Compare commits
10 Commits
expiring-m
...
version/20
Author | SHA1 | Date | |
---|---|---|---|
2fb097061d | |||
8962d17e03 | |||
8326e1490c | |||
091e4d3e4c | |||
6ee77edcbb | |||
763e2288bf | |||
9cdb177ca7 | |||
6070508058 | |||
ec13a5d84d | |||
057de82b01 |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2024.8.3
|
current_version = 2024.8.0
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||||
|
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@ -23,6 +23,7 @@ updates:
|
|||||||
- package-ecosystem: npm
|
- package-ecosystem: npm
|
||||||
directories:
|
directories:
|
||||||
- "/web"
|
- "/web"
|
||||||
|
- "/tests/wdio"
|
||||||
- "/web/sfe"
|
- "/web/sfe"
|
||||||
schedule:
|
schedule:
|
||||||
interval: daily
|
interval: daily
|
||||||
@ -43,11 +44,9 @@ updates:
|
|||||||
- "babel-*"
|
- "babel-*"
|
||||||
eslint:
|
eslint:
|
||||||
patterns:
|
patterns:
|
||||||
- "@eslint/*"
|
|
||||||
- "@typescript-eslint/*"
|
- "@typescript-eslint/*"
|
||||||
- "eslint-*"
|
|
||||||
- "eslint"
|
- "eslint"
|
||||||
- "typescript-eslint"
|
- "eslint-*"
|
||||||
storybook:
|
storybook:
|
||||||
patterns:
|
patterns:
|
||||||
- "@storybook/*"
|
- "@storybook/*"
|
||||||
@ -55,12 +54,10 @@ updates:
|
|||||||
esbuild:
|
esbuild:
|
||||||
patterns:
|
patterns:
|
||||||
- "@esbuild/*"
|
- "@esbuild/*"
|
||||||
- "esbuild*"
|
|
||||||
rollup:
|
rollup:
|
||||||
patterns:
|
patterns:
|
||||||
- "@rollup/*"
|
- "@rollup/*"
|
||||||
- "rollup-*"
|
- "rollup-*"
|
||||||
- "rollup*"
|
|
||||||
swc:
|
swc:
|
||||||
patterns:
|
patterns:
|
||||||
- "@swc/*"
|
- "@swc/*"
|
||||||
|
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@ -1,7 +1,7 @@
|
|||||||
<!--
|
<!--
|
||||||
👋 Hi there! Welcome.
|
👋 Hi there! Welcome.
|
||||||
|
|
||||||
Please check the Contributing guidelines: https://docs.goauthentik.io/docs/developer-docs/#how-can-i-contribute
|
Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute
|
||||||
-->
|
-->
|
||||||
|
|
||||||
## Details
|
## Details
|
||||||
|
2
.github/workflows/api-ts-publish.yml
vendored
2
.github/workflows/api-ts-publish.yml
vendored
@ -40,7 +40,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||||
npm i @goauthentik/api@$VERSION
|
npm i @goauthentik/api@$VERSION
|
||||||
- uses: peter-evans/create-pull-request@v7
|
- uses: peter-evans/create-pull-request@v6
|
||||||
id: cpr
|
id: cpr
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
|
18
.github/workflows/ci-main.yml
vendored
18
.github/workflows/ci-main.yml
vendored
@ -120,12 +120,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
flags: unit
|
flags: unit
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
- if: ${{ !cancelled() }}
|
|
||||||
uses: codecov/test-results-action@v1
|
|
||||||
with:
|
|
||||||
flags: unit
|
|
||||||
file: unittest.xml
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
test-integration:
|
test-integration:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
@ -144,12 +138,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
flags: integration
|
flags: integration
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
- if: ${{ !cancelled() }}
|
|
||||||
uses: codecov/test-results-action@v1
|
|
||||||
with:
|
|
||||||
flags: integration
|
|
||||||
file: unittest.xml
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
test-e2e:
|
test-e2e:
|
||||||
name: test-e2e (${{ matrix.job.name }})
|
name: test-e2e (${{ matrix.job.name }})
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -202,12 +190,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
flags: e2e
|
flags: e2e
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
- if: ${{ !cancelled() }}
|
|
||||||
uses: codecov/test-results-action@v1
|
|
||||||
with:
|
|
||||||
flags: e2e
|
|
||||||
file: unittest.xml
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
ci-core-mark:
|
ci-core-mark:
|
||||||
needs:
|
needs:
|
||||||
- lint
|
- lint
|
||||||
|
22
.github/workflows/ci-web.yml
vendored
22
.github/workflows/ci-web.yml
vendored
@ -24,11 +24,17 @@ jobs:
|
|||||||
- prettier-check
|
- prettier-check
|
||||||
project:
|
project:
|
||||||
- web
|
- web
|
||||||
|
- tests/wdio
|
||||||
include:
|
include:
|
||||||
- command: tsc
|
- command: tsc
|
||||||
project: web
|
project: web
|
||||||
- command: lit-analyse
|
- command: lit-analyse
|
||||||
project: web
|
project: web
|
||||||
|
exclude:
|
||||||
|
- command: lint:lockfile
|
||||||
|
project: tests/wdio
|
||||||
|
- command: tsc
|
||||||
|
project: tests/wdio
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
@ -39,12 +45,21 @@ jobs:
|
|||||||
- working-directory: ${{ matrix.project }}/
|
- working-directory: ${{ matrix.project }}/
|
||||||
run: |
|
run: |
|
||||||
npm ci
|
npm ci
|
||||||
|
${{ matrix.extra_setup }}
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-ts
|
run: make gen-client-ts
|
||||||
- name: Lint
|
- name: Lint
|
||||||
working-directory: ${{ matrix.project }}/
|
working-directory: ${{ matrix.project }}/
|
||||||
run: npm run ${{ matrix.command }}
|
run: npm run ${{ matrix.command }}
|
||||||
|
ci-web-mark:
|
||||||
|
needs:
|
||||||
|
- lint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- run: echo mark
|
||||||
build:
|
build:
|
||||||
|
needs:
|
||||||
|
- ci-web-mark
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@ -60,13 +75,6 @@ jobs:
|
|||||||
- name: build
|
- name: build
|
||||||
working-directory: web/
|
working-directory: web/
|
||||||
run: npm run build
|
run: npm run build
|
||||||
ci-web-mark:
|
|
||||||
needs:
|
|
||||||
- build
|
|
||||||
- lint
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- run: echo mark
|
|
||||||
test:
|
test:
|
||||||
needs:
|
needs:
|
||||||
- ci-web-mark
|
- ci-web-mark
|
||||||
|
@ -24,7 +24,7 @@ jobs:
|
|||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- run: poetry run ak update_webauthn_mds
|
- run: poetry run ak update_webauthn_mds
|
||||||
- uses: peter-evans/create-pull-request@v7
|
- uses: peter-evans/create-pull-request@v6
|
||||||
id: cpr
|
id: cpr
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
|
2
.github/workflows/image-compress.yml
vendored
2
.github/workflows/image-compress.yml
vendored
@ -42,7 +42,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
githubToken: ${{ steps.generate_token.outputs.token }}
|
githubToken: ${{ steps.generate_token.outputs.token }}
|
||||||
compressOnly: ${{ github.event_name != 'pull_request' }}
|
compressOnly: ${{ github.event_name != 'pull_request' }}
|
||||||
- uses: peter-evans/create-pull-request@v7
|
- uses: peter-evans/create-pull-request@v6
|
||||||
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
||||||
id: cpr
|
id: cpr
|
||||||
with:
|
with:
|
||||||
|
@ -32,7 +32,7 @@ jobs:
|
|||||||
poetry run ak compilemessages
|
poetry run ak compilemessages
|
||||||
make web-check-compile
|
make web-check-compile
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v7
|
uses: peter-evans/create-pull-request@v6
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
branch: extract-compile-backend-translation
|
branch: extract-compile-backend-translation
|
||||||
|
@ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
|||||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||||
|
|
||||||
# Stage 5: Python dependencies
|
# Stage 5: Python dependencies
|
||||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS python-deps
|
FROM ghcr.io/goauthentik/fips-python:3.12.5-slim-bookworm-fips-full AS python-deps
|
||||||
|
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG TARGETVARIANT
|
ARG TARGETVARIANT
|
||||||
@ -124,7 +124,7 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
|||||||
pip install --force-reinstall /wheels/*"
|
pip install --force-reinstall /wheels/*"
|
||||||
|
|
||||||
# Stage 6: Run
|
# Stage 6: Run
|
||||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS final-image
|
FROM ghcr.io/goauthentik/fips-python:3.12.5-slim-bookworm-fips-full AS final-image
|
||||||
|
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
ARG GIT_BUILD_HASH
|
ARG GIT_BUILD_HASH
|
||||||
|
5
Makefile
5
Makefile
@ -19,13 +19,14 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
|||||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||||
-I .github/codespell-words.txt \
|
-I .github/codespell-words.txt \
|
||||||
-S 'web/src/locales/**' \
|
-S 'web/src/locales/**' \
|
||||||
-S 'website/docs/developer-docs/api/reference/**' \
|
-S 'website/developer-docs/api/reference/**' \
|
||||||
authentik \
|
authentik \
|
||||||
internal \
|
internal \
|
||||||
cmd \
|
cmd \
|
||||||
web/src \
|
web/src \
|
||||||
website/src \
|
website/src \
|
||||||
website/blog \
|
website/blog \
|
||||||
|
website/developer-docs \
|
||||||
website/docs \
|
website/docs \
|
||||||
website/integrations \
|
website/integrations \
|
||||||
website/src
|
website/src
|
||||||
@ -204,7 +205,7 @@ gen: gen-build gen-client-ts
|
|||||||
web-build: web-install ## Build the Authentik UI
|
web-build: web-install ## Build the Authentik UI
|
||||||
cd web && npm run build
|
cd web && npm run build
|
||||||
|
|
||||||
web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
web: web-lint-fix web-lint web-check-compile web-test ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||||
|
|
||||||
web-install: ## Install the necessary libraries to build the Authentik UI
|
web-install: ## Install the necessary libraries to build the Authentik UI
|
||||||
cd web && npm ci
|
cd web && npm ci
|
||||||
|
@ -34,7 +34,7 @@ For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/h
|
|||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
See [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github)
|
See [Developer Documentation](https://goauthentik.io/developer-docs/?utm_source=github)
|
||||||
|
|
||||||
## Security
|
## Security
|
||||||
|
|
||||||
|
@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
|
|||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| -------- | --------- |
|
| -------- | --------- |
|
||||||
|
| 2024.4.x | ✅ |
|
||||||
| 2024.6.x | ✅ |
|
| 2024.6.x | ✅ |
|
||||||
| 2024.8.x | ✅ |
|
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
__version__ = "2024.8.3"
|
__version__ = "2024.8.0"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
"""authentik admin tasks"""
|
"""authentik admin tasks"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
|
from django.core.validators import URLValidator
|
||||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from packaging.version import parse
|
from packaging.version import parse
|
||||||
from requests import RequestException
|
from requests import RequestException
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
@ -19,6 +21,8 @@ LOGGER = get_logger()
|
|||||||
VERSION_NULL = "0.0.0"
|
VERSION_NULL = "0.0.0"
|
||||||
VERSION_CACHE_KEY = "authentik_latest_version"
|
VERSION_CACHE_KEY = "authentik_latest_version"
|
||||||
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
|
VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
|
||||||
|
# Chop of the first ^ because we want to search the entire string
|
||||||
|
URL_FINDER = URLValidator.regex.pattern[1:]
|
||||||
LOCAL_VERSION = parse(__version__)
|
LOCAL_VERSION = parse(__version__)
|
||||||
|
|
||||||
|
|
||||||
@ -74,16 +78,10 @@ def update_latest_version(self: SystemTask):
|
|||||||
context__new_version=upstream_version,
|
context__new_version=upstream_version,
|
||||||
).exists():
|
).exists():
|
||||||
return
|
return
|
||||||
Event.new(
|
event_dict = {"new_version": upstream_version}
|
||||||
EventAction.UPDATE_AVAILABLE,
|
if match := re.search(URL_FINDER, data.get("stable", {}).get("changelog", "")):
|
||||||
message=_(
|
event_dict["message"] = f"Changelog: {match.group()}"
|
||||||
"New version {version} available!".format(
|
Event.new(EventAction.UPDATE_AVAILABLE, **event_dict).save()
|
||||||
version=upstream_version,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
new_version=upstream_version,
|
|
||||||
changelog=data.get("stable", {}).get("changelog_url"),
|
|
||||||
).save()
|
|
||||||
except (RequestException, IndexError) as exc:
|
except (RequestException, IndexError) as exc:
|
||||||
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
|
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
|
||||||
self.set_error(exc)
|
self.set_error(exc)
|
||||||
|
@ -17,7 +17,6 @@ RESPONSE_VALID = {
|
|||||||
"stable": {
|
"stable": {
|
||||||
"version": "99999999.9999999",
|
"version": "99999999.9999999",
|
||||||
"changelog": "See https://goauthentik.io/test",
|
"changelog": "See https://goauthentik.io/test",
|
||||||
"changelog_url": "https://goauthentik.io/test",
|
|
||||||
"reason": "bugfix",
|
"reason": "bugfix",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -36,7 +35,7 @@ class TestAdminTasks(TestCase):
|
|||||||
Event.objects.filter(
|
Event.objects.filter(
|
||||||
action=EventAction.UPDATE_AVAILABLE,
|
action=EventAction.UPDATE_AVAILABLE,
|
||||||
context__new_version="99999999.9999999",
|
context__new_version="99999999.9999999",
|
||||||
context__message="New version 99999999.9999999 available!",
|
context__message="Changelog: https://goauthentik.io/test",
|
||||||
).exists()
|
).exists()
|
||||||
)
|
)
|
||||||
# test that a consecutive check doesn't create a duplicate event
|
# test that a consecutive check doesn't create a duplicate event
|
||||||
@ -46,7 +45,7 @@ class TestAdminTasks(TestCase):
|
|||||||
Event.objects.filter(
|
Event.objects.filter(
|
||||||
action=EventAction.UPDATE_AVAILABLE,
|
action=EventAction.UPDATE_AVAILABLE,
|
||||||
context__new_version="99999999.9999999",
|
context__new_version="99999999.9999999",
|
||||||
context__message="New version 99999999.9999999 available!",
|
context__message="Changelog: https://goauthentik.io/test",
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
1,
|
1,
|
||||||
|
@ -51,11 +51,9 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
|||||||
context = self.instance.context if self.instance else {}
|
context = self.instance.context if self.instance else {}
|
||||||
valid, logs = Importer.from_string(content, context).validate()
|
valid, logs = Importer.from_string(content, context).validate()
|
||||||
if not valid:
|
if not valid:
|
||||||
|
text_logs = "\n".join([x["event"] for x in logs])
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
[
|
_("Failed to validate blueprint: {logs}".format_map({"logs": text_logs}))
|
||||||
_("Failed to validate blueprint"),
|
|
||||||
*[f"- {x.event}" for x in logs],
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
return content
|
return content
|
||||||
|
|
||||||
|
@ -29,7 +29,9 @@ def check_blueprint_v1_file(BlueprintInstance: type, db_alias, path: Path):
|
|||||||
if version != 1:
|
if version != 1:
|
||||||
return
|
return
|
||||||
blueprint_file.seek(0)
|
blueprint_file.seek(0)
|
||||||
instance = BlueprintInstance.objects.using(db_alias).filter(path=path).first()
|
instance: BlueprintInstance = (
|
||||||
|
BlueprintInstance.objects.using(db_alias).filter(path=path).first()
|
||||||
|
)
|
||||||
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
|
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
|
||||||
meta = None
|
meta = None
|
||||||
if metadata:
|
if metadata:
|
||||||
|
@ -78,5 +78,5 @@ class TestBlueprintsV1API(APITestCase):
|
|||||||
self.assertEqual(res.status_code, 400)
|
self.assertEqual(res.status_code, 400)
|
||||||
self.assertJSONEqual(
|
self.assertJSONEqual(
|
||||||
res.content.decode(),
|
res.content.decode(),
|
||||||
{"content": ["Failed to validate blueprint", "- Invalid blueprint version"]},
|
{"content": ["Failed to validate blueprint: Invalid blueprint version"]},
|
||||||
)
|
)
|
||||||
|
@ -69,7 +69,7 @@ from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
|
|||||||
from authentik.tenants.models import Tenant
|
from authentik.tenants.models import Tenant
|
||||||
|
|
||||||
# Context set when the serializer is created in a blueprint context
|
# Context set when the serializer is created in a blueprint context
|
||||||
# Update website/docs/customize/blueprints/v1/models.md when used
|
# Update website/developer-docs/blueprints/v1/models.md when used
|
||||||
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
||||||
|
|
||||||
|
|
||||||
@ -429,7 +429,7 @@ class Importer:
|
|||||||
orig_import = deepcopy(self._import)
|
orig_import = deepcopy(self._import)
|
||||||
if self._import.version != 1:
|
if self._import.version != 1:
|
||||||
self.logger.warning("Invalid blueprint version")
|
self.logger.warning("Invalid blueprint version")
|
||||||
return False, [LogEvent("Invalid blueprint version", log_level="warning", logger=None)]
|
return False, [{"event": "Invalid blueprint version"}]
|
||||||
with (
|
with (
|
||||||
transaction_rollback(),
|
transaction_rollback(),
|
||||||
capture_logs() as logs,
|
capture_logs() as logs,
|
||||||
|
@ -30,10 +30,8 @@ from authentik.core.api.utils import (
|
|||||||
PassiveSerializer,
|
PassiveSerializer,
|
||||||
)
|
)
|
||||||
from authentik.core.expression.evaluator import PropertyMappingEvaluator
|
from authentik.core.expression.evaluator import PropertyMappingEvaluator
|
||||||
from authentik.core.expression.exceptions import PropertyMappingExpressionException
|
|
||||||
from authentik.core.models import Group, PropertyMapping, User
|
from authentik.core.models import Group, PropertyMapping, User
|
||||||
from authentik.events.utils import sanitize_item
|
from authentik.events.utils import sanitize_item
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
|
||||||
from authentik.policies.api.exec import PolicyTestSerializer
|
from authentik.policies.api.exec import PolicyTestSerializer
|
||||||
from authentik.rbac.decorators import permission_required
|
from authentik.rbac.decorators import permission_required
|
||||||
|
|
||||||
@ -164,15 +162,12 @@ class PropertyMappingViewSet(
|
|||||||
|
|
||||||
response_data = {"successful": True, "result": ""}
|
response_data = {"successful": True, "result": ""}
|
||||||
try:
|
try:
|
||||||
result = mapping.evaluate(dry_run=True, **context)
|
result = mapping.evaluate(**context)
|
||||||
response_data["result"] = dumps(
|
response_data["result"] = dumps(
|
||||||
sanitize_item(result), indent=(4 if format_result else None)
|
sanitize_item(result), indent=(4 if format_result else None)
|
||||||
)
|
)
|
||||||
except PropertyMappingExpressionException as exc:
|
|
||||||
response_data["result"] = exception_to_string(exc.exc)
|
|
||||||
response_data["successful"] = False
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
response_data["result"] = exception_to_string(exc)
|
response_data["result"] = str(exc)
|
||||||
response_data["successful"] = False
|
response_data["successful"] = False
|
||||||
response = PropertyMappingTestResultSerializer(response_data)
|
response = PropertyMappingTestResultSerializer(response_data)
|
||||||
return Response(response.data)
|
return Response(response.data)
|
||||||
|
@ -38,7 +38,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
"name",
|
"name",
|
||||||
"authentication_flow",
|
"authentication_flow",
|
||||||
"authorization_flow",
|
"authorization_flow",
|
||||||
"invalidation_flow",
|
|
||||||
"property_mappings",
|
"property_mappings",
|
||||||
"component",
|
"component",
|
||||||
"assigned_application_slug",
|
"assigned_application_slug",
|
||||||
@ -51,7 +50,6 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"authorization_flow": {"required": True, "allow_null": False},
|
"authorization_flow": {"required": True, "allow_null": False},
|
||||||
"invalidation_flow": {"required": True, "allow_null": False},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -678,13 +678,10 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
if not request.tenant.impersonation:
|
if not request.tenant.impersonation:
|
||||||
LOGGER.debug("User attempted to impersonate", user=request.user)
|
LOGGER.debug("User attempted to impersonate", user=request.user)
|
||||||
return Response(status=401)
|
return Response(status=401)
|
||||||
user_to_be = self.get_object()
|
if not request.user.has_perm("impersonate"):
|
||||||
# Check both object-level perms and global perms
|
|
||||||
if not request.user.has_perm(
|
|
||||||
"authentik_core.impersonate", user_to_be
|
|
||||||
) and not request.user.has_perm("authentik_core.impersonate"):
|
|
||||||
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
||||||
return Response(status=401)
|
return Response(status=401)
|
||||||
|
user_to_be = self.get_object()
|
||||||
if user_to_be.pk == self.request.user.pk:
|
if user_to_be.pk == self.request.user.pk:
|
||||||
LOGGER.debug("User attempted to impersonate themselves", user=request.user)
|
LOGGER.debug("User attempted to impersonate themselves", user=request.user)
|
||||||
return Response(status=401)
|
return Response(status=401)
|
||||||
|
@ -9,11 +9,10 @@ class Command(TenantCommand):
|
|||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.add_argument("--type", type=str, required=True)
|
parser.add_argument("--type", type=str, required=True)
|
||||||
parser.add_argument("--all", action="store_true", default=False)
|
parser.add_argument("--all", action="store_true")
|
||||||
parser.add_argument("usernames", nargs="*", type=str)
|
parser.add_argument("usernames", nargs="+", type=str)
|
||||||
|
|
||||||
def handle_per_tenant(self, **options):
|
def handle_per_tenant(self, **options):
|
||||||
print(options)
|
|
||||||
new_type = UserTypes(options["type"])
|
new_type = UserTypes(options["type"])
|
||||||
qs = (
|
qs = (
|
||||||
User.objects.exclude_anonymous()
|
User.objects.exclude_anonymous()
|
||||||
@ -23,9 +22,6 @@ class Command(TenantCommand):
|
|||||||
if options["usernames"] and options["all"]:
|
if options["usernames"] and options["all"]:
|
||||||
self.stderr.write("--all and usernames specified, only one can be specified")
|
self.stderr.write("--all and usernames specified, only one can be specified")
|
||||||
return
|
return
|
||||||
if not options["usernames"] and not options["all"]:
|
|
||||||
self.stderr.write("--all or usernames must be specified")
|
|
||||||
return
|
|
||||||
if options["usernames"] and not options["all"]:
|
if options["usernames"] and not options["all"]:
|
||||||
qs = qs.filter(username__in=options["usernames"])
|
qs = qs.filter(username__in=options["usernames"])
|
||||||
updated = qs.update(type=new_type)
|
updated = qs.update(type=new_type)
|
||||||
|
@ -1,55 +0,0 @@
|
|||||||
# Generated by Django 5.0.9 on 2024-10-02 11:35
|
|
||||||
|
|
||||||
import django.db.models.deletion
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
from django.apps.registry import Apps
|
|
||||||
from django.db import migrations, models
|
|
||||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_invalidation_flow_default(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
|
||||||
from authentik.flows.models import FlowDesignation, FlowAuthenticationRequirement
|
|
||||||
|
|
||||||
db_alias = schema_editor.connection.alias
|
|
||||||
|
|
||||||
Flow = apps.get_model("authentik_flows", "Flow")
|
|
||||||
Provider = apps.get_model("authentik_core", "Provider")
|
|
||||||
|
|
||||||
# So this flow is managed via a blueprint, bue we're in a migration so we don't want to rely on that
|
|
||||||
# since the blueprint is just an empty flow we can just create it here
|
|
||||||
# and let it be managed by the blueprint later
|
|
||||||
flow, _ = Flow.objects.using(db_alias).update_or_create(
|
|
||||||
slug="default-provider-invalidation-flow",
|
|
||||||
defaults={
|
|
||||||
"name": "Logged out of application",
|
|
||||||
"title": "You've logged out of %(app)s.",
|
|
||||||
"authentication": FlowAuthenticationRequirement.NONE,
|
|
||||||
"designation": FlowDesignation.INVALIDATION,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
Provider.objects.using(db_alias).filter(invalidation_flow=None).update(invalidation_flow=flow)
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("authentik_core", "0039_source_group_matching_mode_alter_group_name_and_more"),
|
|
||||||
("authentik_flows", "0027_auto_20231028_1424"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="provider",
|
|
||||||
name="invalidation_flow",
|
|
||||||
field=models.ForeignKey(
|
|
||||||
default=None,
|
|
||||||
help_text="Flow used ending the session from a provider.",
|
|
||||||
null=True,
|
|
||||||
on_delete=django.db.models.deletion.SET_DEFAULT,
|
|
||||||
related_name="provider_invalidation",
|
|
||||||
to="authentik_flows.flow",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.RunPython(migrate_invalidation_flow_default),
|
|
||||||
]
|
|
@ -391,23 +391,14 @@ class Provider(SerializerModel):
|
|||||||
),
|
),
|
||||||
related_name="provider_authentication",
|
related_name="provider_authentication",
|
||||||
)
|
)
|
||||||
|
|
||||||
authorization_flow = models.ForeignKey(
|
authorization_flow = models.ForeignKey(
|
||||||
"authentik_flows.Flow",
|
"authentik_flows.Flow",
|
||||||
# Set to cascade even though null is allowed, since most providers
|
|
||||||
# still require an authorization flow set
|
|
||||||
on_delete=models.CASCADE,
|
on_delete=models.CASCADE,
|
||||||
null=True,
|
null=True,
|
||||||
help_text=_("Flow used when authorizing this provider."),
|
help_text=_("Flow used when authorizing this provider."),
|
||||||
related_name="provider_authorization",
|
related_name="provider_authorization",
|
||||||
)
|
)
|
||||||
invalidation_flow = models.ForeignKey(
|
|
||||||
"authentik_flows.Flow",
|
|
||||||
on_delete=models.SET_DEFAULT,
|
|
||||||
default=None,
|
|
||||||
null=True,
|
|
||||||
help_text=_("Flow used ending the session from a provider."),
|
|
||||||
related_name="provider_invalidation",
|
|
||||||
)
|
|
||||||
|
|
||||||
property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True)
|
property_mappings = models.ManyToManyField("PropertyMapping", default=None, blank=True)
|
||||||
|
|
||||||
@ -475,6 +466,8 @@ class ApplicationQuerySet(QuerySet):
|
|||||||
def with_provider(self) -> "QuerySet[Application]":
|
def with_provider(self) -> "QuerySet[Application]":
|
||||||
qs = self.select_related("provider")
|
qs = self.select_related("provider")
|
||||||
for subclass in Provider.objects.get_queryset()._get_subclasses_recurse(Provider):
|
for subclass in Provider.objects.get_queryset()._get_subclasses_recurse(Provider):
|
||||||
|
if LOOKUP_SEP in subclass:
|
||||||
|
continue
|
||||||
qs = qs.select_related(f"provider__{subclass}")
|
qs = qs.select_related(f"provider__{subclass}")
|
||||||
return qs
|
return qs
|
||||||
|
|
||||||
@ -552,24 +545,15 @@ class Application(SerializerModel, PolicyBindingModel):
|
|||||||
if not self.provider:
|
if not self.provider:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
candidates = []
|
for subclass in Provider.objects.get_queryset()._get_subclasses_recurse(Provider):
|
||||||
base_class = Provider
|
# We don't care about recursion, skip nested models
|
||||||
for subclass in base_class.objects.get_queryset()._get_subclasses_recurse(base_class):
|
if LOOKUP_SEP in subclass:
|
||||||
parent = self.provider
|
|
||||||
for level in subclass.split(LOOKUP_SEP):
|
|
||||||
try:
|
|
||||||
parent = getattr(parent, level)
|
|
||||||
except AttributeError:
|
|
||||||
break
|
|
||||||
if parent in candidates:
|
|
||||||
continue
|
continue
|
||||||
idx = subclass.count(LOOKUP_SEP)
|
try:
|
||||||
if type(parent) is not base_class:
|
return getattr(self.provider, subclass)
|
||||||
idx += 1
|
except AttributeError:
|
||||||
candidates.insert(idx, parent)
|
pass
|
||||||
if not candidates:
|
|
||||||
return None
|
return None
|
||||||
return candidates[-1]
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.name)
|
return str(self.name)
|
||||||
@ -802,25 +786,12 @@ class ExpiringModel(models.Model):
|
|||||||
return self.delete(*args, **kwargs)
|
return self.delete(*args, **kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _not_expired_filter(cls):
|
def filter_not_expired(cls, **kwargs) -> QuerySet["Token"]:
|
||||||
return Q(expires__gt=now(), expiring=True) | Q(expiring=False)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def filter_not_expired(cls, delete_expired=False, **kwargs) -> QuerySet["ExpiringModel"]:
|
|
||||||
"""Filer for tokens which are not expired yet or are not expiring,
|
"""Filer for tokens which are not expired yet or are not expiring,
|
||||||
and match filters in `kwargs`"""
|
and match filters in `kwargs`"""
|
||||||
if delete_expired:
|
for obj in cls.objects.filter(**kwargs).filter(Q(expires__lt=now(), expiring=True)):
|
||||||
cls.delete_expired(**kwargs)
|
obj.delete()
|
||||||
return cls.objects.filter(cls._not_expired_filter()).filter(**kwargs)
|
return cls.objects.filter(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def delete_expired(cls, **kwargs) -> int:
|
|
||||||
objects = cls.objects.all().exclude(cls._not_expired_filter()).filter(**kwargs)
|
|
||||||
amount = 0
|
|
||||||
for obj in objects:
|
|
||||||
obj.expire_action()
|
|
||||||
amount += 1
|
|
||||||
return amount
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_expired(self) -> bool:
|
def is_expired(self) -> bool:
|
||||||
@ -930,7 +901,7 @@ class PropertyMapping(SerializerModel, ManagedModel):
|
|||||||
except ControlFlowException as exc:
|
except ControlFlowException as exc:
|
||||||
raise exc
|
raise exc
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise PropertyMappingExpressionException(exc, self) from exc
|
raise PropertyMappingExpressionException(self, exc) from exc
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Property Mapping {self.name}"
|
return f"Property Mapping {self.name}"
|
||||||
|
@ -30,7 +30,12 @@ def clean_expired_models(self: SystemTask):
|
|||||||
messages = []
|
messages = []
|
||||||
for cls in ExpiringModel.__subclasses__():
|
for cls in ExpiringModel.__subclasses__():
|
||||||
cls: ExpiringModel
|
cls: ExpiringModel
|
||||||
amount = cls.delete_expired()
|
objects = (
|
||||||
|
cls.objects.all().exclude(expiring=False).exclude(expiring=True, expires__gt=now())
|
||||||
|
)
|
||||||
|
amount = objects.count()
|
||||||
|
for obj in objects:
|
||||||
|
obj.expire_action()
|
||||||
LOGGER.debug("Expired models", model=cls, amount=amount)
|
LOGGER.debug("Expired models", model=cls, amount=amount)
|
||||||
messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}")
|
messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}")
|
||||||
# Special case
|
# Special case
|
||||||
|
43
authentik/core/templates/if/end_session.html
Normal file
43
authentik/core/templates/if/end_session.html
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
{% extends 'login/base_full.html' %}
|
||||||
|
|
||||||
|
{% load static %}
|
||||||
|
{% load i18n %}
|
||||||
|
|
||||||
|
{% block title %}
|
||||||
|
{% trans 'End session' %} - {{ brand.branding_title }}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block card_title %}
|
||||||
|
{% blocktrans with application=application.name %}
|
||||||
|
You've logged out of {{ application }}.
|
||||||
|
{% endblocktrans %}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block card %}
|
||||||
|
<form method="POST" class="pf-c-form">
|
||||||
|
<p>
|
||||||
|
{% blocktrans with application=application.name branding_title=brand.branding_title %}
|
||||||
|
You've logged out of {{ application }}. You can go back to the overview to launch another application, or log out of your {{ branding_title }} account.
|
||||||
|
{% endblocktrans %}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<a id="ak-back-home" href="{% url 'authentik_core:root-redirect' %}" class="pf-c-button pf-m-primary">
|
||||||
|
{% trans 'Go back to overview' %}
|
||||||
|
</a>
|
||||||
|
|
||||||
|
<a id="logout" href="{% url 'authentik_flows:default-invalidation' %}" class="pf-c-button pf-m-secondary">
|
||||||
|
{% blocktrans with branding_title=brand.branding_title %}
|
||||||
|
Log out of {{ branding_title }}
|
||||||
|
{% endblocktrans %}
|
||||||
|
</a>
|
||||||
|
|
||||||
|
{% if application.get_launch_url %}
|
||||||
|
<a href="{{ application.get_launch_url }}" class="pf-c-button pf-m-secondary">
|
||||||
|
{% blocktrans with application=application.name %}
|
||||||
|
Log back into {{ application }}
|
||||||
|
{% endblocktrans %}
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
@ -9,12 +9,9 @@ from rest_framework.test import APITestCase
|
|||||||
|
|
||||||
from authentik.core.models import Application
|
from authentik.core.models import Application
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||||
from authentik.lib.generators import generate_id
|
|
||||||
from authentik.policies.dummy.models import DummyPolicy
|
from authentik.policies.dummy.models import DummyPolicy
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
from authentik.providers.oauth2.models import OAuth2Provider
|
from authentik.providers.oauth2.models import OAuth2Provider
|
||||||
from authentik.providers.proxy.models import ProxyProvider
|
|
||||||
from authentik.providers.saml.models import SAMLProvider
|
|
||||||
|
|
||||||
|
|
||||||
class TestApplicationsAPI(APITestCase):
|
class TestApplicationsAPI(APITestCase):
|
||||||
@ -134,7 +131,6 @@ class TestApplicationsAPI(APITestCase):
|
|||||||
"assigned_application_name": "allowed",
|
"assigned_application_name": "allowed",
|
||||||
"assigned_application_slug": "allowed",
|
"assigned_application_slug": "allowed",
|
||||||
"authentication_flow": None,
|
"authentication_flow": None,
|
||||||
"invalidation_flow": None,
|
|
||||||
"authorization_flow": str(self.provider.authorization_flow.pk),
|
"authorization_flow": str(self.provider.authorization_flow.pk),
|
||||||
"component": "ak-provider-oauth2-form",
|
"component": "ak-provider-oauth2-form",
|
||||||
"meta_model_name": "authentik_providers_oauth2.oauth2provider",
|
"meta_model_name": "authentik_providers_oauth2.oauth2provider",
|
||||||
@ -187,7 +183,6 @@ class TestApplicationsAPI(APITestCase):
|
|||||||
"assigned_application_name": "allowed",
|
"assigned_application_name": "allowed",
|
||||||
"assigned_application_slug": "allowed",
|
"assigned_application_slug": "allowed",
|
||||||
"authentication_flow": None,
|
"authentication_flow": None,
|
||||||
"invalidation_flow": None,
|
|
||||||
"authorization_flow": str(self.provider.authorization_flow.pk),
|
"authorization_flow": str(self.provider.authorization_flow.pk),
|
||||||
"component": "ak-provider-oauth2-form",
|
"component": "ak-provider-oauth2-form",
|
||||||
"meta_model_name": "authentik_providers_oauth2.oauth2provider",
|
"meta_model_name": "authentik_providers_oauth2.oauth2provider",
|
||||||
@ -227,31 +222,3 @@ class TestApplicationsAPI(APITestCase):
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_get_provider(self):
|
|
||||||
"""Ensure that proxy providers (at the time of writing that is the only provider
|
|
||||||
that inherits from another proxy type (OAuth) instead of inheriting from the root
|
|
||||||
provider class) is correctly looked up and selected from the database"""
|
|
||||||
slug = generate_id()
|
|
||||||
provider = ProxyProvider.objects.create(name=generate_id())
|
|
||||||
Application.objects.create(
|
|
||||||
name=generate_id(),
|
|
||||||
slug=slug,
|
|
||||||
provider=provider,
|
|
||||||
)
|
|
||||||
self.assertEqual(Application.objects.get(slug=slug).get_provider(), provider)
|
|
||||||
self.assertEqual(
|
|
||||||
Application.objects.with_provider().get(slug=slug).get_provider(), provider
|
|
||||||
)
|
|
||||||
|
|
||||||
slug = generate_id()
|
|
||||||
provider = SAMLProvider.objects.create(name=generate_id())
|
|
||||||
Application.objects.create(
|
|
||||||
name=generate_id(),
|
|
||||||
slug=slug,
|
|
||||||
provider=provider,
|
|
||||||
)
|
|
||||||
self.assertEqual(Application.objects.get(slug=slug).get_provider(), provider)
|
|
||||||
self.assertEqual(
|
|
||||||
Application.objects.with_provider().get(slug=slug).get_provider(), provider
|
|
||||||
)
|
|
||||||
|
@ -3,10 +3,10 @@
|
|||||||
from json import loads
|
from json import loads
|
||||||
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from guardian.shortcuts import assign_perm
|
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_user
|
from authentik.core.models import User
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.tenants.utils import get_current_tenant
|
from authentik.tenants.utils import get_current_tenant
|
||||||
|
|
||||||
|
|
||||||
@ -15,7 +15,7 @@ class TestImpersonation(APITestCase):
|
|||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.other_user = create_test_user()
|
self.other_user = User.objects.create(username="to-impersonate")
|
||||||
self.user = create_test_admin_user()
|
self.user = create_test_admin_user()
|
||||||
|
|
||||||
def test_impersonate_simple(self):
|
def test_impersonate_simple(self):
|
||||||
@ -44,46 +44,6 @@ class TestImpersonation(APITestCase):
|
|||||||
self.assertEqual(response_body["user"]["username"], self.user.username)
|
self.assertEqual(response_body["user"]["username"], self.user.username)
|
||||||
self.assertNotIn("original", response_body)
|
self.assertNotIn("original", response_body)
|
||||||
|
|
||||||
def test_impersonate_global(self):
|
|
||||||
"""Test impersonation with global permissions"""
|
|
||||||
new_user = create_test_user()
|
|
||||||
assign_perm("authentik_core.impersonate", new_user)
|
|
||||||
assign_perm("authentik_core.view_user", new_user)
|
|
||||||
self.client.force_login(new_user)
|
|
||||||
|
|
||||||
response = self.client.post(
|
|
||||||
reverse(
|
|
||||||
"authentik_api:user-impersonate",
|
|
||||||
kwargs={"pk": self.other_user.pk},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 201)
|
|
||||||
|
|
||||||
response = self.client.get(reverse("authentik_api:user-me"))
|
|
||||||
response_body = loads(response.content.decode())
|
|
||||||
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
|
||||||
self.assertEqual(response_body["original"]["username"], new_user.username)
|
|
||||||
|
|
||||||
def test_impersonate_scoped(self):
|
|
||||||
"""Test impersonation with scoped permissions"""
|
|
||||||
new_user = create_test_user()
|
|
||||||
assign_perm("authentik_core.impersonate", new_user, self.other_user)
|
|
||||||
assign_perm("authentik_core.view_user", new_user, self.other_user)
|
|
||||||
self.client.force_login(new_user)
|
|
||||||
|
|
||||||
response = self.client.post(
|
|
||||||
reverse(
|
|
||||||
"authentik_api:user-impersonate",
|
|
||||||
kwargs={"pk": self.other_user.pk},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 201)
|
|
||||||
|
|
||||||
response = self.client.get(reverse("authentik_api:user-me"))
|
|
||||||
response_body = loads(response.content.decode())
|
|
||||||
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
|
||||||
self.assertEqual(response_body["original"]["username"], new_user.username)
|
|
||||||
|
|
||||||
def test_impersonate_denied(self):
|
def test_impersonate_denied(self):
|
||||||
"""test impersonation without permissions"""
|
"""test impersonation without permissions"""
|
||||||
self.client.force_login(self.other_user)
|
self.client.force_login(self.other_user)
|
||||||
|
@ -19,6 +19,7 @@ class TestTransactionalApplicationsAPI(APITestCase):
|
|||||||
"""Test transactional Application + provider creation"""
|
"""Test transactional Application + provider creation"""
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
uid = generate_id()
|
uid = generate_id()
|
||||||
|
authorization_flow = create_test_flow()
|
||||||
response = self.client.put(
|
response = self.client.put(
|
||||||
reverse("authentik_api:core-transactional-application"),
|
reverse("authentik_api:core-transactional-application"),
|
||||||
data={
|
data={
|
||||||
@ -29,8 +30,7 @@ class TestTransactionalApplicationsAPI(APITestCase):
|
|||||||
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
||||||
"provider": {
|
"provider": {
|
||||||
"name": uid,
|
"name": uid,
|
||||||
"authorization_flow": str(create_test_flow().pk),
|
"authorization_flow": str(authorization_flow.pk),
|
||||||
"invalidation_flow": str(create_test_flow().pk),
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -56,16 +56,10 @@ class TestTransactionalApplicationsAPI(APITestCase):
|
|||||||
"provider": {
|
"provider": {
|
||||||
"name": uid,
|
"name": uid,
|
||||||
"authorization_flow": "",
|
"authorization_flow": "",
|
||||||
"invalidation_flow": "",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertJSONEqual(
|
self.assertJSONEqual(
|
||||||
response.content.decode(),
|
response.content.decode(),
|
||||||
{
|
{"provider": {"authorization_flow": ["This field may not be null."]}},
|
||||||
"provider": {
|
|
||||||
"authorization_flow": ["This field may not be null."],
|
|
||||||
"invalidation_flow": ["This field may not be null."],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
@ -24,6 +24,7 @@ from authentik.core.views.interface import (
|
|||||||
InterfaceView,
|
InterfaceView,
|
||||||
RootRedirectView,
|
RootRedirectView,
|
||||||
)
|
)
|
||||||
|
from authentik.core.views.session import EndSessionView
|
||||||
from authentik.flows.views.interface import FlowInterfaceView
|
from authentik.flows.views.interface import FlowInterfaceView
|
||||||
from authentik.root.asgi_middleware import SessionMiddleware
|
from authentik.root.asgi_middleware import SessionMiddleware
|
||||||
from authentik.root.messages.consumer import MessageConsumer
|
from authentik.root.messages.consumer import MessageConsumer
|
||||||
@ -59,6 +60,11 @@ urlpatterns = [
|
|||||||
ensure_csrf_cookie(FlowInterfaceView.as_view()),
|
ensure_csrf_cookie(FlowInterfaceView.as_view()),
|
||||||
name="if-flow",
|
name="if-flow",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"if/session-end/<slug:application_slug>/",
|
||||||
|
ensure_csrf_cookie(EndSessionView.as_view()),
|
||||||
|
name="if-session-end",
|
||||||
|
),
|
||||||
# Fallback for WS
|
# Fallback for WS
|
||||||
path("ws/outpost/<uuid:pk>/", InterfaceView.as_view(template_name="if/admin.html")),
|
path("ws/outpost/<uuid:pk>/", InterfaceView.as_view(template_name="if/admin.html")),
|
||||||
path(
|
path(
|
||||||
|
23
authentik/core/views/session.py
Normal file
23
authentik/core/views/session.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
"""authentik Session Views"""
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
|
from django.views.generic.base import TemplateView
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.policies.views import PolicyAccessView
|
||||||
|
|
||||||
|
|
||||||
|
class EndSessionView(TemplateView, PolicyAccessView):
|
||||||
|
"""Allow the client to end the Session"""
|
||||||
|
|
||||||
|
template_name = "if/end_session.html"
|
||||||
|
|
||||||
|
def resolve_provider_application(self):
|
||||||
|
self.application = get_object_or_404(Application, slug=self.kwargs["application_slug"])
|
||||||
|
|
||||||
|
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||||
|
context = super().get_context_data(**kwargs)
|
||||||
|
context["application"] = self.application
|
||||||
|
return context
|
@ -18,7 +18,7 @@ from authentik.core.api.used_by import UsedByMixin
|
|||||||
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
|
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
|
||||||
from authentik.core.models import User, UserTypes
|
from authentik.core.models import User, UserTypes
|
||||||
from authentik.enterprise.license import LicenseKey, LicenseSummarySerializer
|
from authentik.enterprise.license import LicenseKey, LicenseSummarySerializer
|
||||||
from authentik.enterprise.models import License
|
from authentik.enterprise.models import License, LicenseUsageStatus
|
||||||
from authentik.rbac.decorators import permission_required
|
from authentik.rbac.decorators import permission_required
|
||||||
from authentik.tenants.utils import get_unique_identifier
|
from authentik.tenants.utils import get_unique_identifier
|
||||||
|
|
||||||
@ -29,7 +29,7 @@ class EnterpriseRequiredMixin:
|
|||||||
|
|
||||||
def validate(self, attrs: dict) -> dict:
|
def validate(self, attrs: dict) -> dict:
|
||||||
"""Check that a valid license exists"""
|
"""Check that a valid license exists"""
|
||||||
if not LicenseKey.cached_summary().status.is_valid:
|
if LicenseKey.cached_summary().status != LicenseUsageStatus.UNLICENSED:
|
||||||
raise ValidationError(_("Enterprise is required to create/update this object."))
|
raise ValidationError(_("Enterprise is required to create/update this object."))
|
||||||
return super().validate(attrs)
|
return super().validate(attrs)
|
||||||
|
|
||||||
|
@ -121,9 +121,6 @@ class LicenseKey:
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
except PyJWTError:
|
except PyJWTError:
|
||||||
unverified = decode(jwt, options={"verify_signature": False})
|
|
||||||
if unverified["aud"] != get_license_aud():
|
|
||||||
raise ValidationError("Invalid Install ID in license") from None
|
|
||||||
raise ValidationError("Unable to verify license") from None
|
raise ValidationError("Unable to verify license") from None
|
||||||
return body
|
return body
|
||||||
|
|
||||||
|
@ -68,7 +68,6 @@ class TestEndpointsAPI(APITestCase):
|
|||||||
"name": self.provider.name,
|
"name": self.provider.name,
|
||||||
"authentication_flow": None,
|
"authentication_flow": None,
|
||||||
"authorization_flow": None,
|
"authorization_flow": None,
|
||||||
"invalidation_flow": None,
|
|
||||||
"property_mappings": [],
|
"property_mappings": [],
|
||||||
"connection_expiry": "hours=8",
|
"connection_expiry": "hours=8",
|
||||||
"delete_token_on_disconnect": False,
|
"delete_token_on_disconnect": False,
|
||||||
@ -121,7 +120,6 @@ class TestEndpointsAPI(APITestCase):
|
|||||||
"name": self.provider.name,
|
"name": self.provider.name,
|
||||||
"authentication_flow": None,
|
"authentication_flow": None,
|
||||||
"authorization_flow": None,
|
"authorization_flow": None,
|
||||||
"invalidation_flow": None,
|
|
||||||
"property_mappings": [],
|
"property_mappings": [],
|
||||||
"component": "ak-provider-rac-form",
|
"component": "ak-provider-rac-form",
|
||||||
"assigned_application_slug": self.app.slug,
|
"assigned_application_slug": self.app.slug,
|
||||||
@ -151,7 +149,6 @@ class TestEndpointsAPI(APITestCase):
|
|||||||
"name": self.provider.name,
|
"name": self.provider.name,
|
||||||
"authentication_flow": None,
|
"authentication_flow": None,
|
||||||
"authorization_flow": None,
|
"authorization_flow": None,
|
||||||
"invalidation_flow": None,
|
|
||||||
"property_mappings": [],
|
"property_mappings": [],
|
||||||
"component": "ak-provider-rac-form",
|
"component": "ak-provider-rac-form",
|
||||||
"assigned_application_slug": self.app.slug,
|
"assigned_application_slug": self.app.slug,
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db.models.signals import post_delete, post_save, pre_save
|
from django.db.models.signals import post_save, pre_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.utils.timezone import get_current_timezone
|
from django.utils.timezone import get_current_timezone
|
||||||
|
|
||||||
@ -27,9 +27,3 @@ def post_save_license(sender: type[License], instance: License, **_):
|
|||||||
"""Trigger license usage calculation when license is saved"""
|
"""Trigger license usage calculation when license is saved"""
|
||||||
cache.delete(CACHE_KEY_ENTERPRISE_LICENSE)
|
cache.delete(CACHE_KEY_ENTERPRISE_LICENSE)
|
||||||
enterprise_update_usage.delay()
|
enterprise_update_usage.delay()
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_delete, sender=License)
|
|
||||||
def post_delete_license(sender: type[License], instance: License, **_):
|
|
||||||
"""Clear license cache when license is deleted"""
|
|
||||||
cache.delete(CACHE_KEY_ENTERPRISE_LICENSE)
|
|
||||||
|
@ -69,5 +69,8 @@ class NotificationViewSet(
|
|||||||
@action(detail=False, methods=["post"])
|
@action(detail=False, methods=["post"])
|
||||||
def mark_all_seen(self, request: Request) -> Response:
|
def mark_all_seen(self, request: Request) -> Response:
|
||||||
"""Mark all the user's notifications as seen"""
|
"""Mark all the user's notifications as seen"""
|
||||||
Notification.objects.filter(user=request.user, seen=False).update(seen=True)
|
notifications = Notification.objects.filter(user=request.user)
|
||||||
|
for notification in notifications:
|
||||||
|
notification.seen = True
|
||||||
|
Notification.objects.bulk_update(notifications, ["seen"])
|
||||||
return Response({}, status=204)
|
return Response({}, status=204)
|
||||||
|
@ -50,7 +50,7 @@ class ASNContextProcessor(MMDBContextProcessor):
|
|||||||
"""Wrapper for Reader.asn"""
|
"""Wrapper for Reader.asn"""
|
||||||
with start_span(
|
with start_span(
|
||||||
op="authentik.events.asn.asn",
|
op="authentik.events.asn.asn",
|
||||||
name=ip_address,
|
description=ip_address,
|
||||||
):
|
):
|
||||||
if not self.configured():
|
if not self.configured():
|
||||||
return None
|
return None
|
||||||
|
@ -51,7 +51,7 @@ class GeoIPContextProcessor(MMDBContextProcessor):
|
|||||||
"""Wrapper for Reader.city"""
|
"""Wrapper for Reader.city"""
|
||||||
with start_span(
|
with start_span(
|
||||||
op="authentik.events.geo.city",
|
op="authentik.events.geo.city",
|
||||||
name=ip_address,
|
description=ip_address,
|
||||||
):
|
):
|
||||||
if not self.configured():
|
if not self.configured():
|
||||||
return None
|
return None
|
||||||
|
@ -49,7 +49,6 @@ from authentik.policies.models import PolicyBindingModel
|
|||||||
from authentik.root.middleware import ClientIPMiddleware
|
from authentik.root.middleware import ClientIPMiddleware
|
||||||
from authentik.stages.email.utils import TemplateEmailMessage
|
from authentik.stages.email.utils import TemplateEmailMessage
|
||||||
from authentik.tenants.models import Tenant
|
from authentik.tenants.models import Tenant
|
||||||
from authentik.tenants.utils import get_current_tenant
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
DISCORD_FIELD_LIMIT = 25
|
DISCORD_FIELD_LIMIT = 25
|
||||||
@ -59,10 +58,6 @@ NOTIFICATION_SUMMARY_LENGTH = 75
|
|||||||
def default_event_duration():
|
def default_event_duration():
|
||||||
"""Default duration an Event is saved.
|
"""Default duration an Event is saved.
|
||||||
This is used as a fallback when no brand is available"""
|
This is used as a fallback when no brand is available"""
|
||||||
try:
|
|
||||||
tenant = get_current_tenant()
|
|
||||||
return now() + timedelta_from_string(tenant.event_retention)
|
|
||||||
except Tenant.DoesNotExist:
|
|
||||||
return now() + timedelta(days=365)
|
return now() + timedelta(days=365)
|
||||||
|
|
||||||
|
|
||||||
@ -250,6 +245,12 @@ class Event(SerializerModel, ExpiringModel):
|
|||||||
if QS_QUERY in self.context["http_request"]["args"]:
|
if QS_QUERY in self.context["http_request"]["args"]:
|
||||||
wrapped = self.context["http_request"]["args"][QS_QUERY]
|
wrapped = self.context["http_request"]["args"][QS_QUERY]
|
||||||
self.context["http_request"]["args"] = cleanse_dict(QueryDict(wrapped))
|
self.context["http_request"]["args"] = cleanse_dict(QueryDict(wrapped))
|
||||||
|
if hasattr(request, "tenant"):
|
||||||
|
tenant: Tenant = request.tenant
|
||||||
|
# Because self.created only gets set on save, we can't use it's value here
|
||||||
|
# hence we set self.created to now and then use it
|
||||||
|
self.created = now()
|
||||||
|
self.expires = self.created + timedelta_from_string(tenant.event_retention)
|
||||||
if hasattr(request, "brand"):
|
if hasattr(request, "brand"):
|
||||||
brand: Brand = request.brand
|
brand: Brand = request.brand
|
||||||
self.brand = sanitize_dict(model_to_dict(brand))
|
self.brand = sanitize_dict(model_to_dict(brand))
|
||||||
|
@ -13,7 +13,7 @@ from authentik.events.apps import SYSTEM_TASK_STATUS
|
|||||||
from authentik.events.models import Event, EventAction, SystemTask
|
from authentik.events.models import Event, EventAction, SystemTask
|
||||||
from authentik.events.tasks import event_notification_handler, gdpr_cleanup
|
from authentik.events.tasks import event_notification_handler, gdpr_cleanup
|
||||||
from authentik.flows.models import Stage
|
from authentik.flows.models import Stage
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_OUTPOST, PLAN_CONTEXT_SOURCE, FlowPlan
|
from authentik.flows.planner import PLAN_CONTEXT_SOURCE, FlowPlan
|
||||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||||
from authentik.root.monitoring import monitoring_set
|
from authentik.root.monitoring import monitoring_set
|
||||||
from authentik.stages.invitation.models import Invitation
|
from authentik.stages.invitation.models import Invitation
|
||||||
@ -38,9 +38,6 @@ def on_user_logged_in(sender, request: HttpRequest, user: User, **_):
|
|||||||
# Save the login method used
|
# Save the login method used
|
||||||
kwargs[PLAN_CONTEXT_METHOD] = flow_plan.context[PLAN_CONTEXT_METHOD]
|
kwargs[PLAN_CONTEXT_METHOD] = flow_plan.context[PLAN_CONTEXT_METHOD]
|
||||||
kwargs[PLAN_CONTEXT_METHOD_ARGS] = flow_plan.context.get(PLAN_CONTEXT_METHOD_ARGS, {})
|
kwargs[PLAN_CONTEXT_METHOD_ARGS] = flow_plan.context.get(PLAN_CONTEXT_METHOD_ARGS, {})
|
||||||
if PLAN_CONTEXT_OUTPOST in flow_plan.context:
|
|
||||||
# Save outpost context
|
|
||||||
kwargs[PLAN_CONTEXT_OUTPOST] = flow_plan.context[PLAN_CONTEXT_OUTPOST]
|
|
||||||
event = Event.new(EventAction.LOGIN, **kwargs).from_http(request, user=user)
|
event = Event.new(EventAction.LOGIN, **kwargs).from_http(request, user=user)
|
||||||
request.session[SESSION_LOGIN_EVENT] = event
|
request.session[SESSION_LOGIN_EVENT] = event
|
||||||
|
|
||||||
|
@ -6,7 +6,6 @@ from django.db.models import Model
|
|||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
|
||||||
from authentik.core.models import default_token_key
|
from authentik.core.models import default_token_key
|
||||||
from authentik.events.models import default_event_duration
|
|
||||||
from authentik.lib.utils.reflection import get_apps
|
from authentik.lib.utils.reflection import get_apps
|
||||||
|
|
||||||
|
|
||||||
@ -21,7 +20,7 @@ def model_tester_factory(test_model: type[Model]) -> Callable:
|
|||||||
allowed = 0
|
allowed = 0
|
||||||
# Token-like objects need to lookup the current tenant to get the default token length
|
# Token-like objects need to lookup the current tenant to get the default token length
|
||||||
for field in test_model._meta.fields:
|
for field in test_model._meta.fields:
|
||||||
if field.default in [default_token_key, default_event_duration]:
|
if field.default == default_token_key:
|
||||||
allowed += 1
|
allowed += 1
|
||||||
with self.assertNumQueries(allowed):
|
with self.assertNumQueries(allowed):
|
||||||
str(test_model())
|
str(test_model())
|
||||||
|
@ -2,8 +2,7 @@
|
|||||||
|
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
from django.urls import reverse
|
from django.test import TestCase
|
||||||
from rest_framework.test import APITestCase
|
|
||||||
|
|
||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
from authentik.events.models import (
|
from authentik.events.models import (
|
||||||
@ -11,7 +10,6 @@ from authentik.events.models import (
|
|||||||
EventAction,
|
EventAction,
|
||||||
Notification,
|
Notification,
|
||||||
NotificationRule,
|
NotificationRule,
|
||||||
NotificationSeverity,
|
|
||||||
NotificationTransport,
|
NotificationTransport,
|
||||||
NotificationWebhookMapping,
|
NotificationWebhookMapping,
|
||||||
TransportMode,
|
TransportMode,
|
||||||
@ -22,7 +20,7 @@ from authentik.policies.exceptions import PolicyException
|
|||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
|
|
||||||
|
|
||||||
class TestEventsNotifications(APITestCase):
|
class TestEventsNotifications(TestCase):
|
||||||
"""Test Event Notifications"""
|
"""Test Event Notifications"""
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
@ -133,15 +131,3 @@ class TestEventsNotifications(APITestCase):
|
|||||||
Notification.objects.all().delete()
|
Notification.objects.all().delete()
|
||||||
Event.new(EventAction.CUSTOM_PREFIX).save()
|
Event.new(EventAction.CUSTOM_PREFIX).save()
|
||||||
self.assertEqual(Notification.objects.first().body, "foo")
|
self.assertEqual(Notification.objects.first().body, "foo")
|
||||||
|
|
||||||
def test_api_mark_all_seen(self):
|
|
||||||
"""Test mark_all_seen"""
|
|
||||||
self.client.force_login(self.user)
|
|
||||||
|
|
||||||
Notification.objects.create(
|
|
||||||
severity=NotificationSeverity.NOTICE, body="foo", user=self.user, seen=False
|
|
||||||
)
|
|
||||||
|
|
||||||
response = self.client.post(reverse("authentik_api:notification-mark-all-seen"))
|
|
||||||
self.assertEqual(response.status_code, 204)
|
|
||||||
self.assertFalse(Notification.objects.filter(body="foo", seen=False).exists())
|
|
||||||
|
@ -110,21 +110,8 @@ class FlowErrorChallenge(Challenge):
|
|||||||
class AccessDeniedChallenge(WithUserInfoChallenge):
|
class AccessDeniedChallenge(WithUserInfoChallenge):
|
||||||
"""Challenge when a flow's active stage calls `stage_invalid()`."""
|
"""Challenge when a flow's active stage calls `stage_invalid()`."""
|
||||||
|
|
||||||
component = CharField(default="ak-stage-access-denied")
|
|
||||||
|
|
||||||
error_message = CharField(required=False)
|
error_message = CharField(required=False)
|
||||||
|
component = CharField(default="ak-stage-access-denied")
|
||||||
|
|
||||||
class SessionEndChallenge(WithUserInfoChallenge):
|
|
||||||
"""Challenge for ending a session"""
|
|
||||||
|
|
||||||
component = CharField(default="ak-stage-session-end")
|
|
||||||
|
|
||||||
application_name = CharField(required=False)
|
|
||||||
application_launch_url = CharField(required=False)
|
|
||||||
|
|
||||||
invalidation_flow_url = CharField(required=False)
|
|
||||||
brand_name = CharField(required=True)
|
|
||||||
|
|
||||||
|
|
||||||
class PermissionDict(TypedDict):
|
class PermissionDict(TypedDict):
|
||||||
|
@ -6,18 +6,20 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
|||||||
|
|
||||||
|
|
||||||
def set_oobe_flow_authentication(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
def set_oobe_flow_authentication(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
from guardian.conf import settings as guardian_settings
|
from guardian.shortcuts import get_anonymous_user
|
||||||
|
|
||||||
Flow = apps.get_model("authentik_flows", "Flow")
|
Flow = apps.get_model("authentik_flows", "Flow")
|
||||||
User = apps.get_model("authentik_core", "User")
|
User = apps.get_model("authentik_core", "User")
|
||||||
|
|
||||||
db_alias = schema_editor.connection.alias
|
db_alias = schema_editor.connection.alias
|
||||||
|
|
||||||
users = (
|
users = User.objects.using(db_alias).exclude(username="akadmin")
|
||||||
User.objects.using(db_alias)
|
try:
|
||||||
.exclude(username="akadmin")
|
users = users.exclude(pk=get_anonymous_user().pk)
|
||||||
.exclude(username=guardian_settings.ANONYMOUS_USER_NAME)
|
|
||||||
)
|
except Exception: # nosec
|
||||||
|
pass
|
||||||
|
|
||||||
if users.exists():
|
if users.exists():
|
||||||
Flow.objects.using(db_alias).filter(slug="initial-setup").update(
|
Flow.objects.using(db_alias).filter(slug="initial-setup").update(
|
||||||
authentication="require_superuser"
|
authentication="require_superuser"
|
||||||
|
@ -107,9 +107,7 @@ class Stage(SerializerModel):
|
|||||||
|
|
||||||
|
|
||||||
def in_memory_stage(view: type["StageView"], **kwargs) -> Stage:
|
def in_memory_stage(view: type["StageView"], **kwargs) -> Stage:
|
||||||
"""Creates an in-memory stage instance, based on a `view` as view.
|
"""Creates an in-memory stage instance, based on a `view` as view."""
|
||||||
Any key-word arguments are set as attributes on the stage object,
|
|
||||||
accessible via `self.executor.current_stage`."""
|
|
||||||
stage = Stage()
|
stage = Stage()
|
||||||
# Because we can't pickle a locally generated function,
|
# Because we can't pickle a locally generated function,
|
||||||
# we set the view as a separate property and reference a generic function
|
# we set the view as a separate property and reference a generic function
|
||||||
|
@ -23,7 +23,6 @@ from authentik.flows.models import (
|
|||||||
in_memory_stage,
|
in_memory_stage,
|
||||||
)
|
)
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.outposts.models import Outpost
|
|
||||||
from authentik.policies.engine import PolicyEngine
|
from authentik.policies.engine import PolicyEngine
|
||||||
from authentik.root.middleware import ClientIPMiddleware
|
from authentik.root.middleware import ClientIPMiddleware
|
||||||
|
|
||||||
@ -33,7 +32,6 @@ PLAN_CONTEXT_SSO = "is_sso"
|
|||||||
PLAN_CONTEXT_REDIRECT = "redirect"
|
PLAN_CONTEXT_REDIRECT = "redirect"
|
||||||
PLAN_CONTEXT_APPLICATION = "application"
|
PLAN_CONTEXT_APPLICATION = "application"
|
||||||
PLAN_CONTEXT_SOURCE = "source"
|
PLAN_CONTEXT_SOURCE = "source"
|
||||||
PLAN_CONTEXT_OUTPOST = "outpost"
|
|
||||||
# Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan
|
# Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan
|
||||||
# was restored.
|
# was restored.
|
||||||
PLAN_CONTEXT_IS_RESTORED = "is_restored"
|
PLAN_CONTEXT_IS_RESTORED = "is_restored"
|
||||||
@ -145,28 +143,15 @@ class FlowPlanner:
|
|||||||
and not request.user.is_superuser
|
and not request.user.is_superuser
|
||||||
):
|
):
|
||||||
raise FlowNonApplicableException()
|
raise FlowNonApplicableException()
|
||||||
outpost_user = ClientIPMiddleware.get_outpost_user(request)
|
|
||||||
if self.flow.authentication == FlowAuthenticationRequirement.REQUIRE_OUTPOST:
|
if self.flow.authentication == FlowAuthenticationRequirement.REQUIRE_OUTPOST:
|
||||||
|
outpost_user = ClientIPMiddleware.get_outpost_user(request)
|
||||||
if not outpost_user:
|
if not outpost_user:
|
||||||
raise FlowNonApplicableException()
|
raise FlowNonApplicableException()
|
||||||
if outpost_user:
|
|
||||||
outpost = Outpost.objects.filter(
|
|
||||||
# TODO: Since Outpost and user are not directly connected, we have to look up a user
|
|
||||||
# like this. This should ideally by in authentik/outposts/models.py
|
|
||||||
pk=outpost_user.username.replace("ak-outpost-", "")
|
|
||||||
).first()
|
|
||||||
if outpost:
|
|
||||||
return {
|
|
||||||
PLAN_CONTEXT_OUTPOST: {
|
|
||||||
"instance": outpost,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def plan(self, request: HttpRequest, default_context: dict[str, Any] | None = None) -> FlowPlan:
|
def plan(self, request: HttpRequest, default_context: dict[str, Any] | None = None) -> FlowPlan:
|
||||||
"""Check each of the flows' policies, check policies for each stage with PolicyBinding
|
"""Check each of the flows' policies, check policies for each stage with PolicyBinding
|
||||||
and return ordered list"""
|
and return ordered list"""
|
||||||
with start_span(op="authentik.flow.planner.plan", name=self.flow.slug) as span:
|
with start_span(op="authentik.flow.planner.plan", description=self.flow.slug) as span:
|
||||||
span: Span
|
span: Span
|
||||||
span.set_data("flow", self.flow)
|
span.set_data("flow", self.flow)
|
||||||
span.set_data("request", request)
|
span.set_data("request", request)
|
||||||
@ -174,12 +159,11 @@ class FlowPlanner:
|
|||||||
self._logger.debug(
|
self._logger.debug(
|
||||||
"f(plan): starting planning process",
|
"f(plan): starting planning process",
|
||||||
)
|
)
|
||||||
context = default_context or {}
|
|
||||||
# Bit of a workaround here, if there is a pending user set in the default context
|
# Bit of a workaround here, if there is a pending user set in the default context
|
||||||
# we use that user for our cache key
|
# we use that user for our cache key
|
||||||
# to make sure they don't get the generic response
|
# to make sure they don't get the generic response
|
||||||
if context and PLAN_CONTEXT_PENDING_USER in context:
|
if default_context and PLAN_CONTEXT_PENDING_USER in default_context:
|
||||||
user = context[PLAN_CONTEXT_PENDING_USER]
|
user = default_context[PLAN_CONTEXT_PENDING_USER]
|
||||||
else:
|
else:
|
||||||
user = request.user
|
user = request.user
|
||||||
# We only need to check the flow authentication if it's planned without a user
|
# We only need to check the flow authentication if it's planned without a user
|
||||||
@ -187,13 +171,14 @@ class FlowPlanner:
|
|||||||
# or if a flow is restarted due to `invalid_response_action` being set to
|
# or if a flow is restarted due to `invalid_response_action` being set to
|
||||||
# `restart_with_context`, which can only happen if the user was already authorized
|
# `restart_with_context`, which can only happen if the user was already authorized
|
||||||
# to use the flow
|
# to use the flow
|
||||||
context.update(self._check_authentication(request))
|
self._check_authentication(request)
|
||||||
# First off, check the flow's direct policy bindings
|
# First off, check the flow's direct policy bindings
|
||||||
# to make sure the user even has access to the flow
|
# to make sure the user even has access to the flow
|
||||||
engine = PolicyEngine(self.flow, user, request)
|
engine = PolicyEngine(self.flow, user, request)
|
||||||
engine.use_cache = self.use_cache
|
engine.use_cache = self.use_cache
|
||||||
span.set_data("context", cleanse_dict(context))
|
if default_context:
|
||||||
engine.request.context.update(context)
|
span.set_data("default_context", cleanse_dict(default_context))
|
||||||
|
engine.request.context.update(default_context)
|
||||||
engine.build()
|
engine.build()
|
||||||
result = engine.result
|
result = engine.result
|
||||||
if not result.passing:
|
if not result.passing:
|
||||||
@ -210,12 +195,12 @@ class FlowPlanner:
|
|||||||
key=cached_plan_key,
|
key=cached_plan_key,
|
||||||
)
|
)
|
||||||
# Reset the context as this isn't factored into caching
|
# Reset the context as this isn't factored into caching
|
||||||
cached_plan.context = context
|
cached_plan.context = default_context or {}
|
||||||
return cached_plan
|
return cached_plan
|
||||||
self._logger.debug(
|
self._logger.debug(
|
||||||
"f(plan): building plan",
|
"f(plan): building plan",
|
||||||
)
|
)
|
||||||
plan = self._build_plan(user, request, context)
|
plan = self._build_plan(user, request, default_context)
|
||||||
if self.use_cache:
|
if self.use_cache:
|
||||||
cache.set(cache_key(self.flow, user), plan, CACHE_TIMEOUT)
|
cache.set(cache_key(self.flow, user), plan, CACHE_TIMEOUT)
|
||||||
if not plan.bindings and not self.allow_empty_flows:
|
if not plan.bindings and not self.allow_empty_flows:
|
||||||
@ -233,7 +218,7 @@ class FlowPlanner:
|
|||||||
with (
|
with (
|
||||||
start_span(
|
start_span(
|
||||||
op="authentik.flow.planner.build_plan",
|
op="authentik.flow.planner.build_plan",
|
||||||
name=self.flow.slug,
|
description=self.flow.slug,
|
||||||
) as span,
|
) as span,
|
||||||
HIST_FLOWS_PLAN_TIME.labels(flow_slug=self.flow.slug).time(),
|
HIST_FLOWS_PLAN_TIME.labels(flow_slug=self.flow.slug).time(),
|
||||||
):
|
):
|
||||||
|
@ -13,7 +13,7 @@ from rest_framework.request import Request
|
|||||||
from sentry_sdk import start_span
|
from sentry_sdk import start_span
|
||||||
from structlog.stdlib import BoundLogger, get_logger
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
|
|
||||||
from authentik.core.models import Application, User
|
from authentik.core.models import User
|
||||||
from authentik.flows.challenge import (
|
from authentik.flows.challenge import (
|
||||||
AccessDeniedChallenge,
|
AccessDeniedChallenge,
|
||||||
Challenge,
|
Challenge,
|
||||||
@ -21,7 +21,6 @@ from authentik.flows.challenge import (
|
|||||||
ContextualFlowInfo,
|
ContextualFlowInfo,
|
||||||
HttpChallengeResponse,
|
HttpChallengeResponse,
|
||||||
RedirectChallenge,
|
RedirectChallenge,
|
||||||
SessionEndChallenge,
|
|
||||||
WithUserInfoChallenge,
|
WithUserInfoChallenge,
|
||||||
)
|
)
|
||||||
from authentik.flows.exceptions import StageInvalidException
|
from authentik.flows.exceptions import StageInvalidException
|
||||||
@ -126,7 +125,7 @@ class ChallengeStageView(StageView):
|
|||||||
with (
|
with (
|
||||||
start_span(
|
start_span(
|
||||||
op="authentik.flow.stage.challenge_invalid",
|
op="authentik.flow.stage.challenge_invalid",
|
||||||
name=self.__class__.__name__,
|
description=self.__class__.__name__,
|
||||||
),
|
),
|
||||||
HIST_FLOWS_STAGE_TIME.labels(
|
HIST_FLOWS_STAGE_TIME.labels(
|
||||||
stage_type=self.__class__.__name__, method="challenge_invalid"
|
stage_type=self.__class__.__name__, method="challenge_invalid"
|
||||||
@ -136,7 +135,7 @@ class ChallengeStageView(StageView):
|
|||||||
with (
|
with (
|
||||||
start_span(
|
start_span(
|
||||||
op="authentik.flow.stage.challenge_valid",
|
op="authentik.flow.stage.challenge_valid",
|
||||||
name=self.__class__.__name__,
|
description=self.__class__.__name__,
|
||||||
),
|
),
|
||||||
HIST_FLOWS_STAGE_TIME.labels(
|
HIST_FLOWS_STAGE_TIME.labels(
|
||||||
stage_type=self.__class__.__name__, method="challenge_valid"
|
stage_type=self.__class__.__name__, method="challenge_valid"
|
||||||
@ -162,7 +161,7 @@ class ChallengeStageView(StageView):
|
|||||||
with (
|
with (
|
||||||
start_span(
|
start_span(
|
||||||
op="authentik.flow.stage.get_challenge",
|
op="authentik.flow.stage.get_challenge",
|
||||||
name=self.__class__.__name__,
|
description=self.__class__.__name__,
|
||||||
),
|
),
|
||||||
HIST_FLOWS_STAGE_TIME.labels(
|
HIST_FLOWS_STAGE_TIME.labels(
|
||||||
stage_type=self.__class__.__name__, method="get_challenge"
|
stage_type=self.__class__.__name__, method="get_challenge"
|
||||||
@ -175,7 +174,7 @@ class ChallengeStageView(StageView):
|
|||||||
return self.executor.stage_invalid()
|
return self.executor.stage_invalid()
|
||||||
with start_span(
|
with start_span(
|
||||||
op="authentik.flow.stage._get_challenge",
|
op="authentik.flow.stage._get_challenge",
|
||||||
name=self.__class__.__name__,
|
description=self.__class__.__name__,
|
||||||
):
|
):
|
||||||
if not hasattr(challenge, "initial_data"):
|
if not hasattr(challenge, "initial_data"):
|
||||||
challenge.initial_data = {}
|
challenge.initial_data = {}
|
||||||
@ -231,7 +230,7 @@ class ChallengeStageView(StageView):
|
|||||||
return HttpChallengeResponse(challenge_response)
|
return HttpChallengeResponse(challenge_response)
|
||||||
|
|
||||||
|
|
||||||
class AccessDeniedStage(ChallengeStageView):
|
class AccessDeniedChallengeView(ChallengeStageView):
|
||||||
"""Used internally by FlowExecutor's stage_invalid()"""
|
"""Used internally by FlowExecutor's stage_invalid()"""
|
||||||
|
|
||||||
error_message: str | None
|
error_message: str | None
|
||||||
@ -269,31 +268,3 @@ class RedirectStage(ChallengeStageView):
|
|||||||
|
|
||||||
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
|
||||||
return HttpChallengeResponse(self.get_challenge())
|
return HttpChallengeResponse(self.get_challenge())
|
||||||
|
|
||||||
|
|
||||||
class SessionEndStage(ChallengeStageView):
|
|
||||||
"""Stage inserted when a flow is used as invalidation flow. By default shows actions
|
|
||||||
that the user is likely to take after signing out of a provider."""
|
|
||||||
|
|
||||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
|
||||||
application: Application | None = self.executor.plan.context.get(PLAN_CONTEXT_APPLICATION)
|
|
||||||
data = {
|
|
||||||
"component": "ak-stage-session-end",
|
|
||||||
"brand_name": self.request.brand.branding_title,
|
|
||||||
}
|
|
||||||
if application:
|
|
||||||
data["application_name"] = application.name
|
|
||||||
data["application_launch_url"] = application.get_launch_url(self.get_pending_user())
|
|
||||||
if self.request.brand.flow_invalidation:
|
|
||||||
data["invalidation_flow_url"] = reverse(
|
|
||||||
"authentik_core:if-flow",
|
|
||||||
kwargs={
|
|
||||||
"flow_slug": self.request.brand.flow_invalidation.slug,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return SessionEndChallenge(data=data)
|
|
||||||
|
|
||||||
# This can never be reached since this challenge is created on demand and only the
|
|
||||||
# .get() method is called
|
|
||||||
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse: # pragma: no cover
|
|
||||||
return self.executor.cancel()
|
|
||||||
|
@ -54,7 +54,7 @@ from authentik.flows.planner import (
|
|||||||
FlowPlan,
|
FlowPlan,
|
||||||
FlowPlanner,
|
FlowPlanner,
|
||||||
)
|
)
|
||||||
from authentik.flows.stage import AccessDeniedStage, StageView
|
from authentik.flows.stage import AccessDeniedChallengeView, StageView
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
from authentik.lib.utils.reflection import all_subclasses, class_to_path
|
from authentik.lib.utils.reflection import all_subclasses, class_to_path
|
||||||
@ -153,7 +153,7 @@ class FlowExecutorView(APIView):
|
|||||||
return plan
|
return plan
|
||||||
|
|
||||||
def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse:
|
def dispatch(self, request: HttpRequest, flow_slug: str) -> HttpResponse:
|
||||||
with start_span(op="authentik.flow.executor.dispatch", name=self.flow.slug) as span:
|
with start_span(op="authentik.flow.executor.dispatch", description=self.flow.slug) as span:
|
||||||
span.set_data("authentik Flow", self.flow.slug)
|
span.set_data("authentik Flow", self.flow.slug)
|
||||||
get_params = QueryDict(request.GET.get(QS_QUERY, ""))
|
get_params = QueryDict(request.GET.get(QS_QUERY, ""))
|
||||||
if QS_KEY_TOKEN in get_params:
|
if QS_KEY_TOKEN in get_params:
|
||||||
@ -273,7 +273,7 @@ class FlowExecutorView(APIView):
|
|||||||
with (
|
with (
|
||||||
start_span(
|
start_span(
|
||||||
op="authentik.flow.executor.stage",
|
op="authentik.flow.executor.stage",
|
||||||
name=class_path,
|
description=class_path,
|
||||||
) as span,
|
) as span,
|
||||||
HIST_FLOW_EXECUTION_STAGE_TIME.labels(
|
HIST_FLOW_EXECUTION_STAGE_TIME.labels(
|
||||||
method=request.method.upper(),
|
method=request.method.upper(),
|
||||||
@ -324,7 +324,7 @@ class FlowExecutorView(APIView):
|
|||||||
with (
|
with (
|
||||||
start_span(
|
start_span(
|
||||||
op="authentik.flow.executor.stage",
|
op="authentik.flow.executor.stage",
|
||||||
name=class_path,
|
description=class_path,
|
||||||
) as span,
|
) as span,
|
||||||
HIST_FLOW_EXECUTION_STAGE_TIME.labels(
|
HIST_FLOW_EXECUTION_STAGE_TIME.labels(
|
||||||
method=request.method.upper(),
|
method=request.method.upper(),
|
||||||
@ -441,7 +441,7 @@ class FlowExecutorView(APIView):
|
|||||||
)
|
)
|
||||||
return self.restart_flow(keep_context)
|
return self.restart_flow(keep_context)
|
||||||
self.cancel()
|
self.cancel()
|
||||||
challenge_view = AccessDeniedStage(self, error_message)
|
challenge_view = AccessDeniedChallengeView(self, error_message)
|
||||||
challenge_view.request = self.request
|
challenge_view.request = self.request
|
||||||
return to_stage_response(self.request, challenge_view.get(self.request))
|
return to_stage_response(self.request, challenge_view.get(self.request))
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# update website/docs/install-config/configuration/configuration.mdx
|
# update website/docs/installation/configuration.mdx
|
||||||
# This is the default configuration file
|
# This is the default configuration file
|
||||||
postgresql:
|
postgresql:
|
||||||
host: localhost
|
host: localhost
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
|
from collections.abc import Iterable
|
||||||
from ipaddress import ip_address, ip_network
|
from ipaddress import ip_address, ip_network
|
||||||
from textwrap import indent
|
from textwrap import indent
|
||||||
from types import CodeType
|
from types import CodeType
|
||||||
@ -27,12 +28,6 @@ from authentik.stages.authenticator import devices_for_user
|
|||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
ARG_SANITIZE = re.compile(r"[:.-]")
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_arg(arg_name: str) -> str:
|
|
||||||
return re.sub(ARG_SANITIZE, "_", arg_name)
|
|
||||||
|
|
||||||
|
|
||||||
class BaseEvaluator:
|
class BaseEvaluator:
|
||||||
"""Validate and evaluate python-based expressions"""
|
"""Validate and evaluate python-based expressions"""
|
||||||
@ -182,9 +177,9 @@ class BaseEvaluator:
|
|||||||
proc = PolicyProcess(PolicyBinding(policy=policy), request=req, connection=None)
|
proc = PolicyProcess(PolicyBinding(policy=policy), request=req, connection=None)
|
||||||
return proc.profiling_wrapper()
|
return proc.profiling_wrapper()
|
||||||
|
|
||||||
def wrap_expression(self, expression: str) -> str:
|
def wrap_expression(self, expression: str, params: Iterable[str]) -> str:
|
||||||
"""Wrap expression in a function, call it, and save the result as `result`"""
|
"""Wrap expression in a function, call it, and save the result as `result`"""
|
||||||
handler_signature = ",".join(sanitize_arg(x) for x in self._context.keys())
|
handler_signature = ",".join(params)
|
||||||
full_expression = ""
|
full_expression = ""
|
||||||
full_expression += f"def handler({handler_signature}):\n"
|
full_expression += f"def handler({handler_signature}):\n"
|
||||||
full_expression += indent(expression, " ")
|
full_expression += indent(expression, " ")
|
||||||
@ -193,8 +188,8 @@ class BaseEvaluator:
|
|||||||
|
|
||||||
def compile(self, expression: str) -> CodeType:
|
def compile(self, expression: str) -> CodeType:
|
||||||
"""Parse expression. Raises SyntaxError or ValueError if the syntax is incorrect."""
|
"""Parse expression. Raises SyntaxError or ValueError if the syntax is incorrect."""
|
||||||
expression = self.wrap_expression(expression)
|
param_keys = self._context.keys()
|
||||||
return compile(expression, self._filename, "exec")
|
return compile(self.wrap_expression(expression, param_keys), self._filename, "exec")
|
||||||
|
|
||||||
def evaluate(self, expression_source: str) -> Any:
|
def evaluate(self, expression_source: str) -> Any:
|
||||||
"""Parse and evaluate expression. If the syntax is incorrect, a SyntaxError is raised.
|
"""Parse and evaluate expression. If the syntax is incorrect, a SyntaxError is raised.
|
||||||
@ -210,7 +205,7 @@ class BaseEvaluator:
|
|||||||
self.handle_error(exc, expression_source)
|
self.handle_error(exc, expression_source)
|
||||||
raise exc
|
raise exc
|
||||||
try:
|
try:
|
||||||
_locals = {sanitize_arg(x): y for x, y in self._context.items()}
|
_locals = self._context
|
||||||
# Yes this is an exec, yes it is potentially bad. Since we limit what variables are
|
# Yes this is an exec, yes it is potentially bad. Since we limit what variables are
|
||||||
# available here, and these policies can only be edited by admins, this is a risk
|
# available here, and these policies can only be edited by admins, this is a risk
|
||||||
# we're willing to take.
|
# we're willing to take.
|
||||||
|
@ -30,11 +30,6 @@ class TestHTTP(TestCase):
|
|||||||
request = self.factory.get("/", HTTP_X_FORWARDED_FOR="127.0.0.2")
|
request = self.factory.get("/", HTTP_X_FORWARDED_FOR="127.0.0.2")
|
||||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.2")
|
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.2")
|
||||||
|
|
||||||
def test_forward_for_invalid(self):
|
|
||||||
"""Test invalid forward for"""
|
|
||||||
request = self.factory.get("/", HTTP_X_FORWARDED_FOR="foobar")
|
|
||||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), ClientIPMiddleware.default_ip)
|
|
||||||
|
|
||||||
def test_fake_outpost(self):
|
def test_fake_outpost(self):
|
||||||
"""Test faked IP which is overridden by an outpost"""
|
"""Test faked IP which is overridden by an outpost"""
|
||||||
token = Token.objects.create(
|
token = Token.objects.create(
|
||||||
@ -58,17 +53,6 @@ class TestHTTP(TestCase):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.1")
|
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.1")
|
||||||
# Invalid, not a real IP
|
|
||||||
self.user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
|
||||||
self.user.save()
|
|
||||||
request = self.factory.get(
|
|
||||||
"/",
|
|
||||||
**{
|
|
||||||
ClientIPMiddleware.outpost_remote_ip_header: "foobar",
|
|
||||||
ClientIPMiddleware.outpost_token_header: token.key,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.1")
|
|
||||||
# Valid
|
# Valid
|
||||||
self.user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
self.user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||||
self.user.save()
|
self.user.save()
|
||||||
|
@ -9,7 +9,7 @@ from uuid import uuid4
|
|||||||
from dacite.core import from_dict
|
from dacite.core import from_dict
|
||||||
from django.contrib.auth.models import Permission
|
from django.contrib.auth.models import Permission
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db import models, transaction
|
from django.db import IntegrityError, models, transaction
|
||||||
from django.db.models.base import Model
|
from django.db.models.base import Model
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from guardian.models import UserObjectPermission
|
from guardian.models import UserObjectPermission
|
||||||
@ -53,7 +53,7 @@ class ServiceConnectionInvalid(SentryIgnoredException):
|
|||||||
class OutpostConfig:
|
class OutpostConfig:
|
||||||
"""Configuration an outpost uses to configure it self"""
|
"""Configuration an outpost uses to configure it self"""
|
||||||
|
|
||||||
# update website/docs/add-secure-apps/outposts/_config.md
|
# update website/docs/outposts/_config.md
|
||||||
|
|
||||||
authentik_host: str = ""
|
authentik_host: str = ""
|
||||||
authentik_host_insecure: bool = False
|
authentik_host_insecure: bool = False
|
||||||
@ -380,14 +380,13 @@ class Outpost(SerializerModel, ManagedModel):
|
|||||||
"""Get/create token for auto-generated user"""
|
"""Get/create token for auto-generated user"""
|
||||||
managed = f"goauthentik.io/outpost/{self.token_identifier}"
|
managed = f"goauthentik.io/outpost/{self.token_identifier}"
|
||||||
tokens = Token.filter_not_expired(
|
tokens = Token.filter_not_expired(
|
||||||
delete_expired=True,
|
|
||||||
identifier=self.token_identifier,
|
identifier=self.token_identifier,
|
||||||
intent=TokenIntents.INTENT_API,
|
intent=TokenIntents.INTENT_API,
|
||||||
managed=managed,
|
managed=managed,
|
||||||
)
|
)
|
||||||
token: Token | None = tokens.first()
|
if tokens.exists():
|
||||||
if token:
|
return tokens.first()
|
||||||
return token
|
try:
|
||||||
return Token.objects.create(
|
return Token.objects.create(
|
||||||
user=self.user,
|
user=self.user,
|
||||||
identifier=self.token_identifier,
|
identifier=self.token_identifier,
|
||||||
@ -396,6 +395,11 @@ class Outpost(SerializerModel, ManagedModel):
|
|||||||
expiring=False,
|
expiring=False,
|
||||||
managed=managed,
|
managed=managed,
|
||||||
)
|
)
|
||||||
|
except IntegrityError:
|
||||||
|
# Integrity error happens mostly when managed is reused
|
||||||
|
Token.objects.filter(managed=managed).delete()
|
||||||
|
Token.objects.filter(identifier=self.token_identifier).delete()
|
||||||
|
return self.token
|
||||||
|
|
||||||
def get_required_objects(self) -> Iterable[models.Model | str]:
|
def get_required_objects(self) -> Iterable[models.Model | str]:
|
||||||
"""Get an iterator of all objects the user needs read access to"""
|
"""Get an iterator of all objects the user needs read access to"""
|
||||||
|
@ -113,7 +113,7 @@ class PolicyEngine:
|
|||||||
with (
|
with (
|
||||||
start_span(
|
start_span(
|
||||||
op="authentik.policy.engine.build",
|
op="authentik.policy.engine.build",
|
||||||
name=self.__pbm,
|
description=self.__pbm,
|
||||||
) as span,
|
) as span,
|
||||||
HIST_POLICIES_ENGINE_TOTAL_TIME.labels(
|
HIST_POLICIES_ENGINE_TOTAL_TIME.labels(
|
||||||
obj_type=class_to_path(self.__pbm.__class__),
|
obj_type=class_to_path(self.__pbm.__class__),
|
||||||
|
@ -87,7 +87,6 @@ class LDAPOutpostConfigSerializer(ModelSerializer):
|
|||||||
|
|
||||||
application_slug = SerializerMethodField()
|
application_slug = SerializerMethodField()
|
||||||
bind_flow_slug = CharField(source="authorization_flow.slug")
|
bind_flow_slug = CharField(source="authorization_flow.slug")
|
||||||
unbind_flow_slug = SerializerMethodField()
|
|
||||||
|
|
||||||
def get_application_slug(self, instance: LDAPProvider) -> str:
|
def get_application_slug(self, instance: LDAPProvider) -> str:
|
||||||
"""Prioritise backchannel slug over direct application slug"""
|
"""Prioritise backchannel slug over direct application slug"""
|
||||||
@ -95,16 +94,6 @@ class LDAPOutpostConfigSerializer(ModelSerializer):
|
|||||||
return instance.backchannel_application.slug
|
return instance.backchannel_application.slug
|
||||||
return instance.application.slug
|
return instance.application.slug
|
||||||
|
|
||||||
def get_unbind_flow_slug(self, instance: LDAPProvider) -> str | None:
|
|
||||||
"""Get slug for unbind flow, defaulting to brand's default flow."""
|
|
||||||
flow = instance.invalidation_flow
|
|
||||||
if not flow and "request" in self.context:
|
|
||||||
request = self.context.get("request")
|
|
||||||
flow = request.brand.flow_invalidation
|
|
||||||
if not flow:
|
|
||||||
return None
|
|
||||||
return flow.slug
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = LDAPProvider
|
model = LDAPProvider
|
||||||
fields = [
|
fields = [
|
||||||
@ -112,7 +101,6 @@ class LDAPOutpostConfigSerializer(ModelSerializer):
|
|||||||
"name",
|
"name",
|
||||||
"base_dn",
|
"base_dn",
|
||||||
"bind_flow_slug",
|
"bind_flow_slug",
|
||||||
"unbind_flow_slug",
|
|
||||||
"application_slug",
|
"application_slug",
|
||||||
"certificate",
|
"certificate",
|
||||||
"tls_server_name",
|
"tls_server_name",
|
||||||
|
@ -4,13 +4,13 @@ from django.apps.registry import Apps
|
|||||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
from django.contrib.auth.management import create_permissions
|
||||||
|
|
||||||
|
|
||||||
def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
from guardian.shortcuts import assign_perm
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from django.apps import apps as real_apps
|
from django.apps import apps as real_apps
|
||||||
from django.contrib.auth.management import create_permissions
|
|
||||||
from guardian.shortcuts import UserObjectPermission
|
|
||||||
|
|
||||||
db_alias = schema_editor.connection.alias
|
db_alias = schema_editor.connection.alias
|
||||||
|
|
||||||
@ -20,25 +20,14 @@ def migrate_search_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
|||||||
create_permissions(real_apps.get_app_config("authentik_providers_ldap"), using=db_alias)
|
create_permissions(real_apps.get_app_config("authentik_providers_ldap"), using=db_alias)
|
||||||
|
|
||||||
LDAPProvider = apps.get_model("authentik_providers_ldap", "ldapprovider")
|
LDAPProvider = apps.get_model("authentik_providers_ldap", "ldapprovider")
|
||||||
Permission = apps.get_model("auth", "Permission")
|
|
||||||
UserObjectPermission = apps.get_model("guardian", "UserObjectPermission")
|
|
||||||
ContentType = apps.get_model("contenttypes", "ContentType")
|
|
||||||
|
|
||||||
new_prem = Permission.objects.using(db_alias).get(codename="search_full_directory")
|
|
||||||
ct = ContentType.objects.using(db_alias).get(
|
|
||||||
app_label="authentik_providers_ldap",
|
|
||||||
model="ldapprovider",
|
|
||||||
)
|
|
||||||
|
|
||||||
for provider in LDAPProvider.objects.using(db_alias).all():
|
for provider in LDAPProvider.objects.using(db_alias).all():
|
||||||
if not provider.search_group:
|
for user_pk in (
|
||||||
continue
|
provider.search_group.users.using(db_alias).all().values_list("pk", flat=True)
|
||||||
for user in provider.search_group.users.using(db_alias).all():
|
):
|
||||||
UserObjectPermission.objects.using(db_alias).create(
|
# We need the correct user model instance to assign the permission
|
||||||
user=user,
|
assign_perm(
|
||||||
permission=new_prem,
|
"search_full_directory", User.objects.using(db_alias).get(pk=user_pk), provider
|
||||||
object_pk=provider.pk,
|
|
||||||
content_type=ct,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -46,7 +35,6 @@ class Migration(migrations.Migration):
|
|||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("authentik_providers_ldap", "0003_ldapprovider_mfa_support_and_more"),
|
("authentik_providers_ldap", "0003_ldapprovider_mfa_support_and_more"),
|
||||||
("guardian", "0002_generic_permissions_index"),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
@ -1,23 +0,0 @@
|
|||||||
# Generated by Django 5.0.9 on 2024-09-26 16:25
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("authentik_providers_oauth2", "0018_alter_accesstoken_expires_and_more"),
|
|
||||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddIndex(
|
|
||||||
model_name="accesstoken",
|
|
||||||
index=models.Index(fields=["token"], name="authentik_p_token_4bc870_idx"),
|
|
||||||
),
|
|
||||||
migrations.AddIndex(
|
|
||||||
model_name="refreshtoken",
|
|
||||||
index=models.Index(fields=["token"], name="authentik_p_token_1a841f_idx"),
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,31 +0,0 @@
|
|||||||
# Generated by Django 5.0.9 on 2024-09-27 14:50
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("authentik_providers_oauth2", "0019_accesstoken_authentik_p_token_4bc870_idx_and_more"),
|
|
||||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveIndex(
|
|
||||||
model_name="accesstoken",
|
|
||||||
name="authentik_p_token_4bc870_idx",
|
|
||||||
),
|
|
||||||
migrations.RemoveIndex(
|
|
||||||
model_name="refreshtoken",
|
|
||||||
name="authentik_p_token_1a841f_idx",
|
|
||||||
),
|
|
||||||
migrations.AddIndex(
|
|
||||||
model_name="accesstoken",
|
|
||||||
index=models.Index(fields=["token", "provider"], name="authentik_p_token_f99422_idx"),
|
|
||||||
),
|
|
||||||
migrations.AddIndex(
|
|
||||||
model_name="refreshtoken",
|
|
||||||
index=models.Index(fields=["token", "provider"], name="authentik_p_token_a1d921_idx"),
|
|
||||||
),
|
|
||||||
]
|
|
@ -376,9 +376,6 @@ class AccessToken(SerializerModel, ExpiringModel, BaseGrantModel):
|
|||||||
_id_token = models.TextField()
|
_id_token = models.TextField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
indexes = [
|
|
||||||
models.Index(fields=["token", "provider"]),
|
|
||||||
]
|
|
||||||
verbose_name = _("OAuth2 Access Token")
|
verbose_name = _("OAuth2 Access Token")
|
||||||
verbose_name_plural = _("OAuth2 Access Tokens")
|
verbose_name_plural = _("OAuth2 Access Tokens")
|
||||||
|
|
||||||
@ -422,9 +419,6 @@ class RefreshToken(SerializerModel, ExpiringModel, BaseGrantModel):
|
|||||||
_id_token = models.TextField(verbose_name=_("ID Token"))
|
_id_token = models.TextField(verbose_name=_("ID Token"))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
indexes = [
|
|
||||||
models.Index(fields=["token", "provider"]),
|
|
||||||
]
|
|
||||||
verbose_name = _("OAuth2 Refresh Token")
|
verbose_name = _("OAuth2 Refresh Token")
|
||||||
verbose_name_plural = _("OAuth2 Refresh Tokens")
|
verbose_name_plural = _("OAuth2 Refresh Tokens")
|
||||||
|
|
||||||
|
@ -29,6 +29,7 @@ class TesOAuth2Introspection(OAuthTestCase):
|
|||||||
self.app = Application.objects.create(
|
self.app = Application.objects.create(
|
||||||
name=generate_id(), slug=generate_id(), provider=self.provider
|
name=generate_id(), slug=generate_id(), provider=self.provider
|
||||||
)
|
)
|
||||||
|
self.app.save()
|
||||||
self.user = create_test_admin_user()
|
self.user = create_test_admin_user()
|
||||||
self.auth = b64encode(
|
self.auth = b64encode(
|
||||||
f"{self.provider.client_id}:{self.provider.client_secret}".encode()
|
f"{self.provider.client_id}:{self.provider.client_secret}".encode()
|
||||||
@ -113,41 +114,6 @@ class TesOAuth2Introspection(OAuthTestCase):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_introspect_invalid_provider(self):
|
|
||||||
"""Test introspection (mismatched provider and token)"""
|
|
||||||
provider: OAuth2Provider = OAuth2Provider.objects.create(
|
|
||||||
name=generate_id(),
|
|
||||||
authorization_flow=create_test_flow(),
|
|
||||||
redirect_uris="",
|
|
||||||
signing_key=create_test_cert(),
|
|
||||||
)
|
|
||||||
auth = b64encode(f"{provider.client_id}:{provider.client_secret}".encode()).decode()
|
|
||||||
|
|
||||||
token: AccessToken = AccessToken.objects.create(
|
|
||||||
provider=self.provider,
|
|
||||||
user=self.user,
|
|
||||||
token=generate_id(),
|
|
||||||
auth_time=timezone.now(),
|
|
||||||
_scope="openid user profile",
|
|
||||||
_id_token=json.dumps(
|
|
||||||
asdict(
|
|
||||||
IDToken("foo", "bar"),
|
|
||||||
)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
res = self.client.post(
|
|
||||||
reverse("authentik_providers_oauth2:token-introspection"),
|
|
||||||
HTTP_AUTHORIZATION=f"Basic {auth}",
|
|
||||||
data={"token": token.token},
|
|
||||||
)
|
|
||||||
self.assertEqual(res.status_code, 200)
|
|
||||||
self.assertJSONEqual(
|
|
||||||
res.content.decode(),
|
|
||||||
{
|
|
||||||
"active": False,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_introspect_invalid_auth(self):
|
def test_introspect_invalid_auth(self):
|
||||||
"""Test introspect (invalid auth)"""
|
"""Test introspect (invalid auth)"""
|
||||||
res = self.client.post(
|
res = self.client.post(
|
||||||
|
@ -12,7 +12,6 @@ from authentik.providers.oauth2.api.tokens import (
|
|||||||
)
|
)
|
||||||
from authentik.providers.oauth2.views.authorize import AuthorizationFlowInitView
|
from authentik.providers.oauth2.views.authorize import AuthorizationFlowInitView
|
||||||
from authentik.providers.oauth2.views.device_backchannel import DeviceView
|
from authentik.providers.oauth2.views.device_backchannel import DeviceView
|
||||||
from authentik.providers.oauth2.views.end_session import EndSessionView
|
|
||||||
from authentik.providers.oauth2.views.introspection import TokenIntrospectionView
|
from authentik.providers.oauth2.views.introspection import TokenIntrospectionView
|
||||||
from authentik.providers.oauth2.views.jwks import JWKSView
|
from authentik.providers.oauth2.views.jwks import JWKSView
|
||||||
from authentik.providers.oauth2.views.provider import ProviderInfoView
|
from authentik.providers.oauth2.views.provider import ProviderInfoView
|
||||||
@ -45,7 +44,7 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"<slug:application_slug>/end-session/",
|
"<slug:application_slug>/end-session/",
|
||||||
EndSessionView.as_view(),
|
RedirectView.as_view(pattern_name="authentik_core:if-session-end", query_string=True),
|
||||||
name="end-session",
|
name="end-session",
|
||||||
),
|
),
|
||||||
path("<slug:application_slug>/jwks/", JWKSView.as_view(), name="jwks"),
|
path("<slug:application_slug>/jwks/", JWKSView.as_view(), name="jwks"),
|
||||||
|
@ -1,45 +0,0 @@
|
|||||||
"""oauth2 provider end_session Views"""
|
|
||||||
|
|
||||||
from django.http import Http404, HttpRequest, HttpResponse
|
|
||||||
from django.shortcuts import get_object_or_404
|
|
||||||
|
|
||||||
from authentik.core.models import Application
|
|
||||||
from authentik.flows.models import Flow, in_memory_stage
|
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, FlowPlanner
|
|
||||||
from authentik.flows.stage import SessionEndStage
|
|
||||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
|
||||||
from authentik.lib.utils.urls import redirect_with_qs
|
|
||||||
from authentik.policies.views import PolicyAccessView
|
|
||||||
|
|
||||||
|
|
||||||
class EndSessionView(PolicyAccessView):
|
|
||||||
"""Redirect to application's provider's invalidation flow"""
|
|
||||||
|
|
||||||
flow: Flow
|
|
||||||
|
|
||||||
def resolve_provider_application(self):
|
|
||||||
self.application = get_object_or_404(Application, slug=self.kwargs["application_slug"])
|
|
||||||
self.provider = self.application.get_provider()
|
|
||||||
if not self.provider:
|
|
||||||
raise Http404
|
|
||||||
self.flow = self.provider.invalidation_flow or self.request.brand.flow_invalidation
|
|
||||||
if not self.flow:
|
|
||||||
raise Http404
|
|
||||||
|
|
||||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
|
||||||
"""Dispatch the flow planner for the invalidation flow"""
|
|
||||||
planner = FlowPlanner(self.flow)
|
|
||||||
planner.allow_empty_flows = True
|
|
||||||
plan = planner.plan(
|
|
||||||
request,
|
|
||||||
{
|
|
||||||
PLAN_CONTEXT_APPLICATION: self.application,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
plan.insert_stage(in_memory_stage(SessionEndStage))
|
|
||||||
request.session[SESSION_KEY_PLAN] = plan
|
|
||||||
return redirect_with_qs(
|
|
||||||
"authentik_core:if-flow",
|
|
||||||
self.request.GET,
|
|
||||||
flow_slug=self.flow.slug,
|
|
||||||
)
|
|
@ -46,10 +46,10 @@ class TokenIntrospectionParams:
|
|||||||
if not provider:
|
if not provider:
|
||||||
raise TokenIntrospectionError
|
raise TokenIntrospectionError
|
||||||
|
|
||||||
access_token = AccessToken.objects.filter(token=raw_token, provider=provider).first()
|
access_token = AccessToken.objects.filter(token=raw_token).first()
|
||||||
if access_token:
|
if access_token:
|
||||||
return TokenIntrospectionParams(access_token, provider)
|
return TokenIntrospectionParams(access_token, provider)
|
||||||
refresh_token = RefreshToken.objects.filter(token=raw_token, provider=provider).first()
|
refresh_token = RefreshToken.objects.filter(token=raw_token).first()
|
||||||
if refresh_token:
|
if refresh_token:
|
||||||
return TokenIntrospectionParams(refresh_token, provider)
|
return TokenIntrospectionParams(refresh_token, provider)
|
||||||
LOGGER.debug("Token does not exist", token=raw_token)
|
LOGGER.debug("Token does not exist", token=raw_token)
|
||||||
|
@ -28,7 +28,7 @@ class ProxyDockerController(DockerController):
|
|||||||
labels = super()._get_labels()
|
labels = super()._get_labels()
|
||||||
labels["traefik.enable"] = "true"
|
labels["traefik.enable"] = "true"
|
||||||
labels[f"traefik.http.routers.{traefik_name}-router.rule"] = (
|
labels[f"traefik.http.routers.{traefik_name}-router.rule"] = (
|
||||||
f"({' || '.join([f'Host({host})' for host in hosts])})"
|
f"({' || '.join([f'Host(`{host}`)' for host in hosts])})"
|
||||||
f" && PathPrefix(`/outpost.goauthentik.io`)"
|
f" && PathPrefix(`/outpost.goauthentik.io`)"
|
||||||
)
|
)
|
||||||
labels[f"traefik.http.routers.{traefik_name}-router.tls"] = "true"
|
labels[f"traefik.http.routers.{traefik_name}-router.tls"] = "true"
|
||||||
|
@ -24,7 +24,6 @@ class ProxyProviderTests(APITestCase):
|
|||||||
"name": generate_id(),
|
"name": generate_id(),
|
||||||
"mode": ProxyMode.PROXY,
|
"mode": ProxyMode.PROXY,
|
||||||
"authorization_flow": create_test_flow().pk.hex,
|
"authorization_flow": create_test_flow().pk.hex,
|
||||||
"invalidation_flow": create_test_flow().pk.hex,
|
|
||||||
"external_host": "http://localhost",
|
"external_host": "http://localhost",
|
||||||
"internal_host": "http://localhost",
|
"internal_host": "http://localhost",
|
||||||
"basic_auth_enabled": True,
|
"basic_auth_enabled": True,
|
||||||
@ -42,7 +41,6 @@ class ProxyProviderTests(APITestCase):
|
|||||||
"name": generate_id(),
|
"name": generate_id(),
|
||||||
"mode": ProxyMode.PROXY,
|
"mode": ProxyMode.PROXY,
|
||||||
"authorization_flow": create_test_flow().pk.hex,
|
"authorization_flow": create_test_flow().pk.hex,
|
||||||
"invalidation_flow": create_test_flow().pk.hex,
|
|
||||||
"external_host": "http://localhost",
|
"external_host": "http://localhost",
|
||||||
"internal_host": "http://localhost",
|
"internal_host": "http://localhost",
|
||||||
"basic_auth_enabled": True,
|
"basic_auth_enabled": True,
|
||||||
@ -66,7 +64,6 @@ class ProxyProviderTests(APITestCase):
|
|||||||
"name": generate_id(),
|
"name": generate_id(),
|
||||||
"mode": ProxyMode.PROXY,
|
"mode": ProxyMode.PROXY,
|
||||||
"authorization_flow": create_test_flow().pk.hex,
|
"authorization_flow": create_test_flow().pk.hex,
|
||||||
"invalidation_flow": create_test_flow().pk.hex,
|
|
||||||
"external_host": "http://localhost",
|
"external_host": "http://localhost",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -85,7 +82,6 @@ class ProxyProviderTests(APITestCase):
|
|||||||
"name": name,
|
"name": name,
|
||||||
"mode": ProxyMode.PROXY,
|
"mode": ProxyMode.PROXY,
|
||||||
"authorization_flow": create_test_flow().pk.hex,
|
"authorization_flow": create_test_flow().pk.hex,
|
||||||
"invalidation_flow": create_test_flow().pk.hex,
|
|
||||||
"external_host": "http://localhost",
|
"external_host": "http://localhost",
|
||||||
"internal_host": "http://localhost",
|
"internal_host": "http://localhost",
|
||||||
},
|
},
|
||||||
@ -103,7 +99,6 @@ class ProxyProviderTests(APITestCase):
|
|||||||
"name": name,
|
"name": name,
|
||||||
"mode": ProxyMode.PROXY,
|
"mode": ProxyMode.PROXY,
|
||||||
"authorization_flow": create_test_flow().pk.hex,
|
"authorization_flow": create_test_flow().pk.hex,
|
||||||
"invalidation_flow": create_test_flow().pk.hex,
|
|
||||||
"external_host": "http://localhost",
|
"external_host": "http://localhost",
|
||||||
"internal_host": "http://localhost",
|
"internal_host": "http://localhost",
|
||||||
},
|
},
|
||||||
@ -119,7 +114,6 @@ class ProxyProviderTests(APITestCase):
|
|||||||
"name": name,
|
"name": name,
|
||||||
"mode": ProxyMode.PROXY,
|
"mode": ProxyMode.PROXY,
|
||||||
"authorization_flow": create_test_flow().pk.hex,
|
"authorization_flow": create_test_flow().pk.hex,
|
||||||
"invalidation_flow": create_test_flow().pk.hex,
|
|
||||||
"external_host": "http://localhost",
|
"external_host": "http://localhost",
|
||||||
"internal_host": "http://localhost",
|
"internal_host": "http://localhost",
|
||||||
},
|
},
|
||||||
|
@ -188,9 +188,6 @@ class SAMLProviderImportSerializer(PassiveSerializer):
|
|||||||
authorization_flow = PrimaryKeyRelatedField(
|
authorization_flow = PrimaryKeyRelatedField(
|
||||||
queryset=Flow.objects.filter(designation=FlowDesignation.AUTHORIZATION),
|
queryset=Flow.objects.filter(designation=FlowDesignation.AUTHORIZATION),
|
||||||
)
|
)
|
||||||
invalidation_flow = PrimaryKeyRelatedField(
|
|
||||||
queryset=Flow.objects.filter(designation=FlowDesignation.INVALIDATION),
|
|
||||||
)
|
|
||||||
file = FileField()
|
file = FileField()
|
||||||
|
|
||||||
|
|
||||||
@ -280,9 +277,7 @@ class SAMLProviderViewSet(UsedByMixin, ModelViewSet):
|
|||||||
try:
|
try:
|
||||||
metadata = ServiceProviderMetadataParser().parse(file.read().decode())
|
metadata = ServiceProviderMetadataParser().parse(file.read().decode())
|
||||||
metadata.to_provider(
|
metadata.to_provider(
|
||||||
data.validated_data["name"],
|
data.validated_data["name"], data.validated_data["authorization_flow"]
|
||||||
data.validated_data["authorization_flow"],
|
|
||||||
data.validated_data["invalidation_flow"],
|
|
||||||
)
|
)
|
||||||
except ValueError as exc: # pragma: no cover
|
except ValueError as exc: # pragma: no cover
|
||||||
LOGGER.warning(str(exc))
|
LOGGER.warning(str(exc))
|
||||||
|
@ -49,13 +49,12 @@ class ServiceProviderMetadata:
|
|||||||
|
|
||||||
signing_keypair: CertificateKeyPair | None = None
|
signing_keypair: CertificateKeyPair | None = None
|
||||||
|
|
||||||
def to_provider(
|
def to_provider(self, name: str, authorization_flow: Flow) -> SAMLProvider:
|
||||||
self, name: str, authorization_flow: Flow, invalidation_flow: Flow
|
|
||||||
) -> SAMLProvider:
|
|
||||||
"""Create a SAMLProvider instance from the details. `name` is required,
|
"""Create a SAMLProvider instance from the details. `name` is required,
|
||||||
as depending on the metadata CertificateKeypairs might have to be created."""
|
as depending on the metadata CertificateKeypairs might have to be created."""
|
||||||
provider = SAMLProvider.objects.create(
|
provider = SAMLProvider.objects.create(
|
||||||
name=name, authorization_flow=authorization_flow, invalidation_flow=invalidation_flow
|
name=name,
|
||||||
|
authorization_flow=authorization_flow,
|
||||||
)
|
)
|
||||||
provider.issuer = self.entity_id
|
provider.issuer = self.entity_id
|
||||||
provider.sp_binding = self.acs_binding
|
provider.sp_binding = self.acs_binding
|
||||||
|
@ -47,12 +47,11 @@ class TestSAMLProviderAPI(APITestCase):
|
|||||||
data={
|
data={
|
||||||
"name": generate_id(),
|
"name": generate_id(),
|
||||||
"authorization_flow": create_test_flow().pk,
|
"authorization_flow": create_test_flow().pk,
|
||||||
"invalidation_flow": create_test_flow().pk,
|
|
||||||
"acs_url": "http://localhost",
|
"acs_url": "http://localhost",
|
||||||
"signing_kp": cert.pk,
|
"signing_kp": cert.pk,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertEqual(response.status_code, 400)
|
self.assertEqual(400, response.status_code)
|
||||||
self.assertJSONEqual(
|
self.assertJSONEqual(
|
||||||
response.content,
|
response.content,
|
||||||
{
|
{
|
||||||
@ -69,13 +68,12 @@ class TestSAMLProviderAPI(APITestCase):
|
|||||||
data={
|
data={
|
||||||
"name": generate_id(),
|
"name": generate_id(),
|
||||||
"authorization_flow": create_test_flow().pk,
|
"authorization_flow": create_test_flow().pk,
|
||||||
"invalidation_flow": create_test_flow().pk,
|
|
||||||
"acs_url": "http://localhost",
|
"acs_url": "http://localhost",
|
||||||
"signing_kp": cert.pk,
|
"signing_kp": cert.pk,
|
||||||
"sign_assertion": True,
|
"sign_assertion": True,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertEqual(response.status_code, 201)
|
self.assertEqual(201, response.status_code)
|
||||||
|
|
||||||
def test_metadata(self):
|
def test_metadata(self):
|
||||||
"""Test metadata export (normal)"""
|
"""Test metadata export (normal)"""
|
||||||
@ -133,7 +131,6 @@ class TestSAMLProviderAPI(APITestCase):
|
|||||||
"file": metadata,
|
"file": metadata,
|
||||||
"name": generate_id(),
|
"name": generate_id(),
|
||||||
"authorization_flow": create_test_flow(FlowDesignation.AUTHORIZATION).pk,
|
"authorization_flow": create_test_flow(FlowDesignation.AUTHORIZATION).pk,
|
||||||
"invalidation_flow": create_test_flow(FlowDesignation.INVALIDATION).pk,
|
|
||||||
},
|
},
|
||||||
format="multipart",
|
format="multipart",
|
||||||
)
|
)
|
||||||
|
@ -82,7 +82,7 @@ class TestServiceProviderMetadataParser(TestCase):
|
|||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
"""Test simple metadata without Signing"""
|
"""Test simple metadata without Signing"""
|
||||||
metadata = ServiceProviderMetadataParser().parse(load_fixture("fixtures/simple.xml"))
|
metadata = ServiceProviderMetadataParser().parse(load_fixture("fixtures/simple.xml"))
|
||||||
provider = metadata.to_provider("test", self.flow, self.flow)
|
provider = metadata.to_provider("test", self.flow)
|
||||||
self.assertEqual(provider.acs_url, "http://localhost:8080/saml/acs")
|
self.assertEqual(provider.acs_url, "http://localhost:8080/saml/acs")
|
||||||
self.assertEqual(provider.issuer, "http://localhost:8080/saml/metadata")
|
self.assertEqual(provider.issuer, "http://localhost:8080/saml/metadata")
|
||||||
self.assertEqual(provider.sp_binding, SAMLBindings.POST)
|
self.assertEqual(provider.sp_binding, SAMLBindings.POST)
|
||||||
@ -95,7 +95,7 @@ class TestServiceProviderMetadataParser(TestCase):
|
|||||||
"""Test Metadata with signing cert"""
|
"""Test Metadata with signing cert"""
|
||||||
create_test_cert()
|
create_test_cert()
|
||||||
metadata = ServiceProviderMetadataParser().parse(load_fixture("fixtures/cert.xml"))
|
metadata = ServiceProviderMetadataParser().parse(load_fixture("fixtures/cert.xml"))
|
||||||
provider = metadata.to_provider("test", self.flow, self.flow)
|
provider = metadata.to_provider("test", self.flow)
|
||||||
self.assertEqual(provider.acs_url, "http://localhost:8080/apps/user_saml/saml/acs")
|
self.assertEqual(provider.acs_url, "http://localhost:8080/apps/user_saml/saml/acs")
|
||||||
self.assertEqual(provider.issuer, "http://localhost:8080/apps/user_saml/saml/metadata")
|
self.assertEqual(provider.issuer, "http://localhost:8080/apps/user_saml/saml/metadata")
|
||||||
self.assertEqual(provider.sp_binding, SAMLBindings.POST)
|
self.assertEqual(provider.sp_binding, SAMLBindings.POST)
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
"""SLO Views"""
|
"""SLO Views"""
|
||||||
|
|
||||||
from django.http import Http404, HttpRequest
|
from django.http import HttpRequest
|
||||||
from django.http.response import HttpResponse
|
from django.http.response import HttpResponse
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404, redirect
|
||||||
from django.utils.decorators import method_decorator
|
from django.utils.decorators import method_decorator
|
||||||
from django.views.decorators.clickjacking import xframe_options_sameorigin
|
from django.views.decorators.clickjacking import xframe_options_sameorigin
|
||||||
from django.views.decorators.csrf import csrf_exempt
|
from django.views.decorators.csrf import csrf_exempt
|
||||||
@ -10,11 +10,6 @@ from structlog.stdlib import get_logger
|
|||||||
|
|
||||||
from authentik.core.models import Application
|
from authentik.core.models import Application
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.flows.models import Flow, in_memory_stage
|
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, FlowPlanner
|
|
||||||
from authentik.flows.stage import SessionEndStage
|
|
||||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
|
||||||
from authentik.lib.utils.urls import redirect_with_qs
|
|
||||||
from authentik.lib.views import bad_request_message
|
from authentik.lib.views import bad_request_message
|
||||||
from authentik.policies.views import PolicyAccessView
|
from authentik.policies.views import PolicyAccessView
|
||||||
from authentik.providers.saml.exceptions import CannotHandleAssertion
|
from authentik.providers.saml.exceptions import CannotHandleAssertion
|
||||||
@ -33,16 +28,11 @@ class SAMLSLOView(PolicyAccessView):
|
|||||||
""" "SAML SLO Base View, which plans a flow and injects our final stage.
|
""" "SAML SLO Base View, which plans a flow and injects our final stage.
|
||||||
Calls get/post handler."""
|
Calls get/post handler."""
|
||||||
|
|
||||||
flow: Flow
|
|
||||||
|
|
||||||
def resolve_provider_application(self):
|
def resolve_provider_application(self):
|
||||||
self.application = get_object_or_404(Application, slug=self.kwargs["application_slug"])
|
self.application = get_object_or_404(Application, slug=self.kwargs["application_slug"])
|
||||||
self.provider: SAMLProvider = get_object_or_404(
|
self.provider: SAMLProvider = get_object_or_404(
|
||||||
SAMLProvider, pk=self.application.provider_id
|
SAMLProvider, pk=self.application.provider_id
|
||||||
)
|
)
|
||||||
self.flow = self.provider.invalidation_flow or self.request.brand.flow_invalidation
|
|
||||||
if not self.flow:
|
|
||||||
raise Http404
|
|
||||||
|
|
||||||
def check_saml_request(self) -> HttpRequest | None:
|
def check_saml_request(self) -> HttpRequest | None:
|
||||||
"""Handler to verify the SAML Request. Must be implemented by a subclass"""
|
"""Handler to verify the SAML Request. Must be implemented by a subclass"""
|
||||||
@ -55,20 +45,9 @@ class SAMLSLOView(PolicyAccessView):
|
|||||||
method_response = self.check_saml_request()
|
method_response = self.check_saml_request()
|
||||||
if method_response:
|
if method_response:
|
||||||
return method_response
|
return method_response
|
||||||
planner = FlowPlanner(self.flow)
|
return redirect(
|
||||||
planner.allow_empty_flows = True
|
"authentik_core:if-session-end",
|
||||||
plan = planner.plan(
|
application_slug=self.kwargs["application_slug"],
|
||||||
request,
|
|
||||||
{
|
|
||||||
PLAN_CONTEXT_APPLICATION: self.application,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
plan.insert_stage(in_memory_stage(SessionEndStage))
|
|
||||||
request.session[SESSION_KEY_PLAN] = plan
|
|
||||||
return redirect_with_qs(
|
|
||||||
"authentik_core:if-flow",
|
|
||||||
self.request.GET,
|
|
||||||
flow_slug=self.flow.slug,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def post(self, request: HttpRequest, application_slug: str) -> HttpResponse:
|
def post(self, request: HttpRequest, application_slug: str) -> HttpResponse:
|
||||||
|
@ -26,7 +26,6 @@ class SCIMProviderSerializer(ProviderSerializer):
|
|||||||
"verbose_name_plural",
|
"verbose_name_plural",
|
||||||
"meta_model_name",
|
"meta_model_name",
|
||||||
"url",
|
"url",
|
||||||
"verify_certificates",
|
|
||||||
"token",
|
"token",
|
||||||
"exclude_users_service_account",
|
"exclude_users_service_account",
|
||||||
"filter_group",
|
"filter_group",
|
||||||
|
@ -42,7 +42,6 @@ class SCIMClient[TModel: "Model", TConnection: "Model", TSchema: "BaseModel"](
|
|||||||
def __init__(self, provider: SCIMProvider):
|
def __init__(self, provider: SCIMProvider):
|
||||||
super().__init__(provider)
|
super().__init__(provider)
|
||||||
self._session = get_http_session()
|
self._session = get_http_session()
|
||||||
self._session.verify = provider.verify_certificates
|
|
||||||
self.provider = provider
|
self.provider = provider
|
||||||
# Remove trailing slashes as we assume the URL doesn't have any
|
# Remove trailing slashes as we assume the URL doesn't have any
|
||||||
base_url = provider.url
|
base_url = provider.url
|
||||||
|
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 5.0.9 on 2024-09-19 14:02
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("authentik_providers_scim", "0009_alter_scimmapping_options"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="scimprovider",
|
|
||||||
name="verify_certificates",
|
|
||||||
field=models.BooleanField(default=True),
|
|
||||||
),
|
|
||||||
]
|
|
@ -68,7 +68,6 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider):
|
|||||||
|
|
||||||
url = models.TextField(help_text=_("Base URL to SCIM requests, usually ends in /v2"))
|
url = models.TextField(help_text=_("Base URL to SCIM requests, usually ends in /v2"))
|
||||||
token = models.TextField(help_text=_("Authentication token"))
|
token = models.TextField(help_text=_("Authentication token"))
|
||||||
verify_certificates = models.BooleanField(default=True)
|
|
||||||
|
|
||||||
property_mappings_group = models.ManyToManyField(
|
property_mappings_group = models.ManyToManyField(
|
||||||
PropertyMapping,
|
PropertyMapping,
|
||||||
|
@ -22,7 +22,7 @@ def create_admin_group(user: User) -> Group:
|
|||||||
return group
|
return group
|
||||||
|
|
||||||
|
|
||||||
def create_recovery_token(user: User, expiry: datetime, generated_from: str) -> tuple[Token, str]:
|
def create_recovery_token(user: User, expiry: datetime, generated_from: str) -> (Token, str):
|
||||||
"""Create recovery token and associated link"""
|
"""Create recovery token and associated link"""
|
||||||
_now = now()
|
_now = now()
|
||||||
token = Token.objects.create(
|
token = Token.objects.create(
|
||||||
|
@ -87,11 +87,7 @@ def task_error_hook(task_id: str, exception: Exception, traceback, *args, **kwar
|
|||||||
|
|
||||||
def _get_startup_tasks_default_tenant() -> list[Callable]:
|
def _get_startup_tasks_default_tenant() -> list[Callable]:
|
||||||
"""Get all tasks to be run on startup for the default tenant"""
|
"""Get all tasks to be run on startup for the default tenant"""
|
||||||
from authentik.outposts.tasks import outpost_connection_discovery
|
return []
|
||||||
|
|
||||||
return [
|
|
||||||
outpost_connection_discovery,
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def _get_startup_tasks_all_tenants() -> list[Callable]:
|
def _get_startup_tasks_all_tenants() -> list[Callable]:
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from hashlib import sha512
|
from hashlib import sha512
|
||||||
from ipaddress import ip_address
|
|
||||||
from time import perf_counter, time
|
from time import perf_counter, time
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@ -175,7 +174,6 @@ class ClientIPMiddleware:
|
|||||||
|
|
||||||
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
||||||
self.get_response = get_response
|
self.get_response = get_response
|
||||||
self.logger = get_logger().bind()
|
|
||||||
|
|
||||||
def _get_client_ip_from_meta(self, meta: dict[str, Any]) -> str:
|
def _get_client_ip_from_meta(self, meta: dict[str, Any]) -> str:
|
||||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||||
@ -187,15 +185,10 @@ class ClientIPMiddleware:
|
|||||||
"HTTP_X_FORWARDED_FOR",
|
"HTTP_X_FORWARDED_FOR",
|
||||||
"REMOTE_ADDR",
|
"REMOTE_ADDR",
|
||||||
)
|
)
|
||||||
try:
|
|
||||||
for _header in headers:
|
for _header in headers:
|
||||||
if _header in meta:
|
if _header in meta:
|
||||||
ips: list[str] = meta.get(_header).split(",")
|
ips: list[str] = meta.get(_header).split(",")
|
||||||
# Ensure the IP parses as a valid IP
|
return ips[0].strip()
|
||||||
return str(ip_address(ips[0].strip()))
|
|
||||||
return self.default_ip
|
|
||||||
except ValueError as exc:
|
|
||||||
self.logger.debug("Invalid remote IP", exc=exc)
|
|
||||||
return self.default_ip
|
return self.default_ip
|
||||||
|
|
||||||
# FIXME: this should probably not be in `root` but rather in a middleware in `outposts`
|
# FIXME: this should probably not be in `root` but rather in a middleware in `outposts`
|
||||||
@ -228,16 +221,12 @@ class ClientIPMiddleware:
|
|||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
# Update sentry scope to include correct IP
|
# Update sentry scope to include correct IP
|
||||||
sentry_user = Scope.get_isolation_scope()._user or {}
|
user = Scope.get_isolation_scope()._user or {}
|
||||||
sentry_user["ip_address"] = delegated_ip
|
user["ip_address"] = delegated_ip
|
||||||
Scope.get_isolation_scope().set_user(sentry_user)
|
Scope.get_isolation_scope().set_user(user)
|
||||||
# Set the outpost service account on the request
|
# Set the outpost service account on the request
|
||||||
setattr(request, self.request_attr_outpost_user, user)
|
setattr(request, self.request_attr_outpost_user, user)
|
||||||
try:
|
return delegated_ip
|
||||||
return str(ip_address(delegated_ip))
|
|
||||||
except ValueError as exc:
|
|
||||||
self.logger.debug("Invalid remote IP from Outpost", exc=exc)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _get_client_ip(self, request: HttpRequest | None) -> str:
|
def _get_client_ip(self, request: HttpRequest | None) -> str:
|
||||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
"""authentik storage backends"""
|
"""authentik storage backends"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from urllib.parse import parse_qsl, urlsplit
|
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import SuspiciousOperation
|
from django.core.exceptions import SuspiciousOperation
|
||||||
@ -111,34 +110,3 @@ class S3Storage(BaseS3Storage):
|
|||||||
if self.querystring_auth:
|
if self.querystring_auth:
|
||||||
return url
|
return url
|
||||||
return self._strip_signing_parameters(url)
|
return self._strip_signing_parameters(url)
|
||||||
|
|
||||||
def _strip_signing_parameters(self, url):
|
|
||||||
# Boto3 does not currently support generating URLs that are unsigned. Instead
|
|
||||||
# we take the signed URLs and strip any querystring params related to signing
|
|
||||||
# and expiration.
|
|
||||||
# Note that this may end up with URLs that are still invalid, especially if
|
|
||||||
# params are passed in that only work with signed URLs, e.g. response header
|
|
||||||
# params.
|
|
||||||
# The code attempts to strip all query parameters that match names of known
|
|
||||||
# parameters from v2 and v4 signatures, regardless of the actual signature
|
|
||||||
# version used.
|
|
||||||
split_url = urlsplit(url)
|
|
||||||
qs = parse_qsl(split_url.query, keep_blank_values=True)
|
|
||||||
blacklist = {
|
|
||||||
"x-amz-algorithm",
|
|
||||||
"x-amz-credential",
|
|
||||||
"x-amz-date",
|
|
||||||
"x-amz-expires",
|
|
||||||
"x-amz-signedheaders",
|
|
||||||
"x-amz-signature",
|
|
||||||
"x-amz-security-token",
|
|
||||||
"awsaccesskeyid",
|
|
||||||
"expires",
|
|
||||||
"signature",
|
|
||||||
}
|
|
||||||
filtered_qs = ((key, val) for key, val in qs if key.lower() not in blacklist)
|
|
||||||
# Note: Parameters that did not have a value in the original query string will
|
|
||||||
# have an '=' sign appended to it, e.g ?foo&bar becomes ?foo=&bar=
|
|
||||||
joined_qs = ("=".join(keyval) for keyval in filtered_qs)
|
|
||||||
split_url = split_url._replace(query="&".join(joined_qs))
|
|
||||||
return split_url.geturl()
|
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from guardian.shortcuts import get_objects_for_user
|
from guardian.shortcuts import get_objects_for_user
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
@ -40,8 +39,9 @@ class LDAPSourceSerializer(SourceSerializer):
|
|||||||
"""Get cached source connectivity"""
|
"""Get cached source connectivity"""
|
||||||
return cache.get(CACHE_KEY_STATUS + source.slug, None)
|
return cache.get(CACHE_KEY_STATUS + source.slug, None)
|
||||||
|
|
||||||
def validate_sync_users_password(self, sync_users_password: bool) -> bool:
|
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||||
"""Check that only a single source has password_sync on"""
|
"""Check that only a single source has password_sync on"""
|
||||||
|
sync_users_password = attrs.get("sync_users_password", True)
|
||||||
if sync_users_password:
|
if sync_users_password:
|
||||||
sources = LDAPSource.objects.filter(sync_users_password=True)
|
sources = LDAPSource.objects.filter(sync_users_password=True)
|
||||||
if self.instance:
|
if self.instance:
|
||||||
@ -49,31 +49,11 @@ class LDAPSourceSerializer(SourceSerializer):
|
|||||||
if sources.exists():
|
if sources.exists():
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
{
|
{
|
||||||
"sync_users_password": _(
|
"sync_users_password": (
|
||||||
"Only a single LDAP Source with password synchronization is allowed"
|
"Only a single LDAP Source with password synchronization is allowed"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
return sync_users_password
|
|
||||||
|
|
||||||
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Validate property mappings with sync_ flags"""
|
|
||||||
types = ["user", "group"]
|
|
||||||
for type in types:
|
|
||||||
toggle_value = attrs.get(f"sync_{type}s", False)
|
|
||||||
mappings_field = f"{type}_property_mappings"
|
|
||||||
mappings_value = attrs.get(mappings_field, [])
|
|
||||||
if toggle_value and len(mappings_value) == 0:
|
|
||||||
raise ValidationError(
|
|
||||||
{
|
|
||||||
mappings_field: _(
|
|
||||||
(
|
|
||||||
"When 'Sync {type}s' is enabled, '{type}s property "
|
|
||||||
"mappings' cannot be empty."
|
|
||||||
).format(type=type)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return super().validate(attrs)
|
return super().validate(attrs)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -186,8 +166,7 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
|||||||
for sync_class in SYNC_CLASSES:
|
for sync_class in SYNC_CLASSES:
|
||||||
class_name = sync_class.name()
|
class_name = sync_class.name()
|
||||||
all_objects.setdefault(class_name, [])
|
all_objects.setdefault(class_name, [])
|
||||||
for page in sync_class(source).get_objects(size_limit=10):
|
for obj in sync_class(source).get_objects(size_limit=10):
|
||||||
for obj in page:
|
|
||||||
obj: dict
|
obj: dict
|
||||||
obj.pop("raw_attributes", None)
|
obj.pop("raw_attributes", None)
|
||||||
obj.pop("raw_dn", None)
|
obj.pop("raw_dn", None)
|
||||||
|
@ -26,16 +26,17 @@ def sync_ldap_source_on_save(sender, instance: LDAPSource, **_):
|
|||||||
"""Ensure that source is synced on save (if enabled)"""
|
"""Ensure that source is synced on save (if enabled)"""
|
||||||
if not instance.enabled:
|
if not instance.enabled:
|
||||||
return
|
return
|
||||||
ldap_connectivity_check.delay(instance.pk)
|
|
||||||
# Don't sync sources when they don't have any property mappings. This will only happen if:
|
# Don't sync sources when they don't have any property mappings. This will only happen if:
|
||||||
# - the user forgets to set them or
|
# - the user forgets to set them or
|
||||||
# - the source is newly created, this is the first save event
|
# - the source is newly created, this is the first save event
|
||||||
# and the mappings are created with an m2m event
|
# and the mappings are created with an m2m event
|
||||||
if instance.sync_users and not instance.user_property_mappings.exists():
|
if (
|
||||||
return
|
not instance.user_property_mappings.exists()
|
||||||
if instance.sync_groups and not instance.group_property_mappings.exists():
|
or not instance.group_property_mappings.exists()
|
||||||
|
):
|
||||||
return
|
return
|
||||||
ldap_sync_single.delay(instance.pk)
|
ldap_sync_single.delay(instance.pk)
|
||||||
|
ldap_connectivity_check.delay(instance.pk)
|
||||||
|
|
||||||
|
|
||||||
@receiver(password_validate)
|
@receiver(password_validate)
|
||||||
|
@ -38,11 +38,7 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||||||
search_base=self.base_dn_groups,
|
search_base=self.base_dn_groups,
|
||||||
search_filter=self._source.group_object_filter,
|
search_filter=self._source.group_object_filter,
|
||||||
search_scope=SUBTREE,
|
search_scope=SUBTREE,
|
||||||
attributes=[
|
attributes=[ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES],
|
||||||
ALL_ATTRIBUTES,
|
|
||||||
ALL_OPERATIONAL_ATTRIBUTES,
|
|
||||||
self._source.object_uniqueness_field,
|
|
||||||
],
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -57,9 +53,9 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||||||
continue
|
continue
|
||||||
attributes = group.get("attributes", {})
|
attributes = group.get("attributes", {})
|
||||||
group_dn = flatten(flatten(group.get("entryDN", group.get("dn"))))
|
group_dn = flatten(flatten(group.get("entryDN", group.get("dn"))))
|
||||||
if not attributes.get(self._source.object_uniqueness_field):
|
if self._source.object_uniqueness_field not in attributes:
|
||||||
self.message(
|
self.message(
|
||||||
f"Uniqueness field not found/not set in attributes: '{group_dn}'",
|
f"Cannot find uniqueness field in attributes: '{group_dn}'",
|
||||||
attributes=attributes.keys(),
|
attributes=attributes.keys(),
|
||||||
dn=group_dn,
|
dn=group_dn,
|
||||||
)
|
)
|
||||||
|
@ -40,11 +40,7 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||||||
search_base=self.base_dn_users,
|
search_base=self.base_dn_users,
|
||||||
search_filter=self._source.user_object_filter,
|
search_filter=self._source.user_object_filter,
|
||||||
search_scope=SUBTREE,
|
search_scope=SUBTREE,
|
||||||
attributes=[
|
attributes=[ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES],
|
||||||
ALL_ATTRIBUTES,
|
|
||||||
ALL_OPERATIONAL_ATTRIBUTES,
|
|
||||||
self._source.object_uniqueness_field,
|
|
||||||
],
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -59,9 +55,9 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||||||
continue
|
continue
|
||||||
attributes = user.get("attributes", {})
|
attributes = user.get("attributes", {})
|
||||||
user_dn = flatten(user.get("entryDN", user.get("dn")))
|
user_dn = flatten(user.get("entryDN", user.get("dn")))
|
||||||
if not attributes.get(self._source.object_uniqueness_field):
|
if self._source.object_uniqueness_field not in attributes:
|
||||||
self.message(
|
self.message(
|
||||||
f"Uniqueness field not found/not set in attributes: '{user_dn}'",
|
f"Cannot find uniqueness field in attributes: '{user_dn}'",
|
||||||
attributes=attributes.keys(),
|
attributes=attributes.keys(),
|
||||||
dn=user_dn,
|
dn=user_dn,
|
||||||
)
|
)
|
||||||
|
4
authentik/sources/ldap/sync/vendor/ms_ad.py
vendored
4
authentik/sources/ldap/sync/vendor/ms_ad.py
vendored
@ -78,9 +78,7 @@ class MicrosoftActiveDirectory(BaseLDAPSynchronizer):
|
|||||||
# /useraccountcontrol-manipulate-account-properties
|
# /useraccountcontrol-manipulate-account-properties
|
||||||
uac_bit = attributes.get("userAccountControl", 512)
|
uac_bit = attributes.get("userAccountControl", 512)
|
||||||
uac = UserAccountControl(uac_bit)
|
uac = UserAccountControl(uac_bit)
|
||||||
is_active = (
|
is_active = UserAccountControl.ACCOUNTDISABLE not in uac
|
||||||
UserAccountControl.ACCOUNTDISABLE not in uac and UserAccountControl.LOCKOUT not in uac
|
|
||||||
)
|
|
||||||
if is_active != user.is_active:
|
if is_active != user.is_active:
|
||||||
user.is_active = is_active
|
user.is_active = is_active
|
||||||
user.save()
|
user.save()
|
||||||
|
@ -50,35 +50,3 @@ class LDAPAPITests(APITestCase):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
self.assertFalse(serializer.is_valid())
|
self.assertFalse(serializer.is_valid())
|
||||||
|
|
||||||
def test_sync_users_mapping_empty(self):
|
|
||||||
"""Check that when sync_users is enabled, property mappings must be set"""
|
|
||||||
serializer = LDAPSourceSerializer(
|
|
||||||
data={
|
|
||||||
"name": "foo",
|
|
||||||
"slug": " foo",
|
|
||||||
"server_uri": "ldaps://1.2.3.4",
|
|
||||||
"bind_cn": "",
|
|
||||||
"bind_password": LDAP_PASSWORD,
|
|
||||||
"base_dn": "dc=foo",
|
|
||||||
"sync_users": True,
|
|
||||||
"user_property_mappings": [],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
self.assertFalse(serializer.is_valid())
|
|
||||||
|
|
||||||
def test_sync_groups_mapping_empty(self):
|
|
||||||
"""Check that when sync_groups is enabled, property mappings must be set"""
|
|
||||||
serializer = LDAPSourceSerializer(
|
|
||||||
data={
|
|
||||||
"name": "foo",
|
|
||||||
"slug": " foo",
|
|
||||||
"server_uri": "ldaps://1.2.3.4",
|
|
||||||
"bind_cn": "",
|
|
||||||
"bind_password": LDAP_PASSWORD,
|
|
||||||
"base_dn": "dc=foo",
|
|
||||||
"sync_groups": True,
|
|
||||||
"group_property_mappings": [],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
self.assertFalse(serializer.is_valid())
|
|
||||||
|
@ -15,13 +15,12 @@ from authentik.sources.oauth.models import OAuthSource
|
|||||||
from authentik.sources.oauth.types.registry import SourceType, registry
|
from authentik.sources.oauth.types.registry import SourceType, registry
|
||||||
from authentik.sources.oauth.views.callback import OAuthCallback
|
from authentik.sources.oauth.views.callback import OAuthCallback
|
||||||
from authentik.sources.oauth.views.redirect import OAuthRedirect
|
from authentik.sources.oauth.views.redirect import OAuthRedirect
|
||||||
from authentik.stages.identification.stage import LoginChallengeMixin
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
APPLE_CLIENT_ID_PARTS = 3
|
APPLE_CLIENT_ID_PARTS = 3
|
||||||
|
|
||||||
|
|
||||||
class AppleLoginChallenge(LoginChallengeMixin, Challenge):
|
class AppleLoginChallenge(Challenge):
|
||||||
"""Special challenge for apple-native authentication flow, which happens on the client."""
|
"""Special challenge for apple-native authentication flow, which happens on the client."""
|
||||||
|
|
||||||
client_id = CharField()
|
client_id = CharField()
|
||||||
|
@ -19,10 +19,9 @@ from authentik.core.models import (
|
|||||||
from authentik.core.types import UILoginButton, UserSettingSerializer
|
from authentik.core.types import UILoginButton, UserSettingSerializer
|
||||||
from authentik.flows.challenge import Challenge, ChallengeResponse
|
from authentik.flows.challenge import Challenge, ChallengeResponse
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.stages.identification.stage import LoginChallengeMixin
|
|
||||||
|
|
||||||
|
|
||||||
class PlexAuthenticationChallenge(LoginChallengeMixin, Challenge):
|
class PlexAuthenticationChallenge(Challenge):
|
||||||
"""Challenge shown to the user in identification stage"""
|
"""Challenge shown to the user in identification stage"""
|
||||||
|
|
||||||
client_id = CharField()
|
client_id = CharField()
|
||||||
|
@ -1,26 +0,0 @@
|
|||||||
# Generated by Django 5.0.9 on 2024-10-10 15:45
|
|
||||||
|
|
||||||
from django.db import migrations
|
|
||||||
from django.apps.registry import Apps
|
|
||||||
|
|
||||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
|
||||||
|
|
||||||
|
|
||||||
def fix_X509SubjectName(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
|
||||||
db_alias = schema_editor.connection.alias
|
|
||||||
|
|
||||||
SAMLSource = apps.get_model("authentik_sources_saml", "SAMLSource")
|
|
||||||
SAMLSource.objects.using(db_alias).filter(
|
|
||||||
name_id_policy="urn:oasis:names:tc:SAML:2.0:nameid-format:X509SubjectName"
|
|
||||||
).update(name_id_policy="urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName")
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("authentik_sources_saml", "0016_samlsource_encryption_kp"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RunPython(fix_X509SubjectName),
|
|
||||||
]
|
|
@ -19,7 +19,7 @@ NS_MAP = {
|
|||||||
SAML_NAME_ID_FORMAT_EMAIL = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress"
|
SAML_NAME_ID_FORMAT_EMAIL = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress"
|
||||||
SAML_NAME_ID_FORMAT_PERSISTENT = "urn:oasis:names:tc:SAML:2.0:nameid-format:persistent"
|
SAML_NAME_ID_FORMAT_PERSISTENT = "urn:oasis:names:tc:SAML:2.0:nameid-format:persistent"
|
||||||
SAML_NAME_ID_FORMAT_UNSPECIFIED = "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified"
|
SAML_NAME_ID_FORMAT_UNSPECIFIED = "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified"
|
||||||
SAML_NAME_ID_FORMAT_X509 = "urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName"
|
SAML_NAME_ID_FORMAT_X509 = "urn:oasis:names:tc:SAML:2.0:nameid-format:X509SubjectName"
|
||||||
SAML_NAME_ID_FORMAT_WINDOWS = "urn:oasis:names:tc:SAML:2.0:nameid-format:WindowsDomainQualifiedName"
|
SAML_NAME_ID_FORMAT_WINDOWS = "urn:oasis:names:tc:SAML:2.0:nameid-format:WindowsDomainQualifiedName"
|
||||||
SAML_NAME_ID_FORMAT_TRANSIENT = "urn:oasis:names:tc:SAML:2.0:nameid-format:transient"
|
SAML_NAME_ID_FORMAT_TRANSIENT = "urn:oasis:names:tc:SAML:2.0:nameid-format:transient"
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""SAML Service Provider Metadata Processor"""
|
"""SAML Service Provider Metadata Processor"""
|
||||||
|
|
||||||
|
from collections.abc import Iterator
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
@ -12,6 +13,11 @@ from authentik.sources.saml.processors.constants import (
|
|||||||
NS_SAML_METADATA,
|
NS_SAML_METADATA,
|
||||||
NS_SIGNATURE,
|
NS_SIGNATURE,
|
||||||
SAML_BINDING_POST,
|
SAML_BINDING_POST,
|
||||||
|
SAML_NAME_ID_FORMAT_EMAIL,
|
||||||
|
SAML_NAME_ID_FORMAT_PERSISTENT,
|
||||||
|
SAML_NAME_ID_FORMAT_TRANSIENT,
|
||||||
|
SAML_NAME_ID_FORMAT_WINDOWS,
|
||||||
|
SAML_NAME_ID_FORMAT_X509,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -54,10 +60,19 @@ class MetadataProcessor:
|
|||||||
return key_descriptor
|
return key_descriptor
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_name_id_format(self) -> Element:
|
def get_name_id_formats(self) -> Iterator[Element]:
|
||||||
|
"""Get compatible NameID Formats"""
|
||||||
|
formats = [
|
||||||
|
SAML_NAME_ID_FORMAT_EMAIL,
|
||||||
|
SAML_NAME_ID_FORMAT_PERSISTENT,
|
||||||
|
SAML_NAME_ID_FORMAT_X509,
|
||||||
|
SAML_NAME_ID_FORMAT_WINDOWS,
|
||||||
|
SAML_NAME_ID_FORMAT_TRANSIENT,
|
||||||
|
]
|
||||||
|
for name_id_format in formats:
|
||||||
element = Element(f"{{{NS_SAML_METADATA}}}NameIDFormat")
|
element = Element(f"{{{NS_SAML_METADATA}}}NameIDFormat")
|
||||||
element.text = self.source.name_id_policy
|
element.text = name_id_format
|
||||||
return element
|
yield element
|
||||||
|
|
||||||
def build_entity_descriptor(self) -> str:
|
def build_entity_descriptor(self) -> str:
|
||||||
"""Build full EntityDescriptor"""
|
"""Build full EntityDescriptor"""
|
||||||
@ -77,7 +92,8 @@ class MetadataProcessor:
|
|||||||
if encryption_descriptor is not None:
|
if encryption_descriptor is not None:
|
||||||
sp_sso_descriptor.append(encryption_descriptor)
|
sp_sso_descriptor.append(encryption_descriptor)
|
||||||
|
|
||||||
sp_sso_descriptor.append(self.get_name_id_format())
|
for name_id_format in self.get_name_id_formats():
|
||||||
|
sp_sso_descriptor.append(name_id_format)
|
||||||
|
|
||||||
assertion_consumer_service = SubElement(
|
assertion_consumer_service = SubElement(
|
||||||
sp_sso_descriptor, f"{{{NS_SAML_METADATA}}}AssertionConsumerService"
|
sp_sso_descriptor, f"{{{NS_SAML_METADATA}}}AssertionConsumerService"
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -96,9 +96,8 @@ class ConsentStageView(ChallengeStageView):
|
|||||||
if PLAN_CONTEXT_PENDING_USER in self.executor.plan.context:
|
if PLAN_CONTEXT_PENDING_USER in self.executor.plan.context:
|
||||||
user = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER]
|
user = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER]
|
||||||
|
|
||||||
# Remove expired consents to prevent database unique constraints errors
|
|
||||||
consent: UserConsent | None = UserConsent.filter_not_expired(
|
consent: UserConsent | None = UserConsent.filter_not_expired(
|
||||||
delete_expired=True, user=user, application=application
|
user=user, application=application
|
||||||
).first()
|
).first()
|
||||||
self.executor.plan.context[PLAN_CONTEXT_CONSENT] = consent
|
self.executor.plan.context[PLAN_CONTEXT_CONSENT] = consent
|
||||||
|
|
||||||
|
@ -26,31 +26,23 @@ from authentik.flows.models import FlowDesignation
|
|||||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||||
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, ChallengeStageView
|
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, ChallengeStageView
|
||||||
from authentik.flows.views.executor import SESSION_KEY_APPLICATION_PRE, SESSION_KEY_GET
|
from authentik.flows.views.executor import SESSION_KEY_APPLICATION_PRE, SESSION_KEY_GET
|
||||||
from authentik.lib.utils.reflection import all_subclasses
|
|
||||||
from authentik.lib.utils.urls import reverse_with_qs
|
from authentik.lib.utils.urls import reverse_with_qs
|
||||||
from authentik.root.middleware import ClientIPMiddleware
|
from authentik.root.middleware import ClientIPMiddleware
|
||||||
|
from authentik.sources.oauth.types.apple import AppleLoginChallenge
|
||||||
|
from authentik.sources.plex.models import PlexAuthenticationChallenge
|
||||||
from authentik.stages.identification.models import IdentificationStage
|
from authentik.stages.identification.models import IdentificationStage
|
||||||
from authentik.stages.identification.signals import identification_failed
|
from authentik.stages.identification.signals import identification_failed
|
||||||
from authentik.stages.password.stage import authenticate
|
from authentik.stages.password.stage import authenticate
|
||||||
|
|
||||||
|
|
||||||
class LoginChallengeMixin:
|
|
||||||
"""Base login challenge for Identification stage"""
|
|
||||||
|
|
||||||
|
|
||||||
def get_login_serializers():
|
|
||||||
mapping = {
|
|
||||||
RedirectChallenge().fields["component"].default: RedirectChallenge,
|
|
||||||
}
|
|
||||||
for cls in all_subclasses(LoginChallengeMixin):
|
|
||||||
mapping[cls().fields["component"].default] = cls
|
|
||||||
return mapping
|
|
||||||
|
|
||||||
|
|
||||||
@extend_schema_field(
|
@extend_schema_field(
|
||||||
PolymorphicProxySerializer(
|
PolymorphicProxySerializer(
|
||||||
component_name="LoginChallengeTypes",
|
component_name="LoginChallengeTypes",
|
||||||
serializers=get_login_serializers,
|
serializers={
|
||||||
|
RedirectChallenge().fields["component"].default: RedirectChallenge,
|
||||||
|
PlexAuthenticationChallenge().fields["component"].default: PlexAuthenticationChallenge,
|
||||||
|
AppleLoginChallenge().fields["component"].default: AppleLoginChallenge,
|
||||||
|
},
|
||||||
resource_type_field_name="component",
|
resource_type_field_name="component",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -104,7 +96,7 @@ class IdentificationChallengeResponse(ChallengeResponse):
|
|||||||
if not pre_user:
|
if not pre_user:
|
||||||
with start_span(
|
with start_span(
|
||||||
op="authentik.stages.identification.validate_invalid_wait",
|
op="authentik.stages.identification.validate_invalid_wait",
|
||||||
name="Sleep random time on invalid user identifier",
|
description="Sleep random time on invalid user identifier",
|
||||||
):
|
):
|
||||||
# Sleep a random time (between 90 and 210ms) to "prevent" user enumeration attacks
|
# Sleep a random time (between 90 and 210ms) to "prevent" user enumeration attacks
|
||||||
sleep(0.030 * SystemRandom().randint(3, 7))
|
sleep(0.030 * SystemRandom().randint(3, 7))
|
||||||
@ -146,7 +138,7 @@ class IdentificationChallengeResponse(ChallengeResponse):
|
|||||||
try:
|
try:
|
||||||
with start_span(
|
with start_span(
|
||||||
op="authentik.stages.identification.authenticate",
|
op="authentik.stages.identification.authenticate",
|
||||||
name="User authenticate call (combo stage)",
|
description="User authenticate call (combo stage)",
|
||||||
):
|
):
|
||||||
user = authenticate(
|
user = authenticate(
|
||||||
self.stage.request,
|
self.stage.request,
|
||||||
|
@ -49,7 +49,7 @@ def authenticate(
|
|||||||
LOGGER.debug("Attempting authentication...", backend=backend_path)
|
LOGGER.debug("Attempting authentication...", backend=backend_path)
|
||||||
with start_span(
|
with start_span(
|
||||||
op="authentik.stages.password.authenticate",
|
op="authentik.stages.password.authenticate",
|
||||||
name=backend_path,
|
description=backend_path,
|
||||||
):
|
):
|
||||||
user = backend.authenticate(request, **credentials)
|
user = backend.authenticate(request, **credentials)
|
||||||
if user is None:
|
if user is None:
|
||||||
|
@ -38,7 +38,7 @@ LOGGER = get_logger()
|
|||||||
class FieldTypes(models.TextChoices):
|
class FieldTypes(models.TextChoices):
|
||||||
"""Field types an Prompt can be"""
|
"""Field types an Prompt can be"""
|
||||||
|
|
||||||
# update website/docs/add-secure-apps/flows-stages/stages/prompt/index.md
|
# update website/docs/flow/stages/prompt/index.md
|
||||||
|
|
||||||
# Simple text field
|
# Simple text field
|
||||||
TEXT = "text", _("Text: Simple Text input")
|
TEXT = "text", _("Text: Simple Text input")
|
||||||
|
@ -82,5 +82,3 @@ entries:
|
|||||||
order: 10
|
order: 10
|
||||||
target: !KeyOf default-authentication-flow-password-binding
|
target: !KeyOf default-authentication-flow-password-binding
|
||||||
policy: !KeyOf default-authentication-flow-password-optional
|
policy: !KeyOf default-authentication-flow-password-optional
|
||||||
attrs:
|
|
||||||
failure_result: true
|
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
version: 1
|
|
||||||
metadata:
|
|
||||||
name: Default - Provider invalidation flow
|
|
||||||
entries:
|
|
||||||
- attrs:
|
|
||||||
designation: invalidation
|
|
||||||
name: Logged out of application
|
|
||||||
title: You've logged out of %(app)s.
|
|
||||||
authentication: none
|
|
||||||
identifiers:
|
|
||||||
slug: default-provider-invalidation-flow
|
|
||||||
model: authentik_flows.flow
|
|
||||||
id: flow
|
|
@ -2,7 +2,7 @@
|
|||||||
"$schema": "http://json-schema.org/draft-07/schema",
|
"$schema": "http://json-schema.org/draft-07/schema",
|
||||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"title": "authentik 2024.8.3 Blueprint schema",
|
"title": "authentik 2024.8.0 Blueprint schema",
|
||||||
"required": [
|
"required": [
|
||||||
"version",
|
"version",
|
||||||
"entries"
|
"entries"
|
||||||
@ -5117,12 +5117,6 @@
|
|||||||
"title": "Authorization flow",
|
"title": "Authorization flow",
|
||||||
"description": "Flow used when authorizing this provider."
|
"description": "Flow used when authorizing this provider."
|
||||||
},
|
},
|
||||||
"invalidation_flow": {
|
|
||||||
"type": "string",
|
|
||||||
"format": "uuid",
|
|
||||||
"title": "Invalidation flow",
|
|
||||||
"description": "Flow used ending the session from a provider."
|
|
||||||
},
|
|
||||||
"property_mappings": {
|
"property_mappings": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
@ -5293,12 +5287,6 @@
|
|||||||
"title": "Authorization flow",
|
"title": "Authorization flow",
|
||||||
"description": "Flow used when authorizing this provider."
|
"description": "Flow used when authorizing this provider."
|
||||||
},
|
},
|
||||||
"invalidation_flow": {
|
|
||||||
"type": "string",
|
|
||||||
"format": "uuid",
|
|
||||||
"title": "Invalidation flow",
|
|
||||||
"description": "Flow used ending the session from a provider."
|
|
||||||
},
|
|
||||||
"property_mappings": {
|
"property_mappings": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
@ -5440,12 +5428,6 @@
|
|||||||
"title": "Authorization flow",
|
"title": "Authorization flow",
|
||||||
"description": "Flow used when authorizing this provider."
|
"description": "Flow used when authorizing this provider."
|
||||||
},
|
},
|
||||||
"invalidation_flow": {
|
|
||||||
"type": "string",
|
|
||||||
"format": "uuid",
|
|
||||||
"title": "Invalidation flow",
|
|
||||||
"description": "Flow used ending the session from a provider."
|
|
||||||
},
|
|
||||||
"property_mappings": {
|
"property_mappings": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
@ -5581,12 +5563,6 @@
|
|||||||
"title": "Authorization flow",
|
"title": "Authorization flow",
|
||||||
"description": "Flow used when authorizing this provider."
|
"description": "Flow used when authorizing this provider."
|
||||||
},
|
},
|
||||||
"invalidation_flow": {
|
|
||||||
"type": "string",
|
|
||||||
"format": "uuid",
|
|
||||||
"title": "Invalidation flow",
|
|
||||||
"description": "Flow used ending the session from a provider."
|
|
||||||
},
|
|
||||||
"property_mappings": {
|
"property_mappings": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
@ -5712,12 +5688,6 @@
|
|||||||
"title": "Authorization flow",
|
"title": "Authorization flow",
|
||||||
"description": "Flow used when authorizing this provider."
|
"description": "Flow used when authorizing this provider."
|
||||||
},
|
},
|
||||||
"invalidation_flow": {
|
|
||||||
"type": "string",
|
|
||||||
"format": "uuid",
|
|
||||||
"title": "Invalidation flow",
|
|
||||||
"description": "Flow used ending the session from a provider."
|
|
||||||
},
|
|
||||||
"property_mappings": {
|
"property_mappings": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
@ -5956,10 +5926,6 @@
|
|||||||
"title": "Url",
|
"title": "Url",
|
||||||
"description": "Base URL to SCIM requests, usually ends in /v2"
|
"description": "Base URL to SCIM requests, usually ends in /v2"
|
||||||
},
|
},
|
||||||
"verify_certificates": {
|
|
||||||
"type": "boolean",
|
|
||||||
"title": "Verify certificates"
|
|
||||||
},
|
|
||||||
"token": {
|
"token": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
@ -7601,7 +7567,7 @@
|
|||||||
"enum": [
|
"enum": [
|
||||||
"urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress",
|
"urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress",
|
||||||
"urn:oasis:names:tc:SAML:2.0:nameid-format:persistent",
|
"urn:oasis:names:tc:SAML:2.0:nameid-format:persistent",
|
||||||
"urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName",
|
"urn:oasis:names:tc:SAML:2.0:nameid-format:X509SubjectName",
|
||||||
"urn:oasis:names:tc:SAML:2.0:nameid-format:WindowsDomainQualifiedName",
|
"urn:oasis:names:tc:SAML:2.0:nameid-format:WindowsDomainQualifiedName",
|
||||||
"urn:oasis:names:tc:SAML:2.0:nameid-format:transient"
|
"urn:oasis:names:tc:SAML:2.0:nameid-format:transient"
|
||||||
],
|
],
|
||||||
@ -12795,12 +12761,6 @@
|
|||||||
"title": "Authorization flow",
|
"title": "Authorization flow",
|
||||||
"description": "Flow used when authorizing this provider."
|
"description": "Flow used when authorizing this provider."
|
||||||
},
|
},
|
||||||
"invalidation_flow": {
|
|
||||||
"type": "string",
|
|
||||||
"format": "uuid",
|
|
||||||
"title": "Invalidation flow",
|
|
||||||
"description": "Flow used ending the session from a provider."
|
|
||||||
},
|
|
||||||
"property_mappings": {
|
"property_mappings": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
|
@ -31,7 +31,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- redis:/data
|
- redis:/data
|
||||||
server:
|
server:
|
||||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.8.3}
|
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.8.0}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
command: server
|
command: server
|
||||||
environment:
|
environment:
|
||||||
@ -52,7 +52,7 @@ services:
|
|||||||
- postgresql
|
- postgresql
|
||||||
- redis
|
- redis
|
||||||
worker:
|
worker:
|
||||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.8.3}
|
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.8.0}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
command: worker
|
command: worker
|
||||||
environment:
|
environment:
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user