Compare commits
174 Commits
version/20
...
hack-close
Author | SHA1 | Date | |
---|---|---|---|
87bf75e51c | |||
1ba1a1def5 | |||
782d95b4a3 | |||
5803c39e91 | |||
e5322a6dd3 | |||
364edfb4a8 | |||
de16988cac | |||
a2714ab1f1 | |||
5347dd7022 | |||
aaddb76962 | |||
b08f8d8e0c | |||
664bc19bba | |||
f315360be1 | |||
4ac255d579 | |||
3f9f57f0fd | |||
3569eb15b1 | |||
94836a3ce7 | |||
f272d14fcf | |||
17fe595528 | |||
3cce6d79eb | |||
7ac5c8eaa6 | |||
7316f126de | |||
d645965a33 | |||
47abbcf8b8 | |||
e86a41b83d | |||
f2293c0f5b | |||
da3393abb4 | |||
211da35a93 | |||
0b8c501326 | |||
18472c231a | |||
e51bef218a | |||
486e17920e | |||
505bad0895 | |||
e4b7691181 | |||
ba5adad53d | |||
2b1dee6aed | |||
b976acff42 | |||
78092ddfea | |||
22d013817f | |||
56224fc712 | |||
86d64b2234 | |||
a320aec9d0 | |||
7be94df00c | |||
346c6e6a85 | |||
8d4b7ce8d3 | |||
56cf14e5ef | |||
69543c14d3 | |||
f3f07f2c98 | |||
4647fbacb0 | |||
4359fab560 | |||
f8b36e1737 | |||
c50148072e | |||
deda3a57ee | |||
8f0c0fae62 | |||
2015463fe0 | |||
d435a65cfd | |||
a728dad166 | |||
e0564b3770 | |||
d50f92d8b4 | |||
03f3ad89df | |||
e604e70395 | |||
1db048bdaf | |||
3d973e7ce3 | |||
9bc3327f03 | |||
f1979e12cc | |||
121cc6ac98 | |||
9b7c30d44c | |||
82935ddf11 | |||
989ff5a464 | |||
0b5870f16e | |||
36e16a270b | |||
09ffdea1f0 | |||
2889974e73 | |||
15ce7423f6 | |||
d12db62a6d | |||
546425acde | |||
7e46af3f45 | |||
2f469d2709 | |||
fb4e4dc8db | |||
60d5936d73 | |||
28d9d4a16d | |||
31913a620d | |||
2ac38869fe | |||
9601d00a31 | |||
e4358dafd7 | |||
b144d28805 | |||
e103eb9369 | |||
e9dbab011f | |||
1ca3f15398 | |||
b6e8342466 | |||
c1eef9278d | |||
12c4ac704f | |||
14ebd55121 | |||
5c7384eecc | |||
cfbf7d3a9a | |||
f0cf4ba5d8 | |||
e207e8dff5 | |||
c70d3bd182 | |||
84a5e6a487 | |||
3a527b7680 | |||
a1c2931b3b | |||
e67c0c2144 | |||
5f8c06a088 | |||
b5fe8afd27 | |||
d359dc5b09 | |||
2e63a7c7e9 | |||
41af486006 | |||
cf799fca03 | |||
db4f61549d | |||
27879d9d95 | |||
1029b897ea | |||
85d1993ddf | |||
de9ac08d91 | |||
9a06908984 | |||
911d7f435b | |||
7eef86a3f4 | |||
77662c9a51 | |||
ca25c46ee1 | |||
59ae774712 | |||
c350560d59 | |||
810a4fd14a | |||
b4a1a1e664 | |||
5ca65003f1 | |||
b0bce60e5e | |||
ff9b48a2d4 | |||
8f1785924f | |||
af25ba7508 | |||
8ccd500d5b | |||
40709e93de | |||
31cabbd64c | |||
f7a0163a70 | |||
0db1d9598d | |||
db8ae4e0f1 | |||
84542080d6 | |||
a95ce95b50 | |||
e655683eec | |||
443b572413 | |||
6836ba2226 | |||
1e3c9c26ea | |||
145f011eba | |||
095b5bfc78 | |||
15d9f39a9e | |||
9d07f1e83e | |||
f4e94bff1f | |||
6345c7fa8e | |||
2e9dc2d5ea | |||
8f05f4d29c | |||
5b2496c190 | |||
6893356c30 | |||
943608e554 | |||
6c065bd7e3 | |||
dfff445ddd | |||
e08f8d5fb5 | |||
30a7a6cbe9 | |||
d6af506a78 | |||
57893e0125 | |||
080ac6b5bb | |||
d2c4bcf25d | |||
c3560c3f05 | |||
50bbb0a9d2 | |||
6839c5b750 | |||
622c0faebf | |||
935821857a | |||
5fe737326e | |||
ff0d3c3d63 | |||
fcdf165dfe | |||
ae7ea4dd11 | |||
0c917ac3ed | |||
657c17a12f | |||
8828eefbe4 | |||
02063f7d92 | |||
24244d6ff4 | |||
4e5ea05987 | |||
f8be8f2268 |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2023.6.0
|
current_version = 2023.6.1
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||||
|
2
.github/workflows/ci-main.yml
vendored
2
.github/workflows/ci-main.yml
vendored
@ -112,7 +112,7 @@ jobs:
|
|||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Create k8s Kind Cluster
|
- name: Create k8s Kind Cluster
|
||||||
uses: helm/kind-action@v1.7.0
|
uses: helm/kind-action@v1.8.0
|
||||||
- name: run integration
|
- name: run integration
|
||||||
run: |
|
run: |
|
||||||
poetry run coverage run manage.py test tests/integration
|
poetry run coverage run manage.py test tests/integration
|
||||||
|
39
.github/workflows/translation-rename.yml
vendored
Normal file
39
.github/workflows/translation-rename.yml
vendored
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
# Rename transifex pull requests to have a correct naming
|
||||||
|
name: authentik-translation-transifex-rename
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, reopened]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
rename_pr:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
|
||||||
|
steps:
|
||||||
|
- id: generate_token
|
||||||
|
uses: tibdex/github-app-token@v1
|
||||||
|
with:
|
||||||
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
|
- name: Get current title
|
||||||
|
id: title
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||||
|
run: |
|
||||||
|
title=$(curl -q -L \
|
||||||
|
-H "Accept: application/vnd.github+json" \
|
||||||
|
-H "Authorization: Bearer ${GH_TOKEN}" \
|
||||||
|
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||||
|
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title)
|
||||||
|
echo "title=${title}" >> "$GITHUB_OUTPUT"
|
||||||
|
- name: Rename
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||||
|
run: |
|
||||||
|
curl -L \
|
||||||
|
-X PATCH \
|
||||||
|
-H "Accept: application/vnd.github+json" \
|
||||||
|
-H "Authorization: Bearer ${GH_TOKEN}" \
|
||||||
|
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||||
|
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
|
||||||
|
-d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -204,3 +204,4 @@ data/
|
|||||||
|
|
||||||
# Local Netlify folder
|
# Local Netlify folder
|
||||||
.netlify
|
.netlify
|
||||||
|
.ruff_cache
|
||||||
|
12
Dockerfile
12
Dockerfile
@ -31,7 +31,7 @@ RUN pip install --no-cache-dir poetry && \
|
|||||||
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
||||||
|
|
||||||
# Stage 4: Build go proxy
|
# Stage 4: Build go proxy
|
||||||
FROM docker.io/golang:1.20.5-bullseye AS go-builder
|
FROM docker.io/golang:1.20.6-bullseye AS go-builder
|
||||||
|
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
|
|
||||||
@ -47,20 +47,18 @@ COPY ./go.sum /work/go.sum
|
|||||||
RUN go build -o /work/authentik ./cmd/server/
|
RUN go build -o /work/authentik ./cmd/server/
|
||||||
|
|
||||||
# Stage 5: MaxMind GeoIP
|
# Stage 5: MaxMind GeoIP
|
||||||
FROM ghcr.io/maxmind/geoipupdate:v5.1 as geoip
|
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
||||||
|
|
||||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||||
ENV GEOIPUPDATE_VERBOSE="true"
|
ENV GEOIPUPDATE_VERBOSE="true"
|
||||||
|
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
|
||||||
|
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
|
||||||
|
|
||||||
USER root
|
USER root
|
||||||
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||||
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
|
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
|
||||||
mkdir -p /usr/share/GeoIP && \
|
mkdir -p /usr/share/GeoIP && \
|
||||||
/bin/sh -c "\
|
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||||
export GEOIPUPDATE_ACCOUNT_ID=$(cat /run/secrets/GEOIPUPDATE_ACCOUNT_ID); \
|
|
||||||
export GEOIPUPDATE_LICENSE_KEY=$(cat /run/secrets/GEOIPUPDATE_LICENSE_KEY); \
|
|
||||||
/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0 \
|
|
||||||
"
|
|
||||||
|
|
||||||
# Stage 6: Run
|
# Stage 6: Run
|
||||||
FROM docker.io/python:3.11.4-slim-bullseye AS final-image
|
FROM docker.io/python:3.11.4-slim-bullseye AS final-image
|
||||||
|
3
Makefile
3
Makefile
@ -145,7 +145,8 @@ web-lint-fix:
|
|||||||
|
|
||||||
web-lint:
|
web-lint:
|
||||||
cd web && npm run lint
|
cd web && npm run lint
|
||||||
cd web && npm run lit-analyse
|
# TODO: The analyzer hasn't run correctly in awhile.
|
||||||
|
# cd web && npm run lit-analyse
|
||||||
|
|
||||||
web-check-compile:
|
web-check-compile:
|
||||||
cd web && npm run tsc
|
cd web && npm run tsc
|
||||||
|
50
SECURITY.md
50
SECURITY.md
@ -1,44 +1,48 @@
|
|||||||
authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
|
authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
|
||||||
|
|
||||||
|
## What authentik classifies as a CVE
|
||||||
|
|
||||||
|
CVE (Common Vulnerability and Exposure) is a system designed to aggregate all vulnerabilities. As such, a CVE will be issued when there is a either vulnerability or exposure. Per NIST, A vulnerability is:
|
||||||
|
|
||||||
|
“Weakness in an information system, system security procedures, internal controls, or implementation that could be exploited or triggered by a threat source.”
|
||||||
|
|
||||||
|
If it is determined that the issue does qualify as a CVE, a CVE number will be issued to the reporter from GitHub.
|
||||||
|
|
||||||
|
Even if the issue is not a CVE, we still greatly appreciate your help in hardening authentik.
|
||||||
|
|
||||||
## Supported Versions
|
## Supported Versions
|
||||||
|
|
||||||
(.x being the latest patch release for each version)
|
(.x being the latest patch release for each version)
|
||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| --------- | ------------------ |
|
| --- | --- |
|
||||||
| 2023.4.x | :white_check_mark: |
|
| 2023.5.x | ✅ |
|
||||||
| 2023.5.x | :white_check_mark: |
|
| 2023.6.x | ✅ |
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the bug.
|
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the issue.
|
||||||
|
|
||||||
## Criticality levels
|
## Severity levels
|
||||||
|
|
||||||
### High
|
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories:
|
||||||
|
|
||||||
- Authorization bypass
|
| 0.0 | None |
|
||||||
- Circumvention of policies
|
| 0.1 – 3.9 | Low |
|
||||||
|
| 4.0 – 6.9 | Medium |
|
||||||
### Moderate
|
| 7.0 – 8.9 | High |
|
||||||
|
| 9.0 – 10.0 | Critical |
|
||||||
- Denial-of-Service attacks
|
|
||||||
|
|
||||||
### Low
|
|
||||||
|
|
||||||
- Unvalidated redirects
|
|
||||||
- Issues requiring uncommon setups
|
|
||||||
|
|
||||||
## Disclosure process
|
## Disclosure process
|
||||||
|
|
||||||
1. Issue is reported via Email as listed above.
|
1. Report from Github or Issue is reported via Email as listed above.
|
||||||
2. The authentik Security team will try to reproduce the issue and ask for more information if required.
|
2. The authentik Security team will try to reproduce the issue and ask for more information if required.
|
||||||
3. A criticality level is assigned.
|
3. A severity level is assigned.
|
||||||
4. A fix is created, and if possible tested by the issue reporter.
|
4. A fix is created, and if possible tested by the issue reporter.
|
||||||
5. The fix is backported to other supported versions, and if possible a workaround for other versions is created.
|
5. The fix is backported to other supported versions, and if possible a workaround for other versions is created.
|
||||||
6. An announcement is sent out with a fixed release date and criticality level of the issue. The announcement will be sent at least 24 hours before the release of the fix
|
6. An announcement is sent out with a fixed release date and severity level of the issue. The announcement will be sent at least 24 hours before the release of the security fix.
|
||||||
7. The fixed version is released for the supported versions.
|
7. The fixed version is released for the supported versions.
|
||||||
|
|
||||||
## Getting security notifications
|
## Getting security notifications
|
||||||
|
|
||||||
To get security notifications, subscribe to the mailing list [here](https://groups.google.com/g/authentik-security-announcements) or join the [discord](https://goauthentik.io/discord) server.
|
To get security notifications, subscribe to the mailing list [here](https://groups.google.com/g/authentik-security-announcements) or join the [discord](https://goauthentik.io/discord) server.
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
from os import environ
|
from os import environ
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
__version__ = "2023.6.0"
|
__version__ = "2023.6.1"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
|
@ -58,7 +58,7 @@ def clear_update_notifications():
|
|||||||
@prefill_task
|
@prefill_task
|
||||||
def update_latest_version(self: MonitoredTask):
|
def update_latest_version(self: MonitoredTask):
|
||||||
"""Update latest version info"""
|
"""Update latest version info"""
|
||||||
if CONFIG.y_bool("disable_update_check"):
|
if CONFIG.get_bool("disable_update_check"):
|
||||||
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
||||||
self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."]))
|
self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."]))
|
||||||
return
|
return
|
||||||
|
@ -9,7 +9,7 @@ from rest_framework.exceptions import AuthenticationFailed
|
|||||||
|
|
||||||
from authentik.api.authentication import bearer_auth
|
from authentik.api.authentication import bearer_auth
|
||||||
from authentik.blueprints.tests import reconcile_app
|
from authentik.blueprints.tests import reconcile_app
|
||||||
from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
|
from authentik.core.models import Token, TokenIntents, User, UserTypes
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||||
@ -57,8 +57,8 @@ class TestAPIAuth(TestCase):
|
|||||||
@reconcile_app("authentik_outposts")
|
@reconcile_app("authentik_outposts")
|
||||||
def test_managed_outpost_success(self):
|
def test_managed_outpost_success(self):
|
||||||
"""Test managed outpost"""
|
"""Test managed outpost"""
|
||||||
user = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
user: User = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||||
self.assertEqual(user.attributes[USER_ATTRIBUTE_SA], True)
|
self.assertEqual(user.type, UserTypes.INTERNAL_SERVICE_ACCOUNT)
|
||||||
|
|
||||||
def test_jwt_valid(self):
|
def test_jwt_valid(self):
|
||||||
"""Test valid JWT"""
|
"""Test valid JWT"""
|
||||||
|
@ -3,6 +3,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
from django.dispatch import Signal
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from rest_framework.fields import (
|
from rest_framework.fields import (
|
||||||
BooleanField,
|
BooleanField,
|
||||||
@ -21,6 +22,8 @@ from authentik.core.api.utils import PassiveSerializer
|
|||||||
from authentik.events.geo import GEOIP_READER
|
from authentik.events.geo import GEOIP_READER
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
|
capabilities = Signal()
|
||||||
|
|
||||||
|
|
||||||
class Capabilities(models.TextChoices):
|
class Capabilities(models.TextChoices):
|
||||||
"""Define capabilities which influence which APIs can/should be used"""
|
"""Define capabilities which influence which APIs can/should be used"""
|
||||||
@ -67,12 +70,15 @@ class ConfigView(APIView):
|
|||||||
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
||||||
if GEOIP_READER.enabled:
|
if GEOIP_READER.enabled:
|
||||||
caps.append(Capabilities.CAN_GEO_IP)
|
caps.append(Capabilities.CAN_GEO_IP)
|
||||||
if CONFIG.y_bool("impersonation"):
|
if CONFIG.get_bool("impersonation"):
|
||||||
caps.append(Capabilities.CAN_IMPERSONATE)
|
caps.append(Capabilities.CAN_IMPERSONATE)
|
||||||
if settings.DEBUG: # pragma: no cover
|
if settings.DEBUG: # pragma: no cover
|
||||||
caps.append(Capabilities.CAN_DEBUG)
|
caps.append(Capabilities.CAN_DEBUG)
|
||||||
if "authentik.enterprise" in settings.INSTALLED_APPS:
|
if "authentik.enterprise" in settings.INSTALLED_APPS:
|
||||||
caps.append(Capabilities.IS_ENTERPRISE)
|
caps.append(Capabilities.IS_ENTERPRISE)
|
||||||
|
for _, result in capabilities.send(sender=self):
|
||||||
|
if result:
|
||||||
|
caps.append(result)
|
||||||
return caps
|
return caps
|
||||||
|
|
||||||
def get_config(self) -> ConfigSerializer:
|
def get_config(self) -> ConfigSerializer:
|
||||||
@ -80,17 +86,17 @@ class ConfigView(APIView):
|
|||||||
return ConfigSerializer(
|
return ConfigSerializer(
|
||||||
{
|
{
|
||||||
"error_reporting": {
|
"error_reporting": {
|
||||||
"enabled": CONFIG.y("error_reporting.enabled"),
|
"enabled": CONFIG.get("error_reporting.enabled"),
|
||||||
"sentry_dsn": CONFIG.y("error_reporting.sentry_dsn"),
|
"sentry_dsn": CONFIG.get("error_reporting.sentry_dsn"),
|
||||||
"environment": CONFIG.y("error_reporting.environment"),
|
"environment": CONFIG.get("error_reporting.environment"),
|
||||||
"send_pii": CONFIG.y("error_reporting.send_pii"),
|
"send_pii": CONFIG.get("error_reporting.send_pii"),
|
||||||
"traces_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.4)),
|
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)),
|
||||||
},
|
},
|
||||||
"capabilities": self.get_capabilities(),
|
"capabilities": self.get_capabilities(),
|
||||||
"cache_timeout": int(CONFIG.y("redis.cache_timeout")),
|
"cache_timeout": int(CONFIG.get("redis.cache_timeout")),
|
||||||
"cache_timeout_flows": int(CONFIG.y("redis.cache_timeout_flows")),
|
"cache_timeout_flows": int(CONFIG.get("redis.cache_timeout_flows")),
|
||||||
"cache_timeout_policies": int(CONFIG.y("redis.cache_timeout_policies")),
|
"cache_timeout_policies": int(CONFIG.get("redis.cache_timeout_policies")),
|
||||||
"cache_timeout_reputation": int(CONFIG.y("redis.cache_timeout_reputation")),
|
"cache_timeout_reputation": int(CONFIG.get("redis.cache_timeout_reputation")),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -21,9 +21,14 @@ _other_urls = []
|
|||||||
for _authentik_app in get_apps():
|
for _authentik_app in get_apps():
|
||||||
try:
|
try:
|
||||||
api_urls = import_module(f"{_authentik_app.name}.urls")
|
api_urls = import_module(f"{_authentik_app.name}.urls")
|
||||||
except (ModuleNotFoundError, ImportError):
|
except (ModuleNotFoundError, ImportError) as exc:
|
||||||
|
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
|
||||||
continue
|
continue
|
||||||
if not hasattr(api_urls, "api_urlpatterns"):
|
if not hasattr(api_urls, "api_urlpatterns"):
|
||||||
|
LOGGER.debug(
|
||||||
|
"App does not define API URLs",
|
||||||
|
app_name=_authentik_app.name,
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
urls: list = getattr(api_urls, "api_urlpatterns")
|
urls: list = getattr(api_urls, "api_urlpatterns")
|
||||||
for url in urls:
|
for url in urls:
|
||||||
|
@ -30,7 +30,7 @@ def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
|||||||
return
|
return
|
||||||
blueprint_file.seek(0)
|
blueprint_file.seek(0)
|
||||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first()
|
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first()
|
||||||
rel_path = path.relative_to(Path(CONFIG.y("blueprints_dir")))
|
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
|
||||||
meta = None
|
meta = None
|
||||||
if metadata:
|
if metadata:
|
||||||
meta = from_dict(BlueprintMetadata, metadata)
|
meta = from_dict(BlueprintMetadata, metadata)
|
||||||
@ -55,7 +55,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
|||||||
Flow = apps.get_model("authentik_flows", "Flow")
|
Flow = apps.get_model("authentik_flows", "Flow")
|
||||||
|
|
||||||
db_alias = schema_editor.connection.alias
|
db_alias = schema_editor.connection.alias
|
||||||
for file in glob(f"{CONFIG.y('blueprints_dir')}/**/*.yaml", recursive=True):
|
for file in glob(f"{CONFIG.get('blueprints_dir')}/**/*.yaml", recursive=True):
|
||||||
check_blueprint_v1_file(BlueprintInstance, Path(file))
|
check_blueprint_v1_file(BlueprintInstance, Path(file))
|
||||||
|
|
||||||
for blueprint in BlueprintInstance.objects.using(db_alias).all():
|
for blueprint in BlueprintInstance.objects.using(db_alias).all():
|
||||||
|
@ -82,7 +82,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
|||||||
def retrieve_file(self) -> str:
|
def retrieve_file(self) -> str:
|
||||||
"""Get blueprint from path"""
|
"""Get blueprint from path"""
|
||||||
try:
|
try:
|
||||||
base = Path(CONFIG.y("blueprints_dir"))
|
base = Path(CONFIG.get("blueprints_dir"))
|
||||||
full_path = base.joinpath(Path(self.path)).resolve()
|
full_path = base.joinpath(Path(self.path)).resolve()
|
||||||
if not str(full_path).startswith(str(base.resolve())):
|
if not str(full_path).startswith(str(base.resolve())):
|
||||||
raise BlueprintRetrievalFailed("Invalid blueprint path")
|
raise BlueprintRetrievalFailed("Invalid blueprint path")
|
||||||
|
@ -62,7 +62,7 @@ def start_blueprint_watcher():
|
|||||||
if _file_watcher_started:
|
if _file_watcher_started:
|
||||||
return
|
return
|
||||||
observer = Observer()
|
observer = Observer()
|
||||||
observer.schedule(BlueprintEventHandler(), CONFIG.y("blueprints_dir"), recursive=True)
|
observer.schedule(BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True)
|
||||||
observer.start()
|
observer.start()
|
||||||
_file_watcher_started = True
|
_file_watcher_started = True
|
||||||
|
|
||||||
@ -80,7 +80,7 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
|||||||
blueprints_discovery.delay()
|
blueprints_discovery.delay()
|
||||||
if isinstance(event, FileModifiedEvent):
|
if isinstance(event, FileModifiedEvent):
|
||||||
path = Path(event.src_path)
|
path = Path(event.src_path)
|
||||||
root = Path(CONFIG.y("blueprints_dir")).absolute()
|
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||||
rel_path = str(path.relative_to(root))
|
rel_path = str(path.relative_to(root))
|
||||||
for instance in BlueprintInstance.objects.filter(path=rel_path):
|
for instance in BlueprintInstance.objects.filter(path=rel_path):
|
||||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||||
@ -101,7 +101,7 @@ def blueprints_find_dict():
|
|||||||
def blueprints_find():
|
def blueprints_find():
|
||||||
"""Find blueprints and return valid ones"""
|
"""Find blueprints and return valid ones"""
|
||||||
blueprints = []
|
blueprints = []
|
||||||
root = Path(CONFIG.y("blueprints_dir"))
|
root = Path(CONFIG.get("blueprints_dir"))
|
||||||
for path in root.rglob("**/*.yaml"):
|
for path in root.rglob("**/*.yaml"):
|
||||||
# Check if any part in the path starts with a dot and assume a hidden file
|
# Check if any part in the path starts with a dot and assume a hidden file
|
||||||
if any(part for part in path.parts if part.startswith(".")):
|
if any(part for part in path.parts if part.startswith(".")):
|
||||||
|
@ -15,7 +15,7 @@ from django.utils.http import urlencode
|
|||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
from django_filters.filters import BooleanFilter, CharFilter, ModelMultipleChoiceFilter
|
from django_filters.filters import BooleanFilter, CharFilter, ModelMultipleChoiceFilter, UUIDFilter
|
||||||
from django_filters.filterset import FilterSet
|
from django_filters.filterset import FilterSet
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import (
|
from drf_spectacular.utils import (
|
||||||
@ -59,7 +59,6 @@ from authentik.core.middleware import (
|
|||||||
SESSION_KEY_IMPERSONATE_USER,
|
SESSION_KEY_IMPERSONATE_USER,
|
||||||
)
|
)
|
||||||
from authentik.core.models import (
|
from authentik.core.models import (
|
||||||
USER_ATTRIBUTE_SA,
|
|
||||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||||
USER_PATH_SERVICE_ACCOUNT,
|
USER_PATH_SERVICE_ACCOUNT,
|
||||||
AuthenticatedSession,
|
AuthenticatedSession,
|
||||||
@ -67,6 +66,7 @@ from authentik.core.models import (
|
|||||||
Token,
|
Token,
|
||||||
TokenIntents,
|
TokenIntents,
|
||||||
User,
|
User,
|
||||||
|
UserTypes,
|
||||||
)
|
)
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.flows.exceptions import FlowNonApplicableException
|
from authentik.flows.exceptions import FlowNonApplicableException
|
||||||
@ -147,6 +147,18 @@ class UserSerializer(ModelSerializer):
|
|||||||
raise ValidationError(_("No empty segments in user path allowed."))
|
raise ValidationError(_("No empty segments in user path allowed."))
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
def validate_type(self, user_type: str) -> str:
|
||||||
|
"""Validate user type, internal_service_account is an internal value"""
|
||||||
|
if (
|
||||||
|
self.instance
|
||||||
|
and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||||
|
and user_type != UserTypes.INTERNAL_SERVICE_ACCOUNT.value
|
||||||
|
):
|
||||||
|
raise ValidationError("Can't change internal service account to other user type.")
|
||||||
|
if not self.instance and user_type == UserTypes.INTERNAL_SERVICE_ACCOUNT.value:
|
||||||
|
raise ValidationError("Setting a user to internal service account is not allowed.")
|
||||||
|
return user_type
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = User
|
model = User
|
||||||
fields = [
|
fields = [
|
||||||
@ -163,6 +175,7 @@ class UserSerializer(ModelSerializer):
|
|||||||
"attributes",
|
"attributes",
|
||||||
"uid",
|
"uid",
|
||||||
"path",
|
"path",
|
||||||
|
"type",
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"name": {"allow_blank": True},
|
"name": {"allow_blank": True},
|
||||||
@ -211,6 +224,7 @@ class UserSelfSerializer(ModelSerializer):
|
|||||||
"avatar",
|
"avatar",
|
||||||
"uid",
|
"uid",
|
||||||
"settings",
|
"settings",
|
||||||
|
"type",
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"is_active": {"read_only": True},
|
"is_active": {"read_only": True},
|
||||||
@ -284,7 +298,7 @@ class UsersFilter(FilterSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
|
is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
|
||||||
uuid = CharFilter(field_name="uuid")
|
uuid = UUIDFilter(field_name="uuid")
|
||||||
|
|
||||||
path = CharFilter(
|
path = CharFilter(
|
||||||
field_name="path",
|
field_name="path",
|
||||||
@ -329,6 +343,7 @@ class UsersFilter(FilterSet):
|
|||||||
"attributes",
|
"attributes",
|
||||||
"groups_by_name",
|
"groups_by_name",
|
||||||
"groups_by_pk",
|
"groups_by_pk",
|
||||||
|
"type",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -421,7 +436,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
user: User = User.objects.create(
|
user: User = User.objects.create(
|
||||||
username=username,
|
username=username,
|
||||||
name=username,
|
name=username,
|
||||||
attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: expiring},
|
type=UserTypes.SERVICE_ACCOUNT,
|
||||||
|
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: expiring},
|
||||||
path=USER_PATH_SERVICE_ACCOUNT,
|
path=USER_PATH_SERVICE_ACCOUNT,
|
||||||
)
|
)
|
||||||
user.set_unusable_password()
|
user.set_unusable_password()
|
||||||
@ -580,7 +596,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
@action(detail=True, methods=["POST"])
|
@action(detail=True, methods=["POST"])
|
||||||
def impersonate(self, request: Request, pk: int) -> Response:
|
def impersonate(self, request: Request, pk: int) -> Response:
|
||||||
"""Impersonate a user"""
|
"""Impersonate a user"""
|
||||||
if not CONFIG.y_bool("impersonation"):
|
if not CONFIG.get_bool("impersonation"):
|
||||||
LOGGER.debug("User attempted to impersonate", user=request.user)
|
LOGGER.debug("User attempted to impersonate", user=request.user)
|
||||||
return Response(status=401)
|
return Response(status=401)
|
||||||
if not request.user.has_perm("impersonate"):
|
if not request.user.has_perm("impersonate"):
|
||||||
|
@ -18,7 +18,7 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
def handle(self, **options):
|
def handle(self, **options):
|
||||||
close_old_connections()
|
close_old_connections()
|
||||||
if CONFIG.y_bool("remote_debug"):
|
if CONFIG.get_bool("remote_debug"):
|
||||||
import debugpy
|
import debugpy
|
||||||
|
|
||||||
debugpy.listen(("0.0.0.0", 6900)) # nosec
|
debugpy.listen(("0.0.0.0", 6900)) # nosec
|
||||||
|
43
authentik/core/migrations/0030_user_type.py
Normal file
43
authentik/core/migrations/0030_user_type.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
# Generated by Django 4.1.7 on 2023-05-21 11:44
|
||||||
|
|
||||||
|
from django.apps.registry import Apps
|
||||||
|
from django.db import migrations, models
|
||||||
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_user_type(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
User = apps.get_model("authentik_core", "User")
|
||||||
|
|
||||||
|
from authentik.core.models import UserTypes
|
||||||
|
|
||||||
|
for user in User.objects.using(db_alias).all():
|
||||||
|
user.type = UserTypes.INTERNAL
|
||||||
|
if "goauthentik.io/user/service-account" in user.attributes:
|
||||||
|
user.type = UserTypes.SERVICE_ACCOUNT
|
||||||
|
if "goauthentik.io/user/override-ips" in user.attributes:
|
||||||
|
user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0029_provider_backchannel_applications_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="type",
|
||||||
|
field=models.TextField(
|
||||||
|
choices=[
|
||||||
|
("default", "Default"),
|
||||||
|
("external", "External"),
|
||||||
|
("service_account", "Service Account"),
|
||||||
|
("internal_service_account", "Internal Service Account"),
|
||||||
|
],
|
||||||
|
default="default",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunPython(migrate_user_type),
|
||||||
|
]
|
41
authentik/core/migrations/0031_alter_user_type.py
Normal file
41
authentik/core/migrations/0031_alter_user_type.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
# Generated by Django 4.1.10 on 2023-07-21 12:54
|
||||||
|
|
||||||
|
from django.apps.registry import Apps
|
||||||
|
from django.db import migrations, models
|
||||||
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_user_type_v2(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
User = apps.get_model("authentik_core", "User")
|
||||||
|
|
||||||
|
from authentik.core.models import UserTypes
|
||||||
|
|
||||||
|
for user in User.objects.using(db_alias).all():
|
||||||
|
if user.type != "default":
|
||||||
|
continue
|
||||||
|
user.type = UserTypes.INTERNAL
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0030_user_type"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="user",
|
||||||
|
name="type",
|
||||||
|
field=models.TextField(
|
||||||
|
choices=[
|
||||||
|
("internal", "Internal"),
|
||||||
|
("external", "External"),
|
||||||
|
("service_account", "Service Account"),
|
||||||
|
("internal_service_account", "Internal Service Account"),
|
||||||
|
],
|
||||||
|
default="internal",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunPython(migrate_user_type_v2),
|
||||||
|
]
|
@ -36,7 +36,6 @@ from authentik.root.install_id import get_install_id
|
|||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug"
|
USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug"
|
||||||
USER_ATTRIBUTE_SA = "goauthentik.io/user/service-account"
|
|
||||||
USER_ATTRIBUTE_GENERATED = "goauthentik.io/user/generated"
|
USER_ATTRIBUTE_GENERATED = "goauthentik.io/user/generated"
|
||||||
USER_ATTRIBUTE_EXPIRES = "goauthentik.io/user/expires"
|
USER_ATTRIBUTE_EXPIRES = "goauthentik.io/user/expires"
|
||||||
USER_ATTRIBUTE_DELETE_ON_LOGOUT = "goauthentik.io/user/delete-on-logout"
|
USER_ATTRIBUTE_DELETE_ON_LOGOUT = "goauthentik.io/user/delete-on-logout"
|
||||||
@ -45,8 +44,6 @@ USER_ATTRIBUTE_TOKEN_EXPIRING = "goauthentik.io/user/token-expires" # nosec
|
|||||||
USER_ATTRIBUTE_CHANGE_USERNAME = "goauthentik.io/user/can-change-username"
|
USER_ATTRIBUTE_CHANGE_USERNAME = "goauthentik.io/user/can-change-username"
|
||||||
USER_ATTRIBUTE_CHANGE_NAME = "goauthentik.io/user/can-change-name"
|
USER_ATTRIBUTE_CHANGE_NAME = "goauthentik.io/user/can-change-name"
|
||||||
USER_ATTRIBUTE_CHANGE_EMAIL = "goauthentik.io/user/can-change-email"
|
USER_ATTRIBUTE_CHANGE_EMAIL = "goauthentik.io/user/can-change-email"
|
||||||
USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips"
|
|
||||||
|
|
||||||
USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
|
USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
|
||||||
USER_PATH_SERVICE_ACCOUNT = USER_PATH_SYSTEM_PREFIX + "/service-accounts"
|
USER_PATH_SERVICE_ACCOUNT = USER_PATH_SYSTEM_PREFIX + "/service-accounts"
|
||||||
|
|
||||||
@ -63,7 +60,22 @@ def default_token_key():
|
|||||||
"""Default token key"""
|
"""Default token key"""
|
||||||
# We use generate_id since the chars in the key should be easy
|
# We use generate_id since the chars in the key should be easy
|
||||||
# to use in Emails (for verification) and URLs (for recovery)
|
# to use in Emails (for verification) and URLs (for recovery)
|
||||||
return generate_id(int(CONFIG.y("default_token_length")))
|
return generate_id(int(CONFIG.get("default_token_length")))
|
||||||
|
|
||||||
|
|
||||||
|
class UserTypes(models.TextChoices):
|
||||||
|
"""User types, both for grouping, licensing and permissions in the case
|
||||||
|
of the internal_service_account"""
|
||||||
|
|
||||||
|
INTERNAL = "internal"
|
||||||
|
EXTERNAL = "external"
|
||||||
|
|
||||||
|
# User-created service accounts
|
||||||
|
SERVICE_ACCOUNT = "service_account"
|
||||||
|
|
||||||
|
# Special user type for internally managed and created service
|
||||||
|
# accounts, such as outpost users
|
||||||
|
INTERNAL_SERVICE_ACCOUNT = "internal_service_account"
|
||||||
|
|
||||||
|
|
||||||
class Group(SerializerModel):
|
class Group(SerializerModel):
|
||||||
@ -149,6 +161,7 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
|||||||
uuid = models.UUIDField(default=uuid4, editable=False, unique=True)
|
uuid = models.UUIDField(default=uuid4, editable=False, unique=True)
|
||||||
name = models.TextField(help_text=_("User's display name."))
|
name = models.TextField(help_text=_("User's display name."))
|
||||||
path = models.TextField(default="users")
|
path = models.TextField(default="users")
|
||||||
|
type = models.TextField(choices=UserTypes.choices, default=UserTypes.INTERNAL)
|
||||||
|
|
||||||
sources = models.ManyToManyField("Source", through="UserSourceConnection")
|
sources = models.ManyToManyField("Source", through="UserSourceConnection")
|
||||||
ak_groups = models.ManyToManyField("Group", related_name="users")
|
ak_groups = models.ManyToManyField("Group", related_name="users")
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
"""authentik core signals"""
|
"""authentik core signals"""
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from django.contrib.auth.signals import user_logged_in, user_logged_out
|
from django.contrib.auth.signals import user_logged_in, user_logged_out
|
||||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
@ -10,16 +8,13 @@ from django.db.models.signals import post_save, pre_delete, pre_save
|
|||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
|
|
||||||
from authentik.core.models import Application, AuthenticatedSession, BackchannelProvider
|
from authentik.core.models import Application, AuthenticatedSession, BackchannelProvider, User
|
||||||
|
|
||||||
# Arguments: user: User, password: str
|
# Arguments: user: User, password: str
|
||||||
password_changed = Signal()
|
password_changed = Signal()
|
||||||
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
|
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
|
||||||
login_failed = Signal()
|
login_failed = Signal()
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from authentik.core.models import User
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save, sender=Application)
|
@receiver(post_save, sender=Application)
|
||||||
def post_save_application(sender: type[Model], instance, created: bool, **_):
|
def post_save_application(sender: type[Model], instance, created: bool, **_):
|
||||||
@ -35,7 +30,7 @@ def post_save_application(sender: type[Model], instance, created: bool, **_):
|
|||||||
|
|
||||||
|
|
||||||
@receiver(user_logged_in)
|
@receiver(user_logged_in)
|
||||||
def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
|
def user_logged_in_session(sender, request: HttpRequest, user: User, **_):
|
||||||
"""Create an AuthenticatedSession from request"""
|
"""Create an AuthenticatedSession from request"""
|
||||||
|
|
||||||
session = AuthenticatedSession.from_request(request, user)
|
session = AuthenticatedSession.from_request(request, user)
|
||||||
@ -44,7 +39,7 @@ def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
|
|||||||
|
|
||||||
|
|
||||||
@receiver(user_logged_out)
|
@receiver(user_logged_out)
|
||||||
def user_logged_out_session(sender, request: HttpRequest, user: "User", **_):
|
def user_logged_out_session(sender, request: HttpRequest, user: User, **_):
|
||||||
"""Delete AuthenticatedSession if it exists"""
|
"""Delete AuthenticatedSession if it exists"""
|
||||||
AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete()
|
AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete()
|
||||||
|
|
||||||
|
@ -8,11 +8,11 @@ from django.urls.base import reverse
|
|||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import (
|
from authentik.core.models import (
|
||||||
USER_ATTRIBUTE_SA,
|
|
||||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||||
AuthenticatedSession,
|
AuthenticatedSession,
|
||||||
Token,
|
Token,
|
||||||
User,
|
User,
|
||||||
|
UserTypes,
|
||||||
)
|
)
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant
|
||||||
from authentik.flows.models import FlowDesignation
|
from authentik.flows.models import FlowDesignation
|
||||||
@ -141,7 +141,8 @@ class TestUsersAPI(APITestCase):
|
|||||||
|
|
||||||
user_filter = User.objects.filter(
|
user_filter = User.objects.filter(
|
||||||
username="test-sa",
|
username="test-sa",
|
||||||
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: True, USER_ATTRIBUTE_SA: True},
|
type=UserTypes.SERVICE_ACCOUNT,
|
||||||
|
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: True},
|
||||||
)
|
)
|
||||||
self.assertTrue(user_filter.exists())
|
self.assertTrue(user_filter.exists())
|
||||||
user: User = user_filter.first()
|
user: User = user_filter.first()
|
||||||
@ -166,7 +167,8 @@ class TestUsersAPI(APITestCase):
|
|||||||
|
|
||||||
user_filter = User.objects.filter(
|
user_filter = User.objects.filter(
|
||||||
username="test-sa",
|
username="test-sa",
|
||||||
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: False, USER_ATTRIBUTE_SA: True},
|
type=UserTypes.SERVICE_ACCOUNT,
|
||||||
|
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: False},
|
||||||
)
|
)
|
||||||
self.assertTrue(user_filter.exists())
|
self.assertTrue(user_filter.exists())
|
||||||
user: User = user_filter.first()
|
user: User = user_filter.first()
|
||||||
@ -192,7 +194,8 @@ class TestUsersAPI(APITestCase):
|
|||||||
|
|
||||||
user_filter = User.objects.filter(
|
user_filter = User.objects.filter(
|
||||||
username="test-sa",
|
username="test-sa",
|
||||||
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: True, USER_ATTRIBUTE_SA: True},
|
type=UserTypes.SERVICE_ACCOUNT,
|
||||||
|
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: True},
|
||||||
)
|
)
|
||||||
self.assertTrue(user_filter.exists())
|
self.assertTrue(user_filter.exists())
|
||||||
user: User = user_filter.first()
|
user: User = user_filter.first()
|
||||||
@ -218,7 +221,8 @@ class TestUsersAPI(APITestCase):
|
|||||||
|
|
||||||
user_filter = User.objects.filter(
|
user_filter = User.objects.filter(
|
||||||
username="test-sa",
|
username="test-sa",
|
||||||
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: True, USER_ATTRIBUTE_SA: True},
|
type=UserTypes.SERVICE_ACCOUNT,
|
||||||
|
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: True},
|
||||||
)
|
)
|
||||||
self.assertTrue(user_filter.exists())
|
self.assertTrue(user_filter.exists())
|
||||||
user: User = user_filter.first()
|
user: User = user_filter.first()
|
||||||
|
@ -46,7 +46,7 @@ def certificate_discovery(self: MonitoredTask):
|
|||||||
certs = {}
|
certs = {}
|
||||||
private_keys = {}
|
private_keys = {}
|
||||||
discovered = 0
|
discovered = 0
|
||||||
for file in glob(CONFIG.y("cert_discovery_dir") + "/**", recursive=True):
|
for file in glob(CONFIG.get("cert_discovery_dir") + "/**", recursive=True):
|
||||||
path = Path(file)
|
path = Path(file)
|
||||||
if not path.exists():
|
if not path.exists():
|
||||||
continue
|
continue
|
||||||
|
0
authentik/enterprise/__init__.py
Normal file
0
authentik/enterprise/__init__.py
Normal file
154
authentik/enterprise/api.py
Normal file
154
authentik/enterprise/api.py
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
"""Enterprise API Views"""
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
from django.utils.timezone import now
|
||||||
|
from drf_spectacular.types import OpenApiTypes
|
||||||
|
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.fields import BooleanField, CharField, DateTimeField, IntegerField
|
||||||
|
from rest_framework.permissions import IsAdminUser, IsAuthenticated
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.serializers import ModelSerializer
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.api.decorators import permission_required
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
|
from authentik.core.models import User, UserTypes
|
||||||
|
from authentik.enterprise.models import License, LicenseKey
|
||||||
|
from authentik.root.install_id import get_install_id
|
||||||
|
|
||||||
|
|
||||||
|
class LicenseSerializer(ModelSerializer):
|
||||||
|
"""License Serializer"""
|
||||||
|
|
||||||
|
def validate_key(self, key: str) -> str:
|
||||||
|
"""Validate the license key (install_id and signature)"""
|
||||||
|
LicenseKey.validate(key)
|
||||||
|
return key
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = License
|
||||||
|
fields = [
|
||||||
|
"license_uuid",
|
||||||
|
"name",
|
||||||
|
"key",
|
||||||
|
"expiry",
|
||||||
|
"users",
|
||||||
|
"external_users",
|
||||||
|
]
|
||||||
|
extra_kwargs = {
|
||||||
|
"name": {"read_only": True},
|
||||||
|
"expiry": {"read_only": True},
|
||||||
|
"users": {"read_only": True},
|
||||||
|
"external_users": {"read_only": True},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class LicenseSummary(PassiveSerializer):
|
||||||
|
"""Serializer for license status"""
|
||||||
|
|
||||||
|
users = IntegerField(required=True)
|
||||||
|
external_users = IntegerField(required=True)
|
||||||
|
valid = BooleanField()
|
||||||
|
show_admin_warning = BooleanField()
|
||||||
|
show_user_warning = BooleanField()
|
||||||
|
read_only = BooleanField()
|
||||||
|
latest_valid = DateTimeField()
|
||||||
|
has_license = BooleanField()
|
||||||
|
|
||||||
|
|
||||||
|
class LicenseForecastSerializer(PassiveSerializer):
|
||||||
|
"""Serializer for license forecast"""
|
||||||
|
|
||||||
|
users = IntegerField(required=True)
|
||||||
|
external_users = IntegerField(required=True)
|
||||||
|
forecasted_users = IntegerField(required=True)
|
||||||
|
forecasted_external_users = IntegerField(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
class LicenseViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""License Viewset"""
|
||||||
|
|
||||||
|
queryset = License.objects.all()
|
||||||
|
serializer_class = LicenseSerializer
|
||||||
|
search_fields = ["name"]
|
||||||
|
ordering = ["name"]
|
||||||
|
filterset_fields = ["name"]
|
||||||
|
|
||||||
|
@permission_required(None, ["authentik_enterprise.view_license"])
|
||||||
|
@extend_schema(
|
||||||
|
request=OpenApiTypes.NONE,
|
||||||
|
responses={
|
||||||
|
200: inline_serializer("InstallIDSerializer", {"install_id": CharField(required=True)}),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
@action(detail=False, methods=["GET"], permission_classes=[IsAdminUser])
|
||||||
|
def get_install_id(self, request: Request) -> Response:
|
||||||
|
"""Get install_id"""
|
||||||
|
return Response(
|
||||||
|
data={
|
||||||
|
"install_id": get_install_id(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
request=OpenApiTypes.NONE,
|
||||||
|
responses={
|
||||||
|
200: LicenseSummary(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
@action(detail=False, methods=["GET"], permission_classes=[IsAuthenticated])
|
||||||
|
def summary(self, request: Request) -> Response:
|
||||||
|
"""Get the total license status"""
|
||||||
|
total = LicenseKey.get_total()
|
||||||
|
last_valid = LicenseKey.last_valid_date()
|
||||||
|
# TODO: move this to a different place?
|
||||||
|
show_admin_warning = last_valid < now() - timedelta(weeks=2)
|
||||||
|
show_user_warning = last_valid < now() - timedelta(weeks=4)
|
||||||
|
read_only = last_valid < now() - timedelta(weeks=6)
|
||||||
|
latest_valid = datetime.fromtimestamp(total.exp)
|
||||||
|
response = LicenseSummary(
|
||||||
|
data={
|
||||||
|
"users": total.users,
|
||||||
|
"external_users": total.external_users,
|
||||||
|
"valid": total.is_valid(),
|
||||||
|
"show_admin_warning": show_admin_warning,
|
||||||
|
"show_user_warning": show_user_warning,
|
||||||
|
"read_only": read_only,
|
||||||
|
"latest_valid": latest_valid,
|
||||||
|
"has_license": License.objects.all().count() > 0,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response.is_valid(raise_exception=True)
|
||||||
|
return Response(response.data)
|
||||||
|
|
||||||
|
@permission_required(None, ["authentik_enterprise.view_license"])
|
||||||
|
@extend_schema(
|
||||||
|
request=OpenApiTypes.NONE,
|
||||||
|
responses={
|
||||||
|
200: LicenseForecastSerializer(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
@action(detail=False, methods=["GET"])
|
||||||
|
def forecast(self, request: Request) -> Response:
|
||||||
|
"""Forecast how many users will be required in a year"""
|
||||||
|
last_month = now() - timedelta(days=30)
|
||||||
|
# Forecast for default users
|
||||||
|
users_in_last_month = User.objects.filter(
|
||||||
|
type=UserTypes.INTERNAL, date_joined__gte=last_month
|
||||||
|
).count()
|
||||||
|
# Forecast for external users
|
||||||
|
external_in_last_month = LicenseKey.get_external_user_count()
|
||||||
|
forecast_for_months = 12
|
||||||
|
response = LicenseForecastSerializer(
|
||||||
|
data={
|
||||||
|
"users": LicenseKey.get_default_user_count(),
|
||||||
|
"external_users": LicenseKey.get_external_user_count(),
|
||||||
|
"forecasted_users": (users_in_last_month * forecast_for_months),
|
||||||
|
"forecasted_external_users": (external_in_last_month * forecast_for_months),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response.is_valid(raise_exception=True)
|
||||||
|
return Response(response.data)
|
@ -9,3 +9,7 @@ class AuthentikEnterpriseConfig(ManagedAppConfig):
|
|||||||
label = "authentik_enterprise"
|
label = "authentik_enterprise"
|
||||||
verbose_name = "authentik Enterprise"
|
verbose_name = "authentik Enterprise"
|
||||||
default = True
|
default = True
|
||||||
|
|
||||||
|
def reconcile_load_enterprise_signals(self):
|
||||||
|
"""Load enterprise signals"""
|
||||||
|
self.import_module("authentik.enterprise.signals")
|
||||||
|
52
authentik/enterprise/migrations/0001_initial.py
Normal file
52
authentik/enterprise/migrations/0001_initial.py
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
# Generated by Django 4.1.10 on 2023-07-06 12:51
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
import authentik.enterprise.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = []
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="License",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"license_uuid",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("key", models.TextField(unique=True)),
|
||||||
|
("name", models.TextField()),
|
||||||
|
("expiry", models.DateTimeField()),
|
||||||
|
("users", models.BigIntegerField()),
|
||||||
|
("external_users", models.BigIntegerField()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="LicenseUsage",
|
||||||
|
fields=[
|
||||||
|
("expiring", models.BooleanField(default=True)),
|
||||||
|
("expires", models.DateTimeField(default=authentik.enterprise.models.usage_expiry)),
|
||||||
|
(
|
||||||
|
"usage_uuid",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("user_count", models.BigIntegerField()),
|
||||||
|
("external_user_count", models.BigIntegerField()),
|
||||||
|
("within_limits", models.BooleanField()),
|
||||||
|
("record_date", models.DateTimeField(auto_now_add=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
0
authentik/enterprise/migrations/__init__.py
Normal file
0
authentik/enterprise/migrations/__init__.py
Normal file
185
authentik/enterprise/models.py
Normal file
185
authentik/enterprise/models.py
Normal file
@ -0,0 +1,185 @@
|
|||||||
|
"""Enterprise models"""
|
||||||
|
from base64 import b64decode
|
||||||
|
from binascii import Error
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from enum import Enum
|
||||||
|
from functools import lru_cache
|
||||||
|
from time import mktime
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from cryptography.exceptions import InvalidSignature
|
||||||
|
from cryptography.x509 import Certificate, load_der_x509_certificate, load_pem_x509_certificate
|
||||||
|
from dacite import from_dict
|
||||||
|
from django.db import models
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
|
from django.utils.timezone import now
|
||||||
|
from guardian.shortcuts import get_anonymous_user
|
||||||
|
from jwt import PyJWTError, decode, get_unverified_header
|
||||||
|
from rest_framework.exceptions import ValidationError
|
||||||
|
|
||||||
|
from authentik.core.models import ExpiringModel, User, UserTypes
|
||||||
|
from authentik.root.install_id import get_install_id
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache()
|
||||||
|
def get_licensing_key() -> Certificate:
|
||||||
|
"""Get Root CA PEM"""
|
||||||
|
with open("authentik/enterprise/public.pem", "rb") as _key:
|
||||||
|
return load_pem_x509_certificate(_key.read())
|
||||||
|
|
||||||
|
|
||||||
|
def get_license_aud() -> str:
|
||||||
|
"""Get the JWT audience field"""
|
||||||
|
return f"enterprise.goauthentik.io/license/{get_install_id()}"
|
||||||
|
|
||||||
|
|
||||||
|
class LicenseFlags(Enum):
|
||||||
|
"""License flags"""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class LicenseKey:
|
||||||
|
"""License JWT claims"""
|
||||||
|
|
||||||
|
aud: str
|
||||||
|
exp: int
|
||||||
|
|
||||||
|
name: str
|
||||||
|
users: int
|
||||||
|
external_users: int
|
||||||
|
flags: list[LicenseFlags] = field(default_factory=list)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate(jwt: str) -> "LicenseKey":
|
||||||
|
"""Validate the license from a given JWT"""
|
||||||
|
try:
|
||||||
|
headers = get_unverified_header(jwt)
|
||||||
|
except PyJWTError:
|
||||||
|
raise ValidationError("Unable to verify license")
|
||||||
|
x5c: list[str] = headers.get("x5c", [])
|
||||||
|
if len(x5c) < 1:
|
||||||
|
raise ValidationError("Unable to verify license")
|
||||||
|
try:
|
||||||
|
our_cert = load_der_x509_certificate(b64decode(x5c[0]))
|
||||||
|
intermediate = load_der_x509_certificate(b64decode(x5c[1]))
|
||||||
|
our_cert.verify_directly_issued_by(intermediate)
|
||||||
|
intermediate.verify_directly_issued_by(get_licensing_key())
|
||||||
|
except (InvalidSignature, TypeError, ValueError, Error):
|
||||||
|
raise ValidationError("Unable to verify license")
|
||||||
|
try:
|
||||||
|
body = from_dict(
|
||||||
|
LicenseKey,
|
||||||
|
decode(
|
||||||
|
jwt,
|
||||||
|
our_cert.public_key(),
|
||||||
|
algorithms=["ES512"],
|
||||||
|
audience=get_license_aud(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except PyJWTError:
|
||||||
|
raise ValidationError("Unable to verify license")
|
||||||
|
return body
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_total() -> "LicenseKey":
|
||||||
|
"""Get a summarized version of all (not expired) licenses"""
|
||||||
|
active_licenses = License.objects.filter(expiry__gte=now())
|
||||||
|
total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0)
|
||||||
|
for lic in active_licenses:
|
||||||
|
total.users += lic.users
|
||||||
|
total.external_users += lic.external_users
|
||||||
|
exp_ts = int(mktime(lic.expiry.timetuple()))
|
||||||
|
if total.exp == 0:
|
||||||
|
total.exp = exp_ts
|
||||||
|
if exp_ts <= total.exp:
|
||||||
|
total.exp = exp_ts
|
||||||
|
total.flags.extend(lic.status.flags)
|
||||||
|
return total
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def base_user_qs() -> QuerySet:
|
||||||
|
"""Base query set for all users"""
|
||||||
|
return User.objects.all().exclude(pk=get_anonymous_user().pk)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_default_user_count():
|
||||||
|
"""Get current default user count"""
|
||||||
|
return LicenseKey.base_user_qs().filter(type=UserTypes.INTERNAL).count()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_external_user_count():
|
||||||
|
"""Get current external user count"""
|
||||||
|
# Count since start of the month
|
||||||
|
last_month = now().replace(day=1)
|
||||||
|
return (
|
||||||
|
LicenseKey.base_user_qs()
|
||||||
|
.filter(type=UserTypes.EXTERNAL, last_login__gte=last_month)
|
||||||
|
.count()
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_valid(self) -> bool:
|
||||||
|
"""Check if the given license body covers all users
|
||||||
|
|
||||||
|
Only checks the current count, no historical data is checked"""
|
||||||
|
default_users = self.get_default_user_count()
|
||||||
|
if default_users > self.users:
|
||||||
|
return False
|
||||||
|
active_users = self.get_external_user_count()
|
||||||
|
if active_users > self.external_users:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def record_usage(self):
|
||||||
|
"""Capture the current validity status and metrics and save them"""
|
||||||
|
LicenseUsage.objects.create(
|
||||||
|
user_count=self.get_default_user_count(),
|
||||||
|
external_user_count=self.get_external_user_count(),
|
||||||
|
within_limits=self.is_valid(),
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def last_valid_date() -> datetime:
|
||||||
|
"""Get the last date the license was valid"""
|
||||||
|
usage: LicenseUsage = (
|
||||||
|
LicenseUsage.filter_not_expired(within_limits=True).order_by("-record_date").first()
|
||||||
|
)
|
||||||
|
if not usage:
|
||||||
|
return now()
|
||||||
|
return usage.record_date
|
||||||
|
|
||||||
|
|
||||||
|
class License(models.Model):
|
||||||
|
"""An authentik enterprise license"""
|
||||||
|
|
||||||
|
license_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
|
key = models.TextField(unique=True)
|
||||||
|
|
||||||
|
name = models.TextField()
|
||||||
|
expiry = models.DateTimeField()
|
||||||
|
users = models.BigIntegerField()
|
||||||
|
external_users = models.BigIntegerField()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def status(self) -> LicenseKey:
|
||||||
|
"""Get parsed license status"""
|
||||||
|
return LicenseKey.validate(self.key)
|
||||||
|
|
||||||
|
|
||||||
|
def usage_expiry():
|
||||||
|
"""Keep license usage records for 3 months"""
|
||||||
|
return now() + timedelta(days=30 * 3)
|
||||||
|
|
||||||
|
|
||||||
|
class LicenseUsage(ExpiringModel):
|
||||||
|
"""a single license usage record"""
|
||||||
|
|
||||||
|
expires = models.DateTimeField(default=usage_expiry)
|
||||||
|
|
||||||
|
usage_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
|
|
||||||
|
user_count = models.BigIntegerField()
|
||||||
|
external_user_count = models.BigIntegerField()
|
||||||
|
within_limits = models.BooleanField()
|
||||||
|
|
||||||
|
record_date = models.DateTimeField(auto_now_add=True)
|
46
authentik/enterprise/policy.py
Normal file
46
authentik/enterprise/policy.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
"""Enterprise license policies"""
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from rest_framework.serializers import BaseSerializer
|
||||||
|
|
||||||
|
from authentik.core.models import User, UserTypes
|
||||||
|
from authentik.enterprise.models import LicenseKey
|
||||||
|
from authentik.policies.models import Policy
|
||||||
|
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||||
|
from authentik.policies.views import PolicyAccessView
|
||||||
|
|
||||||
|
|
||||||
|
class EnterprisePolicy(Policy):
|
||||||
|
"""Check that a user is correctly licensed for the request"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def component(self) -> str:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[BaseSerializer]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def passes(self, request: PolicyRequest) -> PolicyResult:
|
||||||
|
if not LicenseKey.get_total().is_valid():
|
||||||
|
return PolicyResult(False)
|
||||||
|
if request.user.type != UserTypes.INTERNAL:
|
||||||
|
return PolicyResult(False)
|
||||||
|
return PolicyResult(True)
|
||||||
|
|
||||||
|
|
||||||
|
class EnterprisePolicyAccessView(PolicyAccessView):
|
||||||
|
"""PolicyAccessView which also checks enterprise licensing"""
|
||||||
|
|
||||||
|
def user_has_access(self, user: Optional[User] = None) -> PolicyResult:
|
||||||
|
user = user or self.request.user
|
||||||
|
request = PolicyRequest(user)
|
||||||
|
request.http_request = self.request
|
||||||
|
result = super().user_has_access(user)
|
||||||
|
enterprise_result = EnterprisePolicy().passes(request)
|
||||||
|
if not enterprise_result.passing:
|
||||||
|
return enterprise_result
|
||||||
|
return result
|
||||||
|
|
||||||
|
def resolve_provider_application(self):
|
||||||
|
raise NotImplementedError
|
26
authentik/enterprise/public.pem
Normal file
26
authentik/enterprise/public.pem
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIEdzCCA/6gAwIBAgIUQrj1jxn4q/BB38B2SwTrvGyrZLMwCgYIKoZIzj0EAwMw
|
||||||
|
ge8xCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1T
|
||||||
|
YW4gRnJhbmNpc2NvMSQwIgYDVQQJExs1NDggTWFya2V0IFN0cmVldCBQbWIgNzAx
|
||||||
|
NDgxDjAMBgNVBBETBTk0MTA0MSAwHgYDVQQKExdBdXRoZW50aWsgU2VjdXJpdHkg
|
||||||
|
SW5jLjEcMBoGA1UECxMTRW50ZXJwcmlzZSBMaWNlbnNlczE9MDsGA1UEAxM0QXV0
|
||||||
|
aGVudGlrIFNlY3VyaXR5IEluYy4gRW50ZXJwcmlzZSBMaWNlbnNpbmcgUm9vdCBY
|
||||||
|
MTAgFw0yMzA3MDQxNzQ3NDBaGA8yMTIzMDYxMDE3NDgxMFowge8xCzAJBgNVBAYT
|
||||||
|
AlVTMRMwEQYDVQQIEwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1TYW4gRnJhbmNpc2Nv
|
||||||
|
MSQwIgYDVQQJExs1NDggTWFya2V0IFN0cmVldCBQbWIgNzAxNDgxDjAMBgNVBBET
|
||||||
|
BTk0MTA0MSAwHgYDVQQKExdBdXRoZW50aWsgU2VjdXJpdHkgSW5jLjEcMBoGA1UE
|
||||||
|
CxMTRW50ZXJwcmlzZSBMaWNlbnNlczE9MDsGA1UEAxM0QXV0aGVudGlrIFNlY3Vy
|
||||||
|
aXR5IEluYy4gRW50ZXJwcmlzZSBMaWNlbnNpbmcgUm9vdCBYMTB2MBAGByqGSM49
|
||||||
|
AgEGBSuBBAAiA2IABNbPJH6nDbSshpDsDHBRL0UcZVXWCK30txqcMKU+YFmLB6iR
|
||||||
|
PJiHjHA8Z+5aP4eNH6onA5xqykQf65tvbFBA1LB/6HqMArU/tYVVQx4+o9hRBxF5
|
||||||
|
RrzXucUg2br+RX8aa6OCAVUwggFRMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
|
||||||
|
BTADAQH/MB0GA1UdDgQWBBRHpR3/ptPgN0yHVfUjyJOEmsPZqTAfBgNVHSMEGDAW
|
||||||
|
gBRHpR3/ptPgN0yHVfUjyJOEmsPZqTCBoAYIKwYBBQUHAQEEgZMwgZAwRwYIKwYB
|
||||||
|
BQUHMAGGO2h0dHBzOi8vdmF1bHQuY3VzdG9tZXJzLmdvYXV0aGVudGlrLmlvL3Yx
|
||||||
|
L2xpY2Vuc2luZy1jYS9vY3NwMEUGCCsGAQUFBzAChjlodHRwczovL3ZhdWx0LmN1
|
||||||
|
c3RvbWVycy5nb2F1dGhlbnRpay5pby92MS9saWNlbnNpbmctY2EvY2EwSwYDVR0f
|
||||||
|
BEQwQjBAoD6gPIY6aHR0cHM6Ly92YXVsdC5jdXN0b21lcnMuZ29hdXRoZW50aWsu
|
||||||
|
aW8vdjEvbGljZW5zaW5nLWNhL2NybDAKBggqhkjOPQQDAwNnADBkAjB0+YA1yjEO
|
||||||
|
g43CCYUJXz9m9CNIkjOPUI0jO4UtvSj8j067TKRbX6IL/29HxPtQoYACME8eZHBJ
|
||||||
|
Ljcog0oeBgjr4wK8bobgknr5wrm70rrNNpbSAjDvTvXMQeAShGgsftEquQ==
|
||||||
|
-----END CERTIFICATE-----
|
@ -1 +1,12 @@
|
|||||||
"""Enterprise additional settings"""
|
"""Enterprise additional settings"""
|
||||||
|
from celery.schedules import crontab
|
||||||
|
|
||||||
|
from authentik.lib.utils.time import fqdn_rand
|
||||||
|
|
||||||
|
CELERY_BEAT_SCHEDULE = {
|
||||||
|
"enterprise_calculate_license": {
|
||||||
|
"task": "authentik.enterprise.tasks.calculate_license",
|
||||||
|
"schedule": crontab(minute=fqdn_rand("calculate_license"), hour="*/8"),
|
||||||
|
"options": {"queue": "authentik_scheduled"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
18
authentik/enterprise/signals.py
Normal file
18
authentik/enterprise/signals.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
"""Enterprise signals"""
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from django.db.models.signals import pre_save
|
||||||
|
from django.dispatch import receiver
|
||||||
|
from django.utils.timezone import get_current_timezone
|
||||||
|
|
||||||
|
from authentik.enterprise.models import License
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(pre_save, sender=License)
|
||||||
|
def pre_save_license(sender: type[License], instance: License, **_):
|
||||||
|
"""Extract data from license jwt and save it into model"""
|
||||||
|
status = instance.status
|
||||||
|
instance.name = status.name
|
||||||
|
instance.users = status.users
|
||||||
|
instance.external_users = status.external_users
|
||||||
|
instance.expiry = datetime.fromtimestamp(status.exp, tz=get_current_timezone())
|
10
authentik/enterprise/tasks.py
Normal file
10
authentik/enterprise/tasks.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
"""Enterprise tasks"""
|
||||||
|
from authentik.enterprise.models import LicenseKey
|
||||||
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task()
|
||||||
|
def calculate_license():
|
||||||
|
"""Calculate licensing status"""
|
||||||
|
total = LicenseKey.get_total()
|
||||||
|
total.record_usage()
|
0
authentik/enterprise/tests/__init__.py
Normal file
0
authentik/enterprise/tests/__init__.py
Normal file
64
authentik/enterprise/tests/test_license.py
Normal file
64
authentik/enterprise/tests/test_license.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
"""Enterprise license tests"""
|
||||||
|
from datetime import timedelta
|
||||||
|
from time import mktime
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
from django.test import TestCase
|
||||||
|
from django.utils.timezone import now
|
||||||
|
from rest_framework.exceptions import ValidationError
|
||||||
|
|
||||||
|
from authentik.enterprise.models import License, LicenseKey
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
_exp = int(mktime((now() + timedelta(days=3000)).timetuple()))
|
||||||
|
|
||||||
|
|
||||||
|
class TestEnterpriseLicense(TestCase):
|
||||||
|
"""Enterprise license tests"""
|
||||||
|
|
||||||
|
@patch(
|
||||||
|
"authentik.enterprise.models.LicenseKey.validate",
|
||||||
|
MagicMock(
|
||||||
|
return_value=LicenseKey(
|
||||||
|
aud="",
|
||||||
|
exp=_exp,
|
||||||
|
name=generate_id(),
|
||||||
|
users=100,
|
||||||
|
external_users=100,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_valid(self):
|
||||||
|
"""Check license verification"""
|
||||||
|
lic = License.objects.create(key=generate_id())
|
||||||
|
self.assertTrue(lic.status.is_valid())
|
||||||
|
self.assertEqual(lic.users, 100)
|
||||||
|
|
||||||
|
def test_invalid(self):
|
||||||
|
"""Test invalid license"""
|
||||||
|
with self.assertRaises(ValidationError):
|
||||||
|
License.objects.create(key=generate_id())
|
||||||
|
|
||||||
|
@patch(
|
||||||
|
"authentik.enterprise.models.LicenseKey.validate",
|
||||||
|
MagicMock(
|
||||||
|
return_value=LicenseKey(
|
||||||
|
aud="",
|
||||||
|
exp=_exp,
|
||||||
|
name=generate_id(),
|
||||||
|
users=100,
|
||||||
|
external_users=100,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_valid_multiple(self):
|
||||||
|
"""Check license verification"""
|
||||||
|
lic = License.objects.create(key=generate_id())
|
||||||
|
self.assertTrue(lic.status.is_valid())
|
||||||
|
lic2 = License.objects.create(key=generate_id())
|
||||||
|
self.assertTrue(lic2.status.is_valid())
|
||||||
|
total = LicenseKey.get_total()
|
||||||
|
self.assertEqual(total.users, 200)
|
||||||
|
self.assertEqual(total.external_users, 200)
|
||||||
|
self.assertEqual(total.exp, _exp)
|
||||||
|
self.assertTrue(total.is_valid())
|
7
authentik/enterprise/urls.py
Normal file
7
authentik/enterprise/urls.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
"""API URLs"""
|
||||||
|
|
||||||
|
from authentik.enterprise.api import LicenseViewSet
|
||||||
|
|
||||||
|
api_urlpatterns = [
|
||||||
|
("enterprise/license", LicenseViewSet),
|
||||||
|
]
|
@ -33,7 +33,7 @@ class GeoIPReader:
|
|||||||
|
|
||||||
def __open(self):
|
def __open(self):
|
||||||
"""Get GeoIP Reader, if configured, otherwise none"""
|
"""Get GeoIP Reader, if configured, otherwise none"""
|
||||||
path = CONFIG.y("geoip")
|
path = CONFIG.get("geoip")
|
||||||
if path == "" or not path:
|
if path == "" or not path:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
@ -46,7 +46,7 @@ class GeoIPReader:
|
|||||||
def __check_expired(self):
|
def __check_expired(self):
|
||||||
"""Check if the modification date of the GeoIP database has
|
"""Check if the modification date of the GeoIP database has
|
||||||
changed, and reload it if so"""
|
changed, and reload it if so"""
|
||||||
path = CONFIG.y("geoip")
|
path = CONFIG.get("geoip")
|
||||||
try:
|
try:
|
||||||
mtime = stat(path).st_mtime
|
mtime = stat(path).st_mtime
|
||||||
diff = self.__last_mtime < mtime
|
diff = self.__last_mtime < mtime
|
||||||
|
@ -76,9 +76,20 @@ class TaskInfo:
|
|||||||
return cache.get_many(cache.keys(CACHE_KEY_PREFIX + name)).values()
|
return cache.get_many(cache.keys(CACHE_KEY_PREFIX + name)).values()
|
||||||
return cache.get(CACHE_KEY_PREFIX + name, None)
|
return cache.get(CACHE_KEY_PREFIX + name, None)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def full_name(self) -> str:
|
||||||
|
"""Get the full cache key with task name and UID"""
|
||||||
|
key = CACHE_KEY_PREFIX + self.task_name
|
||||||
|
if self.result.uid:
|
||||||
|
uid_suffix = f":{self.result.uid}"
|
||||||
|
key += uid_suffix
|
||||||
|
if not self.task_name.endswith(uid_suffix):
|
||||||
|
self.task_name += uid_suffix
|
||||||
|
return key
|
||||||
|
|
||||||
def delete(self):
|
def delete(self):
|
||||||
"""Delete task info from cache"""
|
"""Delete task info from cache"""
|
||||||
return cache.delete(CACHE_KEY_PREFIX + self.task_name)
|
return cache.delete(self.full_name)
|
||||||
|
|
||||||
def update_metrics(self):
|
def update_metrics(self):
|
||||||
"""Update prometheus metrics"""
|
"""Update prometheus metrics"""
|
||||||
@ -92,17 +103,13 @@ class TaskInfo:
|
|||||||
GAUGE_TASKS.labels(
|
GAUGE_TASKS.labels(
|
||||||
task_name=self.task_name.split(":")[0],
|
task_name=self.task_name.split(":")[0],
|
||||||
task_uid=self.result.uid or "",
|
task_uid=self.result.uid or "",
|
||||||
status=self.result.status.value,
|
status=self.result.status.name.lower(),
|
||||||
).set(duration)
|
).set(duration)
|
||||||
|
|
||||||
def save(self, timeout_hours=6):
|
def save(self, timeout_hours=6):
|
||||||
"""Save task into cache"""
|
"""Save task into cache"""
|
||||||
key = CACHE_KEY_PREFIX + self.task_name
|
|
||||||
if self.result.uid:
|
|
||||||
key += f":{self.result.uid}"
|
|
||||||
self.task_name += f":{self.result.uid}"
|
|
||||||
self.update_metrics()
|
self.update_metrics()
|
||||||
cache.set(key, self, timeout=timeout_hours * 60 * 60)
|
cache.set(self.full_name, self, timeout=timeout_hours * 60 * 60)
|
||||||
|
|
||||||
|
|
||||||
class MonitoredTask(Task):
|
class MonitoredTask(Task):
|
||||||
|
43
authentik/events/tests/test_tasks.py
Normal file
43
authentik/events/tests/test_tasks.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
"""Test Monitored tasks"""
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
from authentik.events.monitored_tasks import MonitoredTask, TaskInfo, TaskResult, TaskResultStatus
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
|
|
||||||
|
class TestMonitoredTasks(TestCase):
|
||||||
|
"""Test Monitored tasks"""
|
||||||
|
|
||||||
|
def test_failed_successful_remove_state(self):
|
||||||
|
"""Test that a task with `save_on_success` set to `False` that failed saves
|
||||||
|
a state, and upon successful completion will delete the state"""
|
||||||
|
should_fail = True
|
||||||
|
uid = generate_id()
|
||||||
|
|
||||||
|
@CELERY_APP.task(
|
||||||
|
bind=True,
|
||||||
|
base=MonitoredTask,
|
||||||
|
)
|
||||||
|
def test_task(self: MonitoredTask):
|
||||||
|
self.save_on_success = False
|
||||||
|
self.set_uid(uid)
|
||||||
|
self.set_status(
|
||||||
|
TaskResult(TaskResultStatus.ERROR if should_fail else TaskResultStatus.SUCCESSFUL)
|
||||||
|
)
|
||||||
|
|
||||||
|
# First test successful run
|
||||||
|
should_fail = False
|
||||||
|
test_task.delay().get()
|
||||||
|
self.assertIsNone(TaskInfo.by_name(f"test_task:{uid}"))
|
||||||
|
|
||||||
|
# Then test failed
|
||||||
|
should_fail = True
|
||||||
|
test_task.delay().get()
|
||||||
|
info = TaskInfo.by_name(f"test_task:{uid}")
|
||||||
|
self.assertEqual(info.result.status, TaskResultStatus.ERROR)
|
||||||
|
|
||||||
|
# Then after that, the state should be removed
|
||||||
|
should_fail = False
|
||||||
|
test_task.delay().get()
|
||||||
|
self.assertIsNone(TaskInfo.by_name(f"test_task:{uid}"))
|
@ -33,7 +33,7 @@ PLAN_CONTEXT_SOURCE = "source"
|
|||||||
# Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan
|
# Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan
|
||||||
# was restored.
|
# was restored.
|
||||||
PLAN_CONTEXT_IS_RESTORED = "is_restored"
|
PLAN_CONTEXT_IS_RESTORED = "is_restored"
|
||||||
CACHE_TIMEOUT = int(CONFIG.y("redis.cache_timeout_flows"))
|
CACHE_TIMEOUT = int(CONFIG.get("redis.cache_timeout_flows"))
|
||||||
CACHE_PREFIX = "goauthentik.io/flows/planner/"
|
CACHE_PREFIX = "goauthentik.io/flows/planner/"
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,7 +18,6 @@ from authentik.flows.planner import FlowPlan, FlowPlanner
|
|||||||
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView
|
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView
|
||||||
from authentik.flows.tests import FlowTestCase
|
from authentik.flows.tests import FlowTestCase
|
||||||
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_PLAN, FlowExecutorView
|
from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_PLAN, FlowExecutorView
|
||||||
from authentik.lib.config import CONFIG
|
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.policies.dummy.models import DummyPolicy
|
from authentik.policies.dummy.models import DummyPolicy
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
@ -85,7 +84,6 @@ class TestFlowExecutor(FlowTestCase):
|
|||||||
FlowDesignation.AUTHENTICATION,
|
FlowDesignation.AUTHENTICATION,
|
||||||
)
|
)
|
||||||
|
|
||||||
CONFIG.update_from_dict({"domain": "testserver"})
|
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||||
)
|
)
|
||||||
@ -111,7 +109,6 @@ class TestFlowExecutor(FlowTestCase):
|
|||||||
denied_action=FlowDeniedAction.CONTINUE,
|
denied_action=FlowDeniedAction.CONTINUE,
|
||||||
)
|
)
|
||||||
|
|
||||||
CONFIG.update_from_dict({"domain": "testserver"})
|
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||||
)
|
)
|
||||||
@ -128,7 +125,6 @@ class TestFlowExecutor(FlowTestCase):
|
|||||||
FlowDesignation.AUTHENTICATION,
|
FlowDesignation.AUTHENTICATION,
|
||||||
)
|
)
|
||||||
|
|
||||||
CONFIG.update_from_dict({"domain": "testserver"})
|
|
||||||
dest = "/unique-string"
|
dest = "/unique-string"
|
||||||
url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug})
|
url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug})
|
||||||
response = self.client.get(url + f"?{NEXT_ARG_NAME}={dest}")
|
response = self.client.get(url + f"?{NEXT_ARG_NAME}={dest}")
|
||||||
@ -145,7 +141,6 @@ class TestFlowExecutor(FlowTestCase):
|
|||||||
FlowDesignation.AUTHENTICATION,
|
FlowDesignation.AUTHENTICATION,
|
||||||
)
|
)
|
||||||
|
|
||||||
CONFIG.update_from_dict({"domain": "testserver"})
|
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||||
)
|
)
|
||||||
|
@ -1,10 +0,0 @@
|
|||||||
"""authentik lib app config"""
|
|
||||||
from django.apps import AppConfig
|
|
||||||
|
|
||||||
|
|
||||||
class AuthentikLibConfig(AppConfig):
|
|
||||||
"""authentik lib app config"""
|
|
||||||
|
|
||||||
name = "authentik.lib"
|
|
||||||
label = "authentik_lib"
|
|
||||||
verbose_name = "authentik lib"
|
|
@ -175,7 +175,7 @@ def get_avatar(user: "User") -> str:
|
|||||||
"initials": avatar_mode_generated,
|
"initials": avatar_mode_generated,
|
||||||
"gravatar": avatar_mode_gravatar,
|
"gravatar": avatar_mode_gravatar,
|
||||||
}
|
}
|
||||||
modes: str = CONFIG.y("avatars", "none")
|
modes: str = CONFIG.get("avatars", "none")
|
||||||
for mode in modes.split(","):
|
for mode in modes.split(","):
|
||||||
avatar = None
|
avatar = None
|
||||||
if mode in mode_map:
|
if mode in mode_map:
|
||||||
|
@ -2,13 +2,15 @@
|
|||||||
import os
|
import os
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from enum import Enum
|
||||||
from glob import glob
|
from glob import glob
|
||||||
from json import dumps, loads
|
from json import JSONEncoder, dumps, loads
|
||||||
from json.decoder import JSONDecodeError
|
from json.decoder import JSONDecodeError
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from sys import argv, stderr
|
from sys import argv, stderr
|
||||||
from time import time
|
from time import time
|
||||||
from typing import Any
|
from typing import Any, Optional
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
@ -32,15 +34,44 @@ def get_path_from_dict(root: dict, path: str, sep=".", default=None) -> Any:
|
|||||||
return root
|
return root
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Attr:
|
||||||
|
"""Single configuration attribute"""
|
||||||
|
|
||||||
|
class Source(Enum):
|
||||||
|
"""Sources a configuration attribute can come from, determines what should be done with
|
||||||
|
Attr.source (and if it's set at all)"""
|
||||||
|
|
||||||
|
UNSPECIFIED = "unspecified"
|
||||||
|
ENV = "env"
|
||||||
|
CONFIG_FILE = "config_file"
|
||||||
|
URI = "uri"
|
||||||
|
|
||||||
|
value: Any
|
||||||
|
|
||||||
|
source_type: Source = field(default=Source.UNSPECIFIED)
|
||||||
|
|
||||||
|
# depending on source_type, might contain the environment variable or the path
|
||||||
|
# to the config file containing this change or the file containing this value
|
||||||
|
source: Optional[str] = field(default=None)
|
||||||
|
|
||||||
|
|
||||||
|
class AttrEncoder(JSONEncoder):
|
||||||
|
"""JSON encoder that can deal with `Attr` classes"""
|
||||||
|
|
||||||
|
def default(self, o: Any) -> Any:
|
||||||
|
if isinstance(o, Attr):
|
||||||
|
return o.value
|
||||||
|
return super().default(o)
|
||||||
|
|
||||||
|
|
||||||
class ConfigLoader:
|
class ConfigLoader:
|
||||||
"""Search through SEARCH_PATHS and load configuration. Environment variables starting with
|
"""Search through SEARCH_PATHS and load configuration. Environment variables starting with
|
||||||
`ENV_PREFIX` are also applied.
|
`ENV_PREFIX` are also applied.
|
||||||
|
|
||||||
A variable like AUTHENTIK_POSTGRESQL__HOST would translate to postgresql.host"""
|
A variable like AUTHENTIK_POSTGRESQL__HOST would translate to postgresql.host"""
|
||||||
|
|
||||||
loaded_file = []
|
def __init__(self, **kwargs):
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.__config = {}
|
self.__config = {}
|
||||||
base_dir = Path(__file__).parent.joinpath(Path("../..")).resolve()
|
base_dir = Path(__file__).parent.joinpath(Path("../..")).resolve()
|
||||||
@ -65,6 +96,7 @@ class ConfigLoader:
|
|||||||
# Update config with env file
|
# Update config with env file
|
||||||
self.update_from_file(env_file)
|
self.update_from_file(env_file)
|
||||||
self.update_from_env()
|
self.update_from_env()
|
||||||
|
self.update(self.__config, kwargs)
|
||||||
|
|
||||||
def log(self, level: str, message: str, **kwargs):
|
def log(self, level: str, message: str, **kwargs):
|
||||||
"""Custom Log method, we want to ensure ConfigLoader always logs JSON even when
|
"""Custom Log method, we want to ensure ConfigLoader always logs JSON even when
|
||||||
@ -86,22 +118,34 @@ class ConfigLoader:
|
|||||||
else:
|
else:
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
value = self.parse_uri(value)
|
value = self.parse_uri(value)
|
||||||
|
elif isinstance(value, Attr) and isinstance(value.value, str):
|
||||||
|
value = self.parse_uri(value.value)
|
||||||
|
elif not isinstance(value, Attr):
|
||||||
|
value = Attr(value)
|
||||||
root[key] = value
|
root[key] = value
|
||||||
return root
|
return root
|
||||||
|
|
||||||
def parse_uri(self, value: str) -> str:
|
def refresh(self, key: str):
|
||||||
|
"""Update a single value"""
|
||||||
|
attr: Attr = get_path_from_dict(self.raw, key)
|
||||||
|
if attr.source_type != Attr.Source.URI:
|
||||||
|
return
|
||||||
|
attr.value = self.parse_uri(attr.source).value
|
||||||
|
|
||||||
|
def parse_uri(self, value: str) -> Attr:
|
||||||
"""Parse string values which start with a URI"""
|
"""Parse string values which start with a URI"""
|
||||||
url = urlparse(value)
|
url = urlparse(value)
|
||||||
|
parsed_value = value
|
||||||
if url.scheme == "env":
|
if url.scheme == "env":
|
||||||
value = os.getenv(url.netloc, url.query)
|
parsed_value = os.getenv(url.netloc, url.query)
|
||||||
if url.scheme == "file":
|
if url.scheme == "file":
|
||||||
try:
|
try:
|
||||||
with open(url.path, "r", encoding="utf8") as _file:
|
with open(url.path, "r", encoding="utf8") as _file:
|
||||||
value = _file.read().strip()
|
parsed_value = _file.read().strip()
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
self.log("error", f"Failed to read config value from {url.path}: {exc}")
|
self.log("error", f"Failed to read config value from {url.path}: {exc}")
|
||||||
value = url.query
|
parsed_value = url.query
|
||||||
return value
|
return Attr(parsed_value, Attr.Source.URI, value)
|
||||||
|
|
||||||
def update_from_file(self, path: Path):
|
def update_from_file(self, path: Path):
|
||||||
"""Update config from file contents"""
|
"""Update config from file contents"""
|
||||||
@ -110,7 +154,6 @@ class ConfigLoader:
|
|||||||
try:
|
try:
|
||||||
self.update(self.__config, yaml.safe_load(file))
|
self.update(self.__config, yaml.safe_load(file))
|
||||||
self.log("debug", "Loaded config", file=str(path))
|
self.log("debug", "Loaded config", file=str(path))
|
||||||
self.loaded_file.append(path)
|
|
||||||
except yaml.YAMLError as exc:
|
except yaml.YAMLError as exc:
|
||||||
raise ImproperlyConfigured from exc
|
raise ImproperlyConfigured from exc
|
||||||
except PermissionError as exc:
|
except PermissionError as exc:
|
||||||
@ -121,10 +164,6 @@ class ConfigLoader:
|
|||||||
error=str(exc),
|
error=str(exc),
|
||||||
)
|
)
|
||||||
|
|
||||||
def update_from_dict(self, update: dict):
|
|
||||||
"""Update config from dict"""
|
|
||||||
self.__config.update(update)
|
|
||||||
|
|
||||||
def update_from_env(self):
|
def update_from_env(self):
|
||||||
"""Check environment variables"""
|
"""Check environment variables"""
|
||||||
outer = {}
|
outer = {}
|
||||||
@ -145,7 +184,7 @@ class ConfigLoader:
|
|||||||
value = loads(value)
|
value = loads(value)
|
||||||
except JSONDecodeError:
|
except JSONDecodeError:
|
||||||
pass
|
pass
|
||||||
current_obj[dot_parts[-1]] = value
|
current_obj[dot_parts[-1]] = Attr(value, Attr.Source.ENV, key)
|
||||||
idx += 1
|
idx += 1
|
||||||
if idx > 0:
|
if idx > 0:
|
||||||
self.log("debug", "Loaded environment variables", count=idx)
|
self.log("debug", "Loaded environment variables", count=idx)
|
||||||
@ -154,28 +193,32 @@ class ConfigLoader:
|
|||||||
@contextmanager
|
@contextmanager
|
||||||
def patch(self, path: str, value: Any):
|
def patch(self, path: str, value: Any):
|
||||||
"""Context manager for unittests to patch a value"""
|
"""Context manager for unittests to patch a value"""
|
||||||
original_value = self.y(path)
|
original_value = self.get(path)
|
||||||
self.y_set(path, value)
|
self.set(path, value)
|
||||||
try:
|
try:
|
||||||
yield
|
yield
|
||||||
finally:
|
finally:
|
||||||
self.y_set(path, original_value)
|
self.set(path, original_value)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def raw(self) -> dict:
|
def raw(self) -> dict:
|
||||||
"""Get raw config dictionary"""
|
"""Get raw config dictionary"""
|
||||||
return self.__config
|
return self.__config
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
def get(self, path: str, default=None, sep=".") -> Any:
|
||||||
def y(self, path: str, default=None, sep=".") -> Any:
|
|
||||||
"""Access attribute by using yaml path"""
|
"""Access attribute by using yaml path"""
|
||||||
# Walk sub_dicts before parsing path
|
# Walk sub_dicts before parsing path
|
||||||
root = self.raw
|
root = self.raw
|
||||||
# Walk each component of the path
|
# Walk each component of the path
|
||||||
return get_path_from_dict(root, path, sep=sep, default=default)
|
attr: Attr = get_path_from_dict(root, path, sep=sep, default=Attr(default))
|
||||||
|
return attr.value
|
||||||
|
|
||||||
def y_set(self, path: str, value: Any, sep="."):
|
def get_bool(self, path: str, default=False) -> bool:
|
||||||
"""Set value using same syntax as y()"""
|
"""Wrapper for get that converts value into boolean"""
|
||||||
|
return str(self.get(path, default)).lower() == "true"
|
||||||
|
|
||||||
|
def set(self, path: str, value: Any, sep="."):
|
||||||
|
"""Set value using same syntax as get()"""
|
||||||
# Walk sub_dicts before parsing path
|
# Walk sub_dicts before parsing path
|
||||||
root = self.raw
|
root = self.raw
|
||||||
# Walk each component of the path
|
# Walk each component of the path
|
||||||
@ -184,17 +227,14 @@ class ConfigLoader:
|
|||||||
if comp not in root:
|
if comp not in root:
|
||||||
root[comp] = {}
|
root[comp] = {}
|
||||||
root = root.get(comp, {})
|
root = root.get(comp, {})
|
||||||
root[path_parts[-1]] = value
|
root[path_parts[-1]] = Attr(value)
|
||||||
|
|
||||||
def y_bool(self, path: str, default=False) -> bool:
|
|
||||||
"""Wrapper for y that converts value into boolean"""
|
|
||||||
return str(self.y(path, default)).lower() == "true"
|
|
||||||
|
|
||||||
|
|
||||||
CONFIG = ConfigLoader()
|
CONFIG = ConfigLoader()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if len(argv) < 2:
|
if len(argv) < 2:
|
||||||
print(dumps(CONFIG.raw, indent=4))
|
print(dumps(CONFIG.raw, indent=4, cls=AttrEncoder))
|
||||||
else:
|
else:
|
||||||
print(CONFIG.y(argv[1]))
|
print(CONFIG.get(argv[1]))
|
||||||
|
@ -73,6 +73,7 @@ outposts:
|
|||||||
|
|
||||||
ldap:
|
ldap:
|
||||||
task_timeout_hours: 2
|
task_timeout_hours: 2
|
||||||
|
page_size: 50
|
||||||
tls:
|
tls:
|
||||||
ciphers: null
|
ciphers: null
|
||||||
|
|
||||||
|
@ -51,18 +51,18 @@ class SentryTransport(HttpTransport):
|
|||||||
|
|
||||||
def sentry_init(**sentry_init_kwargs):
|
def sentry_init(**sentry_init_kwargs):
|
||||||
"""Configure sentry SDK"""
|
"""Configure sentry SDK"""
|
||||||
sentry_env = CONFIG.y("error_reporting.environment", "customer")
|
sentry_env = CONFIG.get("error_reporting.environment", "customer")
|
||||||
kwargs = {
|
kwargs = {
|
||||||
"environment": sentry_env,
|
"environment": sentry_env,
|
||||||
"send_default_pii": CONFIG.y_bool("error_reporting.send_pii", False),
|
"send_default_pii": CONFIG.get_bool("error_reporting.send_pii", False),
|
||||||
"_experiments": {
|
"_experiments": {
|
||||||
"profiles_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.1)),
|
"profiles_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.1)),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
kwargs.update(**sentry_init_kwargs)
|
kwargs.update(**sentry_init_kwargs)
|
||||||
# pylint: disable=abstract-class-instantiated
|
# pylint: disable=abstract-class-instantiated
|
||||||
sentry_sdk_init(
|
sentry_sdk_init(
|
||||||
dsn=CONFIG.y("error_reporting.sentry_dsn"),
|
dsn=CONFIG.get("error_reporting.sentry_dsn"),
|
||||||
integrations=[
|
integrations=[
|
||||||
ArgvIntegration(),
|
ArgvIntegration(),
|
||||||
StdlibIntegration(),
|
StdlibIntegration(),
|
||||||
@ -92,7 +92,7 @@ def traces_sampler(sampling_context: dict) -> float:
|
|||||||
return 0
|
return 0
|
||||||
if _type == "websocket":
|
if _type == "websocket":
|
||||||
return 0
|
return 0
|
||||||
return float(CONFIG.y("error_reporting.sample_rate", 0.1))
|
return float(CONFIG.get("error_reporting.sample_rate", 0.1))
|
||||||
|
|
||||||
|
|
||||||
def before_send(event: dict, hint: dict) -> Optional[dict]:
|
def before_send(event: dict, hint: dict) -> Optional[dict]:
|
||||||
|
@ -16,23 +16,23 @@ class TestConfig(TestCase):
|
|||||||
config = ConfigLoader()
|
config = ConfigLoader()
|
||||||
environ[ENV_PREFIX + "_test__test"] = "bar"
|
environ[ENV_PREFIX + "_test__test"] = "bar"
|
||||||
config.update_from_env()
|
config.update_from_env()
|
||||||
self.assertEqual(config.y("test.test"), "bar")
|
self.assertEqual(config.get("test.test"), "bar")
|
||||||
|
|
||||||
def test_patch(self):
|
def test_patch(self):
|
||||||
"""Test patch decorator"""
|
"""Test patch decorator"""
|
||||||
config = ConfigLoader()
|
config = ConfigLoader()
|
||||||
config.y_set("foo.bar", "bar")
|
config.set("foo.bar", "bar")
|
||||||
self.assertEqual(config.y("foo.bar"), "bar")
|
self.assertEqual(config.get("foo.bar"), "bar")
|
||||||
with config.patch("foo.bar", "baz"):
|
with config.patch("foo.bar", "baz"):
|
||||||
self.assertEqual(config.y("foo.bar"), "baz")
|
self.assertEqual(config.get("foo.bar"), "baz")
|
||||||
self.assertEqual(config.y("foo.bar"), "bar")
|
self.assertEqual(config.get("foo.bar"), "bar")
|
||||||
|
|
||||||
def test_uri_env(self):
|
def test_uri_env(self):
|
||||||
"""Test URI parsing (environment)"""
|
"""Test URI parsing (environment)"""
|
||||||
config = ConfigLoader()
|
config = ConfigLoader()
|
||||||
environ["foo"] = "bar"
|
environ["foo"] = "bar"
|
||||||
self.assertEqual(config.parse_uri("env://foo"), "bar")
|
self.assertEqual(config.parse_uri("env://foo").value, "bar")
|
||||||
self.assertEqual(config.parse_uri("env://foo?bar"), "bar")
|
self.assertEqual(config.parse_uri("env://foo?bar").value, "bar")
|
||||||
|
|
||||||
def test_uri_file(self):
|
def test_uri_file(self):
|
||||||
"""Test URI parsing (file load)"""
|
"""Test URI parsing (file load)"""
|
||||||
@ -41,11 +41,31 @@ class TestConfig(TestCase):
|
|||||||
write(file, "foo".encode())
|
write(file, "foo".encode())
|
||||||
_, file2_name = mkstemp()
|
_, file2_name = mkstemp()
|
||||||
chmod(file2_name, 0o000) # Remove all permissions so we can't read the file
|
chmod(file2_name, 0o000) # Remove all permissions so we can't read the file
|
||||||
self.assertEqual(config.parse_uri(f"file://{file_name}"), "foo")
|
self.assertEqual(config.parse_uri(f"file://{file_name}").value, "foo")
|
||||||
self.assertEqual(config.parse_uri(f"file://{file2_name}?def"), "def")
|
self.assertEqual(config.parse_uri(f"file://{file2_name}?def").value, "def")
|
||||||
unlink(file_name)
|
unlink(file_name)
|
||||||
unlink(file2_name)
|
unlink(file2_name)
|
||||||
|
|
||||||
|
def test_uri_file_update(self):
|
||||||
|
"""Test URI parsing (file load and update)"""
|
||||||
|
file, file_name = mkstemp()
|
||||||
|
write(file, "foo".encode())
|
||||||
|
config = ConfigLoader(file_test=f"file://{file_name}")
|
||||||
|
self.assertEqual(config.get("file_test"), "foo")
|
||||||
|
|
||||||
|
# Update config file
|
||||||
|
write(file, "bar".encode())
|
||||||
|
config.refresh("file_test")
|
||||||
|
self.assertEqual(config.get("file_test"), "foobar")
|
||||||
|
|
||||||
|
unlink(file_name)
|
||||||
|
|
||||||
|
def test_uri_env_full(self):
|
||||||
|
"""Test URI set as env variable"""
|
||||||
|
environ["AUTHENTIK_TEST_VAR"] = "file:///foo?bar"
|
||||||
|
config = ConfigLoader()
|
||||||
|
self.assertEqual(config.get("test_var"), "bar")
|
||||||
|
|
||||||
def test_file_update(self):
|
def test_file_update(self):
|
||||||
"""Test update_from_file"""
|
"""Test update_from_file"""
|
||||||
config = ConfigLoader()
|
config = ConfigLoader()
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""Test HTTP Helpers"""
|
"""Test HTTP Helpers"""
|
||||||
from django.test import RequestFactory, TestCase
|
from django.test import RequestFactory, TestCase
|
||||||
|
|
||||||
from authentik.core.models import USER_ATTRIBUTE_CAN_OVERRIDE_IP, Token, TokenIntents
|
from authentik.core.models import Token, TokenIntents, UserTypes
|
||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.lib.utils.http import OUTPOST_REMOTE_IP_HEADER, OUTPOST_TOKEN_HEADER, get_client_ip
|
from authentik.lib.utils.http import OUTPOST_REMOTE_IP_HEADER, OUTPOST_TOKEN_HEADER, get_client_ip
|
||||||
from authentik.lib.views import bad_request_message
|
from authentik.lib.views import bad_request_message
|
||||||
@ -53,7 +53,7 @@ class TestHTTP(TestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(get_client_ip(request), "127.0.0.1")
|
self.assertEqual(get_client_ip(request), "127.0.0.1")
|
||||||
# Valid
|
# Valid
|
||||||
self.user.attributes[USER_ATTRIBUTE_CAN_OVERRIDE_IP] = True
|
self.user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||||
self.user.save()
|
self.user.save()
|
||||||
request = self.factory.get(
|
request = self.factory.get(
|
||||||
"/",
|
"/",
|
||||||
|
@ -33,9 +33,8 @@ def _get_client_ip_from_meta(meta: dict[str, Any]) -> str:
|
|||||||
|
|
||||||
def _get_outpost_override_ip(request: HttpRequest) -> Optional[str]:
|
def _get_outpost_override_ip(request: HttpRequest) -> Optional[str]:
|
||||||
"""Get the actual remote IP when set by an outpost. Only
|
"""Get the actual remote IP when set by an outpost. Only
|
||||||
allowed when the request is authenticated, by a user with USER_ATTRIBUTE_CAN_OVERRIDE_IP set
|
allowed when the request is authenticated, by an outpost internal service account"""
|
||||||
to outpost"""
|
from authentik.core.models import Token, TokenIntents, UserTypes
|
||||||
from authentik.core.models import USER_ATTRIBUTE_CAN_OVERRIDE_IP, Token, TokenIntents
|
|
||||||
|
|
||||||
if OUTPOST_REMOTE_IP_HEADER not in request.META or OUTPOST_TOKEN_HEADER not in request.META:
|
if OUTPOST_REMOTE_IP_HEADER not in request.META or OUTPOST_TOKEN_HEADER not in request.META:
|
||||||
return None
|
return None
|
||||||
@ -51,7 +50,7 @@ def _get_outpost_override_ip(request: HttpRequest) -> Optional[str]:
|
|||||||
LOGGER.warning("Attempted remote-ip override without token", fake_ip=fake_ip)
|
LOGGER.warning("Attempted remote-ip override without token", fake_ip=fake_ip)
|
||||||
return None
|
return None
|
||||||
user = token.user
|
user = token.user
|
||||||
if not user.group_attributes(request).get(USER_ATTRIBUTE_CAN_OVERRIDE_IP, False):
|
if user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT:
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
"Remote-IP override: user doesn't have permission",
|
"Remote-IP override: user doesn't have permission",
|
||||||
user=user,
|
user=user,
|
||||||
|
@ -50,7 +50,7 @@ def get_env() -> str:
|
|||||||
"""Get environment in which authentik is currently running"""
|
"""Get environment in which authentik is currently running"""
|
||||||
if "CI" in os.environ:
|
if "CI" in os.environ:
|
||||||
return "ci"
|
return "ci"
|
||||||
if CONFIG.y_bool("debug"):
|
if CONFIG.get_bool("debug"):
|
||||||
return "dev"
|
return "dev"
|
||||||
if SERVICE_HOST_ENV_NAME in os.environ:
|
if SERVICE_HOST_ENV_NAME in os.environ:
|
||||||
return "kubernetes"
|
return "kubernetes"
|
||||||
|
@ -97,7 +97,7 @@ class BaseController:
|
|||||||
if self.outpost.config.container_image is not None:
|
if self.outpost.config.container_image is not None:
|
||||||
return self.outpost.config.container_image
|
return self.outpost.config.container_image
|
||||||
|
|
||||||
image_name_template: str = CONFIG.y("outposts.container_image_base")
|
image_name_template: str = CONFIG.get("outposts.container_image_base")
|
||||||
return image_name_template % {
|
return image_name_template % {
|
||||||
"type": self.outpost.type,
|
"type": self.outpost.type,
|
||||||
"version": __version__,
|
"version": __version__,
|
||||||
|
@ -1,16 +1,22 @@
|
|||||||
"""Base Kubernetes Reconciler"""
|
"""Base Kubernetes Reconciler"""
|
||||||
|
from dataclasses import asdict
|
||||||
|
from json import dumps
|
||||||
from typing import TYPE_CHECKING, Generic, Optional, TypeVar
|
from typing import TYPE_CHECKING, Generic, Optional, TypeVar
|
||||||
|
|
||||||
|
from dacite.core import from_dict
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
from kubernetes.client import V1ObjectMeta
|
from jsonpatch import JsonPatchConflict, JsonPatchException, JsonPatchTestFailed, apply_patch
|
||||||
|
from kubernetes.client import ApiClient, V1ObjectMeta
|
||||||
from kubernetes.client.exceptions import ApiException, OpenApiException
|
from kubernetes.client.exceptions import ApiException, OpenApiException
|
||||||
from kubernetes.client.models.v1_deployment import V1Deployment
|
from kubernetes.client.models.v1_deployment import V1Deployment
|
||||||
from kubernetes.client.models.v1_pod import V1Pod
|
from kubernetes.client.models.v1_pod import V1Pod
|
||||||
|
from requests import Response
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
from urllib3.exceptions import HTTPError
|
from urllib3.exceptions import HTTPError
|
||||||
|
|
||||||
from authentik import __version__
|
from authentik import __version__
|
||||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||||
|
from authentik.outposts.controllers.base import ControllerException
|
||||||
from authentik.outposts.controllers.k8s.triggers import NeedsRecreate, NeedsUpdate
|
from authentik.outposts.controllers.k8s.triggers import NeedsRecreate, NeedsUpdate
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@ -34,11 +40,23 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
self.namespace = controller.outpost.config.kubernetes_namespace
|
self.namespace = controller.outpost.config.kubernetes_namespace
|
||||||
self.logger = get_logger().bind(type=self.__class__.__name__)
|
self.logger = get_logger().bind(type=self.__class__.__name__)
|
||||||
|
|
||||||
|
def get_patch(self):
|
||||||
|
"""Get any patches that apply to this CRD"""
|
||||||
|
patches = self.controller.outpost.config.kubernetes_json_patches
|
||||||
|
if not patches:
|
||||||
|
return None
|
||||||
|
return patches.get(self.reconciler_name(), None)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_embedded(self) -> bool:
|
def is_embedded(self) -> bool:
|
||||||
"""Return true if the current outpost is embedded"""
|
"""Return true if the current outpost is embedded"""
|
||||||
return self.controller.outpost.managed == MANAGED_OUTPOST
|
return self.controller.outpost.managed == MANAGED_OUTPOST
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reconciler_name() -> str:
|
||||||
|
"""A name this reconciler is identified by in the configuration"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def noop(self) -> bool:
|
def noop(self) -> bool:
|
||||||
"""Return true if this object should not be created/updated/deleted in this cluster"""
|
"""Return true if this object should not be created/updated/deleted in this cluster"""
|
||||||
@ -55,6 +73,32 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
}
|
}
|
||||||
).lower()
|
).lower()
|
||||||
|
|
||||||
|
def get_patched_reference_object(self) -> T:
|
||||||
|
"""Get patched reference object"""
|
||||||
|
reference = self.get_reference_object()
|
||||||
|
patch = self.get_patch()
|
||||||
|
try:
|
||||||
|
json = ApiClient().sanitize_for_serialization(reference)
|
||||||
|
# Custom objects will not be known to the clients openapi types
|
||||||
|
except AttributeError:
|
||||||
|
json = asdict(reference)
|
||||||
|
try:
|
||||||
|
ref = json
|
||||||
|
if patch is not None:
|
||||||
|
ref = apply_patch(json, patch)
|
||||||
|
except (JsonPatchException, JsonPatchConflict, JsonPatchTestFailed) as exc:
|
||||||
|
raise ControllerException(f"JSON Patch failed: {exc}") from exc
|
||||||
|
mock_response = Response()
|
||||||
|
mock_response.data = dumps(ref)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = ApiClient().deserialize(mock_response, reference.__class__.__name__)
|
||||||
|
# Custom objects will not be known to the clients openapi types
|
||||||
|
except AttributeError:
|
||||||
|
result = from_dict(reference.__class__, data=ref)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
def up(self):
|
def up(self):
|
||||||
"""Create object if it doesn't exist, update if needed or recreate if needed."""
|
"""Create object if it doesn't exist, update if needed or recreate if needed."""
|
||||||
@ -62,7 +106,7 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
if self.noop:
|
if self.noop:
|
||||||
self.logger.debug("Object is noop")
|
self.logger.debug("Object is noop")
|
||||||
return
|
return
|
||||||
reference = self.get_reference_object()
|
reference = self.get_patched_reference_object()
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
current = self.retrieve()
|
current = self.retrieve()
|
||||||
@ -129,6 +173,16 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
if current.metadata.labels != reference.metadata.labels:
|
if current.metadata.labels != reference.metadata.labels:
|
||||||
raise NeedsUpdate()
|
raise NeedsUpdate()
|
||||||
|
|
||||||
|
patch = self.get_patch()
|
||||||
|
if patch is not None:
|
||||||
|
current_json = ApiClient().sanitize_for_serialization(current)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if apply_patch(current_json, patch) != current_json:
|
||||||
|
raise NeedsUpdate()
|
||||||
|
except (JsonPatchException, JsonPatchConflict, JsonPatchTestFailed) as exc:
|
||||||
|
raise ControllerException(f"JSON Patch failed: {exc}") from exc
|
||||||
|
|
||||||
def create(self, reference: T):
|
def create(self, reference: T):
|
||||||
"""API Wrapper to create object"""
|
"""API Wrapper to create object"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
@ -43,6 +43,10 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
self.api = AppsV1Api(controller.client)
|
self.api = AppsV1Api(controller.client)
|
||||||
self.outpost = self.controller.outpost
|
self.outpost = self.controller.outpost
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reconciler_name() -> str:
|
||||||
|
return "deployment"
|
||||||
|
|
||||||
def reconcile(self, current: V1Deployment, reference: V1Deployment):
|
def reconcile(self, current: V1Deployment, reference: V1Deployment):
|
||||||
compare_ports(
|
compare_ports(
|
||||||
current.spec.template.spec.containers[0].ports,
|
current.spec.template.spec.containers[0].ports,
|
||||||
|
@ -24,6 +24,10 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
|
|||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = CoreV1Api(controller.client)
|
self.api = CoreV1Api(controller.client)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reconciler_name() -> str:
|
||||||
|
return "secret"
|
||||||
|
|
||||||
def reconcile(self, current: V1Secret, reference: V1Secret):
|
def reconcile(self, current: V1Secret, reference: V1Secret):
|
||||||
super().reconcile(current, reference)
|
super().reconcile(current, reference)
|
||||||
for key in reference.data.keys():
|
for key in reference.data.keys():
|
||||||
|
@ -20,6 +20,10 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
|
|||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = CoreV1Api(controller.client)
|
self.api = CoreV1Api(controller.client)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reconciler_name() -> str:
|
||||||
|
return "service"
|
||||||
|
|
||||||
def reconcile(self, current: V1Service, reference: V1Service):
|
def reconcile(self, current: V1Service, reference: V1Service):
|
||||||
compare_ports(current.spec.ports, reference.spec.ports)
|
compare_ports(current.spec.ports, reference.spec.ports)
|
||||||
# run the base reconcile last, as that will probably raise NeedsUpdate
|
# run the base reconcile last, as that will probably raise NeedsUpdate
|
||||||
|
@ -71,6 +71,10 @@ class PrometheusServiceMonitorReconciler(KubernetesObjectReconciler[PrometheusSe
|
|||||||
self.api_ex = ApiextensionsV1Api(controller.client)
|
self.api_ex = ApiextensionsV1Api(controller.client)
|
||||||
self.api = CustomObjectsApi(controller.client)
|
self.api = CustomObjectsApi(controller.client)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reconciler_name() -> str:
|
||||||
|
return "prometheus servicemonitor"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def noop(self) -> bool:
|
def noop(self) -> bool:
|
||||||
return (not self._crd_exists()) or (self.is_embedded)
|
return (not self._crd_exists()) or (self.is_embedded)
|
||||||
|
@ -64,12 +64,19 @@ class KubernetesController(BaseController):
|
|||||||
super().__init__(outpost, connection)
|
super().__init__(outpost, connection)
|
||||||
self.client = KubernetesClient(connection)
|
self.client = KubernetesClient(connection)
|
||||||
self.reconcilers = {
|
self.reconcilers = {
|
||||||
"secret": SecretReconciler,
|
SecretReconciler.reconciler_name(): SecretReconciler,
|
||||||
"deployment": DeploymentReconciler,
|
DeploymentReconciler.reconciler_name(): DeploymentReconciler,
|
||||||
"service": ServiceReconciler,
|
ServiceReconciler.reconciler_name(): ServiceReconciler,
|
||||||
"prometheus servicemonitor": PrometheusServiceMonitorReconciler,
|
PrometheusServiceMonitorReconciler.reconciler_name(): (
|
||||||
|
PrometheusServiceMonitorReconciler
|
||||||
|
),
|
||||||
}
|
}
|
||||||
self.reconcile_order = ["secret", "deployment", "service", "prometheus servicemonitor"]
|
self.reconcile_order = [
|
||||||
|
SecretReconciler.reconciler_name(),
|
||||||
|
DeploymentReconciler.reconciler_name(),
|
||||||
|
ServiceReconciler.reconciler_name(),
|
||||||
|
PrometheusServiceMonitorReconciler.reconciler_name(),
|
||||||
|
]
|
||||||
|
|
||||||
def up(self):
|
def up(self):
|
||||||
try:
|
try:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""Outpost models"""
|
"""Outpost models"""
|
||||||
from dataclasses import asdict, dataclass, field
|
from dataclasses import asdict, dataclass, field
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Iterable, Optional
|
from typing import Any, Iterable, Optional
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from dacite.core import from_dict
|
from dacite.core import from_dict
|
||||||
@ -20,13 +20,12 @@ from structlog.stdlib import get_logger
|
|||||||
from authentik import __version__, get_build_hash
|
from authentik import __version__, get_build_hash
|
||||||
from authentik.blueprints.models import ManagedModel
|
from authentik.blueprints.models import ManagedModel
|
||||||
from authentik.core.models import (
|
from authentik.core.models import (
|
||||||
USER_ATTRIBUTE_CAN_OVERRIDE_IP,
|
|
||||||
USER_ATTRIBUTE_SA,
|
|
||||||
USER_PATH_SYSTEM_PREFIX,
|
USER_PATH_SYSTEM_PREFIX,
|
||||||
Provider,
|
Provider,
|
||||||
Token,
|
Token,
|
||||||
TokenIntents,
|
TokenIntents,
|
||||||
User,
|
User,
|
||||||
|
UserTypes,
|
||||||
)
|
)
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
@ -59,7 +58,7 @@ class OutpostConfig:
|
|||||||
authentik_host_insecure: bool = False
|
authentik_host_insecure: bool = False
|
||||||
authentik_host_browser: str = ""
|
authentik_host_browser: str = ""
|
||||||
|
|
||||||
log_level: str = CONFIG.y("log_level")
|
log_level: str = CONFIG.get("log_level")
|
||||||
object_naming_template: str = field(default="ak-outpost-%(name)s")
|
object_naming_template: str = field(default="ak-outpost-%(name)s")
|
||||||
|
|
||||||
container_image: Optional[str] = field(default=None)
|
container_image: Optional[str] = field(default=None)
|
||||||
@ -76,6 +75,7 @@ class OutpostConfig:
|
|||||||
kubernetes_service_type: str = field(default="ClusterIP")
|
kubernetes_service_type: str = field(default="ClusterIP")
|
||||||
kubernetes_disabled_components: list[str] = field(default_factory=list)
|
kubernetes_disabled_components: list[str] = field(default_factory=list)
|
||||||
kubernetes_image_pull_secrets: list[str] = field(default_factory=list)
|
kubernetes_image_pull_secrets: list[str] = field(default_factory=list)
|
||||||
|
kubernetes_json_patches: Optional[dict[str, list[dict[str, Any]]]] = field(default=None)
|
||||||
|
|
||||||
|
|
||||||
class OutpostModel(Model):
|
class OutpostModel(Model):
|
||||||
@ -346,8 +346,7 @@ class Outpost(SerializerModel, ManagedModel):
|
|||||||
user: User = User.objects.create(username=self.user_identifier)
|
user: User = User.objects.create(username=self.user_identifier)
|
||||||
user.set_unusable_password()
|
user.set_unusable_password()
|
||||||
user_created = True
|
user_created = True
|
||||||
user.attributes[USER_ATTRIBUTE_SA] = True
|
user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||||
user.attributes[USER_ATTRIBUTE_CAN_OVERRIDE_IP] = True
|
|
||||||
user.name = f"Outpost {self.name} Service-Account"
|
user.name = f"Outpost {self.name} Service-Account"
|
||||||
user.path = USER_PATH_OUTPOSTS
|
user.path = USER_PATH_OUTPOSTS
|
||||||
user.save()
|
user.save()
|
||||||
|
@ -256,7 +256,7 @@ def _outpost_single_update(outpost: Outpost, layer=None):
|
|||||||
def outpost_connection_discovery(self: MonitoredTask):
|
def outpost_connection_discovery(self: MonitoredTask):
|
||||||
"""Checks the local environment and create Service connections."""
|
"""Checks the local environment and create Service connections."""
|
||||||
status = TaskResult(TaskResultStatus.SUCCESSFUL)
|
status = TaskResult(TaskResultStatus.SUCCESSFUL)
|
||||||
if not CONFIG.y_bool("outposts.discover"):
|
if not CONFIG.get_bool("outposts.discover"):
|
||||||
status.messages.append("Outpost integration discovery is disabled")
|
status.messages.append("Outpost integration discovery is disabled")
|
||||||
self.set_status(status)
|
self.set_status(status)
|
||||||
return
|
return
|
||||||
|
@ -64,7 +64,7 @@ class PolicyEngine:
|
|||||||
self.use_cache = True
|
self.use_cache = True
|
||||||
self.__expected_result_count = 0
|
self.__expected_result_count = 0
|
||||||
|
|
||||||
def _iter_bindings(self) -> Iterator[PolicyBinding]:
|
def iterate_bindings(self) -> Iterator[PolicyBinding]:
|
||||||
"""Make sure all Policies are their respective classes"""
|
"""Make sure all Policies are their respective classes"""
|
||||||
return (
|
return (
|
||||||
PolicyBinding.objects.filter(target=self.__pbm, enabled=True)
|
PolicyBinding.objects.filter(target=self.__pbm, enabled=True)
|
||||||
@ -88,7 +88,7 @@ class PolicyEngine:
|
|||||||
span: Span
|
span: Span
|
||||||
span.set_data("pbm", self.__pbm)
|
span.set_data("pbm", self.__pbm)
|
||||||
span.set_data("request", self.request)
|
span.set_data("request", self.request)
|
||||||
for binding in self._iter_bindings():
|
for binding in self.iterate_bindings():
|
||||||
self.__expected_result_count += 1
|
self.__expected_result_count += 1
|
||||||
|
|
||||||
self._check_policy_type(binding)
|
self._check_policy_type(binding)
|
||||||
|
@ -19,7 +19,7 @@ from authentik.policies.types import CACHE_PREFIX, PolicyRequest, PolicyResult
|
|||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
FORK_CTX = get_context("fork")
|
FORK_CTX = get_context("fork")
|
||||||
CACHE_TIMEOUT = int(CONFIG.y("redis.cache_timeout_policies"))
|
CACHE_TIMEOUT = int(CONFIG.get("redis.cache_timeout_policies"))
|
||||||
PROCESS_CLASS = FORK_CTX.Process
|
PROCESS_CLASS = FORK_CTX.Process
|
||||||
|
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ from authentik.policies.reputation.tasks import save_reputation
|
|||||||
from authentik.stages.identification.signals import identification_failed
|
from authentik.stages.identification.signals import identification_failed
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
CACHE_TIMEOUT = int(CONFIG.y("redis.cache_timeout_reputation"))
|
CACHE_TIMEOUT = int(CONFIG.get("redis.cache_timeout_reputation"))
|
||||||
|
|
||||||
|
|
||||||
def update_score(request: HttpRequest, identifier: str, amount: int):
|
def update_score(request: HttpRequest, identifier: str, amount: int):
|
||||||
|
@ -6,11 +6,12 @@ from django.urls import reverse
|
|||||||
from jwt import decode
|
from jwt import decode
|
||||||
|
|
||||||
from authentik.blueprints.tests import apply_blueprint
|
from authentik.blueprints.tests import apply_blueprint
|
||||||
from authentik.core.models import USER_ATTRIBUTE_SA, Application, Group, Token, TokenIntents
|
from authentik.core.models import Application, Group, Token, TokenIntents, UserTypes
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
|
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
from authentik.providers.oauth2.constants import (
|
from authentik.providers.oauth2.constants import (
|
||||||
GRANT_TYPE_CLIENT_CREDENTIALS,
|
GRANT_TYPE_CLIENT_CREDENTIALS,
|
||||||
|
GRANT_TYPE_PASSWORD,
|
||||||
SCOPE_OPENID,
|
SCOPE_OPENID,
|
||||||
SCOPE_OPENID_EMAIL,
|
SCOPE_OPENID_EMAIL,
|
||||||
SCOPE_OPENID_PROFILE,
|
SCOPE_OPENID_PROFILE,
|
||||||
@ -37,7 +38,7 @@ class TestTokenClientCredentials(OAuthTestCase):
|
|||||||
self.provider.property_mappings.set(ScopeMapping.objects.all())
|
self.provider.property_mappings.set(ScopeMapping.objects.all())
|
||||||
self.app = Application.objects.create(name="test", slug="test", provider=self.provider)
|
self.app = Application.objects.create(name="test", slug="test", provider=self.provider)
|
||||||
self.user = create_test_admin_user("sa")
|
self.user = create_test_admin_user("sa")
|
||||||
self.user.attributes[USER_ATTRIBUTE_SA] = True
|
self.user.type = UserTypes.SERVICE_ACCOUNT
|
||||||
self.user.save()
|
self.user.save()
|
||||||
self.token = Token.objects.create(
|
self.token = Token.objects.create(
|
||||||
identifier="sa-token",
|
identifier="sa-token",
|
||||||
@ -150,3 +151,28 @@ class TestTokenClientCredentials(OAuthTestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(jwt["given_name"], self.user.name)
|
self.assertEqual(jwt["given_name"], self.user.name)
|
||||||
self.assertEqual(jwt["preferred_username"], self.user.username)
|
self.assertEqual(jwt["preferred_username"], self.user.username)
|
||||||
|
|
||||||
|
def test_successful_password(self):
|
||||||
|
"""test successful (password grant)"""
|
||||||
|
response = self.client.post(
|
||||||
|
reverse("authentik_providers_oauth2:token"),
|
||||||
|
{
|
||||||
|
"grant_type": GRANT_TYPE_PASSWORD,
|
||||||
|
"scope": f"{SCOPE_OPENID} {SCOPE_OPENID_EMAIL} {SCOPE_OPENID_PROFILE}",
|
||||||
|
"client_id": self.provider.client_id,
|
||||||
|
"username": "sa",
|
||||||
|
"password": self.token.key,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
body = loads(response.content.decode())
|
||||||
|
self.assertEqual(body["token_type"], TOKEN_TYPE)
|
||||||
|
_, alg = self.provider.jwt_key
|
||||||
|
jwt = decode(
|
||||||
|
body["access_token"],
|
||||||
|
key=self.provider.signing_key.public_key,
|
||||||
|
algorithms=[alg],
|
||||||
|
audience=self.provider.client_id,
|
||||||
|
)
|
||||||
|
self.assertEqual(jwt["given_name"], self.user.name)
|
||||||
|
self.assertEqual(jwt["preferred_username"], self.user.username)
|
||||||
|
@ -46,7 +46,7 @@ class DeviceView(View):
|
|||||||
|
|
||||||
def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||||
throttle = AnonRateThrottle()
|
throttle = AnonRateThrottle()
|
||||||
throttle.rate = CONFIG.y("throttle.providers.oauth2.device", "20/hour")
|
throttle.rate = CONFIG.get("throttle.providers.oauth2.device", "20/hour")
|
||||||
throttle.num_requests, throttle.duration = throttle.parse_rate(throttle.rate)
|
throttle.num_requests, throttle.duration = throttle.parse_rate(throttle.rate)
|
||||||
if not throttle.allow_request(request, self):
|
if not throttle.allow_request(request, self):
|
||||||
return HttpResponse(status=429)
|
return HttpResponse(status=429)
|
||||||
|
@ -459,13 +459,13 @@ class TokenView(View):
|
|||||||
if self.params.grant_type == GRANT_TYPE_REFRESH_TOKEN:
|
if self.params.grant_type == GRANT_TYPE_REFRESH_TOKEN:
|
||||||
LOGGER.debug("Refreshing refresh token")
|
LOGGER.debug("Refreshing refresh token")
|
||||||
return TokenResponse(self.create_refresh_response())
|
return TokenResponse(self.create_refresh_response())
|
||||||
if self.params.grant_type == GRANT_TYPE_CLIENT_CREDENTIALS:
|
if self.params.grant_type in [GRANT_TYPE_CLIENT_CREDENTIALS, GRANT_TYPE_PASSWORD]:
|
||||||
LOGGER.debug("Client credentials grant")
|
LOGGER.debug("Client credentials/password grant")
|
||||||
return TokenResponse(self.create_client_credentials_response())
|
return TokenResponse(self.create_client_credentials_response())
|
||||||
if self.params.grant_type == GRANT_TYPE_DEVICE_CODE:
|
if self.params.grant_type == GRANT_TYPE_DEVICE_CODE:
|
||||||
LOGGER.debug("Device code grant")
|
LOGGER.debug("Device code grant")
|
||||||
return TokenResponse(self.create_device_code_response())
|
return TokenResponse(self.create_device_code_response())
|
||||||
raise ValueError(f"Invalid grant_type: {self.params.grant_type}")
|
raise TokenError("unsupported_grant_type")
|
||||||
except (TokenError, DeviceCodeError) as error:
|
except (TokenError, DeviceCodeError) as error:
|
||||||
return TokenResponse(error.create_dict(), status=400)
|
return TokenResponse(error.create_dict(), status=400)
|
||||||
except UserAuthError as error:
|
except UserAuthError as error:
|
||||||
|
@ -31,6 +31,10 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]):
|
|||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = NetworkingV1Api(controller.client)
|
self.api = NetworkingV1Api(controller.client)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reconciler_name() -> str:
|
||||||
|
return "ingress"
|
||||||
|
|
||||||
def _check_annotations(self, reference: V1Ingress):
|
def _check_annotations(self, reference: V1Ingress):
|
||||||
"""Check that all annotations *we* set are correct"""
|
"""Check that all annotations *we* set are correct"""
|
||||||
for key, value in self.get_ingress_annotations().items():
|
for key, value in self.get_ingress_annotations().items():
|
||||||
|
@ -17,24 +17,28 @@ class TraefikMiddlewareReconciler(KubernetesObjectReconciler):
|
|||||||
if not self.reconciler.crd_exists():
|
if not self.reconciler.crd_exists():
|
||||||
self.reconciler = Traefik2MiddlewareReconciler(controller)
|
self.reconciler = Traefik2MiddlewareReconciler(controller)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reconciler_name() -> str:
|
||||||
|
return "traefik middleware"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def noop(self) -> bool:
|
def noop(self) -> bool:
|
||||||
return self.reconciler.noop
|
return self.reconciler.noop
|
||||||
|
|
||||||
def reconcile(self, current: TraefikMiddleware, reference: TraefikMiddleware):
|
def reconcile(self, current: TraefikMiddleware, reference: TraefikMiddleware):
|
||||||
return self.reconcile(current, reference)
|
return self.reconciler.reconcile(current, reference)
|
||||||
|
|
||||||
def get_reference_object(self) -> TraefikMiddleware:
|
def get_reference_object(self) -> TraefikMiddleware:
|
||||||
return self.get_reference_object()
|
return self.reconciler.get_reference_object()
|
||||||
|
|
||||||
def create(self, reference: TraefikMiddleware):
|
def create(self, reference: TraefikMiddleware):
|
||||||
return self.create(reference)
|
return self.reconciler.create(reference)
|
||||||
|
|
||||||
def delete(self, reference: TraefikMiddleware):
|
def delete(self, reference: TraefikMiddleware):
|
||||||
return self.delete(reference)
|
return self.reconciler.delete(reference)
|
||||||
|
|
||||||
def retrieve(self) -> TraefikMiddleware:
|
def retrieve(self) -> TraefikMiddleware:
|
||||||
return self.retrieve()
|
return self.reconciler.retrieve()
|
||||||
|
|
||||||
def update(self, current: TraefikMiddleware, reference: TraefikMiddleware):
|
def update(self, current: TraefikMiddleware, reference: TraefikMiddleware):
|
||||||
return self.update(current, reference)
|
return self.reconciler.update(current, reference)
|
||||||
|
@ -67,6 +67,10 @@ class Traefik3MiddlewareReconciler(KubernetesObjectReconciler[TraefikMiddleware]
|
|||||||
self.crd_version = "v1alpha1"
|
self.crd_version = "v1alpha1"
|
||||||
self.crd_plural = "middlewares"
|
self.crd_plural = "middlewares"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def reconciler_name() -> str:
|
||||||
|
return "traefik middleware"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def noop(self) -> bool:
|
def noop(self) -> bool:
|
||||||
if not ProxyProvider.objects.filter(
|
if not ProxyProvider.objects.filter(
|
||||||
|
@ -16,7 +16,9 @@ class ProxyKubernetesController(KubernetesController):
|
|||||||
DeploymentPort(9300, "http-metrics", "tcp"),
|
DeploymentPort(9300, "http-metrics", "tcp"),
|
||||||
DeploymentPort(9443, "https", "tcp"),
|
DeploymentPort(9443, "https", "tcp"),
|
||||||
]
|
]
|
||||||
self.reconcilers["ingress"] = IngressReconciler
|
self.reconcilers[IngressReconciler.reconciler_name()] = IngressReconciler
|
||||||
self.reconcilers["traefik middleware"] = TraefikMiddlewareReconciler
|
self.reconcilers[
|
||||||
self.reconcile_order.append("ingress")
|
TraefikMiddlewareReconciler.reconciler_name()
|
||||||
self.reconcile_order.append("traefik middleware")
|
] = TraefikMiddlewareReconciler
|
||||||
|
self.reconcile_order.append(IngressReconciler.reconciler_name())
|
||||||
|
self.reconcile_order.append(TraefikMiddlewareReconciler.reconciler_name())
|
||||||
|
@ -1,17 +1,11 @@
|
|||||||
"""SCIM Provider models"""
|
"""SCIM Provider models"""
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import Q, QuerySet
|
from django.db.models import QuerySet
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
|
|
||||||
from authentik.core.models import (
|
from authentik.core.models import BackchannelProvider, Group, PropertyMapping, User, UserTypes
|
||||||
USER_ATTRIBUTE_SA,
|
|
||||||
BackchannelProvider,
|
|
||||||
Group,
|
|
||||||
PropertyMapping,
|
|
||||||
User,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SCIMProvider(BackchannelProvider):
|
class SCIMProvider(BackchannelProvider):
|
||||||
@ -38,17 +32,8 @@ class SCIMProvider(BackchannelProvider):
|
|||||||
according to the provider's settings"""
|
according to the provider's settings"""
|
||||||
base = User.objects.all().exclude(pk=get_anonymous_user().pk)
|
base = User.objects.all().exclude(pk=get_anonymous_user().pk)
|
||||||
if self.exclude_users_service_account:
|
if self.exclude_users_service_account:
|
||||||
base = base.filter(
|
base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
|
||||||
Q(
|
type=UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||||
**{
|
|
||||||
f"attributes__{USER_ATTRIBUTE_SA}__isnull": True,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
| Q(
|
|
||||||
**{
|
|
||||||
f"attributes__{USER_ATTRIBUTE_SA}": False,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
if self.filter_group:
|
if self.filter_group:
|
||||||
base = base.filter(ak_groups__in=[self.filter_group])
|
base = base.filter(ak_groups__in=[self.filter_group])
|
||||||
|
0
authentik/root/db/__init__.py
Normal file
0
authentik/root/db/__init__.py
Normal file
15
authentik/root/db/base.py
Normal file
15
authentik/root/db/base.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
"""authentik database backend"""
|
||||||
|
from django_prometheus.db.backends.postgresql.base import DatabaseWrapper as BaseDatabaseWrapper
|
||||||
|
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
|
"""database backend which supports rotating credentials"""
|
||||||
|
|
||||||
|
def get_connection_params(self):
|
||||||
|
CONFIG.refresh("postgresql.password")
|
||||||
|
conn_params = super().get_connection_params()
|
||||||
|
conn_params["user"] = CONFIG.get("postgresql.user")
|
||||||
|
conn_params["password"] = CONFIG.get("postgresql.password")
|
||||||
|
return conn_params
|
@ -26,15 +26,15 @@ def get_install_id_raw():
|
|||||||
"""Get install_id without django loaded, this is required for the startup when we get
|
"""Get install_id without django loaded, this is required for the startup when we get
|
||||||
the install_id but django isn't loaded yet and we can't use the function above."""
|
the install_id but django isn't loaded yet and we can't use the function above."""
|
||||||
conn = connect(
|
conn = connect(
|
||||||
dbname=CONFIG.y("postgresql.name"),
|
dbname=CONFIG.get("postgresql.name"),
|
||||||
user=CONFIG.y("postgresql.user"),
|
user=CONFIG.get("postgresql.user"),
|
||||||
password=CONFIG.y("postgresql.password"),
|
password=CONFIG.get("postgresql.password"),
|
||||||
host=CONFIG.y("postgresql.host"),
|
host=CONFIG.get("postgresql.host"),
|
||||||
port=int(CONFIG.y("postgresql.port")),
|
port=int(CONFIG.get("postgresql.port")),
|
||||||
sslmode=CONFIG.y("postgresql.sslmode"),
|
sslmode=CONFIG.get("postgresql.sslmode"),
|
||||||
sslrootcert=CONFIG.y("postgresql.sslrootcert"),
|
sslrootcert=CONFIG.get("postgresql.sslrootcert"),
|
||||||
sslcert=CONFIG.y("postgresql.sslcert"),
|
sslcert=CONFIG.get("postgresql.sslcert"),
|
||||||
sslkey=CONFIG.y("postgresql.sslkey"),
|
sslkey=CONFIG.get("postgresql.sslkey"),
|
||||||
)
|
)
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
cursor.execute("SELECT id FROM authentik_install_id LIMIT 1;")
|
cursor.execute("SELECT id FROM authentik_install_id LIMIT 1;")
|
||||||
|
@ -10,6 +10,8 @@ from django.contrib.sessions.exceptions import SessionInterrupted
|
|||||||
from django.contrib.sessions.middleware import SessionMiddleware as UpstreamSessionMiddleware
|
from django.contrib.sessions.middleware import SessionMiddleware as UpstreamSessionMiddleware
|
||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
from django.http.response import HttpResponse
|
from django.http.response import HttpResponse
|
||||||
|
from django.middleware.csrf import CSRF_SESSION_KEY
|
||||||
|
from django.middleware.csrf import CsrfViewMiddleware as UpstreamCsrfViewMiddleware
|
||||||
from django.utils.cache import patch_vary_headers
|
from django.utils.cache import patch_vary_headers
|
||||||
from django.utils.http import http_date
|
from django.utils.http import http_date
|
||||||
from jwt import PyJWTError, decode, encode
|
from jwt import PyJWTError, decode, encode
|
||||||
@ -131,6 +133,29 @@ class SessionMiddleware(UpstreamSessionMiddleware):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class CsrfViewMiddleware(UpstreamCsrfViewMiddleware):
|
||||||
|
"""Dynamically set secure depending if the upstream connection is TLS or not"""
|
||||||
|
|
||||||
|
def _set_csrf_cookie(self, request: HttpRequest, response: HttpResponse):
|
||||||
|
if settings.CSRF_USE_SESSIONS:
|
||||||
|
if request.session.get(CSRF_SESSION_KEY) != request.META["CSRF_COOKIE"]:
|
||||||
|
request.session[CSRF_SESSION_KEY] = request.META["CSRF_COOKIE"]
|
||||||
|
else:
|
||||||
|
secure = SessionMiddleware.is_secure(request)
|
||||||
|
response.set_cookie(
|
||||||
|
settings.CSRF_COOKIE_NAME,
|
||||||
|
request.META["CSRF_COOKIE"],
|
||||||
|
max_age=settings.CSRF_COOKIE_AGE,
|
||||||
|
domain=settings.CSRF_COOKIE_DOMAIN,
|
||||||
|
path=settings.CSRF_COOKIE_PATH,
|
||||||
|
secure=secure,
|
||||||
|
httponly=settings.CSRF_COOKIE_HTTPONLY,
|
||||||
|
samesite=settings.CSRF_COOKIE_SAMESITE,
|
||||||
|
)
|
||||||
|
# Set the Vary header since content varies with the CSRF cookie.
|
||||||
|
patch_vary_headers(response, ("Cookie",))
|
||||||
|
|
||||||
|
|
||||||
class ChannelsLoggingMiddleware:
|
class ChannelsLoggingMiddleware:
|
||||||
"""Logging middleware for channels"""
|
"""Logging middleware for channels"""
|
||||||
|
|
||||||
|
@ -24,8 +24,8 @@ BASE_DIR = Path(__file__).absolute().parent.parent.parent
|
|||||||
STATICFILES_DIRS = [BASE_DIR / Path("web")]
|
STATICFILES_DIRS = [BASE_DIR / Path("web")]
|
||||||
MEDIA_ROOT = BASE_DIR / Path("media")
|
MEDIA_ROOT = BASE_DIR / Path("media")
|
||||||
|
|
||||||
DEBUG = CONFIG.y_bool("debug")
|
DEBUG = CONFIG.get_bool("debug")
|
||||||
SECRET_KEY = CONFIG.y("secret_key")
|
SECRET_KEY = CONFIG.get("secret_key")
|
||||||
|
|
||||||
INTERNAL_IPS = ["127.0.0.1"]
|
INTERNAL_IPS = ["127.0.0.1"]
|
||||||
ALLOWED_HOSTS = ["*"]
|
ALLOWED_HOSTS = ["*"]
|
||||||
@ -40,7 +40,7 @@ CSRF_COOKIE_NAME = "authentik_csrf"
|
|||||||
CSRF_HEADER_NAME = "HTTP_X_AUTHENTIK_CSRF"
|
CSRF_HEADER_NAME = "HTTP_X_AUTHENTIK_CSRF"
|
||||||
LANGUAGE_COOKIE_NAME = "authentik_language"
|
LANGUAGE_COOKIE_NAME = "authentik_language"
|
||||||
SESSION_COOKIE_NAME = "authentik_session"
|
SESSION_COOKIE_NAME = "authentik_session"
|
||||||
SESSION_COOKIE_DOMAIN = CONFIG.y("cookie_domain", None)
|
SESSION_COOKIE_DOMAIN = CONFIG.get("cookie_domain", None)
|
||||||
|
|
||||||
AUTHENTICATION_BACKENDS = [
|
AUTHENTICATION_BACKENDS = [
|
||||||
"django.contrib.auth.backends.ModelBackend",
|
"django.contrib.auth.backends.ModelBackend",
|
||||||
@ -66,7 +66,6 @@ INSTALLED_APPS = [
|
|||||||
"authentik.crypto",
|
"authentik.crypto",
|
||||||
"authentik.events",
|
"authentik.events",
|
||||||
"authentik.flows",
|
"authentik.flows",
|
||||||
"authentik.lib",
|
|
||||||
"authentik.outposts",
|
"authentik.outposts",
|
||||||
"authentik.policies.dummy",
|
"authentik.policies.dummy",
|
||||||
"authentik.policies.event_matcher",
|
"authentik.policies.event_matcher",
|
||||||
@ -146,6 +145,7 @@ SPECTACULAR_SETTINGS = {
|
|||||||
"PromptTypeEnum": "authentik.stages.prompt.models.FieldTypes",
|
"PromptTypeEnum": "authentik.stages.prompt.models.FieldTypes",
|
||||||
"LDAPAPIAccessMode": "authentik.providers.ldap.models.APIAccessMode",
|
"LDAPAPIAccessMode": "authentik.providers.ldap.models.APIAccessMode",
|
||||||
"UserVerificationEnum": "authentik.stages.authenticator_webauthn.models.UserVerification",
|
"UserVerificationEnum": "authentik.stages.authenticator_webauthn.models.UserVerification",
|
||||||
|
"UserTypeEnum": "authentik.core.models.UserTypes",
|
||||||
},
|
},
|
||||||
"ENUM_ADD_EXPLICIT_BLANK_NULL_CHOICE": False,
|
"ENUM_ADD_EXPLICIT_BLANK_NULL_CHOICE": False,
|
||||||
"POSTPROCESSING_HOOKS": [
|
"POSTPROCESSING_HOOKS": [
|
||||||
@ -178,26 +178,26 @@ REST_FRAMEWORK = {
|
|||||||
"TEST_REQUEST_DEFAULT_FORMAT": "json",
|
"TEST_REQUEST_DEFAULT_FORMAT": "json",
|
||||||
"DEFAULT_THROTTLE_CLASSES": ["rest_framework.throttling.AnonRateThrottle"],
|
"DEFAULT_THROTTLE_CLASSES": ["rest_framework.throttling.AnonRateThrottle"],
|
||||||
"DEFAULT_THROTTLE_RATES": {
|
"DEFAULT_THROTTLE_RATES": {
|
||||||
"anon": CONFIG.y("throttle.default"),
|
"anon": CONFIG.get("throttle.default"),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
_redis_protocol_prefix = "redis://"
|
_redis_protocol_prefix = "redis://"
|
||||||
_redis_celery_tls_requirements = ""
|
_redis_celery_tls_requirements = ""
|
||||||
if CONFIG.y_bool("redis.tls", False):
|
if CONFIG.get_bool("redis.tls", False):
|
||||||
_redis_protocol_prefix = "rediss://"
|
_redis_protocol_prefix = "rediss://"
|
||||||
_redis_celery_tls_requirements = f"?ssl_cert_reqs={CONFIG.y('redis.tls_reqs')}"
|
_redis_celery_tls_requirements = f"?ssl_cert_reqs={CONFIG.get('redis.tls_reqs')}"
|
||||||
_redis_url = (
|
_redis_url = (
|
||||||
f"{_redis_protocol_prefix}:"
|
f"{_redis_protocol_prefix}:"
|
||||||
f"{quote_plus(CONFIG.y('redis.password'))}@{quote_plus(CONFIG.y('redis.host'))}:"
|
f"{quote_plus(CONFIG.get('redis.password'))}@{quote_plus(CONFIG.get('redis.host'))}:"
|
||||||
f"{int(CONFIG.y('redis.port'))}"
|
f"{int(CONFIG.get('redis.port'))}"
|
||||||
)
|
)
|
||||||
|
|
||||||
CACHES = {
|
CACHES = {
|
||||||
"default": {
|
"default": {
|
||||||
"BACKEND": "django_redis.cache.RedisCache",
|
"BACKEND": "django_redis.cache.RedisCache",
|
||||||
"LOCATION": f"{_redis_url}/{CONFIG.y('redis.db')}",
|
"LOCATION": f"{_redis_url}/{CONFIG.get('redis.db')}",
|
||||||
"TIMEOUT": int(CONFIG.y("redis.cache_timeout", 300)),
|
"TIMEOUT": int(CONFIG.get("redis.cache_timeout", 300)),
|
||||||
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
|
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
|
||||||
"KEY_PREFIX": "authentik_cache",
|
"KEY_PREFIX": "authentik_cache",
|
||||||
}
|
}
|
||||||
@ -225,7 +225,7 @@ MIDDLEWARE = [
|
|||||||
"authentik.events.middleware.AuditMiddleware",
|
"authentik.events.middleware.AuditMiddleware",
|
||||||
"django.middleware.security.SecurityMiddleware",
|
"django.middleware.security.SecurityMiddleware",
|
||||||
"django.middleware.common.CommonMiddleware",
|
"django.middleware.common.CommonMiddleware",
|
||||||
"django.middleware.csrf.CsrfViewMiddleware",
|
"authentik.root.middleware.CsrfViewMiddleware",
|
||||||
"django.contrib.messages.middleware.MessageMiddleware",
|
"django.contrib.messages.middleware.MessageMiddleware",
|
||||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||||
"authentik.core.middleware.ImpersonateMiddleware",
|
"authentik.core.middleware.ImpersonateMiddleware",
|
||||||
@ -237,7 +237,7 @@ ROOT_URLCONF = "authentik.root.urls"
|
|||||||
TEMPLATES = [
|
TEMPLATES = [
|
||||||
{
|
{
|
||||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||||
"DIRS": [CONFIG.y("email.template_dir")],
|
"DIRS": [CONFIG.get("email.template_dir")],
|
||||||
"APP_DIRS": True,
|
"APP_DIRS": True,
|
||||||
"OPTIONS": {
|
"OPTIONS": {
|
||||||
"context_processors": [
|
"context_processors": [
|
||||||
@ -257,7 +257,7 @@ CHANNEL_LAYERS = {
|
|||||||
"default": {
|
"default": {
|
||||||
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
||||||
"CONFIG": {
|
"CONFIG": {
|
||||||
"hosts": [f"{_redis_url}/{CONFIG.y('redis.db')}"],
|
"hosts": [f"{_redis_url}/{CONFIG.get('redis.db')}"],
|
||||||
"prefix": "authentik_channels",
|
"prefix": "authentik_channels",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -269,34 +269,37 @@ CHANNEL_LAYERS = {
|
|||||||
|
|
||||||
DATABASES = {
|
DATABASES = {
|
||||||
"default": {
|
"default": {
|
||||||
"ENGINE": "django_prometheus.db.backends.postgresql",
|
"ENGINE": "authentik.root.db",
|
||||||
"HOST": CONFIG.y("postgresql.host"),
|
"HOST": CONFIG.get("postgresql.host"),
|
||||||
"NAME": CONFIG.y("postgresql.name"),
|
"NAME": CONFIG.get("postgresql.name"),
|
||||||
"USER": CONFIG.y("postgresql.user"),
|
"USER": CONFIG.get("postgresql.user"),
|
||||||
"PASSWORD": CONFIG.y("postgresql.password"),
|
"PASSWORD": CONFIG.get("postgresql.password"),
|
||||||
"PORT": int(CONFIG.y("postgresql.port")),
|
"PORT": int(CONFIG.get("postgresql.port")),
|
||||||
"SSLMODE": CONFIG.y("postgresql.sslmode"),
|
"SSLMODE": CONFIG.get("postgresql.sslmode"),
|
||||||
"SSLROOTCERT": CONFIG.y("postgresql.sslrootcert"),
|
"SSLROOTCERT": CONFIG.get("postgresql.sslrootcert"),
|
||||||
"SSLCERT": CONFIG.y("postgresql.sslcert"),
|
"SSLCERT": CONFIG.get("postgresql.sslcert"),
|
||||||
"SSLKEY": CONFIG.y("postgresql.sslkey"),
|
"SSLKEY": CONFIG.get("postgresql.sslkey"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if CONFIG.y_bool("postgresql.use_pgbouncer", False):
|
if CONFIG.get_bool("postgresql.use_pgbouncer", False):
|
||||||
# https://docs.djangoproject.com/en/4.0/ref/databases/#transaction-pooling-server-side-cursors
|
# https://docs.djangoproject.com/en/4.0/ref/databases/#transaction-pooling-server-side-cursors
|
||||||
DATABASES["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True
|
DATABASES["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True
|
||||||
# https://docs.djangoproject.com/en/4.0/ref/databases/#persistent-connections
|
# https://docs.djangoproject.com/en/4.0/ref/databases/#persistent-connections
|
||||||
DATABASES["default"]["CONN_MAX_AGE"] = None # persistent
|
DATABASES["default"]["CONN_MAX_AGE"] = None # persistent
|
||||||
|
|
||||||
# Email
|
# Email
|
||||||
EMAIL_HOST = CONFIG.y("email.host")
|
# These values should never actually be used, emails are only sent from email stages, which
|
||||||
EMAIL_PORT = int(CONFIG.y("email.port"))
|
# loads the config directly from CONFIG
|
||||||
EMAIL_HOST_USER = CONFIG.y("email.username")
|
# See authentik/stages/email/models.py, line 105
|
||||||
EMAIL_HOST_PASSWORD = CONFIG.y("email.password")
|
EMAIL_HOST = CONFIG.get("email.host")
|
||||||
EMAIL_USE_TLS = CONFIG.y_bool("email.use_tls", False)
|
EMAIL_PORT = int(CONFIG.get("email.port"))
|
||||||
EMAIL_USE_SSL = CONFIG.y_bool("email.use_ssl", False)
|
EMAIL_HOST_USER = CONFIG.get("email.username")
|
||||||
EMAIL_TIMEOUT = int(CONFIG.y("email.timeout"))
|
EMAIL_HOST_PASSWORD = CONFIG.get("email.password")
|
||||||
DEFAULT_FROM_EMAIL = CONFIG.y("email.from")
|
EMAIL_USE_TLS = CONFIG.get_bool("email.use_tls", False)
|
||||||
|
EMAIL_USE_SSL = CONFIG.get_bool("email.use_ssl", False)
|
||||||
|
EMAIL_TIMEOUT = int(CONFIG.get("email.timeout"))
|
||||||
|
DEFAULT_FROM_EMAIL = CONFIG.get("email.from")
|
||||||
SERVER_EMAIL = DEFAULT_FROM_EMAIL
|
SERVER_EMAIL = DEFAULT_FROM_EMAIL
|
||||||
EMAIL_SUBJECT_PREFIX = "[authentik] "
|
EMAIL_SUBJECT_PREFIX = "[authentik] "
|
||||||
|
|
||||||
@ -344,15 +347,15 @@ CELERY = {
|
|||||||
},
|
},
|
||||||
"task_create_missing_queues": True,
|
"task_create_missing_queues": True,
|
||||||
"task_default_queue": "authentik",
|
"task_default_queue": "authentik",
|
||||||
"broker_url": f"{_redis_url}/{CONFIG.y('redis.db')}{_redis_celery_tls_requirements}",
|
"broker_url": f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
|
||||||
"result_backend": f"{_redis_url}/{CONFIG.y('redis.db')}{_redis_celery_tls_requirements}",
|
"result_backend": f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
|
||||||
}
|
}
|
||||||
|
|
||||||
# Sentry integration
|
# Sentry integration
|
||||||
env = get_env()
|
env = get_env()
|
||||||
_ERROR_REPORTING = CONFIG.y_bool("error_reporting.enabled", False)
|
_ERROR_REPORTING = CONFIG.get_bool("error_reporting.enabled", False)
|
||||||
if _ERROR_REPORTING:
|
if _ERROR_REPORTING:
|
||||||
sentry_env = CONFIG.y("error_reporting.environment", "customer")
|
sentry_env = CONFIG.get("error_reporting.environment", "customer")
|
||||||
sentry_init()
|
sentry_init()
|
||||||
set_tag("authentik.uuid", sha512(str(SECRET_KEY).encode("ascii")).hexdigest()[:16])
|
set_tag("authentik.uuid", sha512(str(SECRET_KEY).encode("ascii")).hexdigest()[:16])
|
||||||
|
|
||||||
@ -366,7 +369,7 @@ MEDIA_URL = "/media/"
|
|||||||
TEST = False
|
TEST = False
|
||||||
TEST_RUNNER = "authentik.root.test_runner.PytestTestRunner"
|
TEST_RUNNER = "authentik.root.test_runner.PytestTestRunner"
|
||||||
# We can't check TEST here as its set later by the test runner
|
# We can't check TEST here as its set later by the test runner
|
||||||
LOG_LEVEL = CONFIG.y("log_level").upper() if "TF_BUILD" not in os.environ else "DEBUG"
|
LOG_LEVEL = CONFIG.get("log_level").upper() if "TF_BUILD" not in os.environ else "DEBUG"
|
||||||
# We could add a custom level to stdlib logging and structlog, but it's not easy or clean
|
# We could add a custom level to stdlib logging and structlog, but it's not easy or clean
|
||||||
# https://stackoverflow.com/questions/54505487/custom-log-level-not-working-with-structlog
|
# https://stackoverflow.com/questions/54505487/custom-log-level-not-working-with-structlog
|
||||||
# Additionally, the entire code uses debug as highest level so that would have to be re-written too
|
# Additionally, the entire code uses debug as highest level so that would have to be re-written too
|
||||||
|
@ -31,14 +31,14 @@ class PytestTestRunner: # pragma: no cover
|
|||||||
|
|
||||||
settings.TEST = True
|
settings.TEST = True
|
||||||
settings.CELERY["task_always_eager"] = True
|
settings.CELERY["task_always_eager"] = True
|
||||||
CONFIG.y_set("avatars", "none")
|
CONFIG.set("avatars", "none")
|
||||||
CONFIG.y_set("geoip", "tests/GeoLite2-City-Test.mmdb")
|
CONFIG.set("geoip", "tests/GeoLite2-City-Test.mmdb")
|
||||||
CONFIG.y_set("blueprints_dir", "./blueprints")
|
CONFIG.set("blueprints_dir", "./blueprints")
|
||||||
CONFIG.y_set(
|
CONFIG.set(
|
||||||
"outposts.container_image_base",
|
"outposts.container_image_base",
|
||||||
f"ghcr.io/goauthentik/dev-%(type)s:{get_docker_tag()}",
|
f"ghcr.io/goauthentik/dev-%(type)s:{get_docker_tag()}",
|
||||||
)
|
)
|
||||||
CONFIG.y_set("error_reporting.sample_rate", 0)
|
CONFIG.set("error_reporting.sample_rate", 0)
|
||||||
sentry_init(
|
sentry_init(
|
||||||
environment="testing",
|
environment="testing",
|
||||||
send_default_pii=True,
|
send_default_pii=True,
|
||||||
|
@ -3,7 +3,10 @@ from django.core.management.base import BaseCommand
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.sources.ldap.models import LDAPSource
|
from authentik.sources.ldap.models import LDAPSource
|
||||||
from authentik.sources.ldap.tasks import ldap_sync_single
|
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
|
||||||
|
from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer
|
||||||
|
from authentik.sources.ldap.sync.users import UserLDAPSynchronizer
|
||||||
|
from authentik.sources.ldap.tasks import ldap_sync_paginator
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -20,4 +23,10 @@ class Command(BaseCommand):
|
|||||||
if not source:
|
if not source:
|
||||||
LOGGER.warning("Source does not exist", slug=source_slug)
|
LOGGER.warning("Source does not exist", slug=source_slug)
|
||||||
continue
|
continue
|
||||||
ldap_sync_single(source)
|
tasks = (
|
||||||
|
ldap_sync_paginator(source, UserLDAPSynchronizer)
|
||||||
|
+ ldap_sync_paginator(source, GroupLDAPSynchronizer)
|
||||||
|
+ ldap_sync_paginator(source, MembershipLDAPSynchronizer)
|
||||||
|
)
|
||||||
|
for task in tasks:
|
||||||
|
task()
|
||||||
|
@ -136,7 +136,7 @@ class LDAPSource(Source):
|
|||||||
chmod(private_key_file, 0o600)
|
chmod(private_key_file, 0o600)
|
||||||
tls_kwargs["local_private_key_file"] = private_key_file
|
tls_kwargs["local_private_key_file"] = private_key_file
|
||||||
tls_kwargs["local_certificate_file"] = certificate_file
|
tls_kwargs["local_certificate_file"] = certificate_file
|
||||||
if ciphers := CONFIG.y("ldap.tls.ciphers", None):
|
if ciphers := CONFIG.get("ldap.tls.ciphers", None):
|
||||||
tls_kwargs["ciphers"] = ciphers.strip()
|
tls_kwargs["ciphers"] = ciphers.strip()
|
||||||
if self.sni:
|
if self.sni:
|
||||||
tls_kwargs["sni"] = self.server_uri.split(",", maxsplit=1)[0].strip()
|
tls_kwargs["sni"] = self.server_uri.split(",", maxsplit=1)[0].strip()
|
||||||
|
@ -9,6 +9,7 @@ from structlog.stdlib import BoundLogger, get_logger
|
|||||||
|
|
||||||
from authentik.core.exceptions import PropertyMappingExpressionException
|
from authentik.core.exceptions import PropertyMappingExpressionException
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.merge import MERGE_LIST_UNIQUE
|
from authentik.lib.merge import MERGE_LIST_UNIQUE
|
||||||
from authentik.sources.ldap.auth import LDAP_DISTINGUISHED_NAME
|
from authentik.sources.ldap.auth import LDAP_DISTINGUISHED_NAME
|
||||||
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
||||||
@ -92,7 +93,7 @@ class BaseLDAPSynchronizer:
|
|||||||
types_only=False,
|
types_only=False,
|
||||||
get_operational_attributes=False,
|
get_operational_attributes=False,
|
||||||
controls=None,
|
controls=None,
|
||||||
paged_size=5,
|
paged_size=int(CONFIG.get("ldap.page_size", 50)),
|
||||||
paged_criticality=False,
|
paged_criticality=False,
|
||||||
):
|
):
|
||||||
"""Search in pages, returns each page"""
|
"""Search in pages, returns each page"""
|
||||||
|
@ -49,7 +49,7 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||||||
uniq = self._flatten(attributes[self._source.object_uniqueness_field])
|
uniq = self._flatten(attributes[self._source.object_uniqueness_field])
|
||||||
try:
|
try:
|
||||||
defaults = self.build_user_properties(user_dn, **attributes)
|
defaults = self.build_user_properties(user_dn, **attributes)
|
||||||
self._logger.debug("Creating user with attributes", **defaults)
|
self._logger.debug("Writing user with attributes", **defaults)
|
||||||
if "username" not in defaults:
|
if "username" not in defaults:
|
||||||
raise IntegrityError("Username was not set by propertymappings")
|
raise IntegrityError("Username was not set by propertymappings")
|
||||||
ak_user, created = self.update_or_create_attributes(
|
ak_user, created = self.update_or_create_attributes(
|
||||||
|
12
authentik/sources/ldap/sync/vendor/freeipa.py
vendored
12
authentik/sources/ldap/sync/vendor/freeipa.py
vendored
@ -20,6 +20,7 @@ class FreeIPA(BaseLDAPSynchronizer):
|
|||||||
|
|
||||||
def sync(self, attributes: dict[str, Any], user: User, created: bool):
|
def sync(self, attributes: dict[str, Any], user: User, created: bool):
|
||||||
self.check_pwd_last_set(attributes, user, created)
|
self.check_pwd_last_set(attributes, user, created)
|
||||||
|
self.check_nsaccountlock(attributes, user)
|
||||||
|
|
||||||
def check_pwd_last_set(self, attributes: dict[str, Any], user: User, created: bool):
|
def check_pwd_last_set(self, attributes: dict[str, Any], user: User, created: bool):
|
||||||
"""Check krbLastPwdChange"""
|
"""Check krbLastPwdChange"""
|
||||||
@ -37,3 +38,14 @@ class FreeIPA(BaseLDAPSynchronizer):
|
|||||||
)
|
)
|
||||||
user.set_unusable_password()
|
user.set_unusable_password()
|
||||||
user.save()
|
user.save()
|
||||||
|
|
||||||
|
def check_nsaccountlock(self, attributes: dict[str, Any], user: User):
|
||||||
|
"""https://www.port389.org/docs/389ds/howto/howto-account-inactivation.html"""
|
||||||
|
# This is more of a 389-ds quirk rather than FreeIPA, but FreeIPA uses
|
||||||
|
# 389-ds and this will trigger regardless
|
||||||
|
if "nsaccountlock" not in attributes:
|
||||||
|
return
|
||||||
|
is_active = attributes.get("nsaccountlock", False)
|
||||||
|
if is_active != user.is_active:
|
||||||
|
user.is_active = is_active
|
||||||
|
user.save()
|
||||||
|
6
authentik/sources/ldap/sync/vendor/ms_ad.py
vendored
6
authentik/sources/ldap/sync/vendor/ms_ad.py
vendored
@ -78,5 +78,7 @@ class MicrosoftActiveDirectory(BaseLDAPSynchronizer):
|
|||||||
# /useraccountcontrol-manipulate-account-properties
|
# /useraccountcontrol-manipulate-account-properties
|
||||||
uac_bit = attributes.get("userAccountControl", 512)
|
uac_bit = attributes.get("userAccountControl", 512)
|
||||||
uac = UserAccountControl(uac_bit)
|
uac = UserAccountControl(uac_bit)
|
||||||
user.is_active = UserAccountControl.ACCOUNTDISABLE not in uac
|
is_active = UserAccountControl.ACCOUNTDISABLE not in uac
|
||||||
user.save()
|
if is_active != user.is_active:
|
||||||
|
user.is_active = is_active
|
||||||
|
user.save()
|
||||||
|
@ -30,12 +30,15 @@ CACHE_KEY_PREFIX = "goauthentik.io/sources/ldap/page/"
|
|||||||
def ldap_sync_all():
|
def ldap_sync_all():
|
||||||
"""Sync all sources"""
|
"""Sync all sources"""
|
||||||
for source in LDAPSource.objects.filter(enabled=True):
|
for source in LDAPSource.objects.filter(enabled=True):
|
||||||
ldap_sync_single(source)
|
ldap_sync_single(source.pk)
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task()
|
@CELERY_APP.task()
|
||||||
def ldap_sync_single(source: LDAPSource):
|
def ldap_sync_single(source_pk: str):
|
||||||
"""Sync a single source"""
|
"""Sync a single source"""
|
||||||
|
source: LDAPSource = LDAPSource.objects.filter(pk=source_pk).first()
|
||||||
|
if not source:
|
||||||
|
return
|
||||||
task = chain(
|
task = chain(
|
||||||
# User and group sync can happen at once, they have no dependencies on each other
|
# User and group sync can happen at once, they have no dependencies on each other
|
||||||
group(
|
group(
|
||||||
@ -56,7 +59,7 @@ def ldap_sync_paginator(source: LDAPSource, sync: type[BaseLDAPSynchronizer]) ->
|
|||||||
signatures = []
|
signatures = []
|
||||||
for page in sync_inst.get_objects():
|
for page in sync_inst.get_objects():
|
||||||
page_cache_key = CACHE_KEY_PREFIX + str(uuid4())
|
page_cache_key = CACHE_KEY_PREFIX + str(uuid4())
|
||||||
cache.set(page_cache_key, page)
|
cache.set(page_cache_key, page, 60 * 60 * int(CONFIG.get("ldap.task_timeout_hours")))
|
||||||
page_sync = ldap_sync.si(source.pk, class_to_path(sync), page_cache_key)
|
page_sync = ldap_sync.si(source.pk, class_to_path(sync), page_cache_key)
|
||||||
signatures.append(page_sync)
|
signatures.append(page_sync)
|
||||||
return signatures
|
return signatures
|
||||||
@ -65,15 +68,14 @@ def ldap_sync_paginator(source: LDAPSource, sync: type[BaseLDAPSynchronizer]) ->
|
|||||||
@CELERY_APP.task(
|
@CELERY_APP.task(
|
||||||
bind=True,
|
bind=True,
|
||||||
base=MonitoredTask,
|
base=MonitoredTask,
|
||||||
soft_time_limit=60 * 60 * int(CONFIG.y("ldap.task_timeout_hours")),
|
soft_time_limit=60 * 60 * int(CONFIG.get("ldap.task_timeout_hours")),
|
||||||
task_time_limit=60 * 60 * int(CONFIG.y("ldap.task_timeout_hours")),
|
task_time_limit=60 * 60 * int(CONFIG.get("ldap.task_timeout_hours")),
|
||||||
)
|
)
|
||||||
def ldap_sync(self: MonitoredTask, source_pk: str, sync_class: str, page_cache_key: str):
|
def ldap_sync(self: MonitoredTask, source_pk: str, sync_class: str, page_cache_key: str):
|
||||||
"""Synchronization of an LDAP Source"""
|
"""Synchronization of an LDAP Source"""
|
||||||
self.result_timeout_hours = int(CONFIG.y("ldap.task_timeout_hours"))
|
self.result_timeout_hours = int(CONFIG.get("ldap.task_timeout_hours"))
|
||||||
try:
|
source: LDAPSource = LDAPSource.objects.filter(pk=source_pk).first()
|
||||||
source: LDAPSource = LDAPSource.objects.get(pk=source_pk)
|
if not source:
|
||||||
except LDAPSource.DoesNotExist:
|
|
||||||
# Because the source couldn't be found, we don't have a UID
|
# Because the source couldn't be found, we don't have a UID
|
||||||
# to set the state with
|
# to set the state with
|
||||||
return
|
return
|
||||||
@ -84,6 +86,12 @@ def ldap_sync(self: MonitoredTask, source_pk: str, sync_class: str, page_cache_k
|
|||||||
sync_inst: BaseLDAPSynchronizer = sync(source)
|
sync_inst: BaseLDAPSynchronizer = sync(source)
|
||||||
page = cache.get(page_cache_key)
|
page = cache.get(page_cache_key)
|
||||||
if not page:
|
if not page:
|
||||||
|
error_message = (
|
||||||
|
f"Could not find page in cache: {page_cache_key}. "
|
||||||
|
+ "Try increasing ldap.task_timeout_hours"
|
||||||
|
)
|
||||||
|
LOGGER.warning(error_message)
|
||||||
|
self.set_status(TaskResult(TaskResultStatus.ERROR, [error_message]))
|
||||||
return
|
return
|
||||||
cache.touch(page_cache_key)
|
cache.touch(page_cache_key)
|
||||||
count = sync_inst.sync(page)
|
count = sync_inst.sync(page)
|
||||||
|
@ -8,12 +8,14 @@ from authentik.blueprints.tests import apply_blueprint
|
|||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
|
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
||||||
from authentik.lib.generators import generate_key
|
from authentik.lib.generators import generate_key
|
||||||
|
from authentik.lib.utils.reflection import class_to_path
|
||||||
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
||||||
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
|
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
|
||||||
from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer
|
from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer
|
||||||
from authentik.sources.ldap.sync.users import UserLDAPSynchronizer
|
from authentik.sources.ldap.sync.users import UserLDAPSynchronizer
|
||||||
from authentik.sources.ldap.tasks import ldap_sync_all
|
from authentik.sources.ldap.tasks import ldap_sync, ldap_sync_all
|
||||||
from authentik.sources.ldap.tests.mock_ad import mock_ad_connection
|
from authentik.sources.ldap.tests.mock_ad import mock_ad_connection
|
||||||
from authentik.sources.ldap.tests.mock_slapd import mock_slapd_connection
|
from authentik.sources.ldap.tests.mock_slapd import mock_slapd_connection
|
||||||
|
|
||||||
@ -33,6 +35,14 @@ class LDAPSyncTests(TestCase):
|
|||||||
additional_group_dn="ou=groups",
|
additional_group_dn="ou=groups",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_sync_missing_page(self):
|
||||||
|
"""Test sync with missing page"""
|
||||||
|
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
|
||||||
|
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
|
||||||
|
ldap_sync.delay(self.source.pk, class_to_path(UserLDAPSynchronizer), "foo").get()
|
||||||
|
status = TaskInfo.by_name("ldap_sync:ldap:users:foo")
|
||||||
|
self.assertEqual(status.result.status, TaskResultStatus.ERROR)
|
||||||
|
|
||||||
def test_sync_error(self):
|
def test_sync_error(self):
|
||||||
"""Test user sync"""
|
"""Test user sync"""
|
||||||
self.source.property_mappings.set(
|
self.source.property_mappings.set(
|
||||||
|
@ -13,6 +13,7 @@ from rest_framework.serializers import BaseSerializer
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.flows.models import Stage
|
from authentik.flows.models import Stage
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -104,7 +105,16 @@ class EmailStage(Stage):
|
|||||||
def backend(self) -> BaseEmailBackend:
|
def backend(self) -> BaseEmailBackend:
|
||||||
"""Get fully configured Email Backend instance"""
|
"""Get fully configured Email Backend instance"""
|
||||||
if self.use_global_settings:
|
if self.use_global_settings:
|
||||||
return self.backend_class()
|
CONFIG.refresh("email.password")
|
||||||
|
return self.backend_class(
|
||||||
|
host=CONFIG.get("email.host"),
|
||||||
|
port=int(CONFIG.get("email.port")),
|
||||||
|
username=CONFIG.get("email.username"),
|
||||||
|
password=CONFIG.get("email.password"),
|
||||||
|
use_tls=CONFIG.get_bool("email.use_tls", False),
|
||||||
|
use_ssl=CONFIG.get_bool("email.use_ssl", False),
|
||||||
|
timeout=int(CONFIG.get("email.timeout")),
|
||||||
|
)
|
||||||
return self.backend_class(
|
return self.backend_class(
|
||||||
host=self.host,
|
host=self.host,
|
||||||
port=self.port,
|
port=self.port,
|
||||||
|
@ -13,6 +13,7 @@ from authentik.flows.models import FlowDesignation, FlowStageBinding, FlowToken
|
|||||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan
|
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan
|
||||||
from authentik.flows.tests import FlowTestCase
|
from authentik.flows.tests import FlowTestCase
|
||||||
from authentik.flows.views.executor import QS_KEY_TOKEN, SESSION_KEY_PLAN
|
from authentik.flows.views.executor import QS_KEY_TOKEN, SESSION_KEY_PLAN
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.stages.email.models import EmailStage
|
from authentik.stages.email.models import EmailStage
|
||||||
from authentik.stages.email.stage import PLAN_CONTEXT_EMAIL_OVERRIDE
|
from authentik.stages.email.stage import PLAN_CONTEXT_EMAIL_OVERRIDE
|
||||||
|
|
||||||
@ -120,7 +121,7 @@ class TestEmailStage(FlowTestCase):
|
|||||||
def test_use_global_settings(self):
|
def test_use_global_settings(self):
|
||||||
"""Test use_global_settings"""
|
"""Test use_global_settings"""
|
||||||
host = "some-unique-string"
|
host = "some-unique-string"
|
||||||
with self.settings(EMAIL_HOST=host):
|
with CONFIG.patch("email.host", host):
|
||||||
self.assertEqual(EmailStage(use_global_settings=True).backend.host, host)
|
self.assertEqual(EmailStage(use_global_settings=True).backend.host, host)
|
||||||
|
|
||||||
def test_token(self):
|
def test_token(self):
|
||||||
|
@ -179,7 +179,7 @@ class ListPolicyEngine(PolicyEngine):
|
|||||||
self.__list = policies
|
self.__list = policies
|
||||||
self.use_cache = False
|
self.use_cache = False
|
||||||
|
|
||||||
def _iter_bindings(self) -> Iterator[PolicyBinding]:
|
def iterate_bindings(self) -> Iterator[PolicyBinding]:
|
||||||
for policy in self.__list:
|
for policy in self.__list:
|
||||||
yield PolicyBinding(
|
yield PolicyBinding(
|
||||||
policy=policy,
|
policy=policy,
|
||||||
|
@ -78,7 +78,7 @@ class CurrentTenantSerializer(PassiveSerializer):
|
|||||||
ui_footer_links = ListField(
|
ui_footer_links = ListField(
|
||||||
child=FooterLinkSerializer(),
|
child=FooterLinkSerializer(),
|
||||||
read_only=True,
|
read_only=True,
|
||||||
default=CONFIG.y("footer_links", []),
|
default=CONFIG.get("footer_links", []),
|
||||||
)
|
)
|
||||||
ui_theme = ChoiceField(
|
ui_theme = ChoiceField(
|
||||||
choices=Themes.choices,
|
choices=Themes.choices,
|
||||||
|
@ -24,7 +24,7 @@ class TestTenants(APITestCase):
|
|||||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
||||||
"branding_title": "authentik",
|
"branding_title": "authentik",
|
||||||
"matched_domain": tenant.domain,
|
"matched_domain": tenant.domain,
|
||||||
"ui_footer_links": CONFIG.y("footer_links"),
|
"ui_footer_links": CONFIG.get("footer_links"),
|
||||||
"ui_theme": Themes.AUTOMATIC,
|
"ui_theme": Themes.AUTOMATIC,
|
||||||
"default_locale": "",
|
"default_locale": "",
|
||||||
},
|
},
|
||||||
@ -43,7 +43,7 @@ class TestTenants(APITestCase):
|
|||||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
||||||
"branding_title": "custom",
|
"branding_title": "custom",
|
||||||
"matched_domain": "bar.baz",
|
"matched_domain": "bar.baz",
|
||||||
"ui_footer_links": CONFIG.y("footer_links"),
|
"ui_footer_links": CONFIG.get("footer_links"),
|
||||||
"ui_theme": Themes.AUTOMATIC,
|
"ui_theme": Themes.AUTOMATIC,
|
||||||
"default_locale": "",
|
"default_locale": "",
|
||||||
},
|
},
|
||||||
@ -59,7 +59,7 @@ class TestTenants(APITestCase):
|
|||||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
||||||
"branding_title": "authentik",
|
"branding_title": "authentik",
|
||||||
"matched_domain": "fallback",
|
"matched_domain": "fallback",
|
||||||
"ui_footer_links": CONFIG.y("footer_links"),
|
"ui_footer_links": CONFIG.get("footer_links"),
|
||||||
"ui_theme": Themes.AUTOMATIC,
|
"ui_theme": Themes.AUTOMATIC,
|
||||||
"default_locale": "",
|
"default_locale": "",
|
||||||
},
|
},
|
||||||
|
@ -36,7 +36,7 @@ def context_processor(request: HttpRequest) -> dict[str, Any]:
|
|||||||
trace = span.to_traceparent()
|
trace = span.to_traceparent()
|
||||||
return {
|
return {
|
||||||
"tenant": tenant,
|
"tenant": tenant,
|
||||||
"footer_links": CONFIG.y("footer_links"),
|
"footer_links": CONFIG.get("footer_links"),
|
||||||
"sentry_trace": trace,
|
"sentry_trace": trace,
|
||||||
"version": get_full_version(),
|
"version": get_full_version(),
|
||||||
}
|
}
|
||||||
|
@ -94,21 +94,21 @@ entries:
|
|||||||
prompt_data = request.context.get("prompt_data")
|
prompt_data = request.context.get("prompt_data")
|
||||||
|
|
||||||
if not request.user.group_attributes(request.http_request).get(
|
if not request.user.group_attributes(request.http_request).get(
|
||||||
USER_ATTRIBUTE_CHANGE_EMAIL, CONFIG.y_bool("default_user_change_email", True)
|
USER_ATTRIBUTE_CHANGE_EMAIL, CONFIG.get_bool("default_user_change_email", True)
|
||||||
):
|
):
|
||||||
if prompt_data.get("email") != request.user.email:
|
if prompt_data.get("email") != request.user.email:
|
||||||
ak_message("Not allowed to change email address.")
|
ak_message("Not allowed to change email address.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not request.user.group_attributes(request.http_request).get(
|
if not request.user.group_attributes(request.http_request).get(
|
||||||
USER_ATTRIBUTE_CHANGE_NAME, CONFIG.y_bool("default_user_change_name", True)
|
USER_ATTRIBUTE_CHANGE_NAME, CONFIG.get_bool("default_user_change_name", True)
|
||||||
):
|
):
|
||||||
if prompt_data.get("name") != request.user.name:
|
if prompt_data.get("name") != request.user.name:
|
||||||
ak_message("Not allowed to change name.")
|
ak_message("Not allowed to change name.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not request.user.group_attributes(request.http_request).get(
|
if not request.user.group_attributes(request.http_request).get(
|
||||||
USER_ATTRIBUTE_CHANGE_USERNAME, CONFIG.y_bool("default_user_change_username", True)
|
USER_ATTRIBUTE_CHANGE_USERNAME, CONFIG.get_bool("default_user_change_username", True)
|
||||||
):
|
):
|
||||||
if prompt_data.get("username") != request.user.username:
|
if prompt_data.get("username") != request.user.username:
|
||||||
ak_message("Not allowed to change username.")
|
ak_message("Not allowed to change username.")
|
||||||
|
@ -3213,7 +3213,6 @@
|
|||||||
"authentik.crypto",
|
"authentik.crypto",
|
||||||
"authentik.events",
|
"authentik.events",
|
||||||
"authentik.flows",
|
"authentik.flows",
|
||||||
"authentik.lib",
|
|
||||||
"authentik.outposts",
|
"authentik.outposts",
|
||||||
"authentik.policies.dummy",
|
"authentik.policies.dummy",
|
||||||
"authentik.policies.event_matcher",
|
"authentik.policies.event_matcher",
|
||||||
@ -3980,6 +3979,16 @@
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
"title": "Path"
|
"title": "Path"
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"internal",
|
||||||
|
"external",
|
||||||
|
"service_account",
|
||||||
|
"internal_service_account"
|
||||||
|
],
|
||||||
|
"title": "Type"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
@ -4171,6 +4180,16 @@
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
"title": "Path"
|
"title": "Path"
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"internal",
|
||||||
|
"external",
|
||||||
|
"service_account",
|
||||||
|
"internal_service_account"
|
||||||
|
],
|
||||||
|
"title": "Type"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
@ -4366,6 +4385,16 @@
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
"title": "Path"
|
"title": "Path"
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"internal",
|
||||||
|
"external",
|
||||||
|
"service_account",
|
||||||
|
"internal_service_account"
|
||||||
|
],
|
||||||
|
"title": "Type"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
@ -6522,6 +6551,16 @@
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
"title": "Path"
|
"title": "Path"
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"internal",
|
||||||
|
"external",
|
||||||
|
"service_account",
|
||||||
|
"internal_service_account"
|
||||||
|
],
|
||||||
|
"title": "Type"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
@ -7257,6 +7296,16 @@
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
"title": "Path"
|
"title": "Path"
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"internal",
|
||||||
|
"external",
|
||||||
|
"service_account",
|
||||||
|
"internal_service_account"
|
||||||
|
],
|
||||||
|
"title": "Type"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
@ -8334,6 +8383,16 @@
|
|||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
"title": "Path"
|
"title": "Path"
|
||||||
},
|
},
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"internal",
|
||||||
|
"external",
|
||||||
|
"service_account",
|
||||||
|
"internal_service_account"
|
||||||
|
],
|
||||||
|
"title": "Type"
|
||||||
|
},
|
||||||
"password": {
|
"password": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
|
@ -1,36 +0,0 @@
|
|||||||
# This file is used for development and debugging, and should not be used for production instances
|
|
||||||
|
|
||||||
version: '3.5'
|
|
||||||
|
|
||||||
services:
|
|
||||||
flower:
|
|
||||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.5.4}
|
|
||||||
restart: unless-stopped
|
|
||||||
command: worker-status
|
|
||||||
environment:
|
|
||||||
AUTHENTIK_REDIS__HOST: redis
|
|
||||||
AUTHENTIK_POSTGRESQL__HOST: postgresql
|
|
||||||
AUTHENTIK_POSTGRESQL__USER: ${PG_USER:-authentik}
|
|
||||||
AUTHENTIK_POSTGRESQL__NAME: ${PG_DB:-authentik}
|
|
||||||
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS}
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
ports:
|
|
||||||
- "9001:9000"
|
|
||||||
depends_on:
|
|
||||||
- postgresql
|
|
||||||
- redis
|
|
||||||
server:
|
|
||||||
environment:
|
|
||||||
AUTHENTIK_REMOTE_DEBUG: "true"
|
|
||||||
PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT: "true"
|
|
||||||
ports:
|
|
||||||
- 6800:6800
|
|
||||||
worker:
|
|
||||||
environment:
|
|
||||||
CELERY_RDB_HOST: "0.0.0.0"
|
|
||||||
CELERY_RDBSIG: "1"
|
|
||||||
AUTHENTIK_REMOTE_DEBUG: "true"
|
|
||||||
PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT: "true"
|
|
||||||
ports:
|
|
||||||
- 6900:6900
|
|
@ -32,7 +32,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- redis:/data
|
- redis:/data
|
||||||
server:
|
server:
|
||||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.6.0}
|
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.6.1}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
command: server
|
command: server
|
||||||
environment:
|
environment:
|
||||||
@ -53,7 +53,7 @@ services:
|
|||||||
- postgresql
|
- postgresql
|
||||||
- redis
|
- redis
|
||||||
worker:
|
worker:
|
||||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.6.0}
|
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.6.1}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
command: worker
|
command: worker
|
||||||
environment:
|
environment:
|
||||||
|
2
go.mod
2
go.mod
@ -26,7 +26,7 @@ require (
|
|||||||
github.com/sirupsen/logrus v1.9.3
|
github.com/sirupsen/logrus v1.9.3
|
||||||
github.com/spf13/cobra v1.7.0
|
github.com/spf13/cobra v1.7.0
|
||||||
github.com/stretchr/testify v1.8.4
|
github.com/stretchr/testify v1.8.4
|
||||||
goauthentik.io/api/v3 v3.2023054.4
|
goauthentik.io/api/v3 v3.2023061.6
|
||||||
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
|
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
|
||||||
golang.org/x/oauth2 v0.10.0
|
golang.org/x/oauth2 v0.10.0
|
||||||
golang.org/x/sync v0.3.0
|
golang.org/x/sync v0.3.0
|
||||||
|
4
go.sum
4
go.sum
@ -1070,8 +1070,8 @@ go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqe
|
|||||||
go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
|
go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
|
||||||
go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
|
go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
|
||||||
go.uber.org/goleak v1.1.10 h1:z+mqJhf6ss6BSfSM671tgKyZBFPTTJM+HLxnhPC3wu0=
|
go.uber.org/goleak v1.1.10 h1:z+mqJhf6ss6BSfSM671tgKyZBFPTTJM+HLxnhPC3wu0=
|
||||||
goauthentik.io/api/v3 v3.2023054.4 h1:wnONALlxADR42TpW5xKKsGkJ/G8oNDQsWiwdlMsG2Ig=
|
goauthentik.io/api/v3 v3.2023061.6 h1:4zbo0Dtx42HLYObizIlTWAk7iBvCv9kmCvzBxMElkIk=
|
||||||
goauthentik.io/api/v3 v3.2023054.4/go.mod h1:tC7qK9VSP0zJah5p5xHFnjZt/4dAkXVwcrWyZNGYhwQ=
|
goauthentik.io/api/v3 v3.2023061.6/go.mod h1:tC7qK9VSP0zJah5p5xHFnjZt/4dAkXVwcrWyZNGYhwQ=
|
||||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
|
golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
|
||||||
|
@ -29,4 +29,4 @@ func UserAgent() string {
|
|||||||
return fmt.Sprintf("authentik@%s", FullVersion())
|
return fmt.Sprintf("authentik@%s", FullVersion())
|
||||||
}
|
}
|
||||||
|
|
||||||
const VERSION = "2023.6.0"
|
const VERSION = "2023.6.1"
|
||||||
|
@ -8,6 +8,7 @@ import (
|
|||||||
"net/http/cookiejar"
|
"net/http/cookiejar"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/getsentry/sentry-go"
|
"github.com/getsentry/sentry-go"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
@ -21,10 +22,20 @@ import (
|
|||||||
|
|
||||||
var (
|
var (
|
||||||
FlowTimingGet = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
FlowTimingGet = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
||||||
|
Name: "authentik_outpost_flow_timing_get_seconds",
|
||||||
|
Help: "Duration it took to get a challenge in seconds",
|
||||||
|
}, []string{"stage", "flow"})
|
||||||
|
FlowTimingPost = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
||||||
|
Name: "authentik_outpost_flow_timing_post_seconds",
|
||||||
|
Help: "Duration it took to send a challenge in seconds",
|
||||||
|
}, []string{"stage", "flow"})
|
||||||
|
|
||||||
|
// NOTE: the following metrics are kept for compatibility purpose
|
||||||
|
FlowTimingGetLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
||||||
Name: "authentik_outpost_flow_timing_get",
|
Name: "authentik_outpost_flow_timing_get",
|
||||||
Help: "Duration it took to get a challenge",
|
Help: "Duration it took to get a challenge",
|
||||||
}, []string{"stage", "flow"})
|
}, []string{"stage", "flow"})
|
||||||
FlowTimingPost = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
FlowTimingPostLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
||||||
Name: "authentik_outpost_flow_timing_post",
|
Name: "authentik_outpost_flow_timing_post",
|
||||||
Help: "Duration it took to send a challenge",
|
Help: "Duration it took to send a challenge",
|
||||||
}, []string{"stage", "flow"})
|
}, []string{"stage", "flow"})
|
||||||
@ -186,6 +197,10 @@ func (fe *FlowExecutor) getInitialChallenge() (*api.ChallengeTypes, error) {
|
|||||||
FlowTimingGet.With(prometheus.Labels{
|
FlowTimingGet.With(prometheus.Labels{
|
||||||
"stage": ch.GetComponent(),
|
"stage": ch.GetComponent(),
|
||||||
"flow": fe.flowSlug,
|
"flow": fe.flowSlug,
|
||||||
|
}).Observe(float64(gcsp.EndTime.Sub(gcsp.StartTime)) / float64(time.Second))
|
||||||
|
FlowTimingGetLegacy.With(prometheus.Labels{
|
||||||
|
"stage": ch.GetComponent(),
|
||||||
|
"flow": fe.flowSlug,
|
||||||
}).Observe(float64(gcsp.EndTime.Sub(gcsp.StartTime)))
|
}).Observe(float64(gcsp.EndTime.Sub(gcsp.StartTime)))
|
||||||
return challenge, nil
|
return challenge, nil
|
||||||
}
|
}
|
||||||
@ -243,6 +258,10 @@ func (fe *FlowExecutor) solveFlowChallenge(challenge *api.ChallengeTypes, depth
|
|||||||
FlowTimingPost.With(prometheus.Labels{
|
FlowTimingPost.With(prometheus.Labels{
|
||||||
"stage": ch.GetComponent(),
|
"stage": ch.GetComponent(),
|
||||||
"flow": fe.flowSlug,
|
"flow": fe.flowSlug,
|
||||||
|
}).Observe(float64(scsp.EndTime.Sub(scsp.StartTime)) / float64(time.Second))
|
||||||
|
FlowTimingPostLegacy.With(prometheus.Labels{
|
||||||
|
"stage": ch.GetComponent(),
|
||||||
|
"flow": fe.flowSlug,
|
||||||
}).Observe(float64(scsp.EndTime.Sub(scsp.StartTime)))
|
}).Observe(float64(scsp.EndTime.Sub(scsp.StartTime)))
|
||||||
|
|
||||||
if depth >= 10 {
|
if depth >= 10 {
|
||||||
|
@ -2,6 +2,7 @@ package ldap
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"net"
|
"net"
|
||||||
|
"time"
|
||||||
|
|
||||||
"beryju.io/ldap"
|
"beryju.io/ldap"
|
||||||
"github.com/getsentry/sentry-go"
|
"github.com/getsentry/sentry-go"
|
||||||
@ -20,6 +21,11 @@ func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LD
|
|||||||
"outpost_name": ls.ac.Outpost.Name,
|
"outpost_name": ls.ac.Outpost.Name,
|
||||||
"type": "bind",
|
"type": "bind",
|
||||||
"app": selectedApp,
|
"app": selectedApp,
|
||||||
|
}).Observe(float64(span.EndTime.Sub(span.StartTime)) / float64(time.Second))
|
||||||
|
metrics.RequestsLegacy.With(prometheus.Labels{
|
||||||
|
"outpost_name": ls.ac.Outpost.Name,
|
||||||
|
"type": "bind",
|
||||||
|
"app": selectedApp,
|
||||||
}).Observe(float64(span.EndTime.Sub(span.StartTime)))
|
}).Observe(float64(span.EndTime.Sub(span.StartTime)))
|
||||||
req.Log().WithField("took-ms", span.EndTime.Sub(span.StartTime).Milliseconds()).Info("Bind request")
|
req.Log().WithField("took-ms", span.EndTime.Sub(span.StartTime).Milliseconds()).Info("Bind request")
|
||||||
}()
|
}()
|
||||||
@ -49,6 +55,12 @@ func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LD
|
|||||||
"reason": "no_provider",
|
"reason": "no_provider",
|
||||||
"app": "",
|
"app": "",
|
||||||
}).Inc()
|
}).Inc()
|
||||||
|
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
|
||||||
|
"outpost_name": ls.ac.Outpost.Name,
|
||||||
|
"type": "bind",
|
||||||
|
"reason": "no_provider",
|
||||||
|
"app": "",
|
||||||
|
}).Inc()
|
||||||
|
|
||||||
return ldap.LDAPResultInsufficientAccessRights, nil
|
return ldap.LDAPResultInsufficientAccessRights, nil
|
||||||
}
|
}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user