Compare commits
172 Commits
enterprise
...
fix/issue_
| Author | SHA1 | Date | |
|---|---|---|---|
| 13c8cbf03a | |||
| 1776981f29 | |||
| 5a4df95011 | |||
| f2927e5725 | |||
| 8ee90826fc | |||
| 8c7d4d2f5e | |||
| d72def0368 | |||
| 5bcf501842 | |||
| 13fc216c68 | |||
| 27aed4b315 | |||
| 84b5992e55 | |||
| 7eb985f636 | |||
| d3172ae904 | |||
| 88662b54c1 | |||
| b38bc8c1c4 | |||
| a9b648842a | |||
| 5fda531e2b | |||
| 921a3e6eb8 | |||
| fd898bea66 | |||
| cbf9ee55ae | |||
| 590ee7d9d4 | |||
| b8cd1d1ae2 | |||
| 9f9524fbcb | |||
| 1df87cdf77 | |||
| 6383550914 | |||
| 10771b4779 | |||
| fcaf1193ed | |||
| b9f6093e6f | |||
| 47f6d59758 | |||
| 59d20e3bc0 | |||
| ae347cd1c5 | |||
| 7653a35caa | |||
| dc9b12fd37 | |||
| b7dac0674a | |||
| 5a17dea765 | |||
| 044547c316 | |||
| 6a84e7e6b0 | |||
| 6d4bb77960 | |||
| 1b588b98bc | |||
| 3eccef88aa | |||
| 8f50dfa0c5 | |||
| 8417d8508f | |||
| b2c2fc001b | |||
| f60312cbbc | |||
| 7614b17a05 | |||
| 8947376edb | |||
| ce23209ae8 | |||
| 0b806b7130 | |||
| 9538cf4690 | |||
| 63da458fb3 | |||
| 873dab29a9 | |||
| 1e96c80593 | |||
| ee4a922234 | |||
| 37a2eff716 | |||
| 50e2f1c474 | |||
| ab7338b50e | |||
| bcdc6fcd36 | |||
| 98c3e0d68b | |||
| a2b82b6448 | |||
| 0456ace646 | |||
| d3a11ce810 | |||
| bfd1445c69 | |||
| c2b3e9b05c | |||
| 2c7d841e4a | |||
| c5d13c4a15 | |||
| 079ef6e114 | |||
| 98bfca0b4d | |||
| a247bd5b9f | |||
| 27856ec301 | |||
| e4a8c05d25 | |||
| cb2e0c6d54 | |||
| f37e1ca642 | |||
| 70b1f05a84 | |||
| 192ed8f494 | |||
| b69d77d270 | |||
| 35b6801ba0 | |||
| f9e6f57aad | |||
| 868261c883 | |||
| b6442c233d | |||
| 74292e6c23 | |||
| 3e2cf4fd30 | |||
| 05cbb4ce0c | |||
| c93d85731c | |||
| d163afe87c | |||
| eac2c9a12b | |||
| c10e4a9063 | |||
| 4e4adcc672 | |||
| bb20576d84 | |||
| 5f315bddbd | |||
| 9e0404646b | |||
| 45883ff86b | |||
| 915f5689c6 | |||
| ce1ea926f8 | |||
| 2e3624ea82 | |||
| 4e52fb7e52 | |||
| 7e36fb2153 | |||
| 2b00754324 | |||
| 12a73ef306 | |||
| 4469db9b23 | |||
| b7beac6795 | |||
| ad27f268dc | |||
| a3f86115e1 | |||
| 75eb025ef4 | |||
| efb3803371 | |||
| 904d6cd81b | |||
| b445cff4c9 | |||
| 89437ac73b | |||
| e354e110ca | |||
| cf5eea74ee | |||
| 54433e614a | |||
| 78a02ff1f0 | |||
| 749e015414 | |||
| 2c9bf4befe | |||
| f14b2fd4c5 | |||
| cda764c5fd | |||
| 4cee9f3a31 | |||
| 9972b43399 | |||
| d4805f326f | |||
| 38864e8e9a | |||
| 5618545248 | |||
| 876feccd51 | |||
| 2e28683381 | |||
| 5d803a9bf3 | |||
| c7b3272cf6 | |||
| 2688fa4fe8 | |||
| b713660e5d | |||
| de237aab10 | |||
| 4068d67424 | |||
| ab6595b597 | |||
| 0f89b6b746 | |||
| 45f74debd9 | |||
| 5a52225ee2 | |||
| d36f0d187b | |||
| b7bfbff2fe | |||
| 46d8be8d20 | |||
| 58158f61e4 | |||
| 9543800442 | |||
| c0adac3625 | |||
| cd7dce2cae | |||
| 09570a30f9 | |||
| 8617bb098d | |||
| c47fb2612a | |||
| 23c0d90b3e | |||
| 593ae3b52e | |||
| 7a62965928 | |||
| 2d060576c7 | |||
| a51252e1d3 | |||
| 20904776bb | |||
| 4a50c1f640 | |||
| 41555c88c4 | |||
| 408e6ec34e | |||
| 5bc65e253b | |||
| f5d1f72d22 | |||
| ec9e815e7a | |||
| b0671e26c8 | |||
| f185a41813 | |||
| a2211135bc | |||
| b082849fb5 | |||
| e933fd5692 | |||
| 38649e5347 | |||
| ff91ecf873 | |||
| 15ee17ea60 | |||
| 75a6d8c0c5 | |||
| ef4d532b9c | |||
| 985d491073 | |||
| 2bdc415068 | |||
| 547e5be7a2 | |||
| 1bc99e48e0 | |||
| 349f66e53c | |||
| 9e0a9f4eee | |||
| 727404c9a4 | |||
| 0fa4637640 |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2025.2.1
|
current_version = 2025.2.2
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||||
|
|||||||
22
.github/ISSUE_TEMPLATE/docs_issue.md
vendored
Normal file
22
.github/ISSUE_TEMPLATE/docs_issue.md
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
---
|
||||||
|
name: Documentation issue
|
||||||
|
about: Suggest an improvement or report a problem
|
||||||
|
title: ""
|
||||||
|
labels: documentation
|
||||||
|
assignees: ""
|
||||||
|
---
|
||||||
|
|
||||||
|
**Do you see an area that can be clarified or expanded, a technical inaccuracy, or a broken link? Please describe.**
|
||||||
|
A clear and concise description of what the problem is, or where the document can be improved. Ex. I believe we need more details about [...]
|
||||||
|
|
||||||
|
**Provide the URL or link to the exact page in the documentation to which you are referring.**
|
||||||
|
If there are multiple pages, list them all, and be sure to state the header or section where the content is.
|
||||||
|
|
||||||
|
**Describe the solution you'd like**
|
||||||
|
A clear and concise description of what you want to happen.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context or screenshots about the documentation issue here.
|
||||||
|
|
||||||
|
**Consider opening a PR!**
|
||||||
|
If the issue is one that you can fix, or even make a good pass at, we'd appreciate a PR. For more information about making a contribution to the docs, and using our Style Guide and our templates, refer to ["Writing documentation"](https://docs.goauthentik.io/docs/developer-docs/docs/writing-documentation).
|
||||||
@ -44,7 +44,6 @@ if is_release:
|
|||||||
]
|
]
|
||||||
if not prerelease:
|
if not prerelease:
|
||||||
image_tags += [
|
image_tags += [
|
||||||
f"{name}:latest",
|
|
||||||
f"{name}:{version_family}",
|
f"{name}:{version_family}",
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
|
|||||||
16
.github/actions/setup/action.yml
vendored
16
.github/actions/setup/action.yml
vendored
@ -9,17 +9,22 @@ inputs:
|
|||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Install poetry & deps
|
- name: Install apt deps
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
pipx install poetry || true
|
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server
|
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext libkrb5-dev krb5-kdc krb5-user krb5-admin-server
|
||||||
- name: Setup python and restore poetry
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v5
|
||||||
|
with:
|
||||||
|
enable-cache: true
|
||||||
|
- name: Setup python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version-file: "pyproject.toml"
|
python-version-file: "pyproject.toml"
|
||||||
cache: "poetry"
|
- name: Install Python deps
|
||||||
|
shell: bash
|
||||||
|
run: uv sync --all-extras --dev --frozen
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
@ -39,10 +44,9 @@ runs:
|
|||||||
run: |
|
run: |
|
||||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||||
docker compose -f .github/actions/setup/docker-compose.yml up -d
|
docker compose -f .github/actions/setup/docker-compose.yml up -d
|
||||||
poetry sync
|
|
||||||
cd web && npm ci
|
cd web && npm ci
|
||||||
- name: Generate config
|
- name: Generate config
|
||||||
shell: poetry run python {0}
|
shell: uv run python {0}
|
||||||
run: |
|
run: |
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from yaml import safe_dump
|
from yaml import safe_dump
|
||||||
|
|||||||
2
.github/dependabot.yml
vendored
2
.github/dependabot.yml
vendored
@ -98,7 +98,7 @@ updates:
|
|||||||
prefix: "lifecycle/aws:"
|
prefix: "lifecycle/aws:"
|
||||||
labels:
|
labels:
|
||||||
- dependencies
|
- dependencies
|
||||||
- package-ecosystem: pip
|
- package-ecosystem: uv
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: daily
|
interval: daily
|
||||||
|
|||||||
2
.github/workflows/ci-aws-cfn.yml
vendored
2
.github/workflows/ci-aws-cfn.yml
vendored
@ -33,7 +33,7 @@ jobs:
|
|||||||
npm ci
|
npm ci
|
||||||
- name: Check changes have been applied
|
- name: Check changes have been applied
|
||||||
run: |
|
run: |
|
||||||
poetry run make aws-cfn
|
uv run make aws-cfn
|
||||||
git diff --exit-code
|
git diff --exit-code
|
||||||
ci-aws-cfn-mark:
|
ci-aws-cfn-mark:
|
||||||
if: always()
|
if: always()
|
||||||
|
|||||||
32
.github/workflows/ci-main.yml
vendored
32
.github/workflows/ci-main.yml
vendored
@ -34,7 +34,7 @@ jobs:
|
|||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: run job
|
- name: run job
|
||||||
run: poetry run make ci-${{ matrix.job }}
|
run: uv run make ci-${{ matrix.job }}
|
||||||
test-migrations:
|
test-migrations:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@ -42,7 +42,7 @@ jobs:
|
|||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: run migrations
|
- name: run migrations
|
||||||
run: poetry run python -m lifecycle.migrate
|
run: uv run python -m lifecycle.migrate
|
||||||
test-make-seed:
|
test-make-seed:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@ -69,19 +69,21 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: checkout stable
|
- name: checkout stable
|
||||||
run: |
|
run: |
|
||||||
# Delete all poetry envs
|
|
||||||
rm -rf /home/runner/.cache/pypoetry
|
|
||||||
# Copy current, latest config to local
|
# Copy current, latest config to local
|
||||||
|
# Temporarly comment the .github backup while migrating to uv
|
||||||
cp authentik/lib/default.yml local.env.yml
|
cp authentik/lib/default.yml local.env.yml
|
||||||
cp -R .github ..
|
# cp -R .github ..
|
||||||
cp -R scripts ..
|
cp -R scripts ..
|
||||||
git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1)
|
git checkout $(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1)
|
||||||
rm -rf .github/ scripts/
|
# rm -rf .github/ scripts/
|
||||||
mv ../.github ../scripts .
|
# mv ../.github ../scripts .
|
||||||
|
rm -rf scripts/
|
||||||
|
mv ../scripts .
|
||||||
- name: Setup authentik env (stable)
|
- name: Setup authentik env (stable)
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
with:
|
||||||
postgresql_version: ${{ matrix.psql }}
|
postgresql_version: ${{ matrix.psql }}
|
||||||
|
continue-on-error: true
|
||||||
- name: run migrations to stable
|
- name: run migrations to stable
|
||||||
run: poetry run python -m lifecycle.migrate
|
run: poetry run python -m lifecycle.migrate
|
||||||
- name: checkout current code
|
- name: checkout current code
|
||||||
@ -91,15 +93,13 @@ jobs:
|
|||||||
git reset --hard HEAD
|
git reset --hard HEAD
|
||||||
git clean -d -fx .
|
git clean -d -fx .
|
||||||
git checkout $GITHUB_SHA
|
git checkout $GITHUB_SHA
|
||||||
# Delete previous poetry env
|
|
||||||
rm -rf /home/runner/.cache/pypoetry/virtualenvs/*
|
|
||||||
- name: Setup authentik env (ensure latest deps are installed)
|
- name: Setup authentik env (ensure latest deps are installed)
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
with:
|
||||||
postgresql_version: ${{ matrix.psql }}
|
postgresql_version: ${{ matrix.psql }}
|
||||||
- name: migrate to latest
|
- name: migrate to latest
|
||||||
run: |
|
run: |
|
||||||
poetry run python -m lifecycle.migrate
|
uv run python -m lifecycle.migrate
|
||||||
- name: run tests
|
- name: run tests
|
||||||
env:
|
env:
|
||||||
# Test in the main database that we just migrated from the previous stable version
|
# Test in the main database that we just migrated from the previous stable version
|
||||||
@ -108,7 +108,7 @@ jobs:
|
|||||||
CI_RUN_ID: ${{ matrix.run_id }}
|
CI_RUN_ID: ${{ matrix.run_id }}
|
||||||
CI_TOTAL_RUNS: "5"
|
CI_TOTAL_RUNS: "5"
|
||||||
run: |
|
run: |
|
||||||
poetry run make ci-test
|
uv run make ci-test
|
||||||
test-unittest:
|
test-unittest:
|
||||||
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -133,7 +133,7 @@ jobs:
|
|||||||
CI_RUN_ID: ${{ matrix.run_id }}
|
CI_RUN_ID: ${{ matrix.run_id }}
|
||||||
CI_TOTAL_RUNS: "5"
|
CI_TOTAL_RUNS: "5"
|
||||||
run: |
|
run: |
|
||||||
poetry run make ci-test
|
uv run make ci-test
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
@ -156,8 +156,8 @@ jobs:
|
|||||||
uses: helm/kind-action@v1.12.0
|
uses: helm/kind-action@v1.12.0
|
||||||
- name: run integration
|
- name: run integration
|
||||||
run: |
|
run: |
|
||||||
poetry run coverage run manage.py test tests/integration
|
uv run coverage run manage.py test tests/integration
|
||||||
poetry run coverage xml
|
uv run coverage xml
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
@ -214,8 +214,8 @@ jobs:
|
|||||||
npm run build
|
npm run build
|
||||||
- name: run e2e
|
- name: run e2e
|
||||||
run: |
|
run: |
|
||||||
poetry run coverage run manage.py test ${{ matrix.job.glob }}
|
uv run coverage run manage.py test ${{ matrix.job.glob }}
|
||||||
poetry run coverage xml
|
uv run coverage xml
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
|
|||||||
2
.github/workflows/ci-outpost.yml
vendored
2
.github/workflows/ci-outpost.yml
vendored
@ -29,7 +29,7 @@ jobs:
|
|||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v6
|
uses: golangci/golangci-lint-action@v7
|
||||||
with:
|
with:
|
||||||
version: latest
|
version: latest
|
||||||
args: --timeout 5000s --verbose
|
args: --timeout 5000s --verbose
|
||||||
|
|||||||
@ -2,7 +2,7 @@ name: authentik-gen-update-webauthn-mds
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '30 1 1,15 * *'
|
- cron: "30 1 1,15 * *"
|
||||||
|
|
||||||
env:
|
env:
|
||||||
POSTGRES_DB: authentik
|
POSTGRES_DB: authentik
|
||||||
@ -24,7 +24,7 @@ jobs:
|
|||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- run: poetry run ak update_webauthn_mds
|
- run: uv run ak update_webauthn_mds
|
||||||
- uses: peter-evans/create-pull-request@v7
|
- uses: peter-evans/create-pull-request@v7
|
||||||
id: cpr
|
id: cpr
|
||||||
with:
|
with:
|
||||||
|
|||||||
4
.github/workflows/publish-source-docs.yml
vendored
4
.github/workflows/publish-source-docs.yml
vendored
@ -21,8 +21,8 @@ jobs:
|
|||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: generate docs
|
- name: generate docs
|
||||||
run: |
|
run: |
|
||||||
poetry run make migrate
|
uv run make migrate
|
||||||
poetry run ak build_source_docs
|
uv run ak build_source_docs
|
||||||
- name: Publish
|
- name: Publish
|
||||||
uses: netlify/actions/cli@master
|
uses: netlify/actions/cli@master
|
||||||
with:
|
with:
|
||||||
|
|||||||
27
.github/workflows/semgrep.yml
vendored
Normal file
27
.github/workflows/semgrep.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
name: authentik-semgrep
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
pull_request: {}
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- master
|
||||||
|
paths:
|
||||||
|
- .github/workflows/semgrep.yml
|
||||||
|
schedule:
|
||||||
|
# random HH:MM to avoid a load spike on GitHub Actions at 00:00
|
||||||
|
- cron: '12 15 * * *'
|
||||||
|
jobs:
|
||||||
|
semgrep:
|
||||||
|
name: semgrep/ci
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }}
|
||||||
|
container:
|
||||||
|
image: semgrep/semgrep
|
||||||
|
if: (github.actor != 'dependabot[bot]')
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- run: semgrep ci
|
||||||
@ -36,10 +36,10 @@ jobs:
|
|||||||
run: make gen-client-ts
|
run: make gen-client-ts
|
||||||
- name: run extract
|
- name: run extract
|
||||||
run: |
|
run: |
|
||||||
poetry run make i18n-extract
|
uv run make i18n-extract
|
||||||
- name: run compile
|
- name: run compile
|
||||||
run: |
|
run: |
|
||||||
poetry run ak compilemessages
|
uv run ak compilemessages
|
||||||
make web-check-compile
|
make web-check-compile
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
if: ${{ github.event_name != 'pull_request' }}
|
if: ${{ github.event_name != 'pull_request' }}
|
||||||
|
|||||||
46
.vscode/tasks.json
vendored
46
.vscode/tasks.json
vendored
@ -3,8 +3,13 @@
|
|||||||
"tasks": [
|
"tasks": [
|
||||||
{
|
{
|
||||||
"label": "authentik/core: make",
|
"label": "authentik/core: make",
|
||||||
"command": "poetry",
|
"command": "uv",
|
||||||
"args": ["run", "make", "lint-fix", "lint"],
|
"args": [
|
||||||
|
"run",
|
||||||
|
"make",
|
||||||
|
"lint-fix",
|
||||||
|
"lint"
|
||||||
|
],
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"panel": "new"
|
"panel": "new"
|
||||||
},
|
},
|
||||||
@ -12,8 +17,12 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "authentik/core: run",
|
"label": "authentik/core: run",
|
||||||
"command": "poetry",
|
"command": "uv",
|
||||||
"args": ["run", "ak", "server"],
|
"args": [
|
||||||
|
"run",
|
||||||
|
"ak",
|
||||||
|
"server"
|
||||||
|
],
|
||||||
"group": "build",
|
"group": "build",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"panel": "dedicated",
|
"panel": "dedicated",
|
||||||
@ -23,13 +32,17 @@
|
|||||||
{
|
{
|
||||||
"label": "authentik/web: make",
|
"label": "authentik/web: make",
|
||||||
"command": "make",
|
"command": "make",
|
||||||
"args": ["web"],
|
"args": [
|
||||||
|
"web"
|
||||||
|
],
|
||||||
"group": "build"
|
"group": "build"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "authentik/web: watch",
|
"label": "authentik/web: watch",
|
||||||
"command": "make",
|
"command": "make",
|
||||||
"args": ["web-watch"],
|
"args": [
|
||||||
|
"web-watch"
|
||||||
|
],
|
||||||
"group": "build",
|
"group": "build",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"panel": "dedicated",
|
"panel": "dedicated",
|
||||||
@ -39,19 +52,26 @@
|
|||||||
{
|
{
|
||||||
"label": "authentik: install",
|
"label": "authentik: install",
|
||||||
"command": "make",
|
"command": "make",
|
||||||
"args": ["install", "-j4"],
|
"args": [
|
||||||
|
"install",
|
||||||
|
"-j4"
|
||||||
|
],
|
||||||
"group": "build"
|
"group": "build"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "authentik/website: make",
|
"label": "authentik/website: make",
|
||||||
"command": "make",
|
"command": "make",
|
||||||
"args": ["website"],
|
"args": [
|
||||||
|
"website"
|
||||||
|
],
|
||||||
"group": "build"
|
"group": "build"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "authentik/website: watch",
|
"label": "authentik/website: watch",
|
||||||
"command": "make",
|
"command": "make",
|
||||||
"args": ["website-watch"],
|
"args": [
|
||||||
|
"website-watch"
|
||||||
|
],
|
||||||
"group": "build",
|
"group": "build",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"panel": "dedicated",
|
"panel": "dedicated",
|
||||||
@ -60,8 +80,12 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "authentik/api: generate",
|
"label": "authentik/api: generate",
|
||||||
"command": "poetry",
|
"command": "uv",
|
||||||
"args": ["run", "make", "gen"],
|
"args": [
|
||||||
|
"run",
|
||||||
|
"make",
|
||||||
|
"gen"
|
||||||
|
],
|
||||||
"group": "build"
|
"group": "build"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
@ -10,7 +10,7 @@ schemas/ @goauthentik/backend
|
|||||||
scripts/ @goauthentik/backend
|
scripts/ @goauthentik/backend
|
||||||
tests/ @goauthentik/backend
|
tests/ @goauthentik/backend
|
||||||
pyproject.toml @goauthentik/backend
|
pyproject.toml @goauthentik/backend
|
||||||
poetry.lock @goauthentik/backend
|
uv.lock @goauthentik/backend
|
||||||
go.mod @goauthentik/backend
|
go.mod @goauthentik/backend
|
||||||
go.sum @goauthentik/backend
|
go.sum @goauthentik/backend
|
||||||
# Infrastructure
|
# Infrastructure
|
||||||
|
|||||||
@ -5,7 +5,7 @@
|
|||||||
We as members, contributors, and leaders pledge to make participation in our
|
We as members, contributors, and leaders pledge to make participation in our
|
||||||
community a harassment-free experience for everyone, regardless of age, body
|
community a harassment-free experience for everyone, regardless of age, body
|
||||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||||
identity and expression, level of experience, education, socio-economic status,
|
identity and expression, level of experience, education, socioeconomic status,
|
||||||
nationality, personal appearance, race, religion, or sexual identity
|
nationality, personal appearance, race, religion, or sexual identity
|
||||||
and orientation.
|
and orientation.
|
||||||
|
|
||||||
|
|||||||
85
Dockerfile
85
Dockerfile
@ -93,53 +93,59 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
|||||||
mkdir -p /usr/share/GeoIP && \
|
mkdir -p /usr/share/GeoIP && \
|
||||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||||
|
|
||||||
# Stage 5: Python dependencies
|
# Stage 5: Download uv
|
||||||
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps
|
FROM ghcr.io/astral-sh/uv:0.6.10 AS uv
|
||||||
|
# Stage 6: Base python image
|
||||||
|
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-base
|
||||||
|
|
||||||
|
ENV VENV_PATH="/ak-root/.venv" \
|
||||||
|
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \
|
||||||
|
UV_COMPILE_BYTECODE=1 \
|
||||||
|
UV_LINK_MODE=copy \
|
||||||
|
UV_NATIVE_TLS=1 \
|
||||||
|
UV_PYTHON_DOWNLOADS=0
|
||||||
|
|
||||||
|
WORKDIR /ak-root/
|
||||||
|
|
||||||
|
COPY --from=uv /uv /uvx /bin/
|
||||||
|
|
||||||
|
# Stage 7: Python dependencies
|
||||||
|
FROM python-base AS python-deps
|
||||||
|
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG TARGETVARIANT
|
ARG TARGETVARIANT
|
||||||
|
|
||||||
WORKDIR /ak-root/poetry
|
|
||||||
|
|
||||||
ENV VENV_PATH="/ak-root/venv" \
|
|
||||||
POETRY_VIRTUALENVS_CREATE=false \
|
|
||||||
PATH="/ak-root/venv/bin:$PATH"
|
|
||||||
|
|
||||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||||
|
|
||||||
|
ENV PATH="/root/.cargo/bin:$PATH"
|
||||||
|
|
||||||
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
# Required for installing pip packages
|
# Required for installing pip packages
|
||||||
apt-get install -y --no-install-recommends build-essential pkg-config libpq-dev libkrb5-dev
|
|
||||||
|
|
||||||
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
|
||||||
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
|
||||||
--mount=type=cache,target=/root/.cache/pip \
|
|
||||||
--mount=type=cache,target=/root/.cache/pypoetry \
|
|
||||||
pip install --no-cache cffi && \
|
|
||||||
apt-get update && \
|
|
||||||
apt-get install -y --no-install-recommends \
|
apt-get install -y --no-install-recommends \
|
||||||
build-essential libffi-dev \
|
# Build essentials
|
||||||
# Required for cryptography
|
build-essential pkg-config libffi-dev git \
|
||||||
curl pkg-config \
|
# cryptography
|
||||||
# Required for lxml
|
curl \
|
||||||
libxslt-dev zlib1g-dev \
|
# libxml
|
||||||
# Required for xmlsec
|
libxslt-dev zlib1g-dev \
|
||||||
libltdl-dev \
|
# postgresql
|
||||||
# Required for kadmin
|
libpq-dev \
|
||||||
sccache clang && \
|
# python-kadmin-rs
|
||||||
curl https://sh.rustup.rs -sSf | sh -s -- -y && \
|
clang libkrb5-dev sccache \
|
||||||
. "$HOME/.cargo/env" && \
|
# xmlsec
|
||||||
python -m venv /ak-root/venv/ && \
|
libltdl-dev && \
|
||||||
bash -c "source ${VENV_PATH}/bin/activate && \
|
curl https://sh.rustup.rs -sSf | sh -s -- -y
|
||||||
pip3 install --upgrade pip poetry && \
|
|
||||||
poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \
|
|
||||||
poetry install --only=main --no-ansi --no-interaction --no-root && \
|
|
||||||
pip uninstall cryptography -y && \
|
|
||||||
poetry install --only=main --no-ansi --no-interaction --no-root"
|
|
||||||
|
|
||||||
# Stage 6: Run
|
ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec"
|
||||||
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image
|
|
||||||
|
RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \
|
||||||
|
--mount=type=bind,target=uv.lock,src=uv.lock \
|
||||||
|
--mount=type=cache,target=/root/.cache/uv \
|
||||||
|
uv sync --frozen --no-install-project --no-dev
|
||||||
|
|
||||||
|
# Stage 8: Run
|
||||||
|
FROM python-base AS final-image
|
||||||
|
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
ARG GIT_BUILD_HASH
|
ARG GIT_BUILD_HASH
|
||||||
@ -171,7 +177,7 @@ RUN apt-get update && \
|
|||||||
|
|
||||||
COPY ./authentik/ /authentik
|
COPY ./authentik/ /authentik
|
||||||
COPY ./pyproject.toml /
|
COPY ./pyproject.toml /
|
||||||
COPY ./poetry.lock /
|
COPY ./uv.lock /
|
||||||
COPY ./schemas /schemas
|
COPY ./schemas /schemas
|
||||||
COPY ./locale /locale
|
COPY ./locale /locale
|
||||||
COPY ./tests /tests
|
COPY ./tests /tests
|
||||||
@ -180,7 +186,7 @@ COPY ./blueprints /blueprints
|
|||||||
COPY ./lifecycle/ /lifecycle
|
COPY ./lifecycle/ /lifecycle
|
||||||
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
|
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
|
||||||
COPY --from=go-builder /go/authentik /bin/authentik
|
COPY --from=go-builder /go/authentik /bin/authentik
|
||||||
COPY --from=python-deps /ak-root/venv /ak-root/venv
|
COPY --from=python-deps /ak-root/.venv /ak-root/.venv
|
||||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||||
COPY --from=website-builder /work/website/build/ /website/help/
|
COPY --from=website-builder /work/website/build/ /website/help/
|
||||||
@ -191,9 +197,6 @@ USER 1000
|
|||||||
ENV TMPDIR=/dev/shm/ \
|
ENV TMPDIR=/dev/shm/ \
|
||||||
PYTHONDONTWRITEBYTECODE=1 \
|
PYTHONDONTWRITEBYTECODE=1 \
|
||||||
PYTHONUNBUFFERED=1 \
|
PYTHONUNBUFFERED=1 \
|
||||||
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
|
|
||||||
VENV_PATH="/ak-root/venv" \
|
|
||||||
POETRY_VIRTUALENVS_CREATE=false \
|
|
||||||
GOFIPS=1
|
GOFIPS=1
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
||||||
|
|||||||
55
Makefile
55
Makefile
@ -4,7 +4,7 @@
|
|||||||
PWD = $(shell pwd)
|
PWD = $(shell pwd)
|
||||||
UID = $(shell id -u)
|
UID = $(shell id -u)
|
||||||
GID = $(shell id -g)
|
GID = $(shell id -g)
|
||||||
NPM_VERSION = $(shell poetry run python -m scripts.generate_semver)
|
NPM_VERSION = $(shell python -m scripts.generate_semver)
|
||||||
PY_SOURCES = authentik tests scripts lifecycle .github
|
PY_SOURCES = authentik tests scripts lifecycle .github
|
||||||
DOCKER_IMAGE ?= "authentik:test"
|
DOCKER_IMAGE ?= "authentik:test"
|
||||||
|
|
||||||
@ -12,9 +12,9 @@ GEN_API_TS = "gen-ts-api"
|
|||||||
GEN_API_PY = "gen-py-api"
|
GEN_API_PY = "gen-py-api"
|
||||||
GEN_API_GO = "gen-go-api"
|
GEN_API_GO = "gen-go-api"
|
||||||
|
|
||||||
pg_user := $(shell poetry run python -m authentik.lib.config postgresql.user 2>/dev/null)
|
pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null)
|
||||||
pg_host := $(shell poetry run python -m authentik.lib.config postgresql.host 2>/dev/null)
|
pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null)
|
||||||
pg_name := $(shell poetry run python -m authentik.lib.config postgresql.name 2>/dev/null)
|
pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||||
|
|
||||||
all: lint-fix lint test gen web ## Lint, build, and test everything
|
all: lint-fix lint test gen web ## Lint, build, and test everything
|
||||||
|
|
||||||
@ -32,34 +32,37 @@ go-test:
|
|||||||
go test -timeout 0 -v -race -cover ./...
|
go test -timeout 0 -v -race -cover ./...
|
||||||
|
|
||||||
test: ## Run the server tests and produce a coverage report (locally)
|
test: ## Run the server tests and produce a coverage report (locally)
|
||||||
poetry run coverage run manage.py test --keepdb authentik
|
uv run coverage run manage.py test --keepdb authentik
|
||||||
poetry run coverage html
|
uv run coverage html
|
||||||
poetry run coverage report
|
uv run coverage report
|
||||||
|
|
||||||
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
||||||
poetry run black $(PY_SOURCES)
|
uv run black $(PY_SOURCES)
|
||||||
poetry run ruff check --fix $(PY_SOURCES)
|
uv run ruff check --fix $(PY_SOURCES)
|
||||||
|
|
||||||
lint-codespell: ## Reports spelling errors.
|
lint-codespell: ## Reports spelling errors.
|
||||||
poetry run codespell -w
|
uv run codespell -w
|
||||||
|
|
||||||
lint: ## Lint the python and golang sources
|
lint: ## Lint the python and golang sources
|
||||||
poetry run bandit -c pyproject.toml -r $(PY_SOURCES)
|
uv run bandit -c pyproject.toml -r $(PY_SOURCES)
|
||||||
golangci-lint run -v
|
golangci-lint run -v
|
||||||
|
|
||||||
core-install:
|
core-install:
|
||||||
poetry install
|
uv sync --frozen
|
||||||
|
|
||||||
migrate: ## Run the Authentik Django server's migrations
|
migrate: ## Run the Authentik Django server's migrations
|
||||||
poetry run python -m lifecycle.migrate
|
uv run python -m lifecycle.migrate
|
||||||
|
|
||||||
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||||
|
|
||||||
aws-cfn:
|
aws-cfn:
|
||||||
cd lifecycle/aws && npm run aws-cfn
|
cd lifecycle/aws && npm run aws-cfn
|
||||||
|
|
||||||
|
run: ## Run the main authentik server process
|
||||||
|
uv run ak server
|
||||||
|
|
||||||
core-i18n-extract:
|
core-i18n-extract:
|
||||||
poetry run ak makemessages \
|
uv run ak makemessages \
|
||||||
--add-location file \
|
--add-location file \
|
||||||
--no-obsolete \
|
--no-obsolete \
|
||||||
--ignore web \
|
--ignore web \
|
||||||
@ -90,11 +93,11 @@ gen-build: ## Extract the schema from the database
|
|||||||
AUTHENTIK_DEBUG=true \
|
AUTHENTIK_DEBUG=true \
|
||||||
AUTHENTIK_TENANTS__ENABLED=true \
|
AUTHENTIK_TENANTS__ENABLED=true \
|
||||||
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
||||||
poetry run ak make_blueprint_schema > blueprints/schema.json
|
uv run ak make_blueprint_schema > blueprints/schema.json
|
||||||
AUTHENTIK_DEBUG=true \
|
AUTHENTIK_DEBUG=true \
|
||||||
AUTHENTIK_TENANTS__ENABLED=true \
|
AUTHENTIK_TENANTS__ENABLED=true \
|
||||||
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
||||||
poetry run ak spectacular --file schema.yml
|
uv run ak spectacular --file schema.yml
|
||||||
|
|
||||||
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
||||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||||
@ -145,7 +148,7 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python
|
|||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
docker.io/openapitools/openapi-generator-cli:v7.4.0 generate \
|
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
|
||||||
-i /local/schema.yml \
|
-i /local/schema.yml \
|
||||||
-g python \
|
-g python \
|
||||||
-o /local/${GEN_API_PY} \
|
-o /local/${GEN_API_PY} \
|
||||||
@ -173,7 +176,7 @@ gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
|
|||||||
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
|
rm -rf ./${GEN_API_GO}/config.yaml ./${GEN_API_GO}/templates/
|
||||||
|
|
||||||
gen-dev-config: ## Generate a local development config file
|
gen-dev-config: ## Generate a local development config file
|
||||||
poetry run scripts/generate_config.py
|
uv run scripts/generate_config.py
|
||||||
|
|
||||||
gen: gen-build gen-client-ts
|
gen: gen-build gen-client-ts
|
||||||
|
|
||||||
@ -254,21 +257,21 @@ ci--meta-debug:
|
|||||||
node --version
|
node --version
|
||||||
|
|
||||||
ci-black: ci--meta-debug
|
ci-black: ci--meta-debug
|
||||||
poetry run black --check $(PY_SOURCES)
|
uv run black --check $(PY_SOURCES)
|
||||||
|
|
||||||
ci-ruff: ci--meta-debug
|
ci-ruff: ci--meta-debug
|
||||||
poetry run ruff check $(PY_SOURCES)
|
uv run ruff check $(PY_SOURCES)
|
||||||
|
|
||||||
ci-codespell: ci--meta-debug
|
ci-codespell: ci--meta-debug
|
||||||
poetry run codespell -s
|
uv run codespell -s
|
||||||
|
|
||||||
ci-bandit: ci--meta-debug
|
ci-bandit: ci--meta-debug
|
||||||
poetry run bandit -r $(PY_SOURCES)
|
uv run bandit -r $(PY_SOURCES)
|
||||||
|
|
||||||
ci-pending-migrations: ci--meta-debug
|
ci-pending-migrations: ci--meta-debug
|
||||||
poetry run ak makemigrations --check
|
uv run ak makemigrations --check
|
||||||
|
|
||||||
ci-test: ci--meta-debug
|
ci-test: ci--meta-debug
|
||||||
poetry run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
|
uv run coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
|
||||||
poetry run coverage report
|
uv run coverage report
|
||||||
poetry run coverage xml
|
uv run coverage xml
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
__version__ = "2025.2.1"
|
__version__ = "2025.2.2"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -59,7 +59,7 @@ class SystemInfoSerializer(PassiveSerializer):
|
|||||||
if not isinstance(value, str):
|
if not isinstance(value, str):
|
||||||
continue
|
continue
|
||||||
actual_value = value
|
actual_value = value
|
||||||
if raw_session in actual_value:
|
if raw_session is not None and raw_session in actual_value:
|
||||||
actual_value = actual_value.replace(
|
actual_value = actual_value.replace(
|
||||||
raw_session, SafeExceptionReporterFilter.cleansed_substitute
|
raw_session, SafeExceptionReporterFilter.cleansed_substitute
|
||||||
)
|
)
|
||||||
|
|||||||
@ -49,6 +49,8 @@ class BrandSerializer(ModelSerializer):
|
|||||||
"branding_title",
|
"branding_title",
|
||||||
"branding_logo",
|
"branding_logo",
|
||||||
"branding_favicon",
|
"branding_favicon",
|
||||||
|
"branding_custom_css",
|
||||||
|
"branding_default_flow_background",
|
||||||
"flow_authentication",
|
"flow_authentication",
|
||||||
"flow_invalidation",
|
"flow_invalidation",
|
||||||
"flow_recovery",
|
"flow_recovery",
|
||||||
@ -86,6 +88,7 @@ class CurrentBrandSerializer(PassiveSerializer):
|
|||||||
branding_title = CharField()
|
branding_title = CharField()
|
||||||
branding_logo = CharField(source="branding_logo_url")
|
branding_logo = CharField(source="branding_logo_url")
|
||||||
branding_favicon = CharField(source="branding_favicon_url")
|
branding_favicon = CharField(source="branding_favicon_url")
|
||||||
|
branding_custom_css = CharField()
|
||||||
ui_footer_links = ListField(
|
ui_footer_links = ListField(
|
||||||
child=FooterLinkSerializer(),
|
child=FooterLinkSerializer(),
|
||||||
read_only=True,
|
read_only=True,
|
||||||
@ -125,6 +128,7 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
|
|||||||
"branding_title",
|
"branding_title",
|
||||||
"branding_logo",
|
"branding_logo",
|
||||||
"branding_favicon",
|
"branding_favicon",
|
||||||
|
"branding_default_flow_background",
|
||||||
"flow_authentication",
|
"flow_authentication",
|
||||||
"flow_invalidation",
|
"flow_invalidation",
|
||||||
"flow_recovery",
|
"flow_recovery",
|
||||||
|
|||||||
@ -0,0 +1,35 @@
|
|||||||
|
# Generated by Django 5.0.12 on 2025-02-22 01:51
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from django.db import migrations, models
|
||||||
|
from django.apps.registry import Apps
|
||||||
|
|
||||||
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_custom_css(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
Brand = apps.get_model("authentik_brands", "brand")
|
||||||
|
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
|
||||||
|
path = Path("/web/dist/custom.css")
|
||||||
|
if not path.exists():
|
||||||
|
return
|
||||||
|
css = path.read_text()
|
||||||
|
Brand.objects.using(db_alias).update(branding_custom_css=css)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_brands", "0007_brand_default_application"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="brand",
|
||||||
|
name="branding_custom_css",
|
||||||
|
field=models.TextField(blank=True, default=""),
|
||||||
|
),
|
||||||
|
migrations.RunPython(migrate_custom_css),
|
||||||
|
]
|
||||||
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 5.0.13 on 2025-03-19 22:54
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_brands", "0008_brand_branding_custom_css"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="brand",
|
||||||
|
name="branding_default_flow_background",
|
||||||
|
field=models.TextField(default="/static/dist/assets/images/flow_background.jpg"),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -33,6 +33,10 @@ class Brand(SerializerModel):
|
|||||||
|
|
||||||
branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg")
|
branding_logo = models.TextField(default="/static/dist/assets/icons/icon_left_brand.svg")
|
||||||
branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png")
|
branding_favicon = models.TextField(default="/static/dist/assets/icons/icon.png")
|
||||||
|
branding_custom_css = models.TextField(default="", blank=True)
|
||||||
|
branding_default_flow_background = models.TextField(
|
||||||
|
default="/static/dist/assets/images/flow_background.jpg"
|
||||||
|
)
|
||||||
|
|
||||||
flow_authentication = models.ForeignKey(
|
flow_authentication = models.ForeignKey(
|
||||||
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication"
|
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_authentication"
|
||||||
@ -84,6 +88,12 @@ class Brand(SerializerModel):
|
|||||||
return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon
|
return CONFIG.get("web.path", "/")[:-1] + self.branding_favicon
|
||||||
return self.branding_favicon
|
return self.branding_favicon
|
||||||
|
|
||||||
|
def branding_default_flow_background_url(self) -> str:
|
||||||
|
"""Get branding_default_flow_background with the correct prefix"""
|
||||||
|
if self.branding_default_flow_background.startswith("/static"):
|
||||||
|
return CONFIG.get("web.path", "/")[:-1] + self.branding_default_flow_background
|
||||||
|
return self.branding_default_flow_background
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> Serializer:
|
def serializer(self) -> Serializer:
|
||||||
from authentik.brands.api import BrandSerializer
|
from authentik.brands.api import BrandSerializer
|
||||||
|
|||||||
@ -24,6 +24,7 @@ class TestBrands(APITestCase):
|
|||||||
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
||||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
||||||
"branding_title": "authentik",
|
"branding_title": "authentik",
|
||||||
|
"branding_custom_css": "",
|
||||||
"matched_domain": brand.domain,
|
"matched_domain": brand.domain,
|
||||||
"ui_footer_links": [],
|
"ui_footer_links": [],
|
||||||
"ui_theme": Themes.AUTOMATIC,
|
"ui_theme": Themes.AUTOMATIC,
|
||||||
@ -43,6 +44,7 @@ class TestBrands(APITestCase):
|
|||||||
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
||||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
||||||
"branding_title": "custom",
|
"branding_title": "custom",
|
||||||
|
"branding_custom_css": "",
|
||||||
"matched_domain": "bar.baz",
|
"matched_domain": "bar.baz",
|
||||||
"ui_footer_links": [],
|
"ui_footer_links": [],
|
||||||
"ui_theme": Themes.AUTOMATIC,
|
"ui_theme": Themes.AUTOMATIC,
|
||||||
@ -59,6 +61,7 @@ class TestBrands(APITestCase):
|
|||||||
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
"branding_logo": "/static/dist/assets/icons/icon_left_brand.svg",
|
||||||
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
"branding_favicon": "/static/dist/assets/icons/icon.png",
|
||||||
"branding_title": "authentik",
|
"branding_title": "authentik",
|
||||||
|
"branding_custom_css": "",
|
||||||
"matched_domain": "fallback",
|
"matched_domain": "fallback",
|
||||||
"ui_footer_links": [],
|
"ui_footer_links": [],
|
||||||
"ui_theme": Themes.AUTOMATIC,
|
"ui_theme": Themes.AUTOMATIC,
|
||||||
@ -121,3 +124,27 @@ class TestBrands(APITestCase):
|
|||||||
"subject": None,
|
"subject": None,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_branding_url(self):
|
||||||
|
"""Test branding attributes return correct values"""
|
||||||
|
brand = create_test_brand()
|
||||||
|
brand.branding_default_flow_background = "https://goauthentik.io/img/icon.png"
|
||||||
|
brand.branding_favicon = "https://goauthentik.io/img/icon.png"
|
||||||
|
brand.branding_logo = "https://goauthentik.io/img/icon.png"
|
||||||
|
brand.save()
|
||||||
|
self.assertEqual(
|
||||||
|
brand.branding_default_flow_background_url(), "https://goauthentik.io/img/icon.png"
|
||||||
|
)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
self.client.get(reverse("authentik_api:brand-current")).content.decode(),
|
||||||
|
{
|
||||||
|
"branding_logo": "https://goauthentik.io/img/icon.png",
|
||||||
|
"branding_favicon": "https://goauthentik.io/img/icon.png",
|
||||||
|
"branding_title": "authentik",
|
||||||
|
"branding_custom_css": "",
|
||||||
|
"matched_domain": brand.domain,
|
||||||
|
"ui_footer_links": [],
|
||||||
|
"ui_theme": Themes.AUTOMATIC,
|
||||||
|
"default_locale": "",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|||||||
@ -6,7 +6,7 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from django_filters.filters import BooleanFilter
|
from django_filters.filters import BooleanFilter
|
||||||
from django_filters.filterset import FilterSet
|
from django_filters.filterset import FilterSet
|
||||||
from rest_framework import mixins
|
from rest_framework import mixins
|
||||||
from rest_framework.fields import ReadOnlyField, SerializerMethodField
|
from rest_framework.fields import SerializerMethodField
|
||||||
from rest_framework.viewsets import GenericViewSet
|
from rest_framework.viewsets import GenericViewSet
|
||||||
|
|
||||||
from authentik.core.api.object_types import TypesMixin
|
from authentik.core.api.object_types import TypesMixin
|
||||||
@ -18,10 +18,10 @@ from authentik.core.models import Provider
|
|||||||
class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
||||||
"""Provider Serializer"""
|
"""Provider Serializer"""
|
||||||
|
|
||||||
assigned_application_slug = ReadOnlyField(source="application.slug")
|
assigned_application_slug = SerializerMethodField()
|
||||||
assigned_application_name = ReadOnlyField(source="application.name")
|
assigned_application_name = SerializerMethodField()
|
||||||
assigned_backchannel_application_slug = ReadOnlyField(source="backchannel_application.slug")
|
assigned_backchannel_application_slug = SerializerMethodField()
|
||||||
assigned_backchannel_application_name = ReadOnlyField(source="backchannel_application.name")
|
assigned_backchannel_application_name = SerializerMethodField()
|
||||||
|
|
||||||
component = SerializerMethodField()
|
component = SerializerMethodField()
|
||||||
|
|
||||||
@ -31,6 +31,38 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
|
|||||||
return ""
|
return ""
|
||||||
return obj.component
|
return obj.component
|
||||||
|
|
||||||
|
def get_assigned_application_slug(self, obj: Provider) -> str:
|
||||||
|
"""Get application slug, return empty string if no application exists"""
|
||||||
|
try:
|
||||||
|
return obj.application.slug
|
||||||
|
except Provider.application.RelatedObjectDoesNotExist:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def get_assigned_application_name(self, obj: Provider) -> str:
|
||||||
|
"""Get application name, return empty string if no application exists"""
|
||||||
|
try:
|
||||||
|
return obj.application.name
|
||||||
|
except Provider.application.RelatedObjectDoesNotExist:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def get_assigned_backchannel_application_slug(self, obj: Provider) -> str:
|
||||||
|
"""Get backchannel application slug.
|
||||||
|
|
||||||
|
Returns an empty string if no backchannel application exists.
|
||||||
|
"""
|
||||||
|
if not obj.backchannel_application:
|
||||||
|
return ""
|
||||||
|
return obj.backchannel_application.slug or ""
|
||||||
|
|
||||||
|
def get_assigned_backchannel_application_name(self, obj: Provider) -> str:
|
||||||
|
"""Get backchannel application name.
|
||||||
|
|
||||||
|
Returns an empty string if no backchannel application exists.
|
||||||
|
"""
|
||||||
|
if not obj.backchannel_application:
|
||||||
|
return ""
|
||||||
|
return obj.backchannel_application.name or ""
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Provider
|
model = Provider
|
||||||
fields = [
|
fields = [
|
||||||
|
|||||||
@ -5,6 +5,7 @@ from collections.abc import Iterable
|
|||||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||||
from rest_framework import mixins
|
from rest_framework import mixins
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
|
from rest_framework.fields import CharField, ReadOnlyField, SerializerMethodField
|
||||||
from rest_framework.parsers import MultiPartParser
|
from rest_framework.parsers import MultiPartParser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
@ -154,6 +155,17 @@ class SourceViewSet(
|
|||||||
matching_sources.append(source_settings.validated_data)
|
matching_sources.append(source_settings.validated_data)
|
||||||
return Response(matching_sources)
|
return Response(matching_sources)
|
||||||
|
|
||||||
|
def destroy(self, request: Request, *args, **kwargs):
|
||||||
|
"""Prevent deletion of built-in sources"""
|
||||||
|
instance: Source = self.get_object()
|
||||||
|
|
||||||
|
if instance.managed == Source.MANAGED_INBUILT:
|
||||||
|
raise ValidationError(
|
||||||
|
{"detail": "Built-in sources cannot be deleted"}, code="protected"
|
||||||
|
)
|
||||||
|
|
||||||
|
return super().destroy(request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class UserSourceConnectionSerializer(SourceSerializer):
|
class UserSourceConnectionSerializer(SourceSerializer):
|
||||||
"""User source connection"""
|
"""User source connection"""
|
||||||
|
|||||||
@ -32,5 +32,5 @@ class AuthentikCoreConfig(ManagedAppConfig):
|
|||||||
"name": "authentik Built-in",
|
"name": "authentik Built-in",
|
||||||
"slug": "authentik-built-in",
|
"slug": "authentik-built-in",
|
||||||
},
|
},
|
||||||
managed="goauthentik.io/sources/inbuilt",
|
managed=Source.MANAGED_INBUILT,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -678,6 +678,8 @@ class SourceGroupMatchingModes(models.TextChoices):
|
|||||||
class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||||
"""Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server"""
|
"""Base Authentication source, i.e. an OAuth Provider, SAML Remote or LDAP Server"""
|
||||||
|
|
||||||
|
MANAGED_INBUILT = "goauthentik.io/sources/inbuilt"
|
||||||
|
|
||||||
name = models.TextField(help_text=_("Source's display Name."))
|
name = models.TextField(help_text=_("Source's display Name."))
|
||||||
slug = models.SlugField(help_text=_("Internal source name, used in URLs."), unique=True)
|
slug = models.SlugField(help_text=_("Internal source name, used in URLs."), unique=True)
|
||||||
|
|
||||||
|
|||||||
@ -48,6 +48,7 @@ LOGGER = get_logger()
|
|||||||
|
|
||||||
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
|
PLAN_CONTEXT_SOURCE_GROUPS = "source_groups"
|
||||||
SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages"
|
SESSION_KEY_SOURCE_FLOW_STAGES = "authentik/flows/source_flow_stages"
|
||||||
|
SESSION_KEY_SOURCE_FLOW_CONTEXT = "authentik/flows/source_flow_context"
|
||||||
SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token" # nosec
|
SESSION_KEY_OVERRIDE_FLOW_TOKEN = "authentik/flows/source_override_flow_token" # nosec
|
||||||
|
|
||||||
|
|
||||||
@ -261,6 +262,7 @@ class SourceFlowManager:
|
|||||||
plan.append_stage(stage)
|
plan.append_stage(stage)
|
||||||
for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []):
|
for stage in self.request.session.get(SESSION_KEY_SOURCE_FLOW_STAGES, []):
|
||||||
plan.append_stage(stage)
|
plan.append_stage(stage)
|
||||||
|
plan.context.update(self.request.session.get(SESSION_KEY_SOURCE_FLOW_CONTEXT, {}))
|
||||||
return plan.to_redirect(self.request, flow)
|
return plan.to_redirect(self.request, flow)
|
||||||
|
|
||||||
def handle_auth(
|
def handle_auth(
|
||||||
|
|||||||
@ -16,7 +16,7 @@
|
|||||||
{% block head_before %}
|
{% block head_before %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject>
|
<style>{{ brand.branding_custom_css }}</style>
|
||||||
<script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
|
<script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
|
||||||
<script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script>
|
<script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script>
|
||||||
{% block head %}
|
{% block head %}
|
||||||
|
|||||||
@ -4,7 +4,7 @@
|
|||||||
{% load i18n %}
|
{% load i18n %}
|
||||||
|
|
||||||
{% block head_before %}
|
{% block head_before %}
|
||||||
<link rel="prefetch" href="{% static 'dist/assets/images/flow_background.jpg' %}" />
|
<link rel="prefetch" href="{{ request.brand.branding_default_flow_background_url }}" />
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
|
||||||
{% include "base/header_js.html" %}
|
{% include "base/header_js.html" %}
|
||||||
@ -13,7 +13,7 @@
|
|||||||
{% block head %}
|
{% block head %}
|
||||||
<style>
|
<style>
|
||||||
:root {
|
:root {
|
||||||
--ak-flow-background: url("{% static 'dist/assets/images/flow_background.jpg' %}");
|
--ak-flow-background: url("{{ request.brand.branding_default_flow_background_url }}");
|
||||||
--pf-c-background-image--BackgroundImage: var(--ak-flow-background);
|
--pf-c-background-image--BackgroundImage: var(--ak-flow-background);
|
||||||
--pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background);
|
--pf-c-background-image--BackgroundImage-2x: var(--ak-flow-background);
|
||||||
--pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background);
|
--pf-c-background-image--BackgroundImage--sm: var(--ak-flow-background);
|
||||||
|
|||||||
@ -133,6 +133,8 @@ class TestApplicationsAPI(APITestCase):
|
|||||||
"provider_obj": {
|
"provider_obj": {
|
||||||
"assigned_application_name": "allowed",
|
"assigned_application_name": "allowed",
|
||||||
"assigned_application_slug": "allowed",
|
"assigned_application_slug": "allowed",
|
||||||
|
"assigned_backchannel_application_name": "",
|
||||||
|
"assigned_backchannel_application_slug": "",
|
||||||
"authentication_flow": None,
|
"authentication_flow": None,
|
||||||
"invalidation_flow": None,
|
"invalidation_flow": None,
|
||||||
"authorization_flow": str(self.provider.authorization_flow.pk),
|
"authorization_flow": str(self.provider.authorization_flow.pk),
|
||||||
@ -186,6 +188,8 @@ class TestApplicationsAPI(APITestCase):
|
|||||||
"provider_obj": {
|
"provider_obj": {
|
||||||
"assigned_application_name": "allowed",
|
"assigned_application_name": "allowed",
|
||||||
"assigned_application_slug": "allowed",
|
"assigned_application_slug": "allowed",
|
||||||
|
"assigned_backchannel_application_name": "",
|
||||||
|
"assigned_backchannel_application_slug": "",
|
||||||
"authentication_flow": None,
|
"authentication_flow": None,
|
||||||
"invalidation_flow": None,
|
"invalidation_flow": None,
|
||||||
"authorization_flow": str(self.provider.authorization_flow.pk),
|
"authorization_flow": str(self.provider.authorization_flow.pk),
|
||||||
|
|||||||
@ -3,7 +3,8 @@
|
|||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import PropertyMapping
|
from authentik.core.api.providers import ProviderSerializer
|
||||||
|
from authentik.core.models import Application, PropertyMapping, Provider
|
||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
|
|
||||||
|
|
||||||
@ -24,3 +25,51 @@ class TestProvidersAPI(APITestCase):
|
|||||||
reverse("authentik_api:provider-types"),
|
reverse("authentik_api:provider-types"),
|
||||||
)
|
)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
|
def test_provider_serializer_without_application(self):
|
||||||
|
"""Test that Provider serializer handles missing application gracefully"""
|
||||||
|
# Create a provider without an application
|
||||||
|
provider = Provider.objects.create(name="test-provider")
|
||||||
|
|
||||||
|
serializer = ProviderSerializer(instance=provider)
|
||||||
|
serialized_data = serializer.data
|
||||||
|
|
||||||
|
# Check that fields return empty strings when no application exists
|
||||||
|
self.assertEqual(serialized_data["assigned_application_slug"], "")
|
||||||
|
self.assertEqual(serialized_data["assigned_application_name"], "")
|
||||||
|
self.assertEqual(serialized_data["assigned_backchannel_application_slug"], "")
|
||||||
|
self.assertEqual(serialized_data["assigned_backchannel_application_name"], "")
|
||||||
|
|
||||||
|
def test_provider_serializer_with_application(self):
|
||||||
|
"""Test that Provider serializer correctly includes application data"""
|
||||||
|
# Create an application
|
||||||
|
app = Application.objects.create(name="Test App", slug="test-app")
|
||||||
|
|
||||||
|
# Create a provider with an application
|
||||||
|
provider = Provider.objects.create(name="test-provider-with-app")
|
||||||
|
app.provider = provider
|
||||||
|
app.save()
|
||||||
|
|
||||||
|
serializer = ProviderSerializer(instance=provider)
|
||||||
|
serialized_data = serializer.data
|
||||||
|
|
||||||
|
# Check that fields return correct values when application exists
|
||||||
|
self.assertEqual(serialized_data["assigned_application_slug"], "test-app")
|
||||||
|
self.assertEqual(serialized_data["assigned_application_name"], "Test App")
|
||||||
|
self.assertEqual(serialized_data["assigned_backchannel_application_slug"], "")
|
||||||
|
self.assertEqual(serialized_data["assigned_backchannel_application_name"], "")
|
||||||
|
|
||||||
|
def test_provider_api_response(self):
|
||||||
|
"""Test that the API response includes empty strings for missing applications"""
|
||||||
|
# Create a provider without an application
|
||||||
|
provider = Provider.objects.create(name="test-provider-api")
|
||||||
|
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:provider-detail", kwargs={"pk": provider.pk}),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertEqual(response.data["assigned_application_slug"], "")
|
||||||
|
self.assertEqual(response.data["assigned_application_name"], "")
|
||||||
|
self.assertEqual(response.data["assigned_backchannel_application_slug"], "")
|
||||||
|
self.assertEqual(response.data["assigned_backchannel_application_name"], "")
|
||||||
|
|||||||
@ -11,13 +11,14 @@ from guardian.shortcuts import get_anonymous_user
|
|||||||
from authentik.core.models import Source, User
|
from authentik.core.models import Source, User
|
||||||
from authentik.core.sources.flow_manager import (
|
from authentik.core.sources.flow_manager import (
|
||||||
SESSION_KEY_OVERRIDE_FLOW_TOKEN,
|
SESSION_KEY_OVERRIDE_FLOW_TOKEN,
|
||||||
|
SESSION_KEY_SOURCE_FLOW_CONTEXT,
|
||||||
SESSION_KEY_SOURCE_FLOW_STAGES,
|
SESSION_KEY_SOURCE_FLOW_STAGES,
|
||||||
)
|
)
|
||||||
from authentik.core.types import UILoginButton
|
from authentik.core.types import UILoginButton
|
||||||
from authentik.enterprise.stages.source.models import SourceStage
|
from authentik.enterprise.stages.source.models import SourceStage
|
||||||
from authentik.flows.challenge import Challenge, ChallengeResponse
|
from authentik.flows.challenge import Challenge, ChallengeResponse
|
||||||
from authentik.flows.models import FlowToken, in_memory_stage
|
from authentik.flows.models import FlowToken, in_memory_stage
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED
|
from authentik.flows.planner import PLAN_CONTEXT_IS_REDIRECTED, PLAN_CONTEXT_IS_RESTORED
|
||||||
from authentik.flows.stage import ChallengeStageView, StageView
|
from authentik.flows.stage import ChallengeStageView, StageView
|
||||||
from authentik.lib.utils.time import timedelta_from_string
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
|
|
||||||
@ -53,6 +54,9 @@ class SourceStageView(ChallengeStageView):
|
|||||||
resume_token = self.create_flow_token()
|
resume_token = self.create_flow_token()
|
||||||
self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN] = resume_token
|
self.request.session[SESSION_KEY_OVERRIDE_FLOW_TOKEN] = resume_token
|
||||||
self.request.session[SESSION_KEY_SOURCE_FLOW_STAGES] = [in_memory_stage(SourceStageFinal)]
|
self.request.session[SESSION_KEY_SOURCE_FLOW_STAGES] = [in_memory_stage(SourceStageFinal)]
|
||||||
|
self.request.session[SESSION_KEY_SOURCE_FLOW_CONTEXT] = {
|
||||||
|
PLAN_CONTEXT_IS_REDIRECTED: self.executor.flow,
|
||||||
|
}
|
||||||
return self.login_button.challenge
|
return self.login_button.challenge
|
||||||
|
|
||||||
def create_flow_token(self) -> FlowToken:
|
def create_flow_token(self) -> FlowToken:
|
||||||
|
|||||||
@ -50,7 +50,8 @@ class NotificationTransportSerializer(ModelSerializer):
|
|||||||
"mode",
|
"mode",
|
||||||
"mode_verbose",
|
"mode_verbose",
|
||||||
"webhook_url",
|
"webhook_url",
|
||||||
"webhook_mapping",
|
"webhook_mapping_body",
|
||||||
|
"webhook_mapping_headers",
|
||||||
"send_once",
|
"send_once",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@ -0,0 +1,43 @@
|
|||||||
|
# Generated by Django 5.0.13 on 2025-03-20 19:54
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_events", "0008_event_authentik_e_expires_8c73a8_idx_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name="notificationtransport",
|
||||||
|
old_name="webhook_mapping",
|
||||||
|
new_name="webhook_mapping_body",
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="notificationtransport",
|
||||||
|
name="webhook_mapping_body",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
help_text="Customize the body of the request. Mapping should return data that is JSON-serializable.",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
related_name="+",
|
||||||
|
to="authentik_events.notificationwebhookmapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="notificationtransport",
|
||||||
|
name="webhook_mapping_headers",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
help_text="Configure additional headers to be sent. Mapping should return a dictionary of key-value pairs",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
related_name="+",
|
||||||
|
to="authentik_events.notificationwebhookmapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -336,8 +336,27 @@ class NotificationTransport(SerializerModel):
|
|||||||
mode = models.TextField(choices=TransportMode.choices, default=TransportMode.LOCAL)
|
mode = models.TextField(choices=TransportMode.choices, default=TransportMode.LOCAL)
|
||||||
|
|
||||||
webhook_url = models.TextField(blank=True, validators=[DomainlessURLValidator()])
|
webhook_url = models.TextField(blank=True, validators=[DomainlessURLValidator()])
|
||||||
webhook_mapping = models.ForeignKey(
|
webhook_mapping_body = models.ForeignKey(
|
||||||
"NotificationWebhookMapping", on_delete=models.SET_DEFAULT, null=True, default=None
|
"NotificationWebhookMapping",
|
||||||
|
on_delete=models.SET_DEFAULT,
|
||||||
|
null=True,
|
||||||
|
default=None,
|
||||||
|
related_name="+",
|
||||||
|
help_text=_(
|
||||||
|
"Customize the body of the request. "
|
||||||
|
"Mapping should return data that is JSON-serializable."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
webhook_mapping_headers = models.ForeignKey(
|
||||||
|
"NotificationWebhookMapping",
|
||||||
|
on_delete=models.SET_DEFAULT,
|
||||||
|
null=True,
|
||||||
|
default=None,
|
||||||
|
related_name="+",
|
||||||
|
help_text=_(
|
||||||
|
"Configure additional headers to be sent. "
|
||||||
|
"Mapping should return a dictionary of key-value pairs"
|
||||||
|
),
|
||||||
)
|
)
|
||||||
send_once = models.BooleanField(
|
send_once = models.BooleanField(
|
||||||
default=False,
|
default=False,
|
||||||
@ -360,8 +379,8 @@ class NotificationTransport(SerializerModel):
|
|||||||
|
|
||||||
def send_local(self, notification: "Notification") -> list[str]:
|
def send_local(self, notification: "Notification") -> list[str]:
|
||||||
"""Local notification delivery"""
|
"""Local notification delivery"""
|
||||||
if self.webhook_mapping:
|
if self.webhook_mapping_body:
|
||||||
self.webhook_mapping.evaluate(
|
self.webhook_mapping_body.evaluate(
|
||||||
user=notification.user,
|
user=notification.user,
|
||||||
request=None,
|
request=None,
|
||||||
notification=notification,
|
notification=notification,
|
||||||
@ -380,9 +399,18 @@ class NotificationTransport(SerializerModel):
|
|||||||
if notification.event and notification.event.user:
|
if notification.event and notification.event.user:
|
||||||
default_body["event_user_email"] = notification.event.user.get("email", None)
|
default_body["event_user_email"] = notification.event.user.get("email", None)
|
||||||
default_body["event_user_username"] = notification.event.user.get("username", None)
|
default_body["event_user_username"] = notification.event.user.get("username", None)
|
||||||
if self.webhook_mapping:
|
headers = {}
|
||||||
|
if self.webhook_mapping_body:
|
||||||
default_body = sanitize_item(
|
default_body = sanitize_item(
|
||||||
self.webhook_mapping.evaluate(
|
self.webhook_mapping_body.evaluate(
|
||||||
|
user=notification.user,
|
||||||
|
request=None,
|
||||||
|
notification=notification,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if self.webhook_mapping_headers:
|
||||||
|
headers = sanitize_item(
|
||||||
|
self.webhook_mapping_headers.evaluate(
|
||||||
user=notification.user,
|
user=notification.user,
|
||||||
request=None,
|
request=None,
|
||||||
notification=notification,
|
notification=notification,
|
||||||
@ -392,6 +420,7 @@ class NotificationTransport(SerializerModel):
|
|||||||
response = get_http_session().post(
|
response = get_http_session().post(
|
||||||
self.webhook_url,
|
self.webhook_url,
|
||||||
json=default_body,
|
json=default_body,
|
||||||
|
headers=headers,
|
||||||
)
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except RequestException as exc:
|
except RequestException as exc:
|
||||||
|
|||||||
@ -120,7 +120,7 @@ class TestEventsNotifications(APITestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
transport = NotificationTransport.objects.create(
|
transport = NotificationTransport.objects.create(
|
||||||
name=generate_id(), webhook_mapping=mapping, mode=TransportMode.LOCAL
|
name=generate_id(), webhook_mapping_body=mapping, mode=TransportMode.LOCAL
|
||||||
)
|
)
|
||||||
NotificationRule.objects.filter(name__startswith="default").delete()
|
NotificationRule.objects.filter(name__startswith="default").delete()
|
||||||
trigger = NotificationRule.objects.create(name=generate_id(), group=self.group)
|
trigger = NotificationRule.objects.create(name=generate_id(), group=self.group)
|
||||||
|
|||||||
@ -60,20 +60,25 @@ class TestEventTransports(TestCase):
|
|||||||
|
|
||||||
def test_transport_webhook_mapping(self):
|
def test_transport_webhook_mapping(self):
|
||||||
"""Test webhook transport with custom mapping"""
|
"""Test webhook transport with custom mapping"""
|
||||||
mapping = NotificationWebhookMapping.objects.create(
|
mapping_body = NotificationWebhookMapping.objects.create(
|
||||||
name=generate_id(), expression="return request.user"
|
name=generate_id(), expression="return request.user"
|
||||||
)
|
)
|
||||||
|
mapping_headers = NotificationWebhookMapping.objects.create(
|
||||||
|
name=generate_id(), expression="""return {"foo": "bar"}"""
|
||||||
|
)
|
||||||
transport: NotificationTransport = NotificationTransport.objects.create(
|
transport: NotificationTransport = NotificationTransport.objects.create(
|
||||||
name=generate_id(),
|
name=generate_id(),
|
||||||
mode=TransportMode.WEBHOOK,
|
mode=TransportMode.WEBHOOK,
|
||||||
webhook_url="http://localhost:1234/test",
|
webhook_url="http://localhost:1234/test",
|
||||||
webhook_mapping=mapping,
|
webhook_mapping_body=mapping_body,
|
||||||
|
webhook_mapping_headers=mapping_headers,
|
||||||
)
|
)
|
||||||
with Mocker() as mocker:
|
with Mocker() as mocker:
|
||||||
mocker.post("http://localhost:1234/test")
|
mocker.post("http://localhost:1234/test")
|
||||||
transport.send(self.notification)
|
transport.send(self.notification)
|
||||||
self.assertEqual(mocker.call_count, 1)
|
self.assertEqual(mocker.call_count, 1)
|
||||||
self.assertEqual(mocker.request_history[0].method, "POST")
|
self.assertEqual(mocker.request_history[0].method, "POST")
|
||||||
|
self.assertEqual(mocker.request_history[0].headers["foo"], "bar")
|
||||||
self.assertJSONEqual(
|
self.assertJSONEqual(
|
||||||
mocker.request_history[0].body.decode(),
|
mocker.request_history[0].body.decode(),
|
||||||
{"email": self.user.email, "pk": self.user.pk, "username": self.user.username},
|
{"email": self.user.email, "pk": self.user.pk, "username": self.user.username},
|
||||||
|
|||||||
@ -6,6 +6,7 @@ from typing import TYPE_CHECKING
|
|||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
from django.http import HttpRequest
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from model_utils.managers import InheritanceManager
|
from model_utils.managers import InheritanceManager
|
||||||
from rest_framework.serializers import BaseSerializer
|
from rest_framework.serializers import BaseSerializer
|
||||||
@ -178,11 +179,12 @@ class Flow(SerializerModel, PolicyBindingModel):
|
|||||||
help_text=_("Required level of authentication and authorization to access a flow."),
|
help_text=_("Required level of authentication and authorization to access a flow."),
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
def background_url(self, request: HttpRequest | None = None) -> str:
|
||||||
def background_url(self) -> str:
|
|
||||||
"""Get the URL to the background image. If the name is /static or starts with http
|
"""Get the URL to the background image. If the name is /static or starts with http
|
||||||
it is returned as-is"""
|
it is returned as-is"""
|
||||||
if not self.background:
|
if not self.background:
|
||||||
|
if request:
|
||||||
|
return request.brand.branding_default_flow_background_url()
|
||||||
return (
|
return (
|
||||||
CONFIG.get("web.path", "/")[:-1] + "/static/dist/assets/images/flow_background.jpg"
|
CONFIG.get("web.path", "/")[:-1] + "/static/dist/assets/images/flow_background.jpg"
|
||||||
)
|
)
|
||||||
|
|||||||
@ -184,7 +184,7 @@ class ChallengeStageView(StageView):
|
|||||||
flow_info = ContextualFlowInfo(
|
flow_info = ContextualFlowInfo(
|
||||||
data={
|
data={
|
||||||
"title": self.format_title(),
|
"title": self.format_title(),
|
||||||
"background": self.executor.flow.background_url,
|
"background": self.executor.flow.background_url(self.request),
|
||||||
"cancel_url": reverse("authentik_flows:cancel"),
|
"cancel_url": reverse("authentik_flows:cancel"),
|
||||||
"layout": self.executor.flow.layout,
|
"layout": self.executor.flow.layout,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -27,7 +27,6 @@ class FlowTestCase(APITestCase):
|
|||||||
self.assertIsNotNone(raw_response["component"])
|
self.assertIsNotNone(raw_response["component"])
|
||||||
if flow:
|
if flow:
|
||||||
self.assertIn("flow_info", raw_response)
|
self.assertIn("flow_info", raw_response)
|
||||||
self.assertEqual(raw_response["flow_info"]["background"], flow.background_url)
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
raw_response["flow_info"]["cancel_url"], reverse("authentik_flows:cancel")
|
raw_response["flow_info"]["cancel_url"], reverse("authentik_flows:cancel")
|
||||||
)
|
)
|
||||||
|
|||||||
@ -1,9 +1,11 @@
|
|||||||
"""API flow tests"""
|
"""API flow tests"""
|
||||||
|
|
||||||
|
from json import loads
|
||||||
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||||
from authentik.flows.api.stages import StageSerializer, StageViewSet
|
from authentik.flows.api.stages import StageSerializer, StageViewSet
|
||||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, Stage
|
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding, Stage
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
@ -77,6 +79,22 @@ class TestFlowsAPI(APITestCase):
|
|||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertJSONEqual(response.content, {"diagram": DIAGRAM_EXPECTED})
|
self.assertJSONEqual(response.content, {"diagram": DIAGRAM_EXPECTED})
|
||||||
|
|
||||||
|
def test_api_background(self):
|
||||||
|
"""Test custom background"""
|
||||||
|
user = create_test_admin_user()
|
||||||
|
self.client.force_login(user)
|
||||||
|
|
||||||
|
flow = create_test_flow()
|
||||||
|
response = self.client.get(reverse("authentik_api:flow-detail", kwargs={"slug": flow.slug}))
|
||||||
|
body = loads(response.content.decode())
|
||||||
|
self.assertEqual(body["background"], "/static/dist/assets/images/flow_background.jpg")
|
||||||
|
|
||||||
|
flow.background = "https://goauthentik.io/img/icon.png"
|
||||||
|
flow.save()
|
||||||
|
response = self.client.get(reverse("authentik_api:flow-detail", kwargs={"slug": flow.slug}))
|
||||||
|
body = loads(response.content.decode())
|
||||||
|
self.assertEqual(body["background"], "https://goauthentik.io/img/icon.png")
|
||||||
|
|
||||||
def test_api_diagram_no_stages(self):
|
def test_api_diagram_no_stages(self):
|
||||||
"""Test flow diagram with no stages."""
|
"""Test flow diagram with no stages."""
|
||||||
user = create_test_admin_user()
|
user = create_test_admin_user()
|
||||||
|
|||||||
@ -49,7 +49,7 @@ class TestFlowInspector(APITestCase):
|
|||||||
"captcha_stage": None,
|
"captcha_stage": None,
|
||||||
"component": "ak-stage-identification",
|
"component": "ak-stage-identification",
|
||||||
"flow_info": {
|
"flow_info": {
|
||||||
"background": flow.background_url,
|
"background": "/static/dist/assets/images/flow_background.jpg",
|
||||||
"cancel_url": reverse("authentik_flows:cancel"),
|
"cancel_url": reverse("authentik_flows:cancel"),
|
||||||
"title": flow.title,
|
"title": flow.title,
|
||||||
"layout": "stacked",
|
"layout": "stacked",
|
||||||
|
|||||||
@ -1,5 +1,20 @@
|
|||||||
# update website/docs/install-config/configuration/configuration.mdx
|
# authentik configuration
|
||||||
# This is the default configuration file
|
#
|
||||||
|
# https://docs.goauthentik.io/docs/install-config/configuration/
|
||||||
|
#
|
||||||
|
# To override the settings in this file, run the following command from the repository root:
|
||||||
|
#
|
||||||
|
# ```shell
|
||||||
|
# make gen-dev-config
|
||||||
|
# ```
|
||||||
|
#
|
||||||
|
# You may edit the generated file to override the configuration below.
|
||||||
|
#
|
||||||
|
# When making modifying the default configuration file,
|
||||||
|
# ensure that the corresponding documentation is updated to match.
|
||||||
|
#
|
||||||
|
# @see {@link ../../website/docs/install-config/configuration/configuration.mdx Configuration documentation} for more information.
|
||||||
|
|
||||||
postgresql:
|
postgresql:
|
||||||
host: localhost
|
host: localhost
|
||||||
name: authentik
|
name: authentik
|
||||||
@ -45,6 +60,8 @@ redis:
|
|||||||
# url: ""
|
# url: ""
|
||||||
# transport_options: ""
|
# transport_options: ""
|
||||||
|
|
||||||
|
http_timeout: 30
|
||||||
|
|
||||||
cache:
|
cache:
|
||||||
# url: ""
|
# url: ""
|
||||||
timeout: 300
|
timeout: 300
|
||||||
|
|||||||
@ -16,7 +16,40 @@ def authentik_user_agent() -> str:
|
|||||||
return f"authentik@{get_full_version()}"
|
return f"authentik@{get_full_version()}"
|
||||||
|
|
||||||
|
|
||||||
class DebugSession(Session):
|
class TimeoutSession(Session):
|
||||||
|
"""Always set a default HTTP request timeout"""
|
||||||
|
|
||||||
|
def __init__(self, default_timeout=None):
|
||||||
|
super().__init__()
|
||||||
|
self.timeout = default_timeout
|
||||||
|
|
||||||
|
def send(
|
||||||
|
self,
|
||||||
|
request,
|
||||||
|
*,
|
||||||
|
stream=...,
|
||||||
|
verify=...,
|
||||||
|
proxies=...,
|
||||||
|
cert=...,
|
||||||
|
timeout=...,
|
||||||
|
allow_redirects=...,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
if not timeout and self.timeout:
|
||||||
|
timeout = self.timeout
|
||||||
|
return super().send(
|
||||||
|
request,
|
||||||
|
stream=stream,
|
||||||
|
verify=verify,
|
||||||
|
proxies=proxies,
|
||||||
|
cert=cert,
|
||||||
|
timeout=timeout,
|
||||||
|
allow_redirects=allow_redirects,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DebugSession(TimeoutSession):
|
||||||
"""requests session which logs http requests and responses"""
|
"""requests session which logs http requests and responses"""
|
||||||
|
|
||||||
def send(self, req: PreparedRequest, *args, **kwargs):
|
def send(self, req: PreparedRequest, *args, **kwargs):
|
||||||
@ -42,8 +75,9 @@ class DebugSession(Session):
|
|||||||
|
|
||||||
def get_http_session() -> Session:
|
def get_http_session() -> Session:
|
||||||
"""Get a requests session with common headers"""
|
"""Get a requests session with common headers"""
|
||||||
session = Session()
|
session = TimeoutSession()
|
||||||
if CONFIG.get_bool("debug") or CONFIG.get("log_level") == "trace":
|
if CONFIG.get_bool("debug") or CONFIG.get("log_level") == "trace":
|
||||||
session = DebugSession()
|
session = DebugSession()
|
||||||
session.headers["User-Agent"] = authentik_user_agent()
|
session.headers["User-Agent"] = authentik_user_agent()
|
||||||
|
session.timeout = CONFIG.get_optional_int("http_timeout")
|
||||||
return session
|
return session
|
||||||
|
|||||||
@ -13,6 +13,7 @@ from paramiko.ssh_exception import SSHException
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
from yaml import safe_dump
|
from yaml import safe_dump
|
||||||
|
|
||||||
|
from authentik import __version__
|
||||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||||
from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException
|
from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException
|
||||||
from authentik.outposts.docker_ssh import DockerInlineSSH, SSHManagedExternallyException
|
from authentik.outposts.docker_ssh import DockerInlineSSH, SSHManagedExternallyException
|
||||||
@ -184,7 +185,7 @@ class DockerController(BaseController):
|
|||||||
try:
|
try:
|
||||||
self.client.images.pull(image)
|
self.client.images.pull(image)
|
||||||
except DockerException: # pragma: no cover
|
except DockerException: # pragma: no cover
|
||||||
image = f"ghcr.io/goauthentik/{self.outpost.type}:latest"
|
image = f"ghcr.io/goauthentik/{self.outpost.type}:{__version__}"
|
||||||
self.client.images.pull(image)
|
self.client.images.pull(image)
|
||||||
return image
|
return image
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
"""Base Kubernetes Reconciler"""
|
"""Base Kubernetes Reconciler"""
|
||||||
|
|
||||||
|
import re
|
||||||
from dataclasses import asdict
|
from dataclasses import asdict
|
||||||
from json import dumps
|
from json import dumps
|
||||||
from typing import TYPE_CHECKING, Generic, TypeVar
|
from typing import TYPE_CHECKING, Generic, TypeVar
|
||||||
@ -67,7 +68,8 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
"""Get the name of the object this reconciler manages"""
|
"""Get the name of the object this reconciler manages"""
|
||||||
return (
|
|
||||||
|
base_name = (
|
||||||
self.controller.outpost.config.object_naming_template
|
self.controller.outpost.config.object_naming_template
|
||||||
% {
|
% {
|
||||||
"name": slugify(self.controller.outpost.name),
|
"name": slugify(self.controller.outpost.name),
|
||||||
@ -75,6 +77,16 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
}
|
}
|
||||||
).lower()
|
).lower()
|
||||||
|
|
||||||
|
formatted = slugify(base_name)
|
||||||
|
formatted = re.sub(r"[^a-z0-9-]", "-", formatted)
|
||||||
|
formatted = re.sub(r"-+", "-", formatted)
|
||||||
|
formatted = formatted[:63]
|
||||||
|
|
||||||
|
if not formatted:
|
||||||
|
formatted = f"outpost-{self.controller.outpost.uuid.hex}"[:63]
|
||||||
|
|
||||||
|
return formatted
|
||||||
|
|
||||||
def get_patched_reference_object(self) -> T:
|
def get_patched_reference_object(self) -> T:
|
||||||
"""Get patched reference object"""
|
"""Get patched reference object"""
|
||||||
reference = self.get_reference_object()
|
reference = self.get_reference_object()
|
||||||
@ -112,7 +124,6 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
try:
|
try:
|
||||||
current = self.retrieve()
|
current = self.retrieve()
|
||||||
except (OpenApiException, HTTPError) as exc:
|
except (OpenApiException, HTTPError) as exc:
|
||||||
|
|
||||||
if isinstance(exc, ApiException) and exc.status == HttpResponseNotFound.status_code:
|
if isinstance(exc, ApiException) and exc.status == HttpResponseNotFound.status_code:
|
||||||
self.logger.debug("Failed to get current, triggering recreate")
|
self.logger.debug("Failed to get current, triggering recreate")
|
||||||
raise NeedsRecreate from exc
|
raise NeedsRecreate from exc
|
||||||
@ -156,7 +167,6 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
self.delete(current)
|
self.delete(current)
|
||||||
self.logger.debug("Removing")
|
self.logger.debug("Removing")
|
||||||
except (OpenApiException, HTTPError) as exc:
|
except (OpenApiException, HTTPError) as exc:
|
||||||
|
|
||||||
if isinstance(exc, ApiException) and exc.status == HttpResponseNotFound.status_code:
|
if isinstance(exc, ApiException) and exc.status == HttpResponseNotFound.status_code:
|
||||||
self.logger.debug("Failed to get current, assuming non-existent")
|
self.logger.debug("Failed to get current, assuming non-existent")
|
||||||
return
|
return
|
||||||
|
|||||||
@ -61,9 +61,14 @@ class KubernetesController(BaseController):
|
|||||||
client: KubernetesClient
|
client: KubernetesClient
|
||||||
connection: KubernetesServiceConnection
|
connection: KubernetesServiceConnection
|
||||||
|
|
||||||
def __init__(self, outpost: Outpost, connection: KubernetesServiceConnection) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
outpost: Outpost,
|
||||||
|
connection: KubernetesServiceConnection,
|
||||||
|
client: KubernetesClient | None = None,
|
||||||
|
) -> None:
|
||||||
super().__init__(outpost, connection)
|
super().__init__(outpost, connection)
|
||||||
self.client = KubernetesClient(connection)
|
self.client = client if client else KubernetesClient(connection)
|
||||||
self.reconcilers = {
|
self.reconcilers = {
|
||||||
SecretReconciler.reconciler_name(): SecretReconciler,
|
SecretReconciler.reconciler_name(): SecretReconciler,
|
||||||
DeploymentReconciler.reconciler_name(): DeploymentReconciler,
|
DeploymentReconciler.reconciler_name(): DeploymentReconciler,
|
||||||
|
|||||||
44
authentik/outposts/tests/test_controller_k8s.py
Normal file
44
authentik/outposts/tests/test_controller_k8s.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
"""Kubernetes controller tests"""
|
||||||
|
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
from authentik.blueprints.tests import reconcile_app
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||||
|
from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler
|
||||||
|
from authentik.outposts.controllers.kubernetes import KubernetesController
|
||||||
|
from authentik.outposts.models import KubernetesServiceConnection, Outpost, OutpostType
|
||||||
|
|
||||||
|
|
||||||
|
class KubernetesControllerTests(TestCase):
|
||||||
|
"""Kubernetes controller tests"""
|
||||||
|
|
||||||
|
@reconcile_app("authentik_outposts")
|
||||||
|
def setUp(self) -> None:
|
||||||
|
self.outpost = Outpost.objects.create(
|
||||||
|
name="test",
|
||||||
|
type=OutpostType.PROXY,
|
||||||
|
)
|
||||||
|
self.integration = KubernetesServiceConnection(name="test")
|
||||||
|
|
||||||
|
def test_gen_name(self):
|
||||||
|
"""Ensure the generated name is valid"""
|
||||||
|
controller = KubernetesController(
|
||||||
|
Outpost.objects.filter(managed=MANAGED_OUTPOST).first(),
|
||||||
|
self.integration,
|
||||||
|
# Pass something not-none as client so we don't
|
||||||
|
# attempt to connect to K8s as that's not needed
|
||||||
|
client=self,
|
||||||
|
)
|
||||||
|
rec = DeploymentReconciler(controller)
|
||||||
|
self.assertEqual(rec.name, "ak-outpost-authentik-embedded-outpost")
|
||||||
|
|
||||||
|
controller.outpost.name = generate_id()
|
||||||
|
self.assertLess(len(rec.name), 64)
|
||||||
|
|
||||||
|
# Test custom naming template
|
||||||
|
_cfg = controller.outpost.config
|
||||||
|
_cfg.object_naming_template = ""
|
||||||
|
controller.outpost.config = _cfg
|
||||||
|
self.assertEqual(rec.name, f"outpost-{controller.outpost.uuid.hex}")
|
||||||
|
self.assertLess(len(rec.name), 64)
|
||||||
@ -254,10 +254,10 @@ class OAuthAuthorizationParams:
|
|||||||
raise AuthorizeError(self.redirect_uri, "invalid_scope", self.grant_type, self.state)
|
raise AuthorizeError(self.redirect_uri, "invalid_scope", self.grant_type, self.state)
|
||||||
if SCOPE_OFFLINE_ACCESS in self.scope:
|
if SCOPE_OFFLINE_ACCESS in self.scope:
|
||||||
# https://openid.net/specs/openid-connect-core-1_0.html#OfflineAccess
|
# https://openid.net/specs/openid-connect-core-1_0.html#OfflineAccess
|
||||||
if PROMPT_CONSENT not in self.prompt:
|
# Don't explicitly request consent with offline_access, as the spec allows for
|
||||||
# Instead of ignoring the `offline_access` scope when `prompt`
|
# "other conditions for processing the request permitting offline access to the
|
||||||
# isn't set to `consent`, we set override it ourselves
|
# requested resources are in place"
|
||||||
self.prompt.add(PROMPT_CONSENT)
|
# which we interpret as "the admin picks an authorization flow with or without consent"
|
||||||
if self.response_type not in [
|
if self.response_type not in [
|
||||||
ResponseTypes.CODE,
|
ResponseTypes.CODE,
|
||||||
ResponseTypes.CODE_TOKEN,
|
ResponseTypes.CODE_TOKEN,
|
||||||
|
|||||||
@ -1,9 +1,9 @@
|
|||||||
"""RAC app config"""
|
"""RAC app config"""
|
||||||
|
|
||||||
from django.apps import AppConfig
|
from authentik.blueprints.apps import ManagedAppConfig
|
||||||
|
|
||||||
|
|
||||||
class AuthentikProviderRAC(AppConfig):
|
class AuthentikProviderRAC(ManagedAppConfig):
|
||||||
"""authentik rac app config"""
|
"""authentik rac app config"""
|
||||||
|
|
||||||
name = "authentik.providers.rac"
|
name = "authentik.providers.rac"
|
||||||
|
|||||||
@ -4,8 +4,7 @@ from asgiref.sync import async_to_sync
|
|||||||
from channels.layers import get_channel_layer
|
from channels.layers import get_channel_layer
|
||||||
from django.contrib.auth.signals import user_logged_out
|
from django.contrib.auth.signals import user_logged_out
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db.models import Model
|
from django.db.models.signals import post_delete, post_save, pre_delete
|
||||||
from django.db.models.signals import post_save, pre_delete
|
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
|
|
||||||
@ -46,12 +45,8 @@ def pre_delete_connection_token_disconnect(sender, instance: ConnectionToken, **
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save, sender=Endpoint)
|
@receiver([post_save, post_delete], sender=Endpoint)
|
||||||
def post_save_endpoint(sender: type[Model], instance, created: bool, **_):
|
def post_save_post_delete_endpoint(**_):
|
||||||
"""Clear user's endpoint cache upon endpoint creation"""
|
"""Clear user's endpoint cache upon endpoint creation or deletion"""
|
||||||
if not created: # pragma: no cover
|
|
||||||
return
|
|
||||||
|
|
||||||
# Delete user endpoint cache
|
|
||||||
keys = cache.keys(user_endpoint_cache_key("*"))
|
keys = cache.keys(user_endpoint_cache_key("*"))
|
||||||
cache.delete_many(keys)
|
cache.delete_many(keys)
|
||||||
|
|||||||
@ -74,6 +74,8 @@ class TestEndpointsAPI(APITestCase):
|
|||||||
"component": "ak-provider-rac-form",
|
"component": "ak-provider-rac-form",
|
||||||
"assigned_application_slug": self.app.slug,
|
"assigned_application_slug": self.app.slug,
|
||||||
"assigned_application_name": self.app.name,
|
"assigned_application_name": self.app.name,
|
||||||
|
"assigned_backchannel_application_slug": "",
|
||||||
|
"assigned_backchannel_application_name": "",
|
||||||
"verbose_name": "RAC Provider",
|
"verbose_name": "RAC Provider",
|
||||||
"verbose_name_plural": "RAC Providers",
|
"verbose_name_plural": "RAC Providers",
|
||||||
"meta_model_name": "authentik_providers_rac.racprovider",
|
"meta_model_name": "authentik_providers_rac.racprovider",
|
||||||
@ -124,6 +126,8 @@ class TestEndpointsAPI(APITestCase):
|
|||||||
"component": "ak-provider-rac-form",
|
"component": "ak-provider-rac-form",
|
||||||
"assigned_application_slug": self.app.slug,
|
"assigned_application_slug": self.app.slug,
|
||||||
"assigned_application_name": self.app.name,
|
"assigned_application_name": self.app.name,
|
||||||
|
"assigned_backchannel_application_slug": "",
|
||||||
|
"assigned_backchannel_application_name": "",
|
||||||
"connection_expiry": "hours=8",
|
"connection_expiry": "hours=8",
|
||||||
"delete_token_on_disconnect": False,
|
"delete_token_on_disconnect": False,
|
||||||
"verbose_name": "RAC Provider",
|
"verbose_name": "RAC Provider",
|
||||||
@ -153,6 +157,8 @@ class TestEndpointsAPI(APITestCase):
|
|||||||
"component": "ak-provider-rac-form",
|
"component": "ak-provider-rac-form",
|
||||||
"assigned_application_slug": self.app.slug,
|
"assigned_application_slug": self.app.slug,
|
||||||
"assigned_application_name": self.app.name,
|
"assigned_application_name": self.app.name,
|
||||||
|
"assigned_backchannel_application_slug": "",
|
||||||
|
"assigned_backchannel_application_name": "",
|
||||||
"connection_expiry": "hours=8",
|
"connection_expiry": "hours=8",
|
||||||
"delete_token_on_disconnect": False,
|
"delete_token_on_disconnect": False,
|
||||||
"verbose_name": "RAC Provider",
|
"verbose_name": "RAC Provider",
|
||||||
|
|||||||
@ -180,6 +180,7 @@ class SAMLProviderSerializer(ProviderSerializer):
|
|||||||
"session_valid_not_on_or_after",
|
"session_valid_not_on_or_after",
|
||||||
"property_mappings",
|
"property_mappings",
|
||||||
"name_id_mapping",
|
"name_id_mapping",
|
||||||
|
"authn_context_class_ref_mapping",
|
||||||
"digest_algorithm",
|
"digest_algorithm",
|
||||||
"signature_algorithm",
|
"signature_algorithm",
|
||||||
"signing_kp",
|
"signing_kp",
|
||||||
|
|||||||
@ -0,0 +1,28 @@
|
|||||||
|
# Generated by Django 5.0.13 on 2025-03-18 17:41
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_providers_saml", "0016_samlprovider_encryption_kp_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="samlprovider",
|
||||||
|
name="authn_context_class_ref_mapping",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="Configure how the AuthnContextClassRef value will be created. When left empty, the AuthnContextClassRef will be set based on which authentication methods the user used to authenticate.",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
related_name="+",
|
||||||
|
to="authentik_providers_saml.samlpropertymapping",
|
||||||
|
verbose_name="AuthnContextClassRef Property Mapping",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -71,6 +71,20 @@ class SAMLProvider(Provider):
|
|||||||
"the NameIDPolicy of the incoming request will be considered"
|
"the NameIDPolicy of the incoming request will be considered"
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
authn_context_class_ref_mapping = models.ForeignKey(
|
||||||
|
"SAMLPropertyMapping",
|
||||||
|
default=None,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=models.SET_DEFAULT,
|
||||||
|
verbose_name=_("AuthnContextClassRef Property Mapping"),
|
||||||
|
related_name="+",
|
||||||
|
help_text=_(
|
||||||
|
"Configure how the AuthnContextClassRef value will be created. When left empty, "
|
||||||
|
"the AuthnContextClassRef will be set based on which authentication methods the user "
|
||||||
|
"used to authenticate."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
assertion_valid_not_before = models.TextField(
|
assertion_valid_not_before = models.TextField(
|
||||||
default="minutes=-5",
|
default="minutes=-5",
|
||||||
@ -170,7 +184,6 @@ class SAMLProvider(Provider):
|
|||||||
def launch_url(self) -> str | None:
|
def launch_url(self) -> str | None:
|
||||||
"""Use IDP-Initiated SAML flow as launch URL"""
|
"""Use IDP-Initiated SAML flow as launch URL"""
|
||||||
try:
|
try:
|
||||||
|
|
||||||
return reverse(
|
return reverse(
|
||||||
"authentik_providers_saml:sso-init",
|
"authentik_providers_saml:sso-init",
|
||||||
kwargs={"application_slug": self.application.slug},
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
"""SAML Assertion generator"""
|
"""SAML Assertion generator"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from types import GeneratorType
|
from types import GeneratorType
|
||||||
|
|
||||||
@ -52,6 +53,7 @@ class AssertionProcessor:
|
|||||||
_assertion_id: str
|
_assertion_id: str
|
||||||
_response_id: str
|
_response_id: str
|
||||||
|
|
||||||
|
_auth_instant: str
|
||||||
_valid_not_before: str
|
_valid_not_before: str
|
||||||
_session_not_on_or_after: str
|
_session_not_on_or_after: str
|
||||||
_valid_not_on_or_after: str
|
_valid_not_on_or_after: str
|
||||||
@ -65,6 +67,11 @@ class AssertionProcessor:
|
|||||||
self._assertion_id = get_random_id()
|
self._assertion_id = get_random_id()
|
||||||
self._response_id = get_random_id()
|
self._response_id = get_random_id()
|
||||||
|
|
||||||
|
_login_event = get_login_event(self.http_request)
|
||||||
|
_login_time = datetime.now()
|
||||||
|
if _login_event:
|
||||||
|
_login_time = _login_event.created
|
||||||
|
self._auth_instant = get_time_string(_login_time)
|
||||||
self._valid_not_before = get_time_string(
|
self._valid_not_before = get_time_string(
|
||||||
timedelta_from_string(self.provider.assertion_valid_not_before)
|
timedelta_from_string(self.provider.assertion_valid_not_before)
|
||||||
)
|
)
|
||||||
@ -131,7 +138,7 @@ class AssertionProcessor:
|
|||||||
def get_assertion_auth_n_statement(self) -> Element:
|
def get_assertion_auth_n_statement(self) -> Element:
|
||||||
"""Generate AuthnStatement with AuthnContext and ContextClassRef Elements."""
|
"""Generate AuthnStatement with AuthnContext and ContextClassRef Elements."""
|
||||||
auth_n_statement = Element(f"{{{NS_SAML_ASSERTION}}}AuthnStatement")
|
auth_n_statement = Element(f"{{{NS_SAML_ASSERTION}}}AuthnStatement")
|
||||||
auth_n_statement.attrib["AuthnInstant"] = self._valid_not_before
|
auth_n_statement.attrib["AuthnInstant"] = self._auth_instant
|
||||||
auth_n_statement.attrib["SessionIndex"] = sha256(
|
auth_n_statement.attrib["SessionIndex"] = sha256(
|
||||||
self.http_request.session.session_key.encode("ascii")
|
self.http_request.session.session_key.encode("ascii")
|
||||||
).hexdigest()
|
).hexdigest()
|
||||||
@ -158,6 +165,28 @@ class AssertionProcessor:
|
|||||||
auth_n_context_class_ref.text = (
|
auth_n_context_class_ref.text = (
|
||||||
"urn:oasis:names:tc:SAML:2.0:ac:classes:MobileOneFactorContract"
|
"urn:oasis:names:tc:SAML:2.0:ac:classes:MobileOneFactorContract"
|
||||||
)
|
)
|
||||||
|
if self.provider.authn_context_class_ref_mapping:
|
||||||
|
try:
|
||||||
|
value = self.provider.authn_context_class_ref_mapping.evaluate(
|
||||||
|
user=self.http_request.user,
|
||||||
|
request=self.http_request,
|
||||||
|
provider=self.provider,
|
||||||
|
)
|
||||||
|
if value is not None:
|
||||||
|
auth_n_context_class_ref.text = str(value)
|
||||||
|
return auth_n_statement
|
||||||
|
except PropertyMappingExpressionException as exc:
|
||||||
|
Event.new(
|
||||||
|
EventAction.CONFIGURATION_ERROR,
|
||||||
|
message=(
|
||||||
|
"Failed to evaluate property-mapping: "
|
||||||
|
f"'{self.provider.authn_context_class_ref_mapping.name}'"
|
||||||
|
),
|
||||||
|
provider=self.provider,
|
||||||
|
mapping=self.provider.authn_context_class_ref_mapping,
|
||||||
|
).from_http(self.http_request)
|
||||||
|
LOGGER.warning("Failed to evaluate property mapping", exc=exc)
|
||||||
|
return auth_n_statement
|
||||||
return auth_n_statement
|
return auth_n_statement
|
||||||
|
|
||||||
def get_assertion_conditions(self) -> Element:
|
def get_assertion_conditions(self) -> Element:
|
||||||
|
|||||||
@ -294,6 +294,61 @@ class TestAuthNRequest(TestCase):
|
|||||||
self.assertEqual(parsed_request.id, "aws_LDxLGeubpc5lx12gxCgS6uPbix1yd5re")
|
self.assertEqual(parsed_request.id, "aws_LDxLGeubpc5lx12gxCgS6uPbix1yd5re")
|
||||||
self.assertEqual(parsed_request.name_id_policy, SAML_NAME_ID_FORMAT_EMAIL)
|
self.assertEqual(parsed_request.name_id_policy, SAML_NAME_ID_FORMAT_EMAIL)
|
||||||
|
|
||||||
|
def test_authn_context_class_ref_mapping(self):
|
||||||
|
"""Test custom authn_context_class_ref"""
|
||||||
|
authn_context_class_ref = generate_id()
|
||||||
|
mapping = SAMLPropertyMapping.objects.create(
|
||||||
|
name=generate_id(), expression=f"""return '{authn_context_class_ref}'"""
|
||||||
|
)
|
||||||
|
self.provider.authn_context_class_ref_mapping = mapping
|
||||||
|
self.provider.save()
|
||||||
|
user = create_test_admin_user()
|
||||||
|
http_request = get_request("/", user=user)
|
||||||
|
|
||||||
|
# First create an AuthNRequest
|
||||||
|
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
||||||
|
request = request_proc.build_auth_n()
|
||||||
|
|
||||||
|
# To get an assertion we need a parsed request (parsed by provider)
|
||||||
|
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||||
|
b64encode(request.encode()).decode(), "test_state"
|
||||||
|
)
|
||||||
|
# Now create a response and convert it to string (provider)
|
||||||
|
response_proc = AssertionProcessor(self.provider, http_request, parsed_request)
|
||||||
|
response = response_proc.build_response()
|
||||||
|
self.assertIn(user.username, response)
|
||||||
|
self.assertIn(authn_context_class_ref, response)
|
||||||
|
|
||||||
|
def test_authn_context_class_ref_mapping_invalid(self):
|
||||||
|
"""Test custom authn_context_class_ref (invalid)"""
|
||||||
|
mapping = SAMLPropertyMapping.objects.create(name=generate_id(), expression="q")
|
||||||
|
self.provider.authn_context_class_ref_mapping = mapping
|
||||||
|
self.provider.save()
|
||||||
|
user = create_test_admin_user()
|
||||||
|
http_request = get_request("/", user=user)
|
||||||
|
|
||||||
|
# First create an AuthNRequest
|
||||||
|
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
||||||
|
request = request_proc.build_auth_n()
|
||||||
|
|
||||||
|
# To get an assertion we need a parsed request (parsed by provider)
|
||||||
|
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||||
|
b64encode(request.encode()).decode(), "test_state"
|
||||||
|
)
|
||||||
|
# Now create a response and convert it to string (provider)
|
||||||
|
response_proc = AssertionProcessor(self.provider, http_request, parsed_request)
|
||||||
|
response = response_proc.build_response()
|
||||||
|
self.assertIn(user.username, response)
|
||||||
|
|
||||||
|
events = Event.objects.filter(
|
||||||
|
action=EventAction.CONFIGURATION_ERROR,
|
||||||
|
)
|
||||||
|
self.assertTrue(events.exists())
|
||||||
|
self.assertEqual(
|
||||||
|
events.first().context["message"],
|
||||||
|
f"Failed to evaluate property-mapping: '{mapping.name}'",
|
||||||
|
)
|
||||||
|
|
||||||
def test_request_attributes(self):
|
def test_request_attributes(self):
|
||||||
"""Test full SAML Request/Response flow, fully signed"""
|
"""Test full SAML Request/Response flow, fully signed"""
|
||||||
user = create_test_admin_user()
|
user = create_test_admin_user()
|
||||||
@ -321,8 +376,10 @@ class TestAuthNRequest(TestCase):
|
|||||||
request = request_proc.build_auth_n()
|
request = request_proc.build_auth_n()
|
||||||
|
|
||||||
# Create invalid PropertyMapping
|
# Create invalid PropertyMapping
|
||||||
scope = SAMLPropertyMapping.objects.create(name="test", saml_name="test", expression="q")
|
mapping = SAMLPropertyMapping.objects.create(
|
||||||
self.provider.property_mappings.add(scope)
|
name=generate_id(), saml_name="test", expression="q"
|
||||||
|
)
|
||||||
|
self.provider.property_mappings.add(mapping)
|
||||||
|
|
||||||
# To get an assertion we need a parsed request (parsed by provider)
|
# To get an assertion we need a parsed request (parsed by provider)
|
||||||
parsed_request = AuthNRequestParser(self.provider).parse(
|
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||||
@ -338,7 +395,7 @@ class TestAuthNRequest(TestCase):
|
|||||||
self.assertTrue(events.exists())
|
self.assertTrue(events.exists())
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
events.first().context["message"],
|
events.first().context["message"],
|
||||||
"Failed to evaluate property-mapping: 'test'",
|
f"Failed to evaluate property-mapping: '{mapping.name}'",
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_idp_initiated(self):
|
def test_idp_initiated(self):
|
||||||
|
|||||||
@ -1,12 +1,16 @@
|
|||||||
"""Time utilities"""
|
"""Time utilities"""
|
||||||
|
|
||||||
import datetime
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
from django.utils.timezone import now
|
||||||
|
|
||||||
|
|
||||||
def get_time_string(delta: datetime.timedelta | None = None) -> str:
|
def get_time_string(delta: timedelta | datetime | None = None) -> str:
|
||||||
"""Get Data formatted in SAML format"""
|
"""Get Data formatted in SAML format"""
|
||||||
if delta is None:
|
if delta is None:
|
||||||
delta = datetime.timedelta()
|
delta = timedelta()
|
||||||
now = datetime.datetime.now()
|
if isinstance(delta, timedelta):
|
||||||
final = now + delta
|
final = now() + delta
|
||||||
|
else:
|
||||||
|
final = delta
|
||||||
return final.strftime("%Y-%m-%dT%H:%M:%SZ")
|
return final.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
|||||||
@ -24,7 +24,9 @@ class SCIMProviderGroupSerializer(ModelSerializer):
|
|||||||
"group",
|
"group",
|
||||||
"group_obj",
|
"group_obj",
|
||||||
"provider",
|
"provider",
|
||||||
|
"attributes",
|
||||||
]
|
]
|
||||||
|
extra_kwargs = {"attributes": {"read_only": True}}
|
||||||
|
|
||||||
|
|
||||||
class SCIMProviderGroupViewSet(
|
class SCIMProviderGroupViewSet(
|
||||||
|
|||||||
@ -28,6 +28,7 @@ class SCIMProviderSerializer(ProviderSerializer):
|
|||||||
"url",
|
"url",
|
||||||
"verify_certificates",
|
"verify_certificates",
|
||||||
"token",
|
"token",
|
||||||
|
"compatibility_mode",
|
||||||
"exclude_users_service_account",
|
"exclude_users_service_account",
|
||||||
"filter_group",
|
"filter_group",
|
||||||
"dry_run",
|
"dry_run",
|
||||||
|
|||||||
@ -24,7 +24,9 @@ class SCIMProviderUserSerializer(ModelSerializer):
|
|||||||
"user",
|
"user",
|
||||||
"user_obj",
|
"user_obj",
|
||||||
"provider",
|
"provider",
|
||||||
|
"attributes",
|
||||||
]
|
]
|
||||||
|
extra_kwargs = {"attributes": {"read_only": True}}
|
||||||
|
|
||||||
|
|
||||||
class SCIMProviderUserViewSet(
|
class SCIMProviderUserViewSet(
|
||||||
|
|||||||
@ -22,7 +22,7 @@ from authentik.lib.sync.outgoing.exceptions import (
|
|||||||
from authentik.lib.utils.http import get_http_session
|
from authentik.lib.utils.http import get_http_session
|
||||||
from authentik.providers.scim.clients.exceptions import SCIMRequestException
|
from authentik.providers.scim.clients.exceptions import SCIMRequestException
|
||||||
from authentik.providers.scim.clients.schema import ServiceProviderConfiguration
|
from authentik.providers.scim.clients.schema import ServiceProviderConfiguration
|
||||||
from authentik.providers.scim.models import SCIMProvider
|
from authentik.providers.scim.models import SCIMCompatibilityMode, SCIMProvider
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
@ -90,9 +90,14 @@ class SCIMClient[TModel: "Model", TConnection: "Model", TSchema: "BaseModel"](
|
|||||||
"""Get Service provider config"""
|
"""Get Service provider config"""
|
||||||
default_config = ServiceProviderConfiguration.default()
|
default_config = ServiceProviderConfiguration.default()
|
||||||
try:
|
try:
|
||||||
return ServiceProviderConfiguration.model_validate(
|
config = ServiceProviderConfiguration.model_validate(
|
||||||
self._request("GET", "/ServiceProviderConfig")
|
self._request("GET", "/ServiceProviderConfig")
|
||||||
)
|
)
|
||||||
|
if self.provider.compatibility_mode == SCIMCompatibilityMode.AWS:
|
||||||
|
config.patch.supported = False
|
||||||
|
if self.provider.compatibility_mode == SCIMCompatibilityMode.SLACK:
|
||||||
|
config.filter.supported = True
|
||||||
|
return config
|
||||||
except (ValidationError, SCIMRequestException, NotFoundSyncException) as exc:
|
except (ValidationError, SCIMRequestException, NotFoundSyncException) as exc:
|
||||||
self.logger.warning("failed to get ServiceProviderConfig", exc=exc)
|
self.logger.warning("failed to get ServiceProviderConfig", exc=exc)
|
||||||
return default_config
|
return default_config
|
||||||
|
|||||||
@ -102,7 +102,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
|
|||||||
if not scim_id or scim_id == "":
|
if not scim_id or scim_id == "":
|
||||||
raise StopSync("SCIM Response with missing or invalid `id`")
|
raise StopSync("SCIM Response with missing or invalid `id`")
|
||||||
connection = SCIMProviderGroup.objects.create(
|
connection = SCIMProviderGroup.objects.create(
|
||||||
provider=self.provider, group=group, scim_id=scim_id
|
provider=self.provider, group=group, scim_id=scim_id, attributes=response
|
||||||
)
|
)
|
||||||
users = list(group.users.order_by("id").values_list("id", flat=True))
|
users = list(group.users.order_by("id").values_list("id", flat=True))
|
||||||
self._patch_add_users(connection, users)
|
self._patch_add_users(connection, users)
|
||||||
@ -243,9 +243,10 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
|
|||||||
if user.value not in users_should:
|
if user.value not in users_should:
|
||||||
users_to_remove.append(user.value)
|
users_to_remove.append(user.value)
|
||||||
# Check users that should be in the group and add them
|
# Check users that should be in the group and add them
|
||||||
for user in users_should:
|
if current_group.members is not None:
|
||||||
if len([x for x in current_group.members if x.value == user]) < 1:
|
for user in users_should:
|
||||||
users_to_add.append(user)
|
if len([x for x in current_group.members if x.value == user]) < 1:
|
||||||
|
users_to_add.append(user)
|
||||||
# Only send request if we need to make changes
|
# Only send request if we need to make changes
|
||||||
if len(users_to_add) < 1 and len(users_to_remove) < 1:
|
if len(users_to_add) < 1 and len(users_to_remove) < 1:
|
||||||
return
|
return
|
||||||
|
|||||||
@ -1,10 +1,12 @@
|
|||||||
"""User client"""
|
"""User client"""
|
||||||
|
|
||||||
|
from django.db import transaction
|
||||||
|
from django.utils.http import urlencode
|
||||||
from pydantic import ValidationError
|
from pydantic import ValidationError
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.lib.sync.mapper import PropertyMappingManager
|
from authentik.lib.sync.mapper import PropertyMappingManager
|
||||||
from authentik.lib.sync.outgoing.exceptions import StopSync
|
from authentik.lib.sync.outgoing.exceptions import ObjectExistsSyncException, StopSync
|
||||||
from authentik.policies.utils import delete_none_values
|
from authentik.policies.utils import delete_none_values
|
||||||
from authentik.providers.scim.clients.base import SCIMClient
|
from authentik.providers.scim.clients.base import SCIMClient
|
||||||
from authentik.providers.scim.clients.schema import SCIM_USER_SCHEMA
|
from authentik.providers.scim.clients.schema import SCIM_USER_SCHEMA
|
||||||
@ -55,24 +57,44 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]):
|
|||||||
def create(self, user: User):
|
def create(self, user: User):
|
||||||
"""Create user from scratch and create a connection object"""
|
"""Create user from scratch and create a connection object"""
|
||||||
scim_user = self.to_schema(user, None)
|
scim_user = self.to_schema(user, None)
|
||||||
response = self._request(
|
with transaction.atomic():
|
||||||
"POST",
|
try:
|
||||||
"/Users",
|
response = self._request(
|
||||||
json=scim_user.model_dump(
|
"POST",
|
||||||
mode="json",
|
"/Users",
|
||||||
exclude_unset=True,
|
json=scim_user.model_dump(
|
||||||
),
|
mode="json",
|
||||||
)
|
exclude_unset=True,
|
||||||
scim_id = response.get("id")
|
),
|
||||||
if not scim_id or scim_id == "":
|
)
|
||||||
raise StopSync("SCIM Response with missing or invalid `id`")
|
except ObjectExistsSyncException as exc:
|
||||||
return SCIMProviderUser.objects.create(provider=self.provider, user=user, scim_id=scim_id)
|
if not self._config.filter.supported:
|
||||||
|
raise exc
|
||||||
|
users = self._request(
|
||||||
|
"GET", f"/Users?{urlencode({'filter': f'userName eq {scim_user.userName}'})}"
|
||||||
|
)
|
||||||
|
users_res = users.get("Resources", [])
|
||||||
|
if len(users_res) < 1:
|
||||||
|
raise exc
|
||||||
|
return SCIMProviderUser.objects.create(
|
||||||
|
provider=self.provider,
|
||||||
|
user=user,
|
||||||
|
scim_id=users_res[0]["id"],
|
||||||
|
attributes=users_res[0],
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
scim_id = response.get("id")
|
||||||
|
if not scim_id or scim_id == "":
|
||||||
|
raise StopSync("SCIM Response with missing or invalid `id`")
|
||||||
|
return SCIMProviderUser.objects.create(
|
||||||
|
provider=self.provider, user=user, scim_id=scim_id, attributes=response
|
||||||
|
)
|
||||||
|
|
||||||
def update(self, user: User, connection: SCIMProviderUser):
|
def update(self, user: User, connection: SCIMProviderUser):
|
||||||
"""Update existing user"""
|
"""Update existing user"""
|
||||||
scim_user = self.to_schema(user, connection)
|
scim_user = self.to_schema(user, connection)
|
||||||
scim_user.id = connection.scim_id
|
scim_user.id = connection.scim_id
|
||||||
self._request(
|
response = self._request(
|
||||||
"PUT",
|
"PUT",
|
||||||
f"/Users/{connection.scim_id}",
|
f"/Users/{connection.scim_id}",
|
||||||
json=scim_user.model_dump(
|
json=scim_user.model_dump(
|
||||||
@ -80,3 +102,5 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]):
|
|||||||
exclude_unset=True,
|
exclude_unset=True,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
connection.attributes = response
|
||||||
|
connection.save()
|
||||||
|
|||||||
@ -0,0 +1,24 @@
|
|||||||
|
# Generated by Django 5.0.12 on 2025-03-07 23:35
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_providers_scim", "0011_scimprovider_dry_run"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="scimprovider",
|
||||||
|
name="compatibility_mode",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[("default", "Default"), ("aws", "AWS"), ("slack", "Slack")],
|
||||||
|
default="default",
|
||||||
|
help_text="Alter authentik behavior for vendor-specific SCIM implementations.",
|
||||||
|
max_length=30,
|
||||||
|
verbose_name="SCIM Compatibility Mode",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 5.0.13 on 2025-03-18 13:47
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_providers_scim", "0012_scimprovider_compatibility_mode"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="scimprovidergroup",
|
||||||
|
name="attributes",
|
||||||
|
field=models.JSONField(default=dict),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="scimprovideruser",
|
||||||
|
name="attributes",
|
||||||
|
field=models.JSONField(default=dict),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -22,6 +22,7 @@ class SCIMProviderUser(SerializerModel):
|
|||||||
scim_id = models.TextField()
|
scim_id = models.TextField()
|
||||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||||
provider = models.ForeignKey("SCIMProvider", on_delete=models.CASCADE)
|
provider = models.ForeignKey("SCIMProvider", on_delete=models.CASCADE)
|
||||||
|
attributes = models.JSONField(default=dict)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
@ -43,6 +44,7 @@ class SCIMProviderGroup(SerializerModel):
|
|||||||
scim_id = models.TextField()
|
scim_id = models.TextField()
|
||||||
group = models.ForeignKey(Group, on_delete=models.CASCADE)
|
group = models.ForeignKey(Group, on_delete=models.CASCADE)
|
||||||
provider = models.ForeignKey("SCIMProvider", on_delete=models.CASCADE)
|
provider = models.ForeignKey("SCIMProvider", on_delete=models.CASCADE)
|
||||||
|
attributes = models.JSONField(default=dict)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[Serializer]:
|
def serializer(self) -> type[Serializer]:
|
||||||
@ -57,6 +59,14 @@ class SCIMProviderGroup(SerializerModel):
|
|||||||
return f"SCIM Provider Group {self.group_id} to {self.provider_id}"
|
return f"SCIM Provider Group {self.group_id} to {self.provider_id}"
|
||||||
|
|
||||||
|
|
||||||
|
class SCIMCompatibilityMode(models.TextChoices):
|
||||||
|
"""SCIM compatibility mode"""
|
||||||
|
|
||||||
|
DEFAULT = "default", _("Default")
|
||||||
|
AWS = "aws", _("AWS")
|
||||||
|
SLACK = "slack", _("Slack")
|
||||||
|
|
||||||
|
|
||||||
class SCIMProvider(OutgoingSyncProvider, BackchannelProvider):
|
class SCIMProvider(OutgoingSyncProvider, BackchannelProvider):
|
||||||
"""SCIM 2.0 provider to create users and groups in external applications"""
|
"""SCIM 2.0 provider to create users and groups in external applications"""
|
||||||
|
|
||||||
@ -77,6 +87,14 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider):
|
|||||||
help_text=_("Property mappings used for group creation/updating."),
|
help_text=_("Property mappings used for group creation/updating."),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
compatibility_mode = models.CharField(
|
||||||
|
max_length=30,
|
||||||
|
choices=SCIMCompatibilityMode.choices,
|
||||||
|
default=SCIMCompatibilityMode.DEFAULT,
|
||||||
|
verbose_name=_("SCIM Compatibility Mode"),
|
||||||
|
help_text=_("Alter authentik behavior for vendor-specific SCIM implementations."),
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def icon_url(self) -> str | None:
|
def icon_url(self) -> str | None:
|
||||||
return static("authentik/sources/scim.png")
|
return static("authentik/sources/scim.png")
|
||||||
|
|||||||
@ -68,8 +68,6 @@ class OAuth2Client(BaseOAuthClient):
|
|||||||
error_desc = self.get_request_arg("error_description", None)
|
error_desc = self.get_request_arg("error_description", None)
|
||||||
return {"error": error_desc or error or _("No token received.")}
|
return {"error": error_desc or error or _("No token received.")}
|
||||||
args = {
|
args = {
|
||||||
"client_id": self.get_client_id(),
|
|
||||||
"client_secret": self.get_client_secret(),
|
|
||||||
"redirect_uri": callback,
|
"redirect_uri": callback,
|
||||||
"code": code,
|
"code": code,
|
||||||
"grant_type": "authorization_code",
|
"grant_type": "authorization_code",
|
||||||
|
|||||||
@ -28,7 +28,7 @@ def update_well_known_jwks(self: SystemTask):
|
|||||||
LOGGER.warning("Failed to update well_known", source=source, exc=exc, text=text)
|
LOGGER.warning("Failed to update well_known", source=source, exc=exc, text=text)
|
||||||
messages.append(f"Failed to update OIDC configuration for {source.slug}")
|
messages.append(f"Failed to update OIDC configuration for {source.slug}")
|
||||||
continue
|
continue
|
||||||
config = well_known_config.json()
|
config: dict = well_known_config.json()
|
||||||
try:
|
try:
|
||||||
dirty = False
|
dirty = False
|
||||||
source_attr_key = (
|
source_attr_key = (
|
||||||
@ -40,7 +40,9 @@ def update_well_known_jwks(self: SystemTask):
|
|||||||
for source_attr, config_key in source_attr_key:
|
for source_attr, config_key in source_attr_key:
|
||||||
# Check if we're actually changing anything to only
|
# Check if we're actually changing anything to only
|
||||||
# save when something has changed
|
# save when something has changed
|
||||||
if getattr(source, source_attr, "") != config[config_key]:
|
if config_key not in config:
|
||||||
|
continue
|
||||||
|
if getattr(source, source_attr, "") != config.get(config_key, ""):
|
||||||
dirty = True
|
dirty = True
|
||||||
setattr(source, source_attr, config[config_key])
|
setattr(source, source_attr, config[config_key])
|
||||||
except (IndexError, KeyError) as exc:
|
except (IndexError, KeyError) as exc:
|
||||||
|
|||||||
@ -25,8 +25,10 @@ class RedditOAuth2Client(UserprofileHeaderAuthClient):
|
|||||||
|
|
||||||
def get_access_token(self, **request_kwargs):
|
def get_access_token(self, **request_kwargs):
|
||||||
"Fetch access token from callback request."
|
"Fetch access token from callback request."
|
||||||
auth = HTTPBasicAuth(self.source.consumer_key, self.source.consumer_secret)
|
request_kwargs["auth"] = HTTPBasicAuth(
|
||||||
return super().get_access_token(auth=auth)
|
self.source.consumer_key, self.source.consumer_secret
|
||||||
|
)
|
||||||
|
return super().get_access_token(**request_kwargs)
|
||||||
|
|
||||||
|
|
||||||
class RedditOAuth2Callback(OAuthCallback):
|
class RedditOAuth2Callback(OAuthCallback):
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@ -0,0 +1,54 @@
|
|||||||
|
# Generated by Django 5.0.12 on 2025-02-27 04:32
|
||||||
|
|
||||||
|
import authentik.lib.utils.time
|
||||||
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
def convert_integer_to_string_format(apps, schema_editor):
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
EmailStage = apps.get_model("authentik_stages_email", "EmailStage")
|
||||||
|
for stage in EmailStage.objects.using(db_alias).all():
|
||||||
|
stage.token_expiry = f"minutes={stage.token_expiry}"
|
||||||
|
stage.save(using=db_alias)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_string_to_integer_format(apps, schema_editor):
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
EmailStage = apps.get_model("authentik_stages_email", "EmailStage")
|
||||||
|
for stage in EmailStage.objects.using(db_alias).all():
|
||||||
|
# Check if token_expiry is a string
|
||||||
|
if isinstance(stage.token_expiry, str):
|
||||||
|
try:
|
||||||
|
# Use the timedelta_from_string utility to convert to timedelta
|
||||||
|
# then convert to minutes by dividing seconds by 60
|
||||||
|
td = timedelta_from_string(stage.token_expiry)
|
||||||
|
minutes_value = int(td.total_seconds() / 60)
|
||||||
|
stage.token_expiry = minutes_value
|
||||||
|
stage.save(using=db_alias)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
# If the string can't be parsed or converted properly, skip
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_stages_email", "0004_emailstage_activate_user_on_success"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="emailstage",
|
||||||
|
name="token_expiry",
|
||||||
|
field=models.TextField(
|
||||||
|
default="minutes=30",
|
||||||
|
help_text="Time the token sent is valid (Format: hours=3,minutes=17,seconds=300).",
|
||||||
|
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunPython(
|
||||||
|
convert_integer_to_string_format,
|
||||||
|
convert_string_to_integer_format,
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -14,6 +14,7 @@ from structlog.stdlib import get_logger
|
|||||||
|
|
||||||
from authentik.flows.models import Stage
|
from authentik.flows.models import Stage
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
|
from authentik.lib.utils.time import timedelta_string_validator
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -74,8 +75,10 @@ class EmailStage(Stage):
|
|||||||
default=False, help_text=_("Activate users upon completion of stage.")
|
default=False, help_text=_("Activate users upon completion of stage.")
|
||||||
)
|
)
|
||||||
|
|
||||||
token_expiry = models.IntegerField(
|
token_expiry = models.TextField(
|
||||||
default=30, help_text=_("Time in minutes the token sent is valid.")
|
default="minutes=30",
|
||||||
|
validators=[timedelta_string_validator],
|
||||||
|
help_text=_("Time the token sent is valid (Format: hours=3,minutes=17,seconds=300)."),
|
||||||
)
|
)
|
||||||
subject = models.TextField(default="authentik")
|
subject = models.TextField(default="authentik")
|
||||||
template = models.TextField(default=EmailTemplates.PASSWORD_RESET)
|
template = models.TextField(default=EmailTemplates.PASSWORD_RESET)
|
||||||
|
|||||||
@ -22,6 +22,7 @@ from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, PLAN_CONTEXT_PENDI
|
|||||||
from authentik.flows.stage import ChallengeStageView
|
from authentik.flows.stage import ChallengeStageView
|
||||||
from authentik.flows.views.executor import QS_KEY_TOKEN, QS_QUERY
|
from authentik.flows.views.executor import QS_KEY_TOKEN, QS_QUERY
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
from authentik.stages.email.models import EmailStage
|
from authentik.stages.email.models import EmailStage
|
||||||
from authentik.stages.email.tasks import send_mails
|
from authentik.stages.email.tasks import send_mails
|
||||||
from authentik.stages.email.utils import TemplateEmailMessage
|
from authentik.stages.email.utils import TemplateEmailMessage
|
||||||
@ -73,8 +74,8 @@ class EmailStageView(ChallengeStageView):
|
|||||||
"""Get token"""
|
"""Get token"""
|
||||||
pending_user = self.get_pending_user()
|
pending_user = self.get_pending_user()
|
||||||
current_stage: EmailStage = self.executor.current_stage
|
current_stage: EmailStage = self.executor.current_stage
|
||||||
valid_delta = timedelta(
|
valid_delta = timedelta_from_string(current_stage.token_expiry) + timedelta(
|
||||||
minutes=current_stage.token_expiry + 1
|
minutes=1
|
||||||
) # + 1 because django timesince always rounds down
|
) # + 1 because django timesince always rounds down
|
||||||
identifier = slugify(f"ak-email-stage-{current_stage.name}-{str(uuid4())}")
|
identifier = slugify(f"ak-email-stage-{current_stage.name}-{str(uuid4())}")
|
||||||
# Don't check for validity here, we only care if the token exists
|
# Don't check for validity here, we only care if the token exists
|
||||||
|
|||||||
@ -8,7 +8,7 @@ from django.core.mail.backends.locmem import EmailBackend
|
|||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_user
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.flows.markers import StageMarker
|
from authentik.flows.markers import StageMarker
|
||||||
from authentik.flows.models import FlowDesignation, FlowStageBinding
|
from authentik.flows.models import FlowDesignation, FlowStageBinding
|
||||||
@ -67,6 +67,36 @@ class TestEmailStageSending(FlowTestCase):
|
|||||||
self.assertEqual(event.context["to_email"], [f"{self.user.name} <{self.user.email}>"])
|
self.assertEqual(event.context["to_email"], [f"{self.user.name} <{self.user.email}>"])
|
||||||
self.assertEqual(event.context["from_email"], "system@authentik.local")
|
self.assertEqual(event.context["from_email"], "system@authentik.local")
|
||||||
|
|
||||||
|
def test_newlines_long_name(self):
|
||||||
|
"""Test with pending user"""
|
||||||
|
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||||
|
long_user = create_test_user()
|
||||||
|
long_user.name = "Test User\r\n Many Words\r\n"
|
||||||
|
long_user.save()
|
||||||
|
plan.context[PLAN_CONTEXT_PENDING_USER] = long_user
|
||||||
|
session = self.client.session
|
||||||
|
session[SESSION_KEY_PLAN] = plan
|
||||||
|
session.save()
|
||||||
|
Event.objects.filter(action=EventAction.EMAIL_SENT).delete()
|
||||||
|
|
||||||
|
url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
|
||||||
|
with patch(
|
||||||
|
"authentik.stages.email.models.EmailStage.backend_class",
|
||||||
|
PropertyMock(return_value=EmailBackend),
|
||||||
|
):
|
||||||
|
response = self.client.post(url)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertStageResponse(
|
||||||
|
response,
|
||||||
|
self.flow,
|
||||||
|
response_errors={
|
||||||
|
"non_field_errors": [{"string": "email-sent", "code": "email-sent"}]
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(len(mail.outbox), 1)
|
||||||
|
self.assertEqual(mail.outbox[0].subject, "authentik")
|
||||||
|
self.assertEqual(mail.outbox[0].to, [f"Test User Many Words <{long_user.email}>"])
|
||||||
|
|
||||||
def test_pending_fake_user(self):
|
def test_pending_fake_user(self):
|
||||||
"""Test with pending (fake) user"""
|
"""Test with pending (fake) user"""
|
||||||
self.flow.designation = FlowDesignation.RECOVERY
|
self.flow.designation = FlowDesignation.RECOVERY
|
||||||
|
|||||||
@ -32,7 +32,14 @@ class TemplateEmailMessage(EmailMultiAlternatives):
|
|||||||
sanitized_to = []
|
sanitized_to = []
|
||||||
# Ensure that all recipients are valid
|
# Ensure that all recipients are valid
|
||||||
for recipient_name, recipient_email in to:
|
for recipient_name, recipient_email in to:
|
||||||
sanitized_to.append(sanitize_address((recipient_name, recipient_email), "utf-8"))
|
# Remove any newline characters from name and email before sanitizing
|
||||||
|
clean_name = (
|
||||||
|
recipient_name.replace("\n", " ").replace("\r", " ") if recipient_name else ""
|
||||||
|
)
|
||||||
|
clean_email = (
|
||||||
|
recipient_email.replace("\n", "").replace("\r", "") if recipient_email else ""
|
||||||
|
)
|
||||||
|
sanitized_to.append(sanitize_address((clean_name, clean_email), "utf-8"))
|
||||||
super().__init__(to=sanitized_to, **kwargs)
|
super().__init__(to=sanitized_to, **kwargs)
|
||||||
if not template_name:
|
if not template_name:
|
||||||
return
|
return
|
||||||
|
|||||||
@ -142,38 +142,35 @@ class IdentificationChallengeResponse(ChallengeResponse):
|
|||||||
raise ValidationError("Failed to authenticate.")
|
raise ValidationError("Failed to authenticate.")
|
||||||
self.pre_user = pre_user
|
self.pre_user = pre_user
|
||||||
|
|
||||||
|
# Password check
|
||||||
|
if current_stage.password_stage:
|
||||||
|
password = attrs.get("password", None)
|
||||||
|
if not password:
|
||||||
|
self.stage.logger.warning("Password not set for ident+auth attempt")
|
||||||
|
try:
|
||||||
|
with start_span(
|
||||||
|
op="authentik.stages.identification.authenticate",
|
||||||
|
name="User authenticate call (combo stage)",
|
||||||
|
):
|
||||||
|
user = authenticate(
|
||||||
|
self.stage.request,
|
||||||
|
current_stage.password_stage.backends,
|
||||||
|
current_stage,
|
||||||
|
username=self.pre_user.username,
|
||||||
|
password=password,
|
||||||
|
)
|
||||||
|
if not user:
|
||||||
|
raise ValidationError("Failed to authenticate.")
|
||||||
|
self.pre_user = user
|
||||||
|
except PermissionDenied as exc:
|
||||||
|
raise ValidationError(str(exc)) from exc
|
||||||
|
|
||||||
# Captcha check
|
# Captcha check
|
||||||
if captcha_stage := current_stage.captcha_stage:
|
if captcha_stage := current_stage.captcha_stage:
|
||||||
captcha_token = attrs.get("captcha_token", None)
|
captcha_token = attrs.get("captcha_token", None)
|
||||||
if not captcha_token:
|
if not captcha_token:
|
||||||
self.stage.logger.warning("Token not set for captcha attempt")
|
self.stage.logger.warning("Token not set for captcha attempt")
|
||||||
verify_captcha_token(captcha_stage, captcha_token, client_ip)
|
verify_captcha_token(captcha_stage, captcha_token, client_ip)
|
||||||
|
|
||||||
# Password check
|
|
||||||
if not current_stage.password_stage:
|
|
||||||
# No password stage select, don't validate the password
|
|
||||||
return attrs
|
|
||||||
|
|
||||||
password = attrs.get("password", None)
|
|
||||||
if not password:
|
|
||||||
self.stage.logger.warning("Password not set for ident+auth attempt")
|
|
||||||
try:
|
|
||||||
with start_span(
|
|
||||||
op="authentik.stages.identification.authenticate",
|
|
||||||
name="User authenticate call (combo stage)",
|
|
||||||
):
|
|
||||||
user = authenticate(
|
|
||||||
self.stage.request,
|
|
||||||
current_stage.password_stage.backends,
|
|
||||||
current_stage,
|
|
||||||
username=self.pre_user.username,
|
|
||||||
password=password,
|
|
||||||
)
|
|
||||||
if not user:
|
|
||||||
raise ValidationError("Failed to authenticate.")
|
|
||||||
self.pre_user = user
|
|
||||||
except PermissionDenied as exc:
|
|
||||||
raise ValidationError(str(exc)) from exc
|
|
||||||
return attrs
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -57,7 +57,7 @@ entries:
|
|||||||
use_ssl: false
|
use_ssl: false
|
||||||
timeout: 10
|
timeout: 10
|
||||||
from_address: system@authentik.local
|
from_address: system@authentik.local
|
||||||
token_expiry: 30
|
token_expiry: minutes=30
|
||||||
subject: authentik
|
subject: authentik
|
||||||
template: email/password_reset.html
|
template: email/password_reset.html
|
||||||
activate_user_on_success: true
|
activate_user_on_success: true
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
"$schema": "http://json-schema.org/draft-07/schema",
|
"$schema": "http://json-schema.org/draft-07/schema",
|
||||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"title": "authentik 2025.2.1 Blueprint schema",
|
"title": "authentik 2025.2.2 Blueprint schema",
|
||||||
"required": [
|
"required": [
|
||||||
"version",
|
"version",
|
||||||
"entries"
|
"entries"
|
||||||
@ -6462,6 +6462,11 @@
|
|||||||
"title": "NameID Property Mapping",
|
"title": "NameID Property Mapping",
|
||||||
"description": "Configure how the NameID value will be created. When left empty, the NameIDPolicy of the incoming request will be considered"
|
"description": "Configure how the NameID value will be created. When left empty, the NameIDPolicy of the incoming request will be considered"
|
||||||
},
|
},
|
||||||
|
"authn_context_class_ref_mapping": {
|
||||||
|
"type": "integer",
|
||||||
|
"title": "AuthnContextClassRef Property Mapping",
|
||||||
|
"description": "Configure how the AuthnContextClassRef value will be created. When left empty, the AuthnContextClassRef will be set based on which authentication methods the user used to authenticate."
|
||||||
|
},
|
||||||
"digest_algorithm": {
|
"digest_algorithm": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": [
|
||||||
@ -6661,6 +6666,16 @@
|
|||||||
"title": "Token",
|
"title": "Token",
|
||||||
"description": "Authentication token"
|
"description": "Authentication token"
|
||||||
},
|
},
|
||||||
|
"compatibility_mode": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"default",
|
||||||
|
"aws",
|
||||||
|
"slack"
|
||||||
|
],
|
||||||
|
"title": "SCIM Compatibility Mode",
|
||||||
|
"description": "Alter authentik behavior for vendor-specific SCIM implementations."
|
||||||
|
},
|
||||||
"exclude_users_service_account": {
|
"exclude_users_service_account": {
|
||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"title": "Exclude users service account"
|
"title": "Exclude users service account"
|
||||||
@ -11369,11 +11384,10 @@
|
|||||||
"title": "From address"
|
"title": "From address"
|
||||||
},
|
},
|
||||||
"token_expiry": {
|
"token_expiry": {
|
||||||
"type": "integer",
|
"type": "string",
|
||||||
"minimum": -2147483648,
|
"minLength": 1,
|
||||||
"maximum": 2147483647,
|
|
||||||
"title": "Token expiry",
|
"title": "Token expiry",
|
||||||
"description": "Time in minutes the token sent is valid."
|
"description": "Time the token sent is valid (Format: hours=3,minutes=17,seconds=300)."
|
||||||
},
|
},
|
||||||
"subject": {
|
"subject": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
@ -13002,6 +13016,15 @@
|
|||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
"title": "Branding favicon"
|
"title": "Branding favicon"
|
||||||
},
|
},
|
||||||
|
"branding_custom_css": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "Branding custom css"
|
||||||
|
},
|
||||||
|
"branding_default_flow_background": {
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 1,
|
||||||
|
"title": "Branding default flow background"
|
||||||
|
},
|
||||||
"flow_authentication": {
|
"flow_authentication": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "uuid",
|
"format": "uuid",
|
||||||
@ -14883,9 +14906,15 @@
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"title": "Webhook url"
|
"title": "Webhook url"
|
||||||
},
|
},
|
||||||
"webhook_mapping": {
|
"webhook_mapping_body": {
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
"title": "Webhook mapping"
|
"title": "Webhook mapping body",
|
||||||
|
"description": "Customize the body of the request. Mapping should return data that is JSON-serializable."
|
||||||
|
},
|
||||||
|
"webhook_mapping_headers": {
|
||||||
|
"type": "integer",
|
||||||
|
"title": "Webhook mapping headers",
|
||||||
|
"description": "Configure additional headers to be sent. Mapping should return a dictionary of key-value pairs"
|
||||||
},
|
},
|
||||||
"send_once": {
|
"send_once": {
|
||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
|
|||||||
@ -31,7 +31,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- redis:/data
|
- redis:/data
|
||||||
server:
|
server:
|
||||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.2.1}
|
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.2.2}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
command: server
|
command: server
|
||||||
environment:
|
environment:
|
||||||
@ -54,7 +54,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
worker:
|
worker:
|
||||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.2.1}
|
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.2.2}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
command: worker
|
command: worker
|
||||||
environment:
|
environment:
|
||||||
|
|||||||
14
go.mod
14
go.mod
@ -6,12 +6,12 @@ toolchain go1.24.0
|
|||||||
|
|
||||||
require (
|
require (
|
||||||
beryju.io/ldap v0.1.0
|
beryju.io/ldap v0.1.0
|
||||||
github.com/coreos/go-oidc/v3 v3.12.0
|
github.com/coreos/go-oidc/v3 v3.13.0
|
||||||
github.com/getsentry/sentry-go v0.31.1
|
github.com/getsentry/sentry-go v0.31.1
|
||||||
github.com/go-http-utils/etag v0.0.0-20161124023236-513ea8f21eb1
|
github.com/go-http-utils/etag v0.0.0-20161124023236-513ea8f21eb1
|
||||||
github.com/go-ldap/ldap/v3 v3.4.10
|
github.com/go-ldap/ldap/v3 v3.4.10
|
||||||
github.com/go-openapi/runtime v0.28.0
|
github.com/go-openapi/runtime v0.28.0
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.1
|
github.com/golang-jwt/jwt/v5 v5.2.2
|
||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/gorilla/handlers v1.5.2
|
github.com/gorilla/handlers v1.5.2
|
||||||
github.com/gorilla/mux v1.8.1
|
github.com/gorilla/mux v1.8.1
|
||||||
@ -23,13 +23,13 @@ require (
|
|||||||
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
|
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
|
||||||
github.com/pires/go-proxyproto v0.8.0
|
github.com/pires/go-proxyproto v0.8.0
|
||||||
github.com/prometheus/client_golang v1.21.1
|
github.com/prometheus/client_golang v1.21.1
|
||||||
github.com/redis/go-redis/v9 v9.7.1
|
github.com/redis/go-redis/v9 v9.7.3
|
||||||
github.com/sethvargo/go-envconfig v1.1.1
|
github.com/sethvargo/go-envconfig v1.1.1
|
||||||
github.com/sirupsen/logrus v1.9.3
|
github.com/sirupsen/logrus v1.9.3
|
||||||
github.com/spf13/cobra v1.9.1
|
github.com/spf13/cobra v1.9.1
|
||||||
github.com/stretchr/testify v1.10.0
|
github.com/stretchr/testify v1.10.0
|
||||||
github.com/wwt/guac v1.3.2
|
github.com/wwt/guac v1.3.2
|
||||||
goauthentik.io/api/v3 v3.2025021.2
|
goauthentik.io/api/v3 v3.2025022.6
|
||||||
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
|
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
|
||||||
golang.org/x/oauth2 v0.28.0
|
golang.org/x/oauth2 v0.28.0
|
||||||
golang.org/x/sync v0.12.0
|
golang.org/x/sync v0.12.0
|
||||||
@ -76,9 +76,9 @@ require (
|
|||||||
go.opentelemetry.io/otel v1.24.0 // indirect
|
go.opentelemetry.io/otel v1.24.0 // indirect
|
||||||
go.opentelemetry.io/otel/metric v1.24.0 // indirect
|
go.opentelemetry.io/otel/metric v1.24.0 // indirect
|
||||||
go.opentelemetry.io/otel/trace v1.24.0 // indirect
|
go.opentelemetry.io/otel/trace v1.24.0 // indirect
|
||||||
golang.org/x/crypto v0.32.0 // indirect
|
golang.org/x/crypto v0.36.0 // indirect
|
||||||
golang.org/x/sys v0.29.0 // indirect
|
golang.org/x/sys v0.31.0 // indirect
|
||||||
golang.org/x/text v0.21.0 // indirect
|
golang.org/x/text v0.23.0 // indirect
|
||||||
google.golang.org/protobuf v1.36.1 // indirect
|
google.golang.org/protobuf v1.36.1 // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
30
go.sum
30
go.sum
@ -55,8 +55,8 @@ github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5P
|
|||||||
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||||
github.com/coreos/go-oidc/v3 v3.12.0 h1:sJk+8G2qq94rDI6ehZ71Bol3oUHy63qNYmkiSjrc/Jo=
|
github.com/coreos/go-oidc/v3 v3.13.0 h1:M66zd0pcc5VxvBNM4pB331Wrsanby+QomQYjN8HamW8=
|
||||||
github.com/coreos/go-oidc/v3 v3.12.0/go.mod h1:gE3LgjOgFoHi9a4ce4/tJczr0Ai2/BoDhf0r5lltWI0=
|
github.com/coreos/go-oidc/v3 v3.13.0/go.mod h1:HaZ3szPaZ0e4r6ebqvsLWlk2Tn+aejfmrfah6hnSYEU=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
@ -113,8 +113,8 @@ github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+Gr
|
|||||||
github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
|
github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
|
||||||
github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58=
|
github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58=
|
||||||
github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ=
|
github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ=
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
@ -248,8 +248,8 @@ github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ
|
|||||||
github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I=
|
github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I=
|
||||||
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
|
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
|
||||||
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
|
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
|
||||||
github.com/redis/go-redis/v9 v9.7.1 h1:4LhKRCIduqXqtvCUlaq9c8bdHOkICjDMrr1+Zb3osAc=
|
github.com/redis/go-redis/v9 v9.7.3 h1:YpPyAayJV+XErNsatSElgRZZVCwXX9QzkKYNvO7x0wM=
|
||||||
github.com/redis/go-redis/v9 v9.7.1/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw=
|
github.com/redis/go-redis/v9 v9.7.3/go.mod h1:bGUrSggJ9X9GUmZpZNEOQKaANxSGgOEBRltRTZHSvrA=
|
||||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||||
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
|
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
|
||||||
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
||||||
@ -299,8 +299,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y
|
|||||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||||
goauthentik.io/api/v3 v3.2025021.2 h1:9y87piH47omtkWxQpKZaKai/+jh+cJdLxj5MC2Y/ZLI=
|
goauthentik.io/api/v3 v3.2025022.6 h1:M5M8Cd/1N7E8KLkvYYh7VdcdKz5nfzjKPFLK+YOtOVg=
|
||||||
goauthentik.io/api/v3 v3.2025021.2/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
|
goauthentik.io/api/v3 v3.2025022.6/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
@ -313,8 +313,8 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY
|
|||||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||||
golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
|
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
|
||||||
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
|
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
|
||||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||||
@ -386,8 +386,9 @@ golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
|||||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||||
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
|
|
||||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||||
|
golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c=
|
||||||
|
golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
@ -449,8 +450,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|||||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
||||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
@ -471,8 +472,9 @@ golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
|||||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
|
||||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||||
|
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||||
|
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
|
|||||||
@ -162,13 +162,14 @@ func (c *Config) parseScheme(rawVal string) string {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return rawVal
|
return rawVal
|
||||||
}
|
}
|
||||||
if u.Scheme == "env" {
|
switch u.Scheme {
|
||||||
|
case "env":
|
||||||
e, ok := os.LookupEnv(u.Host)
|
e, ok := os.LookupEnv(u.Host)
|
||||||
if ok {
|
if ok {
|
||||||
return e
|
return e
|
||||||
}
|
}
|
||||||
return u.RawQuery
|
return u.RawQuery
|
||||||
} else if u.Scheme == "file" {
|
case "file":
|
||||||
d, err := os.ReadFile(u.Path)
|
d, err := os.ReadFile(u.Path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return u.RawQuery
|
return u.RawQuery
|
||||||
|
|||||||
@ -10,7 +10,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestConfigEnv(t *testing.T) {
|
func TestConfigEnv(t *testing.T) {
|
||||||
os.Setenv("AUTHENTIK_SECRET_KEY", "bar")
|
assert.NoError(t, os.Setenv("AUTHENTIK_SECRET_KEY", "bar"))
|
||||||
cfg = nil
|
cfg = nil
|
||||||
if err := Get().fromEnv(); err != nil {
|
if err := Get().fromEnv(); err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
@ -19,8 +19,8 @@ func TestConfigEnv(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestConfigEnv_Scheme(t *testing.T) {
|
func TestConfigEnv_Scheme(t *testing.T) {
|
||||||
os.Setenv("foo", "bar")
|
assert.NoError(t, os.Setenv("foo", "bar"))
|
||||||
os.Setenv("AUTHENTIK_SECRET_KEY", "env://foo")
|
assert.NoError(t, os.Setenv("AUTHENTIK_SECRET_KEY", "env://foo"))
|
||||||
cfg = nil
|
cfg = nil
|
||||||
if err := Get().fromEnv(); err != nil {
|
if err := Get().fromEnv(); err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
@ -33,13 +33,15 @@ func TestConfigEnv_File(t *testing.T) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
defer os.Remove(file.Name())
|
defer func() {
|
||||||
|
assert.NoError(t, os.Remove(file.Name()))
|
||||||
|
}()
|
||||||
_, err = file.Write([]byte("bar"))
|
_, err = file.Write([]byte("bar"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
os.Setenv("AUTHENTIK_SECRET_KEY", fmt.Sprintf("file://%s", file.Name()))
|
assert.NoError(t, os.Setenv("AUTHENTIK_SECRET_KEY", fmt.Sprintf("file://%s", file.Name())))
|
||||||
cfg = nil
|
cfg = nil
|
||||||
if err := Get().fromEnv(); err != nil {
|
if err := Get().fromEnv(); err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
|
|||||||
@ -29,4 +29,4 @@ func UserAgent() string {
|
|||||||
return fmt.Sprintf("authentik@%s", FullVersion())
|
return fmt.Sprintf("authentik@%s", FullVersion())
|
||||||
}
|
}
|
||||||
|
|
||||||
const VERSION = "2025.2.1"
|
const VERSION = "2025.2.2"
|
||||||
|
|||||||
@ -35,7 +35,7 @@ func EnableDebugServer() {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
_, err = w.Write([]byte(fmt.Sprintf("<a href='%[1]s'>%[1]s</a><br>", tpl)))
|
_, err = fmt.Fprintf(w, "<a href='%[1]s'>%[1]s</a><br>", tpl)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
l.WithError(err).Warning("failed to write index")
|
l.WithError(err).Warning("failed to write index")
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@ -44,10 +44,11 @@ func New(healthcheck func() bool) *GoUnicorn {
|
|||||||
signal.Notify(c, syscall.SIGHUP, syscall.SIGUSR2)
|
signal.Notify(c, syscall.SIGHUP, syscall.SIGUSR2)
|
||||||
go func() {
|
go func() {
|
||||||
for sig := range c {
|
for sig := range c {
|
||||||
if sig == syscall.SIGHUP {
|
switch sig {
|
||||||
|
case syscall.SIGHUP:
|
||||||
g.log.Info("SIGHUP received, forwarding to gunicorn")
|
g.log.Info("SIGHUP received, forwarding to gunicorn")
|
||||||
g.Reload()
|
g.Reload()
|
||||||
} else if sig == syscall.SIGUSR2 {
|
case syscall.SIGUSR2:
|
||||||
g.log.Info("SIGUSR2 received, restarting gunicorn")
|
g.log.Info("SIGUSR2 received, restarting gunicorn")
|
||||||
g.Restart()
|
g.Restart()
|
||||||
}
|
}
|
||||||
|
|||||||
@ -35,13 +35,19 @@ func Paginator[Tobj any, Treq any, Tres PaginatorResponse[Tobj]](
|
|||||||
req PaginatorRequest[Treq, Tres],
|
req PaginatorRequest[Treq, Tres],
|
||||||
opts PaginatorOptions,
|
opts PaginatorOptions,
|
||||||
) ([]Tobj, error) {
|
) ([]Tobj, error) {
|
||||||
|
if opts.Logger == nil {
|
||||||
|
opts.Logger = log.NewEntry(log.StandardLogger())
|
||||||
|
}
|
||||||
var bfreq, cfreq interface{}
|
var bfreq, cfreq interface{}
|
||||||
fetchOffset := func(page int32) (Tres, error) {
|
fetchOffset := func(page int32) (Tres, error) {
|
||||||
bfreq = req.Page(page)
|
bfreq = req.Page(page)
|
||||||
cfreq = bfreq.(PaginatorRequest[Treq, Tres]).PageSize(int32(opts.PageSize))
|
cfreq = bfreq.(PaginatorRequest[Treq, Tres]).PageSize(int32(opts.PageSize))
|
||||||
res, _, err := cfreq.(PaginatorRequest[Treq, Tres]).Execute()
|
res, hres, err := cfreq.(PaginatorRequest[Treq, Tres]).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
opts.Logger.WithError(err).WithField("page", page).Warning("failed to fetch page")
|
opts.Logger.WithError(err).WithField("page", page).Warning("failed to fetch page")
|
||||||
|
if hres != nil && hres.StatusCode >= 400 && hres.StatusCode < 500 {
|
||||||
|
return res, err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return res, err
|
return res, err
|
||||||
}
|
}
|
||||||
@ -51,6 +57,9 @@ func Paginator[Tobj any, Treq any, Tres PaginatorResponse[Tobj]](
|
|||||||
for {
|
for {
|
||||||
apiObjects, err := fetchOffset(page)
|
apiObjects, err := fetchOffset(page)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if page == 1 {
|
||||||
|
return objects, err
|
||||||
|
}
|
||||||
errs = append(errs, err)
|
errs = append(errs, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,5 +1,64 @@
|
|||||||
package ak
|
package ak
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"goauthentik.io/api/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
type fakeAPIType struct{}
|
||||||
|
|
||||||
|
type fakeAPIResponse struct {
|
||||||
|
results []fakeAPIType
|
||||||
|
pagination api.Pagination
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fapi *fakeAPIResponse) GetResults() []fakeAPIType { return fapi.results }
|
||||||
|
func (fapi *fakeAPIResponse) GetPagination() api.Pagination { return fapi.pagination }
|
||||||
|
|
||||||
|
type fakeAPIRequest struct {
|
||||||
|
res *fakeAPIResponse
|
||||||
|
http *http.Response
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fapi *fakeAPIRequest) Page(page int32) *fakeAPIRequest { return fapi }
|
||||||
|
func (fapi *fakeAPIRequest) PageSize(size int32) *fakeAPIRequest { return fapi }
|
||||||
|
func (fapi *fakeAPIRequest) Execute() (*fakeAPIResponse, *http.Response, error) {
|
||||||
|
return fapi.res, fapi.http, fapi.err
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_Simple(t *testing.T) {
|
||||||
|
req := &fakeAPIRequest{
|
||||||
|
res: &fakeAPIResponse{
|
||||||
|
results: []fakeAPIType{
|
||||||
|
{},
|
||||||
|
},
|
||||||
|
pagination: api.Pagination{
|
||||||
|
TotalPages: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
res, err := Paginator(req, PaginatorOptions{})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, res, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_BadRequest(t *testing.T) {
|
||||||
|
req := &fakeAPIRequest{
|
||||||
|
http: &http.Response{
|
||||||
|
StatusCode: 400,
|
||||||
|
},
|
||||||
|
err: errors.New("foo"),
|
||||||
|
}
|
||||||
|
res, err := Paginator(req, PaginatorOptions{})
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Equal(t, []fakeAPIType{}, res)
|
||||||
|
}
|
||||||
|
|
||||||
// func Test_PaginatorCompile(t *testing.T) {
|
// func Test_PaginatorCompile(t *testing.T) {
|
||||||
// req := api.ApiCoreUsersListRequest{}
|
// req := api.ApiCoreUsersListRequest{}
|
||||||
// Paginator(req, PaginatorOptions{
|
// Paginator(req, PaginatorOptions{
|
||||||
|
|||||||
@ -148,7 +148,8 @@ func (ac *APIController) startWSHandler() {
|
|||||||
"outpost_type": ac.Server.Type(),
|
"outpost_type": ac.Server.Type(),
|
||||||
"uuid": ac.instanceUUID.String(),
|
"uuid": ac.instanceUUID.String(),
|
||||||
}).Set(1)
|
}).Set(1)
|
||||||
if wsMsg.Instruction == WebsocketInstructionTriggerUpdate {
|
switch wsMsg.Instruction {
|
||||||
|
case WebsocketInstructionTriggerUpdate:
|
||||||
time.Sleep(ac.reloadOffset)
|
time.Sleep(ac.reloadOffset)
|
||||||
logger.Debug("Got update trigger...")
|
logger.Debug("Got update trigger...")
|
||||||
err := ac.OnRefresh()
|
err := ac.OnRefresh()
|
||||||
@ -163,7 +164,7 @@ func (ac *APIController) startWSHandler() {
|
|||||||
"build": constants.BUILD(""),
|
"build": constants.BUILD(""),
|
||||||
}).SetToCurrentTime()
|
}).SetToCurrentTime()
|
||||||
}
|
}
|
||||||
} else if wsMsg.Instruction == WebsocketInstructionProviderSpecific {
|
case WebsocketInstructionProviderSpecific:
|
||||||
for _, h := range ac.wsHandlers {
|
for _, h := range ac.wsHandlers {
|
||||||
h(context.Background(), wsMsg.Args)
|
h(context.Background(), wsMsg.Args)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -66,7 +66,12 @@ func (ls *LDAPServer) StartLDAPServer() error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
proxyListener := &proxyproto.Listener{Listener: ln, ConnPolicy: utils.GetProxyConnectionPolicy()}
|
proxyListener := &proxyproto.Listener{Listener: ln, ConnPolicy: utils.GetProxyConnectionPolicy()}
|
||||||
defer proxyListener.Close()
|
defer func() {
|
||||||
|
err := proxyListener.Close()
|
||||||
|
if err != nil {
|
||||||
|
ls.log.WithError(err).Warning("failed to close proxy listener")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
ls.log.WithField("listen", listen).Info("Starting LDAP server")
|
ls.log.WithField("listen", listen).Info("Starting LDAP server")
|
||||||
err = ls.s.Serve(proxyListener)
|
err = ls.s.Serve(proxyListener)
|
||||||
|
|||||||
@ -49,7 +49,12 @@ func (ls *LDAPServer) StartLDAPTLSServer() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
proxyListener := &proxyproto.Listener{Listener: ln, ConnPolicy: utils.GetProxyConnectionPolicy()}
|
proxyListener := &proxyproto.Listener{Listener: ln, ConnPolicy: utils.GetProxyConnectionPolicy()}
|
||||||
defer proxyListener.Close()
|
defer func() {
|
||||||
|
err := proxyListener.Close()
|
||||||
|
if err != nil {
|
||||||
|
ls.log.WithError(err).Warning("failed to close proxy listener")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
tln := tls.NewListener(proxyListener, tlsConfig)
|
tln := tls.NewListener(proxyListener, tlsConfig)
|
||||||
|
|
||||||
|
|||||||
@ -98,7 +98,7 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
|
|||||||
|
|
||||||
entries := make([]*ldap.Entry, 0)
|
entries := make([]*ldap.Entry, 0)
|
||||||
|
|
||||||
scope := req.SearchRequest.Scope
|
scope := req.Scope
|
||||||
needUsers, needGroups := ms.si.GetNeededObjects(scope, req.BaseDN, req.FilterObjectClass)
|
needUsers, needGroups := ms.si.GetNeededObjects(scope, req.BaseDN, req.FilterObjectClass)
|
||||||
|
|
||||||
if scope >= 0 && strings.EqualFold(req.BaseDN, baseDN) {
|
if scope >= 0 && strings.EqualFold(req.BaseDN, baseDN) {
|
||||||
|
|||||||
@ -56,7 +56,7 @@ func GetOIDCEndpoint(p api.ProxyOutpostConfig, authentikHost string, embedded bo
|
|||||||
if !embedded && hostBrowser == "" {
|
if !embedded && hostBrowser == "" {
|
||||||
return ep
|
return ep
|
||||||
}
|
}
|
||||||
var newHost *url.URL = aku
|
var newHost = aku
|
||||||
var newBrowserHost *url.URL
|
var newBrowserHost *url.URL
|
||||||
if embedded {
|
if embedded {
|
||||||
if authentikHost == "" {
|
if authentikHost == "" {
|
||||||
|
|||||||
@ -130,7 +130,12 @@ func (ps *ProxyServer) ServeHTTP() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
proxyListener := &proxyproto.Listener{Listener: listener, ConnPolicy: utils.GetProxyConnectionPolicy()}
|
proxyListener := &proxyproto.Listener{Listener: listener, ConnPolicy: utils.GetProxyConnectionPolicy()}
|
||||||
defer proxyListener.Close()
|
defer func() {
|
||||||
|
err := proxyListener.Close()
|
||||||
|
if err != nil {
|
||||||
|
ps.log.WithError(err).Warning("failed to close proxy listener")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
ps.log.WithField("listen", listenAddress).Info("Starting HTTP server")
|
ps.log.WithField("listen", listenAddress).Info("Starting HTTP server")
|
||||||
ps.serve(proxyListener)
|
ps.serve(proxyListener)
|
||||||
@ -149,7 +154,12 @@ func (ps *ProxyServer) ServeHTTPS() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
proxyListener := &proxyproto.Listener{Listener: web.TCPKeepAliveListener{TCPListener: ln.(*net.TCPListener)}, ConnPolicy: utils.GetProxyConnectionPolicy()}
|
proxyListener := &proxyproto.Listener{Listener: web.TCPKeepAliveListener{TCPListener: ln.(*net.TCPListener)}, ConnPolicy: utils.GetProxyConnectionPolicy()}
|
||||||
defer proxyListener.Close()
|
defer func() {
|
||||||
|
err := proxyListener.Close()
|
||||||
|
if err != nil {
|
||||||
|
ps.log.WithError(err).Warning("failed to close proxy listener")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
tlsListener := tls.NewListener(proxyListener, tlsConfig)
|
tlsListener := tls.NewListener(proxyListener, tlsConfig)
|
||||||
ps.log.WithField("listen", listenAddress).Info("Starting HTTPS server")
|
ps.log.WithField("listen", listenAddress).Info("Starting HTTPS server")
|
||||||
|
|||||||
@ -72,11 +72,13 @@ func (s *RedisStore) New(r *http.Request, name string) (*sessions.Session, error
|
|||||||
session.ID = c.Value
|
session.ID = c.Value
|
||||||
|
|
||||||
err = s.load(r.Context(), session)
|
err = s.load(r.Context(), session)
|
||||||
if err == nil {
|
if err != nil {
|
||||||
session.IsNew = false
|
if errors.Is(err, redis.Nil) {
|
||||||
} else if err == redis.Nil {
|
return session, nil
|
||||||
err = nil // no data stored
|
}
|
||||||
|
return session, err
|
||||||
}
|
}
|
||||||
|
session.IsNew = false
|
||||||
return session, err
|
return session, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -8,7 +8,6 @@
|
|||||||
<link rel="shortcut icon" type="image/png" href="/outpost.goauthentik.io/static/dist/assets/icons/icon.png">
|
<link rel="shortcut icon" type="image/png" href="/outpost.goauthentik.io/static/dist/assets/icons/icon.png">
|
||||||
<link rel="stylesheet" type="text/css" href="/outpost.goauthentik.io/static/dist/patternfly.min.css">
|
<link rel="stylesheet" type="text/css" href="/outpost.goauthentik.io/static/dist/patternfly.min.css">
|
||||||
<link rel="stylesheet" type="text/css" href="/outpost.goauthentik.io/static/dist/authentik.css">
|
<link rel="stylesheet" type="text/css" href="/outpost.goauthentik.io/static/dist/authentik.css">
|
||||||
<link rel="stylesheet" type="text/css" href="/outpost.goauthentik.io/static/dist/custom.css">
|
|
||||||
<link rel="prefetch" href="/outpost.goauthentik.io/static/dist/assets/images/flow_background.jpg" />
|
<link rel="prefetch" href="/outpost.goauthentik.io/static/dist/assets/images/flow_background.jpg" />
|
||||||
<style>
|
<style>
|
||||||
.pf-c-background-image::before {
|
.pf-c-background-image::before {
|
||||||
|
|||||||
@ -156,7 +156,12 @@ func (ws *WebServer) listenPlain() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
proxyListener := &proxyproto.Listener{Listener: ln, ConnPolicy: utils.GetProxyConnectionPolicy()}
|
proxyListener := &proxyproto.Listener{Listener: ln, ConnPolicy: utils.GetProxyConnectionPolicy()}
|
||||||
defer proxyListener.Close()
|
defer func() {
|
||||||
|
err := proxyListener.Close()
|
||||||
|
if err != nil {
|
||||||
|
ws.log.WithError(err).Warning("failed to close proxy listener")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
ws.log.WithField("listen", config.Get().Listen.HTTP).Info("Starting HTTP server")
|
ws.log.WithField("listen", config.Get().Listen.HTTP).Info("Starting HTTP server")
|
||||||
ws.serve(proxyListener)
|
ws.serve(proxyListener)
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user