Compare commits
206 Commits
enterprise
...
policies/p
| Author | SHA1 | Date | |
|---|---|---|---|
| b3883f7fbf | |||
| 87c6b0128a | |||
| b243c97916 | |||
| 3f66527521 | |||
| 2f7c258657 | |||
| 917c90374f | |||
| e9c944c0d5 | |||
| b865e97973 | |||
| 24a364bd6b | |||
| 65579c0a2b | |||
| de20897321 | |||
| 39f7bc8e9b | |||
| 4ade549ce2 | |||
| a4d87ef011 | |||
| b851c3daaf | |||
| 198af84b3b | |||
| 69ced3ae02 | |||
| 4a2f58561b | |||
| 8becaf3418 | |||
| bcfbc46839 | |||
| af287ee7b0 | |||
| ebf3d12874 | |||
| 7fbdd0452e | |||
| 18298a856f | |||
| ef6836207a | |||
| 5ad176adf2 | |||
| 011afc8b2f | |||
| 4c32c1503b | |||
| 774a8e6eeb | |||
| 297d7f100a | |||
| 0d3692a619 | |||
| ba20748b07 | |||
| 3fc296ad0b | |||
| 0aba428787 | |||
| 4a88e29de6 | |||
| 0d6fced7d8 | |||
| 29c6c1e33b | |||
| e2e8b7c114 | |||
| bf2e854f12 | |||
| 3fbc059f2d | |||
| e051e8ebd8 | |||
| 880a99efe5 | |||
| 27d5063d16 | |||
| e130bca344 | |||
| 325d590679 | |||
| f40a4b5076 | |||
| 89a19f6e4c | |||
| 9bc51c683e | |||
| 3d2bd4d8dd | |||
| 46a968d1dd | |||
| 49cc70eb96 | |||
| 143b02b51a | |||
| 5904fae80b | |||
| 6f9479a085 | |||
| ce10dbfa4e | |||
| 394881dcd3 | |||
| a6e322507c | |||
| 755e2f1507 | |||
| d41c9eb442 | |||
| dea48e6ac7 | |||
| 1614f3174f | |||
| d18950f7bb | |||
| 4fe533a92f | |||
| 82d4e8aa4e | |||
| 98129d3e9a | |||
| 98f3b9ae97 | |||
| bd69dbc0e1 | |||
| ac4d6ae9f6 | |||
| cdc0d0a857 | |||
| 3656c38aa0 | |||
| fe4e364492 | |||
| ce86cbe2a0 | |||
| 8f0e9ff534 | |||
| ff60607851 | |||
| b6cf27b421 | |||
| 9457c80d62 | |||
| 409035b692 | |||
| 7798d16e01 | |||
| 8f16a182aa | |||
| 50c68df0a1 | |||
| 556248c7c9 | |||
| ed2e2380cc | |||
| 1f79b5acb7 | |||
| 6185e7cdc7 | |||
| aedce2a6a1 | |||
| fefa189ff4 | |||
| b5bdad6804 | |||
| 1d03f92dee | |||
| 01b20153ca | |||
| 83a2728500 | |||
| c57f17bff8 | |||
| 5533f7dd7a | |||
| daebeb1192 | |||
| 26a08fcaac | |||
| 330fc8cee3 | |||
| 205c01038f | |||
| 23eb93c981 | |||
| 5679352c15 | |||
| fb7d637da1 | |||
| cee48909e9 | |||
| 6549b303d5 | |||
| e2d6d3860c | |||
| 91155f9ce3 | |||
| bdcd1059dd | |||
| e4b6df3f27 | |||
| 7a6d7919c8 | |||
| fda9b137a7 | |||
| 7686d12f1b | |||
| 34ee29227a | |||
| 334e2c466f | |||
| 7c944b954c | |||
| 427a8c91c8 | |||
| 22d6dd3098 | |||
| 36c81a30ad | |||
| f7dc7faea5 | |||
| 62720e6c51 | |||
| 64dfe7e3c2 | |||
| c803b4da51 | |||
| 3568cd601f | |||
| 8cad66536c | |||
| 220e79e668 | |||
| 316f43e6eb | |||
| b7053dfffd | |||
| fccdaaf210 | |||
| cf530c6f31 | |||
| 94d84ae1dc | |||
| de1bb03619 | |||
| e41d86bd2a | |||
| a10e6b7fd7 | |||
| 92d6d74c2d | |||
| 773c57b8d7 | |||
| 692a6be07f | |||
| 645323cd02 | |||
| 06d57a7574 | |||
| 102c7e4c5c | |||
| 7e7ed83dfe | |||
| 141ced8317 | |||
| 5109af0ab4 | |||
| 1a1912e391 | |||
| 6702652824 | |||
| b04ff5bbee | |||
| 3daa39080a | |||
| d3d6040e23 | |||
| e08ccf4ca0 | |||
| 0e346c6e7c | |||
| 62187e60d4 | |||
| 467b1fcd14 | |||
| 9e2fccb045 | |||
| 39d8b41357 | |||
| 0a0f8433c6 | |||
| 3b61e08d3d | |||
| 921e1923b0 | |||
| a666c20c40 | |||
| 1ed96fd5a5 | |||
| f245dada2c | |||
| 7d8094d9c4 | |||
| d63cba0a9d | |||
| fdc3de8646 | |||
| 7163d333dc | |||
| 02bdf093e0 | |||
| 1ce3dfd17f | |||
| ce7e539f59 | |||
| 12e6282316 | |||
| 3253de73ec | |||
| afe8ab7850 | |||
| f2e3199050 | |||
| 04148e08a7 | |||
| 656b296d6e | |||
| f76014710c | |||
| 04517d46b0 | |||
| 365e9c9ca3 | |||
| 5b01f44333 | |||
| 388b29ef87 | |||
| 7659afdd30 | |||
| faab182404 | |||
| 90a85abf9d | |||
| 4d061e1af9 | |||
| 0720b3db3c | |||
| 236455fc45 | |||
| ac08805d73 | |||
| 656beebd63 | |||
| 6430cdcd68 | |||
| b8c97eb7c1 | |||
| 9eef9ee230 | |||
| 84cc2b4f11 | |||
| e988799e12 | |||
| 7c71f9fcac | |||
| 1eeb85a4e7 | |||
| 4182ead0b9 | |||
| dc45e8c08c | |||
| d111740f6b | |||
| 4597ee45f8 | |||
| 735f48981d | |||
| f35457492b | |||
| af9ba83529 | |||
| 3c6cb9dbad | |||
| 1d63359077 | |||
| 33121d86f2 | |||
| 0c235909a2 | |||
| 91ef8c2c8d | |||
| 4ee45bb5cc | |||
| b4ae3ba390 | |||
| f3834016dc | |||
| 661a966e23 | |||
| 813273338e | |||
| 99639a9ed0 |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2024.12.2
|
current_version = 2024.12.3
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||||
@ -31,4 +31,4 @@ optional_value = final
|
|||||||
|
|
||||||
[bumpversion:file:web/src/common/constants.ts]
|
[bumpversion:file:web/src/common/constants.ts]
|
||||||
|
|
||||||
[bumpversion:file:website/docs/install-config/install/aws/template.yaml]
|
[bumpversion:file:lifecycle/aws/template.yaml]
|
||||||
|
|||||||
@ -35,14 +35,6 @@ runs:
|
|||||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
||||||
```
|
```
|
||||||
|
|
||||||
For arm64, use these values:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
AUTHENTIK_IMAGE=ghcr.io/goauthentik/dev-server
|
|
||||||
AUTHENTIK_TAG=${{ inputs.tag }}-arm64
|
|
||||||
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
|
||||||
```
|
|
||||||
|
|
||||||
Afterwards, run the upgrade commands from the latest release notes.
|
Afterwards, run the upgrade commands from the latest release notes.
|
||||||
</details>
|
</details>
|
||||||
<details>
|
<details>
|
||||||
@ -60,18 +52,6 @@ runs:
|
|||||||
tag: ${{ inputs.tag }}
|
tag: ${{ inputs.tag }}
|
||||||
```
|
```
|
||||||
|
|
||||||
For arm64, use these values:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
authentik:
|
|
||||||
outposts:
|
|
||||||
container_image_base: ghcr.io/goauthentik/dev-%(type)s:gh-%(build_hash)s
|
|
||||||
global:
|
|
||||||
image:
|
|
||||||
repository: ghcr.io/goauthentik/dev-server
|
|
||||||
tag: ${{ inputs.tag }}-arm64
|
|
||||||
```
|
|
||||||
|
|
||||||
Afterwards, run the upgrade commands from the latest release notes.
|
Afterwards, run the upgrade commands from the latest release notes.
|
||||||
</details>
|
</details>
|
||||||
edit-mode: replace
|
edit-mode: replace
|
||||||
|
|||||||
14
.github/actions/docker-push-variables/action.yml
vendored
14
.github/actions/docker-push-variables/action.yml
vendored
@ -9,6 +9,9 @@ inputs:
|
|||||||
image-arch:
|
image-arch:
|
||||||
required: false
|
required: false
|
||||||
description: "Docker image arch"
|
description: "Docker image arch"
|
||||||
|
release:
|
||||||
|
required: true
|
||||||
|
description: "True if this is a release build, false if this is a dev/PR build"
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
shouldPush:
|
shouldPush:
|
||||||
@ -29,15 +32,24 @@ outputs:
|
|||||||
imageTags:
|
imageTags:
|
||||||
description: "Docker image tags"
|
description: "Docker image tags"
|
||||||
value: ${{ steps.ev.outputs.imageTags }}
|
value: ${{ steps.ev.outputs.imageTags }}
|
||||||
|
imageTagsJSON:
|
||||||
|
description: "Docker image tags, as a JSON array"
|
||||||
|
value: ${{ steps.ev.outputs.imageTagsJSON }}
|
||||||
attestImageNames:
|
attestImageNames:
|
||||||
description: "Docker image names used for attestation"
|
description: "Docker image names used for attestation"
|
||||||
value: ${{ steps.ev.outputs.attestImageNames }}
|
value: ${{ steps.ev.outputs.attestImageNames }}
|
||||||
|
cacheTo:
|
||||||
|
description: "cache-to value for the docker build step"
|
||||||
|
value: ${{ steps.ev.outputs.cacheTo }}
|
||||||
imageMainTag:
|
imageMainTag:
|
||||||
description: "Docker image main tag"
|
description: "Docker image main tag"
|
||||||
value: ${{ steps.ev.outputs.imageMainTag }}
|
value: ${{ steps.ev.outputs.imageMainTag }}
|
||||||
imageMainName:
|
imageMainName:
|
||||||
description: "Docker image main name"
|
description: "Docker image main name"
|
||||||
value: ${{ steps.ev.outputs.imageMainName }}
|
value: ${{ steps.ev.outputs.imageMainName }}
|
||||||
|
imageBuildArgs:
|
||||||
|
description: "Docker image build args"
|
||||||
|
value: ${{ steps.ev.outputs.imageBuildArgs }}
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
@ -48,6 +60,8 @@ runs:
|
|||||||
env:
|
env:
|
||||||
IMAGE_NAME: ${{ inputs.image-name }}
|
IMAGE_NAME: ${{ inputs.image-name }}
|
||||||
IMAGE_ARCH: ${{ inputs.image-arch }}
|
IMAGE_ARCH: ${{ inputs.image-arch }}
|
||||||
|
RELEASE: ${{ inputs.release }}
|
||||||
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||||
|
REF: ${{ github.ref }}
|
||||||
run: |
|
run: |
|
||||||
python3 ${{ github.action_path }}/push_vars.py
|
python3 ${{ github.action_path }}/push_vars.py
|
||||||
|
|||||||
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import configparser
|
import configparser
|
||||||
import os
|
import os
|
||||||
|
from json import dumps
|
||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
parser = configparser.ConfigParser()
|
parser = configparser.ConfigParser()
|
||||||
@ -48,7 +49,7 @@ if is_release:
|
|||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
suffix = ""
|
suffix = ""
|
||||||
if image_arch and image_arch != "amd64":
|
if image_arch:
|
||||||
suffix = f"-{image_arch}"
|
suffix = f"-{image_arch}"
|
||||||
for name in image_names:
|
for name in image_names:
|
||||||
image_tags += [
|
image_tags += [
|
||||||
@ -70,12 +71,31 @@ def get_attest_image_names(image_with_tags: list[str]):
|
|||||||
return ",".join(set(image_tags))
|
return ",".join(set(image_tags))
|
||||||
|
|
||||||
|
|
||||||
|
# Generate `cache-to` param
|
||||||
|
cache_to = ""
|
||||||
|
if should_push:
|
||||||
|
_cache_tag = "buildcache"
|
||||||
|
if image_arch:
|
||||||
|
_cache_tag += f"-{image_arch}"
|
||||||
|
cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max"
|
||||||
|
|
||||||
|
|
||||||
|
image_build_args = []
|
||||||
|
if os.getenv("RELEASE", "false").lower() == "true":
|
||||||
|
image_build_args = [f"VERSION={os.getenv('REF')}"]
|
||||||
|
else:
|
||||||
|
image_build_args = [f"GIT_BUILD_HASH={sha}"]
|
||||||
|
image_build_args = "\n".join(image_build_args)
|
||||||
|
|
||||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||||
print(f"shouldPush={str(should_push).lower()}", file=_output)
|
print(f"shouldPush={str(should_push).lower()}", file=_output)
|
||||||
print(f"sha={sha}", file=_output)
|
print(f"sha={sha}", file=_output)
|
||||||
print(f"version={version}", file=_output)
|
print(f"version={version}", file=_output)
|
||||||
print(f"prerelease={prerelease}", file=_output)
|
print(f"prerelease={prerelease}", file=_output)
|
||||||
print(f"imageTags={','.join(image_tags)}", file=_output)
|
print(f"imageTags={','.join(image_tags)}", file=_output)
|
||||||
|
print(f"imageTagsJSON={dumps(image_tags)}", file=_output)
|
||||||
print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output)
|
print(f"attestImageNames={get_attest_image_names(image_tags)}", file=_output)
|
||||||
print(f"imageMainTag={image_main_tag}", file=_output)
|
print(f"imageMainTag={image_main_tag}", file=_output)
|
||||||
print(f"imageMainName={image_tags[0]}", file=_output)
|
print(f"imageMainName={image_tags[0]}", file=_output)
|
||||||
|
print(f"cacheTo={cache_to}", file=_output)
|
||||||
|
print(f"imageBuildArgs={image_build_args}", file=_output)
|
||||||
|
|||||||
11
.github/actions/docker-push-variables/test.sh
vendored
11
.github/actions/docker-push-variables/test.sh
vendored
@ -1,7 +1,18 @@
|
|||||||
#!/bin/bash -x
|
#!/bin/bash -x
|
||||||
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||||
|
# Non-pushing PR
|
||||||
GITHUB_OUTPUT=/dev/stdout \
|
GITHUB_OUTPUT=/dev/stdout \
|
||||||
GITHUB_REF=ref \
|
GITHUB_REF=ref \
|
||||||
GITHUB_SHA=sha \
|
GITHUB_SHA=sha \
|
||||||
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
|
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
|
||||||
|
GITHUB_REPOSITORY=goauthentik/authentik \
|
||||||
|
python $SCRIPT_DIR/push_vars.py
|
||||||
|
|
||||||
|
# Pushing PR/main
|
||||||
|
GITHUB_OUTPUT=/dev/stdout \
|
||||||
|
GITHUB_REF=ref \
|
||||||
|
GITHUB_SHA=sha \
|
||||||
|
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
|
||||||
|
GITHUB_REPOSITORY=goauthentik/authentik \
|
||||||
|
DOCKER_USERNAME=foo \
|
||||||
python $SCRIPT_DIR/push_vars.py
|
python $SCRIPT_DIR/push_vars.py
|
||||||
|
|||||||
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@ -82,6 +82,16 @@ updates:
|
|||||||
docusaurus:
|
docusaurus:
|
||||||
patterns:
|
patterns:
|
||||||
- "@docusaurus/*"
|
- "@docusaurus/*"
|
||||||
|
- package-ecosystem: npm
|
||||||
|
directory: "/lifecycle/aws"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "04:00"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
commit-message:
|
||||||
|
prefix: "lifecycle/aws:"
|
||||||
|
labels:
|
||||||
|
- dependencies
|
||||||
- package-ecosystem: pip
|
- package-ecosystem: pip
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
|
|||||||
96
.github/workflows/_reusable-docker-build-single.yaml
vendored
Normal file
96
.github/workflows/_reusable-docker-build-single.yaml
vendored
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
# Re-usable workflow for a single-architecture build
|
||||||
|
name: Single-arch Container build
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
image_name:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
image_arch:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
runs-on:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
registry_dockerhub:
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
registry_ghcr:
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
release:
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
outputs:
|
||||||
|
image-digest:
|
||||||
|
value: ${{ jobs.build.outputs.image-digest }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Build ${{ inputs.image_arch }}
|
||||||
|
runs-on: ${{ inputs.runs-on }}
|
||||||
|
outputs:
|
||||||
|
image-digest: ${{ steps.push.outputs.digest }}
|
||||||
|
permissions:
|
||||||
|
# Needed to upload container images to ghcr.io
|
||||||
|
packages: write
|
||||||
|
# Needed for attestation
|
||||||
|
id-token: write
|
||||||
|
attestations: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: docker/setup-qemu-action@v3.4.0
|
||||||
|
- uses: docker/setup-buildx-action@v3
|
||||||
|
- name: prepare variables
|
||||||
|
uses: ./.github/actions/docker-push-variables
|
||||||
|
id: ev
|
||||||
|
env:
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
with:
|
||||||
|
image-name: ${{ inputs.image_name }}
|
||||||
|
image-arch: ${{ inputs.image_arch }}
|
||||||
|
release: ${{ inputs.release }}
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
if: ${{ inputs.registry_dockerhub }}
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
if: ${{ inputs.registry_ghcr }}
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: make empty clients
|
||||||
|
if: ${{ inputs.release }}
|
||||||
|
run: |
|
||||||
|
mkdir -p ./gen-ts-api
|
||||||
|
mkdir -p ./gen-go-api
|
||||||
|
- name: generate ts client
|
||||||
|
if: ${{ !inputs.release }}
|
||||||
|
run: make gen-client-ts
|
||||||
|
- name: Build Docker Image
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
id: push
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||||
|
secrets: |
|
||||||
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
|
build-args: |
|
||||||
|
${{ steps.ev.outputs.imageBuildArgs }}
|
||||||
|
tags: ${{ steps.ev.outputs.imageTags }}
|
||||||
|
platforms: linux/${{ inputs.image_arch }}
|
||||||
|
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
|
||||||
|
cache-to: ${{ steps.ev.outputs.cacheTo }}
|
||||||
|
- uses: actions/attest-build-provenance@v2
|
||||||
|
id: attest
|
||||||
|
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||||
|
with:
|
||||||
|
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||||
|
subject-digest: ${{ steps.push.outputs.digest }}
|
||||||
|
push-to-registry: true
|
||||||
104
.github/workflows/_reusable-docker-build.yaml
vendored
Normal file
104
.github/workflows/_reusable-docker-build.yaml
vendored
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
# Re-usable workflow for a multi-architecture build
|
||||||
|
name: Multi-arch container build
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
image_name:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
registry_dockerhub:
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
registry_ghcr:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
release:
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
outputs: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-server-amd64:
|
||||||
|
uses: ./.github/workflows/_reusable-docker-build-single.yaml
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
image_name: ${{ inputs.image_name }}
|
||||||
|
image_arch: amd64
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
registry_dockerhub: ${{ inputs.registry_dockerhub }}
|
||||||
|
registry_ghcr: ${{ inputs.registry_ghcr }}
|
||||||
|
release: ${{ inputs.release }}
|
||||||
|
build-server-arm64:
|
||||||
|
uses: ./.github/workflows/_reusable-docker-build-single.yaml
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
image_name: ${{ inputs.image_name }}
|
||||||
|
image_arch: arm64
|
||||||
|
runs-on: ubuntu-22.04-arm
|
||||||
|
registry_dockerhub: ${{ inputs.registry_dockerhub }}
|
||||||
|
registry_ghcr: ${{ inputs.registry_ghcr }}
|
||||||
|
release: ${{ inputs.release }}
|
||||||
|
get-tags:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- build-server-amd64
|
||||||
|
- build-server-arm64
|
||||||
|
outputs:
|
||||||
|
tags: ${{ steps.ev.outputs.imageTagsJSON }}
|
||||||
|
shouldPush: ${{ steps.ev.outputs.shouldPush }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: prepare variables
|
||||||
|
uses: ./.github/actions/docker-push-variables
|
||||||
|
id: ev
|
||||||
|
env:
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
with:
|
||||||
|
image-name: ${{ inputs.image_name }}
|
||||||
|
merge-server:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ needs.get-tags.outputs.shouldPush == 'true' }}
|
||||||
|
needs:
|
||||||
|
- get-tags
|
||||||
|
- build-server-amd64
|
||||||
|
- build-server-arm64
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
tag: ${{ fromJson(needs.get-tags.outputs.tags) }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: prepare variables
|
||||||
|
uses: ./.github/actions/docker-push-variables
|
||||||
|
id: ev
|
||||||
|
env:
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
with:
|
||||||
|
image-name: ${{ inputs.image_name }}
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
if: ${{ inputs.registry_dockerhub }}
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
if: ${{ inputs.registry_ghcr }}
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- uses: int128/docker-manifest-create-action@v2
|
||||||
|
id: build
|
||||||
|
with:
|
||||||
|
tags: ${{ matrix.tag }}
|
||||||
|
sources: |
|
||||||
|
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
|
||||||
|
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
|
||||||
|
- uses: actions/attest-build-provenance@v2
|
||||||
|
id: attest
|
||||||
|
with:
|
||||||
|
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||||
|
subject-digest: ${{ steps.build.outputs.digest }}
|
||||||
|
push-to-registry: true
|
||||||
6
.github/workflows/ci-aws-cfn.yml
vendored
6
.github/workflows/ci-aws-cfn.yml
vendored
@ -25,10 +25,10 @@ jobs:
|
|||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version-file: website/package.json
|
node-version-file: lifecycle/aws/package.json
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: website/package-lock.json
|
cache-dependency-path: lifecycle/aws/package-lock.json
|
||||||
- working-directory: website/
|
- working-directory: lifecycle/aws/
|
||||||
run: |
|
run: |
|
||||||
npm ci
|
npm ci
|
||||||
- name: Check changes have been applied
|
- name: Check changes have been applied
|
||||||
|
|||||||
28
.github/workflows/ci-main-daily.yml
vendored
Normal file
28
.github/workflows/ci-main-daily.yml
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
---
|
||||||
|
name: authentik-ci-main-daily
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
# Every night at 3am
|
||||||
|
- cron: "0 3 * * *"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-container:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
version:
|
||||||
|
- docs
|
||||||
|
- version-2024-12
|
||||||
|
- version-2024-10
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- run: |
|
||||||
|
current="$(pwd)"
|
||||||
|
dir="/tmp/authentik/${{ matrix.version }}"
|
||||||
|
mkdir -p $dir
|
||||||
|
cd $dir
|
||||||
|
wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml
|
||||||
|
${current}/scripts/test_docker.sh
|
||||||
97
.github/workflows/ci-main.yml
vendored
97
.github/workflows/ci-main.yml
vendored
@ -43,15 +43,26 @@ jobs:
|
|||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: run migrations
|
- name: run migrations
|
||||||
run: poetry run python -m lifecycle.migrate
|
run: poetry run python -m lifecycle.migrate
|
||||||
test-migrations-from-stable:
|
test-make-seed:
|
||||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- id: seed
|
||||||
|
run: |
|
||||||
|
echo "seed=$(printf "%d\n" "0x$(openssl rand -hex 4)")" >> "$GITHUB_OUTPUT"
|
||||||
|
outputs:
|
||||||
|
seed: ${{ steps.seed.outputs.seed }}
|
||||||
|
test-migrations-from-stable:
|
||||||
|
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 20
|
||||||
|
needs: test-make-seed
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
psql:
|
psql:
|
||||||
- 15-alpine
|
- 15-alpine
|
||||||
- 16-alpine
|
- 16-alpine
|
||||||
|
run_id: [1, 2, 3, 4, 5]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@ -93,18 +104,23 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
# Test in the main database that we just migrated from the previous stable version
|
# Test in the main database that we just migrated from the previous stable version
|
||||||
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
|
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
|
||||||
|
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||||
|
CI_RUN_ID: ${{ matrix.run_id }}
|
||||||
|
CI_TOTAL_RUNS: "5"
|
||||||
run: |
|
run: |
|
||||||
poetry run make test
|
poetry run make ci-test
|
||||||
test-unittest:
|
test-unittest:
|
||||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 20
|
||||||
|
needs: test-make-seed
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
psql:
|
psql:
|
||||||
- 15-alpine
|
- 15-alpine
|
||||||
- 16-alpine
|
- 16-alpine
|
||||||
|
run_id: [1, 2, 3, 4, 5]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
@ -112,9 +128,12 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
postgresql_version: ${{ matrix.psql }}
|
postgresql_version: ${{ matrix.psql }}
|
||||||
- name: run unittest
|
- name: run unittest
|
||||||
|
env:
|
||||||
|
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||||
|
CI_RUN_ID: ${{ matrix.run_id }}
|
||||||
|
CI_TOTAL_RUNS: "5"
|
||||||
run: |
|
run: |
|
||||||
poetry run make test
|
poetry run make ci-test
|
||||||
poetry run coverage xml
|
|
||||||
- if: ${{ always() }}
|
- if: ${{ always() }}
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
@ -223,68 +242,18 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
jobs: ${{ toJSON(needs) }}
|
jobs: ${{ toJSON(needs) }}
|
||||||
build:
|
build:
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
arch:
|
|
||||||
- amd64
|
|
||||||
- arm64
|
|
||||||
needs: ci-core-mark
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
permissions:
|
||||||
# Needed to upload contianer images to ghcr.io
|
# Needed to upload container images to ghcr.io
|
||||||
packages: write
|
packages: write
|
||||||
# Needed for attestation
|
# Needed for attestation
|
||||||
id-token: write
|
id-token: write
|
||||||
attestations: write
|
attestations: write
|
||||||
timeout-minutes: 120
|
needs: ci-core-mark
|
||||||
steps:
|
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||||
- uses: actions/checkout@v4
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
image_name: ghcr.io/goauthentik/dev-server
|
||||||
- name: Set up QEMU
|
release: false
|
||||||
uses: docker/setup-qemu-action@v3.3.0
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
- name: prepare variables
|
|
||||||
uses: ./.github/actions/docker-push-variables
|
|
||||||
id: ev
|
|
||||||
env:
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
with:
|
|
||||||
image-name: ghcr.io/goauthentik/dev-server
|
|
||||||
image-arch: ${{ matrix.arch }}
|
|
||||||
- name: Login to Container Registry
|
|
||||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: generate ts client
|
|
||||||
run: make gen-client-ts
|
|
||||||
- name: Build Docker Image
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
id: push
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
secrets: |
|
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
|
||||||
tags: ${{ steps.ev.outputs.imageTags }}
|
|
||||||
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
|
||||||
build-args: |
|
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
|
||||||
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache
|
|
||||||
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-server:buildcache,mode=max' || '' }}
|
|
||||||
platforms: linux/${{ matrix.arch }}
|
|
||||||
- uses: actions/attest-build-provenance@v2
|
|
||||||
id: attest
|
|
||||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
|
||||||
with:
|
|
||||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
|
||||||
subject-digest: ${{ steps.push.outputs.digest }}
|
|
||||||
push-to-registry: true
|
|
||||||
pr-comment:
|
pr-comment:
|
||||||
needs:
|
needs:
|
||||||
- build
|
- build
|
||||||
|
|||||||
4
.github/workflows/ci-outpost.yml
vendored
4
.github/workflows/ci-outpost.yml
vendored
@ -72,7 +72,7 @@ jobs:
|
|||||||
- rac
|
- rac
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
# Needed to upload contianer images to ghcr.io
|
# Needed to upload container images to ghcr.io
|
||||||
packages: write
|
packages: write
|
||||||
# Needed for attestation
|
# Needed for attestation
|
||||||
id-token: write
|
id-token: write
|
||||||
@ -82,7 +82,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.3.0
|
uses: docker/setup-qemu-action@v3.4.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
|
|||||||
65
.github/workflows/release-publish.yml
vendored
65
.github/workflows/release-publish.yml
vendored
@ -7,64 +7,23 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-server:
|
build-server:
|
||||||
runs-on: ubuntu-latest
|
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||||
|
secrets: inherit
|
||||||
permissions:
|
permissions:
|
||||||
# Needed to upload contianer images to ghcr.io
|
# Needed to upload container images to ghcr.io
|
||||||
packages: write
|
packages: write
|
||||||
# Needed for attestation
|
# Needed for attestation
|
||||||
id-token: write
|
id-token: write
|
||||||
attestations: write
|
attestations: write
|
||||||
steps:
|
with:
|
||||||
- uses: actions/checkout@v4
|
image_name: ghcr.io/goauthentik/server,beryju/authentik
|
||||||
- name: Set up QEMU
|
release: true
|
||||||
uses: docker/setup-qemu-action@v3.3.0
|
registry_dockerhub: true
|
||||||
- name: Set up Docker Buildx
|
registry_ghcr: true
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
- name: prepare variables
|
|
||||||
uses: ./.github/actions/docker-push-variables
|
|
||||||
id: ev
|
|
||||||
env:
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
with:
|
|
||||||
image-name: ghcr.io/goauthentik/server,beryju/authentik
|
|
||||||
- name: Docker Login Registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
- name: Login to GitHub Container Registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: make empty clients
|
|
||||||
run: |
|
|
||||||
mkdir -p ./gen-ts-api
|
|
||||||
mkdir -p ./gen-go-api
|
|
||||||
- name: Build Docker Image
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
id: push
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: true
|
|
||||||
secrets: |
|
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
|
||||||
build-args: |
|
|
||||||
VERSION=${{ github.ref }}
|
|
||||||
tags: ${{ steps.ev.outputs.imageTags }}
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
- uses: actions/attest-build-provenance@v2
|
|
||||||
id: attest
|
|
||||||
with:
|
|
||||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
|
||||||
subject-digest: ${{ steps.push.outputs.digest }}
|
|
||||||
push-to-registry: true
|
|
||||||
build-outpost:
|
build-outpost:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
# Needed to upload contianer images to ghcr.io
|
# Needed to upload container images to ghcr.io
|
||||||
packages: write
|
packages: write
|
||||||
# Needed for attestation
|
# Needed for attestation
|
||||||
id-token: write
|
id-token: write
|
||||||
@ -83,7 +42,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.3.0
|
uses: docker/setup-qemu-action@v3.4.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
@ -188,8 +147,8 @@ jobs:
|
|||||||
aws-region: ${{ env.AWS_REGION }}
|
aws-region: ${{ env.AWS_REGION }}
|
||||||
- name: Upload template
|
- name: Upload template
|
||||||
run: |
|
run: |
|
||||||
aws s3 cp --acl=public-read website/docs/install-config/install/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml
|
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.${{ github.ref }}.yaml
|
||||||
aws s3 cp --acl=public-read website/docs/install-config/install/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml
|
aws s3 cp --acl=public-read lifecycle/aws/template.yaml s3://authentik-cloudformation-templates/authentik.ecs.latest.yaml
|
||||||
test-release:
|
test-release:
|
||||||
needs:
|
needs:
|
||||||
- build-server
|
- build-server
|
||||||
|
|||||||
11
.github/workflows/release-tag.yml
vendored
11
.github/workflows/release-tag.yml
vendored
@ -14,16 +14,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Pre-release test
|
- name: Pre-release test
|
||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
|
make test-docker
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
|
|
||||||
docker buildx install
|
|
||||||
mkdir -p ./gen-ts-api
|
|
||||||
docker build -t testing:latest .
|
|
||||||
echo "AUTHENTIK_IMAGE=testing" >> .env
|
|
||||||
echo "AUTHENTIK_TAG=latest" >> .env
|
|
||||||
docker compose up --no-start
|
|
||||||
docker compose start postgresql redis
|
|
||||||
docker compose run -u root server test-all
|
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v2
|
uses: tibdex/github-app-token@v2
|
||||||
with:
|
with:
|
||||||
|
|||||||
6
.github/workflows/repo-stale.yml
vendored
6
.github/workflows/repo-stale.yml
vendored
@ -1,8 +1,8 @@
|
|||||||
name: 'authentik-repo-stale'
|
name: "authentik-repo-stale"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '30 1 * * *'
|
- cron: "30 1 * * *"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
@ -25,7 +25,7 @@ jobs:
|
|||||||
days-before-stale: 60
|
days-before-stale: 60
|
||||||
days-before-close: 7
|
days-before-close: 7
|
||||||
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
|
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question,status/reviewing
|
||||||
stale-issue-label: wontfix
|
stale-issue-label: status/stale
|
||||||
stale-issue-message: >
|
stale-issue-message: >
|
||||||
This issue has been automatically marked as stale because it has not had
|
This issue has been automatically marked as stale because it has not had
|
||||||
recent activity. It will be closed if no further activity occurs. Thank you
|
recent activity. It will be closed if no further activity occurs. Thank you
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@ -209,3 +209,6 @@ source_docs/
|
|||||||
|
|
||||||
### Golang ###
|
### Golang ###
|
||||||
/vendor/
|
/vendor/
|
||||||
|
|
||||||
|
### Docker ###
|
||||||
|
docker-compose.override.yml
|
||||||
|
|||||||
7
.vscode/extensions.json
vendored
7
.vscode/extensions.json
vendored
@ -2,6 +2,7 @@
|
|||||||
"recommendations": [
|
"recommendations": [
|
||||||
"bashmish.es6-string-css",
|
"bashmish.es6-string-css",
|
||||||
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
|
"bpruitt-goddard.mermaid-markdown-syntax-highlighting",
|
||||||
|
"charliermarsh.ruff",
|
||||||
"dbaeumer.vscode-eslint",
|
"dbaeumer.vscode-eslint",
|
||||||
"EditorConfig.EditorConfig",
|
"EditorConfig.EditorConfig",
|
||||||
"esbenp.prettier-vscode",
|
"esbenp.prettier-vscode",
|
||||||
@ -10,12 +11,12 @@
|
|||||||
"Gruntfuggly.todo-tree",
|
"Gruntfuggly.todo-tree",
|
||||||
"mechatroner.rainbow-csv",
|
"mechatroner.rainbow-csv",
|
||||||
"ms-python.black-formatter",
|
"ms-python.black-formatter",
|
||||||
"charliermarsh.ruff",
|
"ms-python.black-formatter",
|
||||||
|
"ms-python.debugpy",
|
||||||
"ms-python.python",
|
"ms-python.python",
|
||||||
"ms-python.vscode-pylance",
|
"ms-python.vscode-pylance",
|
||||||
"ms-python.black-formatter",
|
|
||||||
"redhat.vscode-yaml",
|
"redhat.vscode-yaml",
|
||||||
"Tobermory.es6-string-html",
|
"Tobermory.es6-string-html",
|
||||||
"unifiedjs.vscode-mdx"
|
"unifiedjs.vscode-mdx",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
66
.vscode/launch.json
vendored
66
.vscode/launch.json
vendored
@ -2,26 +2,76 @@
|
|||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
{
|
{
|
||||||
"name": "Python: PDB attach Server",
|
"name": "Debug: Attach Server Core",
|
||||||
"type": "python",
|
"type": "debugpy",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
"connect": {
|
"connect": {
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"port": 6800
|
"port": 9901
|
||||||
},
|
},
|
||||||
"justMyCode": true,
|
"pathMappings": [
|
||||||
|
{
|
||||||
|
"localRoot": "${workspaceFolder}",
|
||||||
|
"remoteRoot": "."
|
||||||
|
}
|
||||||
|
],
|
||||||
"django": true
|
"django": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Python: PDB attach Worker",
|
"name": "Debug: Attach Worker",
|
||||||
"type": "python",
|
"type": "debugpy",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
"connect": {
|
"connect": {
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"port": 6900
|
"port": 9901
|
||||||
},
|
},
|
||||||
"justMyCode": true,
|
"pathMappings": [
|
||||||
|
{
|
||||||
|
"localRoot": "${workspaceFolder}",
|
||||||
|
"remoteRoot": "."
|
||||||
|
}
|
||||||
|
],
|
||||||
"django": true
|
"django": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug: Start Server Router",
|
||||||
|
"type": "go",
|
||||||
|
"request": "launch",
|
||||||
|
"mode": "auto",
|
||||||
|
"program": "${workspaceFolder}/cmd/server",
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug: Start LDAP Outpost",
|
||||||
|
"type": "go",
|
||||||
|
"request": "launch",
|
||||||
|
"mode": "auto",
|
||||||
|
"program": "${workspaceFolder}/cmd/ldap",
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug: Start Proxy Outpost",
|
||||||
|
"type": "go",
|
||||||
|
"request": "launch",
|
||||||
|
"mode": "auto",
|
||||||
|
"program": "${workspaceFolder}/cmd/proxy",
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug: Start RAC Outpost",
|
||||||
|
"type": "go",
|
||||||
|
"request": "launch",
|
||||||
|
"mode": "auto",
|
||||||
|
"program": "${workspaceFolder}/cmd/rac",
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug: Start Radius Outpost",
|
||||||
|
"type": "go",
|
||||||
|
"request": "launch",
|
||||||
|
"mode": "auto",
|
||||||
|
"program": "${workspaceFolder}/cmd/radius",
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@ -15,6 +15,7 @@ go.mod @goauthentik/backend
|
|||||||
go.sum @goauthentik/backend
|
go.sum @goauthentik/backend
|
||||||
# Infrastructure
|
# Infrastructure
|
||||||
.github/ @goauthentik/infrastructure
|
.github/ @goauthentik/infrastructure
|
||||||
|
lifecycle/aws/ @goauthentik/infrastructure
|
||||||
Dockerfile @goauthentik/infrastructure
|
Dockerfile @goauthentik/infrastructure
|
||||||
*Dockerfile @goauthentik/infrastructure
|
*Dockerfile @goauthentik/infrastructure
|
||||||
.dockerignore @goauthentik/infrastructure
|
.dockerignore @goauthentik/infrastructure
|
||||||
|
|||||||
34
Dockerfile
34
Dockerfile
@ -94,7 +94,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
|||||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||||
|
|
||||||
# Stage 5: Python dependencies
|
# Stage 5: Python dependencies
|
||||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS python-deps
|
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS python-deps
|
||||||
|
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG TARGETVARIANT
|
ARG TARGETVARIANT
|
||||||
@ -116,15 +116,30 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
|||||||
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
||||||
--mount=type=cache,target=/root/.cache/pip \
|
--mount=type=cache,target=/root/.cache/pip \
|
||||||
--mount=type=cache,target=/root/.cache/pypoetry \
|
--mount=type=cache,target=/root/.cache/pypoetry \
|
||||||
|
pip install --no-cache cffi && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
build-essential libffi-dev \
|
||||||
|
# Required for cryptography
|
||||||
|
curl pkg-config \
|
||||||
|
# Required for lxml
|
||||||
|
libxslt-dev zlib1g-dev \
|
||||||
|
# Required for xmlsec
|
||||||
|
libltdl-dev \
|
||||||
|
# Required for kadmin
|
||||||
|
sccache clang && \
|
||||||
|
curl https://sh.rustup.rs -sSf | sh -s -- -y && \
|
||||||
|
. "$HOME/.cargo/env" && \
|
||||||
python -m venv /ak-root/venv/ && \
|
python -m venv /ak-root/venv/ && \
|
||||||
bash -c "source ${VENV_PATH}/bin/activate && \
|
bash -c "source ${VENV_PATH}/bin/activate && \
|
||||||
pip3 install --upgrade pip && \
|
pip3 install --upgrade pip poetry && \
|
||||||
pip3 install poetry && \
|
poetry config --local installer.no-binary cryptography,xmlsec,lxml,python-kadmin-rs && \
|
||||||
poetry install --only=main --no-ansi --no-interaction --no-root && \
|
poetry install --only=main --no-ansi --no-interaction --no-root && \
|
||||||
pip install --force-reinstall /wheels/*"
|
pip uninstall cryptography -y && \
|
||||||
|
poetry install --only=main --no-ansi --no-interaction --no-root"
|
||||||
|
|
||||||
# Stage 6: Run
|
# Stage 6: Run
|
||||||
FROM ghcr.io/goauthentik/fips-python:3.12.7-slim-bookworm-fips-full AS final-image
|
FROM ghcr.io/goauthentik/fips-python:3.12.8-slim-bookworm-fips AS final-image
|
||||||
|
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
ARG GIT_BUILD_HASH
|
ARG GIT_BUILD_HASH
|
||||||
@ -140,10 +155,12 @@ WORKDIR /
|
|||||||
|
|
||||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
|
apt-get upgrade -y && \
|
||||||
# Required for runtime
|
# Required for runtime
|
||||||
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 && \
|
apt-get install -y --no-install-recommends libpq5 libmaxminddb0 ca-certificates libkrb5-3 libkadm5clnt-mit12 libkdb5-10 libltdl7 libxslt1.1 && \
|
||||||
# Required for bootstrap & healtcheck
|
# Required for bootstrap & healtcheck
|
||||||
apt-get install -y --no-install-recommends runit && \
|
apt-get install -y --no-install-recommends runit && \
|
||||||
|
pip3 install --no-cache-dir --upgrade pip && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||||
@ -176,9 +193,8 @@ ENV TMPDIR=/dev/shm/ \
|
|||||||
PYTHONUNBUFFERED=1 \
|
PYTHONUNBUFFERED=1 \
|
||||||
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
|
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
|
||||||
VENV_PATH="/ak-root/venv" \
|
VENV_PATH="/ak-root/venv" \
|
||||||
POETRY_VIRTUALENVS_CREATE=false
|
POETRY_VIRTUALENVS_CREATE=false \
|
||||||
|
GOFIPS=1
|
||||||
ENV GOFIPS=1
|
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
||||||
|
|
||||||
|
|||||||
40
Makefile
40
Makefile
@ -5,7 +5,9 @@ PWD = $(shell pwd)
|
|||||||
UID = $(shell id -u)
|
UID = $(shell id -u)
|
||||||
GID = $(shell id -g)
|
GID = $(shell id -g)
|
||||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||||
PY_SOURCES = authentik tests scripts lifecycle .github website/docs/install-config/install/aws
|
PY_SOURCES = authentik tests scripts lifecycle .github
|
||||||
|
GO_SOURCES = cmd internal
|
||||||
|
WEB_SOURCES = web/src web/packages
|
||||||
DOCKER_IMAGE ?= "authentik:test"
|
DOCKER_IMAGE ?= "authentik:test"
|
||||||
|
|
||||||
GEN_API_TS = "gen-ts-api"
|
GEN_API_TS = "gen-ts-api"
|
||||||
@ -19,11 +21,12 @@ pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
|||||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||||
-I .github/codespell-words.txt \
|
-I .github/codespell-words.txt \
|
||||||
-S 'web/src/locales/**' \
|
-S 'web/src/locales/**' \
|
||||||
-S 'website/docs/developer-docs/api/reference/**' \
|
-S 'website/developer-docs/api/reference/**' \
|
||||||
authentik \
|
-S '**/node_modules/**' \
|
||||||
internal \
|
-S '**/dist/**' \
|
||||||
cmd \
|
$(PY_SOURCES) \
|
||||||
web/src \
|
$(GO_SOURCES) \
|
||||||
|
$(WEB_SOURCES) \
|
||||||
website/src \
|
website/src \
|
||||||
website/blog \
|
website/blog \
|
||||||
website/docs \
|
website/docs \
|
||||||
@ -45,15 +48,6 @@ help: ## Show this help
|
|||||||
go-test:
|
go-test:
|
||||||
go test -timeout 0 -v -race -cover ./...
|
go test -timeout 0 -v -race -cover ./...
|
||||||
|
|
||||||
test-docker: ## Run all tests in a docker-compose
|
|
||||||
echo "PG_PASS=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(shell openssl rand 32 | base64 -w 0)" >> .env
|
|
||||||
docker compose pull -q
|
|
||||||
docker compose up --no-start
|
|
||||||
docker compose start postgresql redis
|
|
||||||
docker compose run -u root server test-all
|
|
||||||
rm -f .env
|
|
||||||
|
|
||||||
test: ## Run the server tests and produce a coverage report (locally)
|
test: ## Run the server tests and produce a coverage report (locally)
|
||||||
coverage run manage.py test --keepdb authentik
|
coverage run manage.py test --keepdb authentik
|
||||||
coverage html
|
coverage html
|
||||||
@ -78,6 +72,9 @@ migrate: ## Run the Authentik Django server's migrations
|
|||||||
|
|
||||||
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||||
|
|
||||||
|
aws-cfn:
|
||||||
|
cd lifecycle/aws && npm run aws-cfn
|
||||||
|
|
||||||
core-i18n-extract:
|
core-i18n-extract:
|
||||||
ak makemessages \
|
ak makemessages \
|
||||||
--add-location file \
|
--add-location file \
|
||||||
@ -149,7 +146,7 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
|
|||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
docker.io/openapitools/openapi-generator-cli:v6.5.0 generate \
|
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
|
||||||
-i /local/schema.yml \
|
-i /local/schema.yml \
|
||||||
-g typescript-fetch \
|
-g typescript-fetch \
|
||||||
-o /local/${GEN_API_TS} \
|
-o /local/${GEN_API_TS} \
|
||||||
@ -252,9 +249,6 @@ website-build:
|
|||||||
website-watch: ## Build and watch the documentation website, updating automatically
|
website-watch: ## Build and watch the documentation website, updating automatically
|
||||||
cd website && npm run watch
|
cd website && npm run watch
|
||||||
|
|
||||||
aws-cfn:
|
|
||||||
cd website && npm run aws-cfn
|
|
||||||
|
|
||||||
#########################
|
#########################
|
||||||
## Docker
|
## Docker
|
||||||
#########################
|
#########################
|
||||||
@ -263,6 +257,9 @@ docker: ## Build a docker image of the current source tree
|
|||||||
mkdir -p ${GEN_API_TS}
|
mkdir -p ${GEN_API_TS}
|
||||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||||
|
|
||||||
|
test-docker:
|
||||||
|
BUILD=true ./scripts/test_docker.sh
|
||||||
|
|
||||||
#########################
|
#########################
|
||||||
## CI
|
## CI
|
||||||
#########################
|
#########################
|
||||||
@ -287,3 +284,8 @@ ci-bandit: ci--meta-debug
|
|||||||
|
|
||||||
ci-pending-migrations: ci--meta-debug
|
ci-pending-migrations: ci--meta-debug
|
||||||
ak makemigrations --check
|
ak makemigrations --check
|
||||||
|
|
||||||
|
ci-test: ci--meta-debug
|
||||||
|
coverage run manage.py test --keepdb --randomly-seed ${CI_TEST_SEED} authentik
|
||||||
|
coverage report
|
||||||
|
coverage xml
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
__version__ = "2024.12.2"
|
__version__ = "2024.12.3"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -51,6 +51,7 @@ from authentik.enterprise.providers.microsoft_entra.models import (
|
|||||||
MicrosoftEntraProviderUser,
|
MicrosoftEntraProviderUser,
|
||||||
)
|
)
|
||||||
from authentik.enterprise.providers.rac.models import ConnectionToken
|
from authentik.enterprise.providers.rac.models import ConnectionToken
|
||||||
|
from authentik.enterprise.providers.ssf.models import StreamEvent
|
||||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
|
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
|
||||||
EndpointDevice,
|
EndpointDevice,
|
||||||
EndpointDeviceConnection,
|
EndpointDeviceConnection,
|
||||||
@ -131,6 +132,7 @@ def excluded_models() -> list[type[Model]]:
|
|||||||
EndpointDevice,
|
EndpointDevice,
|
||||||
EndpointDeviceConnection,
|
EndpointDeviceConnection,
|
||||||
DeviceToken,
|
DeviceToken,
|
||||||
|
StreamEvent,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -3,6 +3,7 @@
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||||
|
from guardian.shortcuts import get_objects_for_user
|
||||||
from rest_framework.fields import (
|
from rest_framework.fields import (
|
||||||
BooleanField,
|
BooleanField,
|
||||||
CharField,
|
CharField,
|
||||||
@ -16,7 +17,6 @@ from rest_framework.viewsets import ViewSet
|
|||||||
|
|
||||||
from authentik.core.api.utils import MetaNameSerializer
|
from authentik.core.api.utils import MetaNameSerializer
|
||||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
|
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
|
||||||
from authentik.rbac.decorators import permission_required
|
|
||||||
from authentik.stages.authenticator import device_classes, devices_for_user
|
from authentik.stages.authenticator import device_classes, devices_for_user
|
||||||
from authentik.stages.authenticator.models import Device
|
from authentik.stages.authenticator.models import Device
|
||||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||||
@ -73,7 +73,9 @@ class AdminDeviceViewSet(ViewSet):
|
|||||||
def get_devices(self, **kwargs):
|
def get_devices(self, **kwargs):
|
||||||
"""Get all devices in all child classes"""
|
"""Get all devices in all child classes"""
|
||||||
for model in device_classes():
|
for model in device_classes():
|
||||||
device_set = model.objects.filter(**kwargs)
|
device_set = get_objects_for_user(
|
||||||
|
self.request.user, f"{model._meta.app_label}.view_{model._meta.model_name}", model
|
||||||
|
).filter(**kwargs)
|
||||||
yield from device_set
|
yield from device_set
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
@ -86,10 +88,6 @@ class AdminDeviceViewSet(ViewSet):
|
|||||||
],
|
],
|
||||||
responses={200: DeviceSerializer(many=True)},
|
responses={200: DeviceSerializer(many=True)},
|
||||||
)
|
)
|
||||||
@permission_required(
|
|
||||||
None,
|
|
||||||
[f"{model._meta.app_label}.view_{model._meta.model_name}" for model in device_classes()],
|
|
||||||
)
|
|
||||||
def list(self, request: Request) -> Response:
|
def list(self, request: Request) -> Response:
|
||||||
"""Get all devices for current user"""
|
"""Get all devices for current user"""
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
|
|||||||
@ -85,7 +85,7 @@ class SourceViewSet(
|
|||||||
serializer_class = SourceSerializer
|
serializer_class = SourceSerializer
|
||||||
lookup_field = "slug"
|
lookup_field = "slug"
|
||||||
search_fields = ["slug", "name"]
|
search_fields = ["slug", "name"]
|
||||||
filterset_fields = ["slug", "name", "managed"]
|
filterset_fields = ["slug", "name", "managed", "pbm_uuid"]
|
||||||
|
|
||||||
def get_queryset(self): # pragma: no cover
|
def get_queryset(self): # pragma: no cover
|
||||||
return Source.objects.select_subclasses()
|
return Source.objects.select_subclasses()
|
||||||
|
|||||||
@ -236,9 +236,11 @@ class UserSerializer(ModelSerializer):
|
|||||||
"path",
|
"path",
|
||||||
"type",
|
"type",
|
||||||
"uuid",
|
"uuid",
|
||||||
|
"password_change_date",
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"name": {"allow_blank": True},
|
"name": {"allow_blank": True},
|
||||||
|
"password_change_date": {"read_only": True},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -427,7 +429,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||||||
queryset = User.objects.none()
|
queryset = User.objects.none()
|
||||||
ordering = ["username"]
|
ordering = ["username"]
|
||||||
serializer_class = UserSerializer
|
serializer_class = UserSerializer
|
||||||
search_fields = ["username", "name", "is_active", "email", "uuid"]
|
search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"]
|
||||||
filterset_class = UsersFilter
|
filterset_class = UsersFilter
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
|
|||||||
@ -5,6 +5,7 @@ from typing import TextIO
|
|||||||
from daphne.management.commands.runserver import Command as RunServer
|
from daphne.management.commands.runserver import Command as RunServer
|
||||||
from daphne.server import Server
|
from daphne.server import Server
|
||||||
|
|
||||||
|
from authentik.lib.debug import start_debug_server
|
||||||
from authentik.root.signals import post_startup, pre_startup, startup
|
from authentik.root.signals import post_startup, pre_startup, startup
|
||||||
|
|
||||||
|
|
||||||
@ -13,6 +14,7 @@ class SignalServer(Server):
|
|||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
start_debug_server()
|
||||||
|
|
||||||
def ready_callable():
|
def ready_callable():
|
||||||
pre_startup.send(sender=self)
|
pre_startup.send(sender=self)
|
||||||
|
|||||||
@ -9,6 +9,7 @@ from django.db import close_old_connections
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
|
from authentik.lib.debug import start_debug_server
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
@ -28,10 +29,7 @@ class Command(BaseCommand):
|
|||||||
def handle(self, **options):
|
def handle(self, **options):
|
||||||
LOGGER.debug("Celery options", **options)
|
LOGGER.debug("Celery options", **options)
|
||||||
close_old_connections()
|
close_old_connections()
|
||||||
if CONFIG.get_bool("remote_debug"):
|
start_debug_server()
|
||||||
import debugpy
|
|
||||||
|
|
||||||
debugpy.listen(("0.0.0.0", 6900)) # nosec
|
|
||||||
worker: Worker = CELERY_APP.Worker(
|
worker: Worker = CELERY_APP.Worker(
|
||||||
no_color=False,
|
no_color=False,
|
||||||
quiet=True,
|
quiet=True,
|
||||||
|
|||||||
@ -599,6 +599,14 @@ class Application(SerializerModel, PolicyBindingModel):
|
|||||||
return None
|
return None
|
||||||
return candidates[-1]
|
return candidates[-1]
|
||||||
|
|
||||||
|
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
|
||||||
|
"""Get Backchannel provider for a specific type"""
|
||||||
|
providers = self.backchannel_providers.filter(
|
||||||
|
**{f"{provider_type._meta.model_name}__isnull": False},
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
return getattr(providers.first(), provider_type._meta.model_name)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.name)
|
return str(self.name)
|
||||||
|
|
||||||
|
|||||||
0
authentik/enterprise/providers/ssf/__init__.py
Normal file
0
authentik/enterprise/providers/ssf/__init__.py
Normal file
0
authentik/enterprise/providers/ssf/api/__init__.py
Normal file
0
authentik/enterprise/providers/ssf/api/__init__.py
Normal file
64
authentik/enterprise/providers/ssf/api/providers.py
Normal file
64
authentik/enterprise/providers/ssf/api/providers.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
"""SSF Provider API Views"""
|
||||||
|
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.fields import SerializerMethodField
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.providers import ProviderSerializer
|
||||||
|
from authentik.core.api.tokens import TokenSerializer
|
||||||
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||||
|
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||||
|
|
||||||
|
|
||||||
|
class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
|
||||||
|
"""SSFProvider Serializer"""
|
||||||
|
|
||||||
|
ssf_url = SerializerMethodField()
|
||||||
|
token_obj = TokenSerializer(source="token", required=False, read_only=True)
|
||||||
|
|
||||||
|
def get_ssf_url(self, instance: SSFProvider) -> str | None:
|
||||||
|
request: Request = self._context.get("request")
|
||||||
|
if not request:
|
||||||
|
return None
|
||||||
|
if not instance.backchannel_application:
|
||||||
|
return None
|
||||||
|
return request.build_absolute_uri(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:configuration",
|
||||||
|
kwargs={
|
||||||
|
"application_slug": instance.backchannel_application.slug,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = SSFProvider
|
||||||
|
fields = [
|
||||||
|
"pk",
|
||||||
|
"name",
|
||||||
|
"component",
|
||||||
|
"verbose_name",
|
||||||
|
"verbose_name_plural",
|
||||||
|
"meta_model_name",
|
||||||
|
"signing_key",
|
||||||
|
"token_obj",
|
||||||
|
"oidc_auth_providers",
|
||||||
|
"ssf_url",
|
||||||
|
"event_retention",
|
||||||
|
]
|
||||||
|
extra_kwargs = {}
|
||||||
|
|
||||||
|
|
||||||
|
class SSFProviderViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""SSFProvider Viewset"""
|
||||||
|
|
||||||
|
queryset = SSFProvider.objects.all()
|
||||||
|
serializer_class = SSFProviderSerializer
|
||||||
|
filterset_fields = {
|
||||||
|
"application": ["isnull"],
|
||||||
|
"name": ["iexact"],
|
||||||
|
}
|
||||||
|
search_fields = ["name"]
|
||||||
|
ordering = ["name"]
|
||||||
37
authentik/enterprise/providers/ssf/api/streams.py
Normal file
37
authentik/enterprise/providers/ssf/api/streams.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
"""SSF Stream API Views"""
|
||||||
|
|
||||||
|
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||||
|
|
||||||
|
from authentik.core.api.utils import ModelSerializer
|
||||||
|
from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer
|
||||||
|
from authentik.enterprise.providers.ssf.models import Stream
|
||||||
|
|
||||||
|
|
||||||
|
class SSFStreamSerializer(ModelSerializer):
|
||||||
|
"""SSFStream Serializer"""
|
||||||
|
|
||||||
|
provider_obj = SSFProviderSerializer(source="provider", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Stream
|
||||||
|
fields = [
|
||||||
|
"pk",
|
||||||
|
"provider",
|
||||||
|
"provider_obj",
|
||||||
|
"delivery_method",
|
||||||
|
"endpoint_url",
|
||||||
|
"events_requested",
|
||||||
|
"format",
|
||||||
|
"aud",
|
||||||
|
"iss",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class SSFStreamViewSet(ReadOnlyModelViewSet):
|
||||||
|
"""SSFStream Viewset"""
|
||||||
|
|
||||||
|
queryset = Stream.objects.all()
|
||||||
|
serializer_class = SSFStreamSerializer
|
||||||
|
filterset_fields = ["provider", "endpoint_url", "delivery_method"]
|
||||||
|
search_fields = ["provider__name", "endpoint_url"]
|
||||||
|
ordering = ["provider", "uuid"]
|
||||||
13
authentik/enterprise/providers/ssf/apps.py
Normal file
13
authentik/enterprise/providers/ssf/apps.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
"""SSF app config"""
|
||||||
|
|
||||||
|
from authentik.enterprise.apps import EnterpriseConfig
|
||||||
|
|
||||||
|
|
||||||
|
class AuthentikEnterpriseProviderSSF(EnterpriseConfig):
|
||||||
|
"""authentik enterprise ssf app config"""
|
||||||
|
|
||||||
|
name = "authentik.enterprise.providers.ssf"
|
||||||
|
label = "authentik_providers_ssf"
|
||||||
|
verbose_name = "authentik Enterprise.Providers.SSF"
|
||||||
|
default = True
|
||||||
|
mountpoint = ""
|
||||||
201
authentik/enterprise/providers/ssf/migrations/0001_initial.py
Normal file
201
authentik/enterprise/providers/ssf/migrations/0001_initial.py
Normal file
@ -0,0 +1,201 @@
|
|||||||
|
# Generated by Django 5.0.11 on 2025-02-05 16:20
|
||||||
|
|
||||||
|
import authentik.lib.utils.time
|
||||||
|
import django.contrib.postgres.fields
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("authentik_core", "0042_authenticatedsession_authentik_c_expires_08251d_idx_and_more"),
|
||||||
|
("authentik_crypto", "0004_alter_certificatekeypair_name"),
|
||||||
|
("authentik_providers_oauth2", "0027_accesstoken_authentik_p_expires_9f24a5_idx_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="SSFProvider",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"provider_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="authentik_core.provider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"event_retention",
|
||||||
|
models.TextField(
|
||||||
|
default="days=30",
|
||||||
|
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"oidc_auth_providers",
|
||||||
|
models.ManyToManyField(
|
||||||
|
blank=True, default=None, to="authentik_providers_oauth2.oauth2provider"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"signing_key",
|
||||||
|
models.ForeignKey(
|
||||||
|
help_text="Key used to sign the SSF Events.",
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_crypto.certificatekeypair",
|
||||||
|
verbose_name="Signing Key",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"token",
|
||||||
|
models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_core.token",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Shared Signals Framework Provider",
|
||||||
|
"verbose_name_plural": "Shared Signals Framework Providers",
|
||||||
|
"permissions": [("add_stream", "Add stream to SSF provider")],
|
||||||
|
},
|
||||||
|
bases=("authentik_core.provider",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="Stream",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"uuid",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"delivery_method",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||||
|
"Risc Push",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/risc/delivery-method/poll",
|
||||||
|
"Risc Poll",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("endpoint_url", models.TextField(null=True)),
|
||||||
|
(
|
||||||
|
"events_requested",
|
||||||
|
django.contrib.postgres.fields.ArrayField(
|
||||||
|
base_field=models.TextField(
|
||||||
|
choices=[
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||||
|
"Caep Session Revoked",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||||
|
"Caep Credential Change",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/ssf/event-type/verification",
|
||||||
|
"Set Verification",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
default=list,
|
||||||
|
size=None,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("format", models.TextField()),
|
||||||
|
(
|
||||||
|
"aud",
|
||||||
|
django.contrib.postgres.fields.ArrayField(
|
||||||
|
base_field=models.TextField(), default=list, size=None
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("iss", models.TextField()),
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_ssf.ssfprovider",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "SSF Stream",
|
||||||
|
"verbose_name_plural": "SSF Streams",
|
||||||
|
"default_permissions": ["change", "delete", "view"],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="StreamEvent",
|
||||||
|
fields=[
|
||||||
|
("created", models.DateTimeField(auto_now_add=True)),
|
||||||
|
("last_updated", models.DateTimeField(auto_now=True)),
|
||||||
|
("expires", models.DateTimeField(default=None, null=True)),
|
||||||
|
("expiring", models.BooleanField(default=True)),
|
||||||
|
(
|
||||||
|
"uuid",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"status",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
("pending_new", "Pending New"),
|
||||||
|
("pending_failed", "Pending Failed"),
|
||||||
|
("sent", "Sent"),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"type",
|
||||||
|
models.TextField(
|
||||||
|
choices=[
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||||
|
"Caep Session Revoked",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||||
|
"Caep Credential Change",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://schemas.openid.net/secevent/ssf/event-type/verification",
|
||||||
|
"Set Verification",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("payload", models.JSONField(default=dict)),
|
||||||
|
(
|
||||||
|
"stream",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="authentik_providers_ssf.stream",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "SSF Stream Event",
|
||||||
|
"verbose_name_plural": "SSF Stream Events",
|
||||||
|
"ordering": ("-created",),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
178
authentik/enterprise/providers/ssf/models.py
Normal file
178
authentik/enterprise/providers/ssf/models.py
Normal file
@ -0,0 +1,178 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from functools import cached_property
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
|
||||||
|
from django.contrib.postgres.fields import ArrayField
|
||||||
|
from django.db import models
|
||||||
|
from django.templatetags.static import static
|
||||||
|
from django.utils.timezone import now
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from jwt import encode
|
||||||
|
|
||||||
|
from authentik.core.models import BackchannelProvider, ExpiringModel, Token
|
||||||
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
from authentik.lib.models import CreatedUpdatedModel
|
||||||
|
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||||
|
from authentik.providers.oauth2.models import JWTAlgorithms, OAuth2Provider
|
||||||
|
|
||||||
|
|
||||||
|
class EventTypes(models.TextChoices):
|
||||||
|
"""SSF Event types supported by authentik"""
|
||||||
|
|
||||||
|
CAEP_SESSION_REVOKED = "https://schemas.openid.net/secevent/caep/event-type/session-revoked"
|
||||||
|
CAEP_CREDENTIAL_CHANGE = "https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||||
|
SET_VERIFICATION = "https://schemas.openid.net/secevent/ssf/event-type/verification"
|
||||||
|
|
||||||
|
|
||||||
|
class DeliveryMethods(models.TextChoices):
|
||||||
|
"""SSF Delivery methods"""
|
||||||
|
|
||||||
|
RISC_PUSH = "https://schemas.openid.net/secevent/risc/delivery-method/push"
|
||||||
|
RISC_POLL = "https://schemas.openid.net/secevent/risc/delivery-method/poll"
|
||||||
|
|
||||||
|
|
||||||
|
class SSFEventStatus(models.TextChoices):
|
||||||
|
"""SSF Event status"""
|
||||||
|
|
||||||
|
PENDING_NEW = "pending_new"
|
||||||
|
PENDING_FAILED = "pending_failed"
|
||||||
|
SENT = "sent"
|
||||||
|
|
||||||
|
|
||||||
|
class SSFProvider(BackchannelProvider):
|
||||||
|
"""Shared Signals Framework provider to allow applications to
|
||||||
|
receive user events from authentik."""
|
||||||
|
|
||||||
|
signing_key = models.ForeignKey(
|
||||||
|
CertificateKeyPair,
|
||||||
|
verbose_name=_("Signing Key"),
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
help_text=_("Key used to sign the SSF Events."),
|
||||||
|
)
|
||||||
|
|
||||||
|
oidc_auth_providers = models.ManyToManyField(OAuth2Provider, blank=True, default=None)
|
||||||
|
|
||||||
|
token = models.ForeignKey(Token, on_delete=models.CASCADE, null=True, default=None)
|
||||||
|
|
||||||
|
event_retention = models.TextField(
|
||||||
|
default="days=30",
|
||||||
|
validators=[timedelta_string_validator],
|
||||||
|
)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def jwt_key(self) -> tuple[PrivateKeyTypes, str]:
|
||||||
|
"""Get either the configured certificate or the client secret"""
|
||||||
|
key: CertificateKeyPair = self.signing_key
|
||||||
|
private_key = key.private_key
|
||||||
|
if isinstance(private_key, RSAPrivateKey):
|
||||||
|
return private_key, JWTAlgorithms.RS256
|
||||||
|
if isinstance(private_key, EllipticCurvePrivateKey):
|
||||||
|
return private_key, JWTAlgorithms.ES256
|
||||||
|
raise ValueError(f"Invalid private key type: {type(private_key)}")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def service_account_identifier(self) -> str:
|
||||||
|
return f"ak-providers-ssf-{self.pk}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self):
|
||||||
|
from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer
|
||||||
|
|
||||||
|
return SSFProviderSerializer
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon_url(self) -> str | None:
|
||||||
|
return static("authentik/sources/ssf.svg")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def component(self) -> str:
|
||||||
|
return "ak-provider-ssf-form"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Shared Signals Framework Provider")
|
||||||
|
verbose_name_plural = _("Shared Signals Framework Providers")
|
||||||
|
permissions = [
|
||||||
|
# This overrides the default "add_stream" permission of the Stream object,
|
||||||
|
# as the user requesting to add a stream must have the permission on the provider
|
||||||
|
("add_stream", _("Add stream to SSF provider")),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class Stream(models.Model):
|
||||||
|
"""SSF Stream"""
|
||||||
|
|
||||||
|
uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False)
|
||||||
|
provider = models.ForeignKey(SSFProvider, on_delete=models.CASCADE)
|
||||||
|
|
||||||
|
delivery_method = models.TextField(choices=DeliveryMethods.choices)
|
||||||
|
endpoint_url = models.TextField(null=True)
|
||||||
|
|
||||||
|
events_requested = ArrayField(models.TextField(choices=EventTypes.choices), default=list)
|
||||||
|
format = models.TextField()
|
||||||
|
aud = ArrayField(models.TextField(), default=list)
|
||||||
|
|
||||||
|
iss = models.TextField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("SSF Stream")
|
||||||
|
verbose_name_plural = _("SSF Streams")
|
||||||
|
default_permissions = ["change", "delete", "view"]
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return "SSF Stream"
|
||||||
|
|
||||||
|
def prepare_event_payload(self, type: EventTypes, event_data: dict, **kwargs) -> dict:
|
||||||
|
jti = uuid4()
|
||||||
|
_now = now()
|
||||||
|
return {
|
||||||
|
"uuid": jti,
|
||||||
|
"stream_id": str(self.pk),
|
||||||
|
"type": type,
|
||||||
|
"expiring": True,
|
||||||
|
"status": SSFEventStatus.PENDING_NEW,
|
||||||
|
"expires": _now + timedelta_from_string(self.provider.event_retention),
|
||||||
|
"payload": {
|
||||||
|
"jti": jti.hex,
|
||||||
|
"aud": self.aud,
|
||||||
|
"iat": int(datetime.now().timestamp()),
|
||||||
|
"iss": self.iss,
|
||||||
|
"events": {type: event_data},
|
||||||
|
**kwargs,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def encode(self, data: dict) -> str:
|
||||||
|
headers = {}
|
||||||
|
if self.provider.signing_key:
|
||||||
|
headers["kid"] = self.provider.signing_key.kid
|
||||||
|
key, alg = self.provider.jwt_key
|
||||||
|
return encode(data, key, algorithm=alg, headers=headers)
|
||||||
|
|
||||||
|
|
||||||
|
class StreamEvent(CreatedUpdatedModel, ExpiringModel):
|
||||||
|
"""Single stream event to be sent"""
|
||||||
|
|
||||||
|
uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False)
|
||||||
|
|
||||||
|
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
|
||||||
|
status = models.TextField(choices=SSFEventStatus.choices)
|
||||||
|
|
||||||
|
type = models.TextField(choices=EventTypes.choices)
|
||||||
|
payload = models.JSONField(default=dict)
|
||||||
|
|
||||||
|
def expire_action(self, *args, **kwargs):
|
||||||
|
"""Only allow automatic cleanup of successfully sent event"""
|
||||||
|
if self.status != SSFEventStatus.SENT:
|
||||||
|
return
|
||||||
|
return super().expire_action(*args, **kwargs)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Stream event {self.type}"
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("SSF Stream Event")
|
||||||
|
verbose_name_plural = _("SSF Stream Events")
|
||||||
|
ordering = ("-created",)
|
||||||
193
authentik/enterprise/providers/ssf/signals.py
Normal file
193
authentik/enterprise/providers/ssf/signals.py
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
from hashlib import sha256
|
||||||
|
|
||||||
|
from django.contrib.auth.signals import user_logged_out
|
||||||
|
from django.db.models import Model
|
||||||
|
from django.db.models.signals import post_delete, post_save, pre_delete
|
||||||
|
from django.dispatch import receiver
|
||||||
|
from django.http.request import HttpRequest
|
||||||
|
from guardian.shortcuts import assign_perm
|
||||||
|
|
||||||
|
from authentik.core.models import (
|
||||||
|
USER_PATH_SYSTEM_PREFIX,
|
||||||
|
AuthenticatedSession,
|
||||||
|
Token,
|
||||||
|
TokenIntents,
|
||||||
|
User,
|
||||||
|
UserTypes,
|
||||||
|
)
|
||||||
|
from authentik.core.signals import password_changed
|
||||||
|
from authentik.enterprise.providers.ssf.models import (
|
||||||
|
EventTypes,
|
||||||
|
SSFProvider,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
|
||||||
|
from authentik.events.middleware import audit_ignore
|
||||||
|
from authentik.stages.authenticator.models import Device
|
||||||
|
from authentik.stages.authenticator_duo.models import DuoDevice
|
||||||
|
from authentik.stages.authenticator_static.models import StaticDevice
|
||||||
|
from authentik.stages.authenticator_totp.models import TOTPDevice
|
||||||
|
from authentik.stages.authenticator_webauthn.models import (
|
||||||
|
UNKNOWN_DEVICE_TYPE_AAGUID,
|
||||||
|
WebAuthnDevice,
|
||||||
|
)
|
||||||
|
|
||||||
|
USER_PATH_PROVIDERS_SSF = USER_PATH_SYSTEM_PREFIX + "/providers/ssf"
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_save, sender=SSFProvider)
|
||||||
|
def ssf_providers_post_save(sender: type[Model], instance: SSFProvider, created: bool, **_):
|
||||||
|
"""Create service account before provider is saved"""
|
||||||
|
identifier = instance.service_account_identifier
|
||||||
|
user, _ = User.objects.update_or_create(
|
||||||
|
username=identifier,
|
||||||
|
defaults={
|
||||||
|
"name": f"SSF Provider {instance.name} Service-Account",
|
||||||
|
"type": UserTypes.INTERNAL_SERVICE_ACCOUNT,
|
||||||
|
"path": USER_PATH_PROVIDERS_SSF,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assign_perm("add_stream", user, instance)
|
||||||
|
token, token_created = Token.objects.update_or_create(
|
||||||
|
identifier=identifier,
|
||||||
|
defaults={
|
||||||
|
"user": user,
|
||||||
|
"intent": TokenIntents.INTENT_API,
|
||||||
|
"expiring": False,
|
||||||
|
"managed": f"goauthentik.io/providers/ssf/{instance.pk}",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if created or token_created:
|
||||||
|
with audit_ignore():
|
||||||
|
instance.token = token
|
||||||
|
instance.save()
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(user_logged_out)
|
||||||
|
def ssf_user_logged_out_session_revoked(sender, request: HttpRequest, user: User, **_):
|
||||||
|
"""Session revoked trigger (user logged out)"""
|
||||||
|
if not request.session or not request.session.session_key or not user:
|
||||||
|
return
|
||||||
|
send_ssf_event(
|
||||||
|
EventTypes.CAEP_SESSION_REVOKED,
|
||||||
|
{
|
||||||
|
"initiating_entity": "user",
|
||||||
|
},
|
||||||
|
sub_id={
|
||||||
|
"format": "complex",
|
||||||
|
"session": {
|
||||||
|
"format": "opaque",
|
||||||
|
"id": sha256(request.session.session_key.encode("ascii")).hexdigest(),
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"format": "email",
|
||||||
|
"email": user.email,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
request=request,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(pre_delete, sender=AuthenticatedSession)
|
||||||
|
def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSession, **_):
|
||||||
|
"""Session revoked trigger (users' session has been deleted)
|
||||||
|
|
||||||
|
As this signal is also triggered with a regular logout, we can't be sure
|
||||||
|
if the session has been deleted by an admin or by the user themselves."""
|
||||||
|
send_ssf_event(
|
||||||
|
EventTypes.CAEP_SESSION_REVOKED,
|
||||||
|
{
|
||||||
|
"initiating_entity": "user",
|
||||||
|
},
|
||||||
|
sub_id={
|
||||||
|
"format": "complex",
|
||||||
|
"session": {
|
||||||
|
"format": "opaque",
|
||||||
|
"id": sha256(instance.session_key.encode("ascii")).hexdigest(),
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"format": "email",
|
||||||
|
"email": instance.user.email,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(password_changed)
|
||||||
|
def ssf_password_changed_cred_change(sender, user: User, password: str | None, **_):
|
||||||
|
"""Credential change trigger (password changed)"""
|
||||||
|
send_ssf_event(
|
||||||
|
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||||
|
{
|
||||||
|
"credential_type": "password",
|
||||||
|
"change_type": "revoke" if password is None else "update",
|
||||||
|
},
|
||||||
|
sub_id={
|
||||||
|
"format": "complex",
|
||||||
|
"user": {
|
||||||
|
"format": "email",
|
||||||
|
"email": user.email,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
device_type_map = {
|
||||||
|
StaticDevice: "pin",
|
||||||
|
TOTPDevice: "pin",
|
||||||
|
WebAuthnDevice: "fido-u2f",
|
||||||
|
DuoDevice: "app",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_save)
|
||||||
|
def ssf_device_post_save(sender: type[Model], instance: Device, created: bool, **_):
|
||||||
|
if not isinstance(instance, Device):
|
||||||
|
return
|
||||||
|
if not instance.confirmed:
|
||||||
|
return
|
||||||
|
device_type = device_type_map.get(instance.__class__)
|
||||||
|
data = {
|
||||||
|
"credential_type": device_type,
|
||||||
|
"change_type": "create" if created else "update",
|
||||||
|
"friendly_name": instance.name,
|
||||||
|
}
|
||||||
|
if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID:
|
||||||
|
data["fido2_aaguid"] = instance.aaguid
|
||||||
|
send_ssf_event(
|
||||||
|
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||||
|
data,
|
||||||
|
sub_id={
|
||||||
|
"format": "complex",
|
||||||
|
"user": {
|
||||||
|
"format": "email",
|
||||||
|
"email": instance.user.email,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_delete)
|
||||||
|
def ssf_device_post_delete(sender: type[Model], instance: Device, **_):
|
||||||
|
if not isinstance(instance, Device):
|
||||||
|
return
|
||||||
|
if not instance.confirmed:
|
||||||
|
return
|
||||||
|
device_type = device_type_map.get(instance.__class__)
|
||||||
|
data = {
|
||||||
|
"credential_type": device_type,
|
||||||
|
"change_type": "delete",
|
||||||
|
"friendly_name": instance.name,
|
||||||
|
}
|
||||||
|
if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID:
|
||||||
|
data["fido2_aaguid"] = instance.aaguid
|
||||||
|
send_ssf_event(
|
||||||
|
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||||
|
data,
|
||||||
|
sub_id={
|
||||||
|
"format": "complex",
|
||||||
|
"user": {
|
||||||
|
"format": "email",
|
||||||
|
"email": instance.user.email,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
136
authentik/enterprise/providers/ssf/tasks.py
Normal file
136
authentik/enterprise/providers/ssf/tasks.py
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
from celery import group
|
||||||
|
from django.http import HttpRequest
|
||||||
|
from django.utils.timezone import now
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from requests.exceptions import RequestException
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.core.models import User
|
||||||
|
from authentik.enterprise.providers.ssf.models import (
|
||||||
|
DeliveryMethods,
|
||||||
|
EventTypes,
|
||||||
|
SSFEventStatus,
|
||||||
|
Stream,
|
||||||
|
StreamEvent,
|
||||||
|
)
|
||||||
|
from authentik.events.logs import LogEvent
|
||||||
|
from authentik.events.models import TaskStatus
|
||||||
|
from authentik.events.system_tasks import SystemTask
|
||||||
|
from authentik.lib.utils.http import get_http_session
|
||||||
|
from authentik.lib.utils.time import timedelta_from_string
|
||||||
|
from authentik.policies.engine import PolicyEngine
|
||||||
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
|
session = get_http_session()
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
def send_ssf_event(
|
||||||
|
event_type: EventTypes,
|
||||||
|
data: dict,
|
||||||
|
stream_filter: dict | None = None,
|
||||||
|
request: HttpRequest | None = None,
|
||||||
|
**extra_data,
|
||||||
|
):
|
||||||
|
"""Wrapper to send an SSF event to multiple streams"""
|
||||||
|
payload = []
|
||||||
|
if not stream_filter:
|
||||||
|
stream_filter = {}
|
||||||
|
stream_filter["events_requested__contains"] = [event_type]
|
||||||
|
if request and hasattr(request, "request_id"):
|
||||||
|
extra_data.setdefault("txn", request.request_id)
|
||||||
|
for stream in Stream.objects.filter(**stream_filter):
|
||||||
|
event_data = stream.prepare_event_payload(event_type, data, **extra_data)
|
||||||
|
payload.append((str(stream.uuid), event_data))
|
||||||
|
return _send_ssf_event.delay(payload)
|
||||||
|
|
||||||
|
|
||||||
|
def _check_app_access(stream_uuid: str, event_data: dict) -> bool:
|
||||||
|
"""Check if event is related to user and if so, check
|
||||||
|
if the user has access to the application"""
|
||||||
|
stream = Stream.objects.filter(pk=stream_uuid).first()
|
||||||
|
if not stream:
|
||||||
|
return False
|
||||||
|
# `event_data` is a dict version of a StreamEvent
|
||||||
|
sub_id = event_data.get("payload", {}).get("sub_id", {})
|
||||||
|
email = sub_id.get("user", {}).get("email", None)
|
||||||
|
if not email:
|
||||||
|
return True
|
||||||
|
user = User.objects.filter(email=email).first()
|
||||||
|
if not user:
|
||||||
|
return True
|
||||||
|
engine = PolicyEngine(stream.provider.backchannel_application, user)
|
||||||
|
engine.use_cache = False
|
||||||
|
engine.build()
|
||||||
|
return engine.passing
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task()
|
||||||
|
def _send_ssf_event(event_data: list[tuple[str, dict]]):
|
||||||
|
tasks = []
|
||||||
|
for stream, data in event_data:
|
||||||
|
if not _check_app_access(stream, data):
|
||||||
|
continue
|
||||||
|
event = StreamEvent.objects.create(**data)
|
||||||
|
tasks.extend(send_single_ssf_event(stream, str(event.uuid)))
|
||||||
|
main_task = group(*tasks)
|
||||||
|
main_task()
|
||||||
|
|
||||||
|
|
||||||
|
def send_single_ssf_event(stream_id: str, evt_id: str):
|
||||||
|
stream = Stream.objects.filter(pk=stream_id).first()
|
||||||
|
if not stream:
|
||||||
|
return
|
||||||
|
event = StreamEvent.objects.filter(pk=evt_id).first()
|
||||||
|
if not event:
|
||||||
|
return
|
||||||
|
if event.status == SSFEventStatus.SENT:
|
||||||
|
return
|
||||||
|
if stream.delivery_method == DeliveryMethods.RISC_PUSH:
|
||||||
|
return [ssf_push_event.si(str(event.pk))]
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||||
|
def ssf_push_event(self: SystemTask, event_id: str):
|
||||||
|
self.save_on_success = False
|
||||||
|
event = StreamEvent.objects.filter(pk=event_id).first()
|
||||||
|
if not event:
|
||||||
|
return
|
||||||
|
self.set_uid(event_id)
|
||||||
|
if event.status == SSFEventStatus.SENT:
|
||||||
|
self.set_status(TaskStatus.SUCCESSFUL)
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
response = session.post(
|
||||||
|
event.stream.endpoint_url,
|
||||||
|
data=event.stream.encode(event.payload),
|
||||||
|
headers={"Content-Type": "application/secevent+jwt", "Accept": "application/json"},
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
event.status = SSFEventStatus.SENT
|
||||||
|
event.save()
|
||||||
|
self.set_status(TaskStatus.SUCCESSFUL)
|
||||||
|
return
|
||||||
|
except RequestException as exc:
|
||||||
|
LOGGER.warning("Failed to send SSF event", exc=exc)
|
||||||
|
self.set_status(TaskStatus.ERROR)
|
||||||
|
attrs = {}
|
||||||
|
if exc.response:
|
||||||
|
attrs["response"] = {
|
||||||
|
"content": exc.response.text,
|
||||||
|
"status": exc.response.status_code,
|
||||||
|
}
|
||||||
|
self.set_error(
|
||||||
|
exc,
|
||||||
|
LogEvent(
|
||||||
|
_("Failed to send request"),
|
||||||
|
log_level="warning",
|
||||||
|
logger=self.__name__,
|
||||||
|
attributes=attrs,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
# Re-up the expiry of the stream event
|
||||||
|
event.expires = now() + timedelta_from_string(event.stream.provider.event_retention)
|
||||||
|
event.status = SSFEventStatus.PENDING_FAILED
|
||||||
|
event.save()
|
||||||
46
authentik/enterprise/providers/ssf/tests/test_config.py
Normal file
46
authentik/enterprise/providers/ssf/tests/test_config.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
import json
|
||||||
|
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.core.tests.utils import create_test_cert
|
||||||
|
from authentik.enterprise.providers.ssf.models import (
|
||||||
|
SSFProvider,
|
||||||
|
)
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
|
||||||
|
class TestConfiguration(APITestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||||
|
self.provider = SSFProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
signing_key=create_test_cert(),
|
||||||
|
backchannel_application=self.application,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_config_fetch(self):
|
||||||
|
"""test SSF configuration (unauthenticated)"""
|
||||||
|
res = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:configuration",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
content = json.loads(res.content)
|
||||||
|
self.assertEqual(content["spec_version"], "1_0-ID2")
|
||||||
|
|
||||||
|
def test_config_fetch_authenticated(self):
|
||||||
|
"""test SSF configuration (authenticated)"""
|
||||||
|
res = self.client.get(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:configuration",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
content = json.loads(res.content)
|
||||||
|
self.assertEqual(content["spec_version"], "1_0-ID2")
|
||||||
51
authentik/enterprise/providers/ssf/tests/test_jwks.py
Normal file
51
authentik/enterprise/providers/ssf/tests/test_jwks.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
"""JWKS tests"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.x509 import load_der_x509_certificate
|
||||||
|
from django.test import TestCase
|
||||||
|
from django.urls.base import reverse
|
||||||
|
from jwt import PyJWKSet
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.core.tests.utils import create_test_cert
|
||||||
|
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
|
||||||
|
class TestJWKS(TestCase):
|
||||||
|
"""Test JWKS view"""
|
||||||
|
|
||||||
|
def test_rs256(self):
|
||||||
|
"""Test JWKS request with RS256"""
|
||||||
|
provider = SSFProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
signing_key=create_test_cert(),
|
||||||
|
)
|
||||||
|
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||||
|
app.backchannel_providers.add(provider)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug})
|
||||||
|
)
|
||||||
|
body = json.loads(response.content.decode())
|
||||||
|
self.assertEqual(len(body["keys"]), 1)
|
||||||
|
PyJWKSet.from_dict(body)
|
||||||
|
key = body["keys"][0]
|
||||||
|
load_der_x509_certificate(base64.b64decode(key["x5c"][0]), default_backend()).public_key()
|
||||||
|
|
||||||
|
def test_es256(self):
|
||||||
|
"""Test JWKS request with ES256"""
|
||||||
|
provider = SSFProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
signing_key=create_test_cert(),
|
||||||
|
)
|
||||||
|
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||||
|
app.backchannel_providers.add(provider)
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_providers_ssf:jwks", kwargs={"application_slug": app.slug})
|
||||||
|
)
|
||||||
|
body = json.loads(response.content.decode())
|
||||||
|
self.assertEqual(len(body["keys"]), 1)
|
||||||
|
PyJWKSet.from_dict(body)
|
||||||
168
authentik/enterprise/providers/ssf/tests/test_signals.py
Normal file
168
authentik/enterprise/providers/ssf/tests/test_signals.py
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from authentik.core.models import Application, Group
|
||||||
|
from authentik.core.tests.utils import (
|
||||||
|
create_test_cert,
|
||||||
|
create_test_user,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.ssf.models import (
|
||||||
|
EventTypes,
|
||||||
|
SSFEventStatus,
|
||||||
|
SSFProvider,
|
||||||
|
Stream,
|
||||||
|
StreamEvent,
|
||||||
|
)
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.policies.models import PolicyBinding
|
||||||
|
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||||
|
|
||||||
|
|
||||||
|
class TestSignals(APITestCase):
|
||||||
|
"""Test individual SSF Signals"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||||
|
self.provider = SSFProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
signing_key=create_test_cert(),
|
||||||
|
backchannel_application=self.application,
|
||||||
|
)
|
||||||
|
res = self.client.post(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:stream",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
data={
|
||||||
|
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||||
|
"aud": ["https://app.authentik.company"],
|
||||||
|
"delivery": {
|
||||||
|
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||||
|
"endpoint_url": "https://app.authentik.company",
|
||||||
|
},
|
||||||
|
"events_requested": [
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||||
|
],
|
||||||
|
"format": "iss_sub",
|
||||||
|
},
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 201, res.content)
|
||||||
|
|
||||||
|
def test_signal_logout(self):
|
||||||
|
"""Test user logout"""
|
||||||
|
user = create_test_user()
|
||||||
|
self.client.force_login(user)
|
||||||
|
self.client.logout()
|
||||||
|
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(stream=stream).first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||||
|
event_payload = event.payload["events"][
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked"
|
||||||
|
]
|
||||||
|
self.assertEqual(event_payload["initiating_entity"], "user")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["session"]["format"], "opaque")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||||
|
|
||||||
|
def test_signal_password_change(self):
|
||||||
|
"""Test user password change"""
|
||||||
|
user = create_test_user()
|
||||||
|
self.client.force_login(user)
|
||||||
|
user.set_password(generate_id())
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(stream=stream).first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||||
|
event_payload = event.payload["events"][
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||||
|
]
|
||||||
|
self.assertEqual(event_payload["change_type"], "update")
|
||||||
|
self.assertEqual(event_payload["credential_type"], "password")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||||
|
|
||||||
|
def test_signal_authenticator_added(self):
|
||||||
|
"""Test authenticator creation signal"""
|
||||||
|
user = create_test_user()
|
||||||
|
self.client.force_login(user)
|
||||||
|
dev = WebAuthnDevice.objects.create(
|
||||||
|
user=user,
|
||||||
|
name=generate_id(),
|
||||||
|
credential_id=generate_id(),
|
||||||
|
public_key=generate_id(),
|
||||||
|
aaguid=str(uuid4()),
|
||||||
|
)
|
||||||
|
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(stream=stream).exclude().first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||||
|
event_payload = event.payload["events"][
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||||
|
]
|
||||||
|
self.assertEqual(event_payload["change_type"], "create")
|
||||||
|
self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid)
|
||||||
|
self.assertEqual(event_payload["friendly_name"], dev.name)
|
||||||
|
self.assertEqual(event_payload["credential_type"], "fido-u2f")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||||
|
|
||||||
|
def test_signal_authenticator_deleted(self):
|
||||||
|
"""Test authenticator deletion signal"""
|
||||||
|
user = create_test_user()
|
||||||
|
self.client.force_login(user)
|
||||||
|
dev = WebAuthnDevice.objects.create(
|
||||||
|
user=user,
|
||||||
|
name=generate_id(),
|
||||||
|
credential_id=generate_id(),
|
||||||
|
public_key=generate_id(),
|
||||||
|
aaguid=str(uuid4()),
|
||||||
|
)
|
||||||
|
dev.delete()
|
||||||
|
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(stream=stream).exclude().first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||||
|
event_payload = event.payload["events"][
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change"
|
||||||
|
]
|
||||||
|
self.assertEqual(event_payload["change_type"], "delete")
|
||||||
|
self.assertEqual(event_payload["fido2_aaguid"], dev.aaguid)
|
||||||
|
self.assertEqual(event_payload["friendly_name"], dev.name)
|
||||||
|
self.assertEqual(event_payload["credential_type"], "fido-u2f")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["format"], "complex")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["format"], "email")
|
||||||
|
self.assertEqual(event.payload["sub_id"]["user"]["email"], user.email)
|
||||||
|
|
||||||
|
def test_signal_policy_ignore(self):
|
||||||
|
"""Test event not being created for user that doesn't have access to the application"""
|
||||||
|
PolicyBinding.objects.create(
|
||||||
|
target=self.application, group=Group.objects.create(name=generate_id()), order=0
|
||||||
|
)
|
||||||
|
user = create_test_user()
|
||||||
|
self.client.force_login(user)
|
||||||
|
user.set_password(generate_id())
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(
|
||||||
|
stream=stream, type=EventTypes.CAEP_CREDENTIAL_CHANGE
|
||||||
|
).first()
|
||||||
|
self.assertIsNone(event)
|
||||||
154
authentik/enterprise/providers/ssf/tests/test_stream.py
Normal file
154
authentik/enterprise/providers/ssf/tests/test_stream.py
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
import json
|
||||||
|
from dataclasses import asdict
|
||||||
|
|
||||||
|
from django.urls import reverse
|
||||||
|
from django.utils import timezone
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
|
||||||
|
from authentik.enterprise.providers.ssf.models import (
|
||||||
|
SSFEventStatus,
|
||||||
|
SSFProvider,
|
||||||
|
Stream,
|
||||||
|
StreamEvent,
|
||||||
|
)
|
||||||
|
from authentik.lib.generators import generate_id
|
||||||
|
from authentik.providers.oauth2.id_token import IDToken
|
||||||
|
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||||
|
|
||||||
|
|
||||||
|
class TestStream(APITestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||||
|
self.provider = SSFProvider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
signing_key=create_test_cert(),
|
||||||
|
backchannel_application=self.application,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_stream_add_token(self):
|
||||||
|
"""test stream add (token auth)"""
|
||||||
|
res = self.client.post(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:stream",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
data={
|
||||||
|
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||||
|
"aud": ["https://app.authentik.company"],
|
||||||
|
"delivery": {
|
||||||
|
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||||
|
"endpoint_url": "https://app.authentik.company",
|
||||||
|
},
|
||||||
|
"events_requested": [
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||||
|
],
|
||||||
|
"format": "iss_sub",
|
||||||
|
},
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 201)
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(stream=stream).first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||||
|
self.assertEqual(
|
||||||
|
event.payload["events"],
|
||||||
|
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_stream_add_poll(self):
|
||||||
|
"""test stream add - poll method"""
|
||||||
|
res = self.client.post(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:stream",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
data={
|
||||||
|
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||||
|
"aud": ["https://app.authentik.company"],
|
||||||
|
"delivery": {
|
||||||
|
"method": "https://schemas.openid.net/secevent/risc/delivery-method/poll",
|
||||||
|
},
|
||||||
|
"events_requested": [
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||||
|
],
|
||||||
|
"format": "iss_sub",
|
||||||
|
},
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 400)
|
||||||
|
self.assertJSONEqual(
|
||||||
|
res.content,
|
||||||
|
{"delivery": {"method": ["Polling for SSF events is not currently supported."]}},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_stream_add_oidc(self):
|
||||||
|
"""test stream add (oidc auth)"""
|
||||||
|
provider = OAuth2Provider.objects.create(
|
||||||
|
name=generate_id(),
|
||||||
|
authorization_flow=create_test_flow(),
|
||||||
|
)
|
||||||
|
self.application.provider = provider
|
||||||
|
self.application.save()
|
||||||
|
user = create_test_admin_user()
|
||||||
|
token = AccessToken.objects.create(
|
||||||
|
provider=provider,
|
||||||
|
user=user,
|
||||||
|
token=generate_id(),
|
||||||
|
auth_time=timezone.now(),
|
||||||
|
_scope="openid user profile",
|
||||||
|
_id_token=json.dumps(
|
||||||
|
asdict(
|
||||||
|
IDToken("foo", "bar"),
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
res = self.client.post(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:stream",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
data={
|
||||||
|
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
|
||||||
|
"aud": ["https://app.authentik.company"],
|
||||||
|
"delivery": {
|
||||||
|
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
|
||||||
|
"endpoint_url": "https://app.authentik.company",
|
||||||
|
},
|
||||||
|
"events_requested": [
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
|
||||||
|
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
|
||||||
|
],
|
||||||
|
"format": "iss_sub",
|
||||||
|
},
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {token.token}",
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 201)
|
||||||
|
stream = Stream.objects.filter(provider=self.provider).first()
|
||||||
|
self.assertIsNotNone(stream)
|
||||||
|
event = StreamEvent.objects.filter(stream=stream).first()
|
||||||
|
self.assertIsNotNone(event)
|
||||||
|
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
|
||||||
|
self.assertEqual(
|
||||||
|
event.payload["events"],
|
||||||
|
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_stream_delete(self):
|
||||||
|
"""delete stream"""
|
||||||
|
stream = Stream.objects.create(provider=self.provider)
|
||||||
|
res = self.client.delete(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:stream",
|
||||||
|
kwargs={"application_slug": self.application.slug},
|
||||||
|
),
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 204)
|
||||||
|
self.assertFalse(Stream.objects.filter(pk=stream.pk).exists())
|
||||||
32
authentik/enterprise/providers/ssf/urls.py
Normal file
32
authentik/enterprise/providers/ssf/urls.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
"""SSF provider URLs"""
|
||||||
|
|
||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
from authentik.enterprise.providers.ssf.api.providers import SSFProviderViewSet
|
||||||
|
from authentik.enterprise.providers.ssf.api.streams import SSFStreamViewSet
|
||||||
|
from authentik.enterprise.providers.ssf.views.configuration import ConfigurationView
|
||||||
|
from authentik.enterprise.providers.ssf.views.jwks import JWKSview
|
||||||
|
from authentik.enterprise.providers.ssf.views.stream import StreamView
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"application/ssf/<slug:application_slug>/ssf-jwks/",
|
||||||
|
JWKSview.as_view(),
|
||||||
|
name="jwks",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
".well-known/ssf-configuration/<slug:application_slug>",
|
||||||
|
ConfigurationView.as_view(),
|
||||||
|
name="configuration",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"application/ssf/<slug:application_slug>/stream/",
|
||||||
|
StreamView.as_view(),
|
||||||
|
name="stream",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
api_urlpatterns = [
|
||||||
|
("providers/ssf", SSFProviderViewSet),
|
||||||
|
("ssf/streams", SSFStreamViewSet),
|
||||||
|
]
|
||||||
66
authentik/enterprise/providers/ssf/views/auth.py
Normal file
66
authentik/enterprise/providers/ssf/views/auth.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
"""SSF Token auth"""
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from django.db.models import Q
|
||||||
|
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||||
|
from rest_framework.request import Request
|
||||||
|
|
||||||
|
from authentik.core.models import Token, TokenIntents, User
|
||||||
|
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||||
|
from authentik.providers.oauth2.models import AccessToken
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||||
|
|
||||||
|
|
||||||
|
class SSFTokenAuth(BaseAuthentication):
|
||||||
|
"""SSF Token auth"""
|
||||||
|
|
||||||
|
view: "SSFView"
|
||||||
|
|
||||||
|
def __init__(self, view: "SSFView") -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.view = view
|
||||||
|
|
||||||
|
def check_token(self, key: str) -> Token | None:
|
||||||
|
"""Check that a token exists, is not expired, and is assigned to the correct provider"""
|
||||||
|
token = Token.filter_not_expired(key=key, intent=TokenIntents.INTENT_API).first()
|
||||||
|
if not token:
|
||||||
|
return None
|
||||||
|
provider: SSFProvider = token.ssfprovider_set.first()
|
||||||
|
if not provider:
|
||||||
|
return None
|
||||||
|
self.view.application = provider.backchannel_application
|
||||||
|
self.view.provider = provider
|
||||||
|
return token
|
||||||
|
|
||||||
|
def check_jwt(self, jwt: str) -> AccessToken | None:
|
||||||
|
"""Check JWT-based authentication, this supports tokens issued either by providers
|
||||||
|
configured directly in the provider, and by providers assigned to the application
|
||||||
|
that the SSF provider is a backchannel provider of."""
|
||||||
|
token = AccessToken.filter_not_expired(token=jwt, revoked=False).first()
|
||||||
|
if not token:
|
||||||
|
return None
|
||||||
|
ssf_provider = SSFProvider.objects.filter(
|
||||||
|
Q(oidc_auth_providers__in=[token.provider])
|
||||||
|
| Q(backchannel_application__provider__in=[token.provider]),
|
||||||
|
).first()
|
||||||
|
if not ssf_provider:
|
||||||
|
return None
|
||||||
|
self.view.application = ssf_provider.backchannel_application
|
||||||
|
self.view.provider = ssf_provider
|
||||||
|
return token
|
||||||
|
|
||||||
|
def authenticate(self, request: Request) -> tuple[User, Any] | None:
|
||||||
|
auth = get_authorization_header(request).decode()
|
||||||
|
auth_type, _, key = auth.partition(" ")
|
||||||
|
if auth_type != "Bearer":
|
||||||
|
return None
|
||||||
|
token = self.check_token(key)
|
||||||
|
if token:
|
||||||
|
return (token.user, token)
|
||||||
|
jwt_token = self.check_jwt(key)
|
||||||
|
if jwt_token:
|
||||||
|
return (jwt_token.user, token)
|
||||||
|
return None
|
||||||
23
authentik/enterprise/providers/ssf/views/base.py
Normal file
23
authentik/enterprise/providers/ssf/views/base.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from django.http import HttpRequest
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||||
|
from authentik.enterprise.providers.ssf.views.auth import SSFTokenAuth
|
||||||
|
|
||||||
|
|
||||||
|
class SSFView(APIView):
|
||||||
|
application: Application
|
||||||
|
provider: SSFProvider
|
||||||
|
logger: BoundLogger
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def setup(self, request: HttpRequest, *args, **kwargs) -> None:
|
||||||
|
self.logger = get_logger().bind()
|
||||||
|
super().setup(request, *args, **kwargs)
|
||||||
|
|
||||||
|
def get_authenticators(self):
|
||||||
|
return [SSFTokenAuth(self)]
|
||||||
55
authentik/enterprise/providers/ssf/views/configuration.py
Normal file
55
authentik/enterprise/providers/ssf/views/configuration.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
from django.http import Http404, HttpRequest, HttpResponse, JsonResponse
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.permissions import AllowAny
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.enterprise.providers.ssf.models import DeliveryMethods, SSFProvider
|
||||||
|
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationView(SSFView):
|
||||||
|
"""SSF configuration endpoint"""
|
||||||
|
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
def get_authenticators(self):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get(self, request: HttpRequest, application_slug: str, *args, **kwargs) -> HttpResponse:
|
||||||
|
application = get_object_or_404(Application, slug=application_slug)
|
||||||
|
provider = application.backchannel_provider_for(SSFProvider)
|
||||||
|
if not provider:
|
||||||
|
raise Http404
|
||||||
|
data = {
|
||||||
|
"spec_version": "1_0-ID2",
|
||||||
|
"issuer": self.request.build_absolute_uri(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:configuration",
|
||||||
|
kwargs={
|
||||||
|
"application_slug": application.slug,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
),
|
||||||
|
"jwks_uri": self.request.build_absolute_uri(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:jwks",
|
||||||
|
kwargs={
|
||||||
|
"application_slug": application.slug,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
),
|
||||||
|
"configuration_endpoint": self.request.build_absolute_uri(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:stream",
|
||||||
|
kwargs={
|
||||||
|
"application_slug": application.slug,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
),
|
||||||
|
"delivery_methods_supported": [
|
||||||
|
DeliveryMethods.RISC_PUSH,
|
||||||
|
],
|
||||||
|
"authorization_schemes": [{"spec_urn": "urn:ietf:rfc:6749"}],
|
||||||
|
}
|
||||||
|
return JsonResponse(data)
|
||||||
31
authentik/enterprise/providers/ssf/views/jwks.py
Normal file
31
authentik/enterprise/providers/ssf/views/jwks.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
from django.http import Http404, HttpRequest, HttpResponse, JsonResponse
|
||||||
|
from django.shortcuts import get_object_or_404
|
||||||
|
from django.views import View
|
||||||
|
|
||||||
|
from authentik.core.models import Application
|
||||||
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
from authentik.enterprise.providers.ssf.models import SSFProvider
|
||||||
|
from authentik.providers.oauth2.views.jwks import JWKSView as OAuthJWKSView
|
||||||
|
|
||||||
|
|
||||||
|
class JWKSview(View):
|
||||||
|
"""SSF JWKS endpoint, similar to the OAuth2 provider's endpoint"""
|
||||||
|
|
||||||
|
def get(self, request: HttpRequest, application_slug: str) -> HttpResponse:
|
||||||
|
"""Show JWK Key data for Provider"""
|
||||||
|
application = get_object_or_404(Application, slug=application_slug)
|
||||||
|
provider = application.backchannel_provider_for(SSFProvider)
|
||||||
|
if not provider:
|
||||||
|
raise Http404
|
||||||
|
signing_key: CertificateKeyPair = provider.signing_key
|
||||||
|
|
||||||
|
response_data = {}
|
||||||
|
|
||||||
|
jwk = OAuthJWKSView.get_jwk_for_key(signing_key, "sig")
|
||||||
|
if jwk:
|
||||||
|
response_data["keys"] = [jwk]
|
||||||
|
|
||||||
|
response = JsonResponse(response_data)
|
||||||
|
response["Access-Control-Allow-Origin"] = "*"
|
||||||
|
|
||||||
|
return response
|
||||||
130
authentik/enterprise/providers/ssf/views/stream.py
Normal file
130
authentik/enterprise/providers/ssf/views/stream.py
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
from django.http import HttpRequest
|
||||||
|
from django.urls import reverse
|
||||||
|
from rest_framework.exceptions import PermissionDenied, ValidationError
|
||||||
|
from rest_framework.fields import CharField, ChoiceField, ListField, SerializerMethodField
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.serializers import ModelSerializer
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
|
from authentik.enterprise.providers.ssf.models import (
|
||||||
|
DeliveryMethods,
|
||||||
|
EventTypes,
|
||||||
|
SSFProvider,
|
||||||
|
Stream,
|
||||||
|
)
|
||||||
|
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
|
||||||
|
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
class StreamDeliverySerializer(PassiveSerializer):
|
||||||
|
method = ChoiceField(choices=[(x.value, x.value) for x in DeliveryMethods])
|
||||||
|
endpoint_url = CharField(required=False)
|
||||||
|
|
||||||
|
def validate_method(self, method: DeliveryMethods):
|
||||||
|
"""Currently only push is supported"""
|
||||||
|
if method == DeliveryMethods.RISC_POLL:
|
||||||
|
raise ValidationError("Polling for SSF events is not currently supported.")
|
||||||
|
return method
|
||||||
|
|
||||||
|
def validate(self, attrs: dict) -> dict:
|
||||||
|
if attrs["method"] == DeliveryMethods.RISC_PUSH:
|
||||||
|
if not attrs.get("endpoint_url"):
|
||||||
|
raise ValidationError("Endpoint URL is required when using push.")
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
class StreamSerializer(ModelSerializer):
|
||||||
|
delivery = StreamDeliverySerializer()
|
||||||
|
events_requested = ListField(
|
||||||
|
child=ChoiceField(choices=[(x.value, x.value) for x in EventTypes])
|
||||||
|
)
|
||||||
|
format = CharField()
|
||||||
|
aud = ListField(child=CharField())
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
provider: SSFProvider = validated_data["provider"]
|
||||||
|
request: HttpRequest = self.context["request"]
|
||||||
|
iss = request.build_absolute_uri(
|
||||||
|
reverse(
|
||||||
|
"authentik_providers_ssf:configuration",
|
||||||
|
kwargs={
|
||||||
|
"application_slug": provider.backchannel_application.slug,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# Ensure that streams always get SET verification events sent to them
|
||||||
|
validated_data["events_requested"].append(EventTypes.SET_VERIFICATION)
|
||||||
|
return super().create(
|
||||||
|
{
|
||||||
|
"delivery_method": validated_data["delivery"]["method"],
|
||||||
|
"endpoint_url": validated_data["delivery"].get("endpoint_url"),
|
||||||
|
"format": validated_data["format"],
|
||||||
|
"provider": validated_data["provider"],
|
||||||
|
"events_requested": validated_data["events_requested"],
|
||||||
|
"aud": validated_data["aud"],
|
||||||
|
"iss": iss,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Stream
|
||||||
|
fields = [
|
||||||
|
"delivery",
|
||||||
|
"events_requested",
|
||||||
|
"format",
|
||||||
|
"aud",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class StreamResponseSerializer(PassiveSerializer):
|
||||||
|
stream_id = CharField(source="pk")
|
||||||
|
iss = CharField()
|
||||||
|
aud = ListField(child=CharField())
|
||||||
|
delivery = SerializerMethodField()
|
||||||
|
format = CharField()
|
||||||
|
|
||||||
|
events_requested = ListField(child=CharField())
|
||||||
|
events_supported = SerializerMethodField()
|
||||||
|
events_delivered = ListField(child=CharField(), source="events_requested")
|
||||||
|
|
||||||
|
def get_delivery(self, instance: Stream) -> StreamDeliverySerializer:
|
||||||
|
return {
|
||||||
|
"method": instance.delivery_method,
|
||||||
|
"endpoint_url": instance.endpoint_url,
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_events_supported(self, instance: Stream) -> list[str]:
|
||||||
|
return [x.value for x in EventTypes]
|
||||||
|
|
||||||
|
|
||||||
|
class StreamView(SSFView):
|
||||||
|
def post(self, request: Request, *args, **kwargs) -> Response:
|
||||||
|
stream = StreamSerializer(data=request.data, context={"request": request})
|
||||||
|
stream.is_valid(raise_exception=True)
|
||||||
|
if not request.user.has_perm("authentik_providers_ssf.add_stream", self.provider):
|
||||||
|
raise PermissionDenied(
|
||||||
|
"User does not have permission to create stream for this provider."
|
||||||
|
)
|
||||||
|
instance: Stream = stream.save(provider=self.provider)
|
||||||
|
send_ssf_event(
|
||||||
|
EventTypes.SET_VERIFICATION,
|
||||||
|
{
|
||||||
|
"state": None,
|
||||||
|
},
|
||||||
|
stream_filter={"pk": instance.uuid},
|
||||||
|
sub_id={"format": "opaque", "id": str(instance.uuid)},
|
||||||
|
)
|
||||||
|
response = StreamResponseSerializer(instance=instance, context={"request": request}).data
|
||||||
|
return Response(response, status=201)
|
||||||
|
|
||||||
|
def delete(self, request: Request, *args, **kwargs) -> Response:
|
||||||
|
streams = Stream.objects.filter(provider=self.provider)
|
||||||
|
# Technically this parameter is required by the spec...
|
||||||
|
if "stream_id" in request.query_params:
|
||||||
|
streams = streams.filter(stream_id=request.query_params["stream_id"])
|
||||||
|
streams.delete()
|
||||||
|
return Response(status=204)
|
||||||
@ -17,6 +17,7 @@ TENANT_APPS = [
|
|||||||
"authentik.enterprise.providers.google_workspace",
|
"authentik.enterprise.providers.google_workspace",
|
||||||
"authentik.enterprise.providers.microsoft_entra",
|
"authentik.enterprise.providers.microsoft_entra",
|
||||||
"authentik.enterprise.providers.rac",
|
"authentik.enterprise.providers.rac",
|
||||||
|
"authentik.enterprise.providers.ssf",
|
||||||
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
|
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
|
||||||
"authentik.enterprise.stages.source",
|
"authentik.enterprise.stages.source",
|
||||||
]
|
]
|
||||||
|
|||||||
@ -53,12 +53,13 @@ class SystemTask(TenantTask):
|
|||||||
if not isinstance(msg, LogEvent):
|
if not isinstance(msg, LogEvent):
|
||||||
self._messages[idx] = LogEvent(msg, logger=self.__name__, log_level="info")
|
self._messages[idx] = LogEvent(msg, logger=self.__name__, log_level="info")
|
||||||
|
|
||||||
def set_error(self, exception: Exception):
|
def set_error(self, exception: Exception, *messages: LogEvent):
|
||||||
"""Set result to error and save exception"""
|
"""Set result to error and save exception"""
|
||||||
self._status = TaskStatus.ERROR
|
self._status = TaskStatus.ERROR
|
||||||
self._messages = [
|
self._messages = list(messages)
|
||||||
LogEvent(exception_to_string(exception), logger=self.__name__, log_level="error")
|
self._messages.extend(
|
||||||
]
|
[LogEvent(exception_to_string(exception), logger=self.__name__, log_level="error")]
|
||||||
|
)
|
||||||
|
|
||||||
def before_start(self, task_id, args, kwargs):
|
def before_start(self, task_id, args, kwargs):
|
||||||
self._start_precise = perf_counter()
|
self._start_precise = perf_counter()
|
||||||
|
|||||||
@ -3,6 +3,7 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from django.contrib.messages import INFO, add_message
|
||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
@ -61,6 +62,8 @@ class ReevaluateMarker(StageMarker):
|
|||||||
engine.request.context.update(plan.context)
|
engine.request.context.update(plan.context)
|
||||||
engine.build()
|
engine.build()
|
||||||
result = engine.result
|
result = engine.result
|
||||||
|
for message in result.messages:
|
||||||
|
add_message(http_request, INFO, message)
|
||||||
if result.passing:
|
if result.passing:
|
||||||
return binding
|
return binding
|
||||||
LOGGER.warning(
|
LOGGER.warning(
|
||||||
|
|||||||
@ -109,6 +109,8 @@ class FlowPlan:
|
|||||||
|
|
||||||
def pop(self):
|
def pop(self):
|
||||||
"""Pop next pending stage from bottom of list"""
|
"""Pop next pending stage from bottom of list"""
|
||||||
|
if not self.markers and not self.bindings:
|
||||||
|
return
|
||||||
self.markers.pop(0)
|
self.markers.pop(0)
|
||||||
self.bindings.pop(0)
|
self.bindings.pop(0)
|
||||||
|
|
||||||
@ -156,8 +158,13 @@ class FlowPlan:
|
|||||||
final_stage: type[StageView] = self.bindings[-1].stage.view
|
final_stage: type[StageView] = self.bindings[-1].stage.view
|
||||||
temp_exec = FlowExecutorView(flow=flow, request=request, plan=self)
|
temp_exec = FlowExecutorView(flow=flow, request=request, plan=self)
|
||||||
temp_exec.current_stage = self.bindings[-1].stage
|
temp_exec.current_stage = self.bindings[-1].stage
|
||||||
|
temp_exec.current_stage_view = final_stage
|
||||||
|
temp_exec.setup(request, flow.slug)
|
||||||
stage = final_stage(request=request, executor=temp_exec)
|
stage = final_stage(request=request, executor=temp_exec)
|
||||||
return stage.dispatch(request)
|
response = stage.dispatch(request)
|
||||||
|
# Ensure we clean the flow state we have in the session before we redirect away
|
||||||
|
temp_exec.stage_ok()
|
||||||
|
return response
|
||||||
|
|
||||||
get_qs = request.GET.copy()
|
get_qs = request.GET.copy()
|
||||||
if request.user.is_authenticated and (
|
if request.user.is_authenticated and (
|
||||||
|
|||||||
@ -103,7 +103,7 @@ class FlowExecutorView(APIView):
|
|||||||
|
|
||||||
permission_classes = [AllowAny]
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
flow: Flow
|
flow: Flow = None
|
||||||
|
|
||||||
plan: FlowPlan | None = None
|
plan: FlowPlan | None = None
|
||||||
current_binding: FlowStageBinding | None = None
|
current_binding: FlowStageBinding | None = None
|
||||||
@ -114,7 +114,8 @@ class FlowExecutorView(APIView):
|
|||||||
|
|
||||||
def setup(self, request: HttpRequest, flow_slug: str):
|
def setup(self, request: HttpRequest, flow_slug: str):
|
||||||
super().setup(request, flow_slug=flow_slug)
|
super().setup(request, flow_slug=flow_slug)
|
||||||
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
if not self.flow:
|
||||||
|
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
||||||
self._logger = get_logger().bind(flow_slug=flow_slug)
|
self._logger = get_logger().bind(flow_slug=flow_slug)
|
||||||
set_tag("authentik.flow", self.flow.slug)
|
set_tag("authentik.flow", self.flow.slug)
|
||||||
|
|
||||||
|
|||||||
@ -283,12 +283,15 @@ class ConfigLoader:
|
|||||||
def get_optional_int(self, path: str, default=None) -> int | None:
|
def get_optional_int(self, path: str, default=None) -> int | None:
|
||||||
"""Wrapper for get that converts value into int or None if set"""
|
"""Wrapper for get that converts value into int or None if set"""
|
||||||
value = self.get(path, default)
|
value = self.get(path, default)
|
||||||
|
if value is UNSET:
|
||||||
|
return default
|
||||||
try:
|
try:
|
||||||
return int(value)
|
return int(value)
|
||||||
except (ValueError, TypeError) as exc:
|
except (ValueError, TypeError) as exc:
|
||||||
if value is None or (isinstance(value, str) and value.lower() == "null"):
|
if value is None or (isinstance(value, str) and value.lower() == "null"):
|
||||||
return None
|
return default
|
||||||
|
if value is UNSET:
|
||||||
|
return default
|
||||||
self.log("warning", "Failed to parse config as int", path=path, exc=str(exc))
|
self.log("warning", "Failed to parse config as int", path=path, exc=str(exc))
|
||||||
return default
|
return default
|
||||||
|
|
||||||
@ -421,4 +424,4 @@ if __name__ == "__main__":
|
|||||||
if len(argv) < 2: # noqa: PLR2004
|
if len(argv) < 2: # noqa: PLR2004
|
||||||
print(dumps(CONFIG.raw, indent=4, cls=AttrEncoder))
|
print(dumps(CONFIG.raw, indent=4, cls=AttrEncoder))
|
||||||
else:
|
else:
|
||||||
print(CONFIG.get(argv[1]))
|
print(CONFIG.get(argv[-1]))
|
||||||
|
|||||||
26
authentik/lib/debug.py
Normal file
26
authentik/lib/debug.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
|
def start_debug_server(**kwargs) -> bool:
|
||||||
|
"""Attempt to start a debugpy server in the current process.
|
||||||
|
Returns true if the server was started successfully, otherwise false"""
|
||||||
|
if not CONFIG.get_bool("debug") and not CONFIG.get_bool("debugger"):
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
import debugpy
|
||||||
|
except ImportError:
|
||||||
|
LOGGER.warning(
|
||||||
|
"Failed to import debugpy. debugpy is not included "
|
||||||
|
"in the default release dependencies and must be installed manually"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
listen: str = CONFIG.get("listen.listen_debug_py", "127.0.0.1:9901")
|
||||||
|
host, _, port = listen.rpartition(":")
|
||||||
|
debugpy.listen((host, int(port)), **kwargs) # nosec
|
||||||
|
LOGGER.debug("Starting debug server", host=host, port=port)
|
||||||
|
return True
|
||||||
@ -8,6 +8,7 @@ postgresql:
|
|||||||
password: "env://POSTGRES_PASSWORD"
|
password: "env://POSTGRES_PASSWORD"
|
||||||
test:
|
test:
|
||||||
name: test_authentik
|
name: test_authentik
|
||||||
|
default_schema: public
|
||||||
read_replicas: {}
|
read_replicas: {}
|
||||||
# For example
|
# For example
|
||||||
# 0:
|
# 0:
|
||||||
@ -21,6 +22,7 @@ listen:
|
|||||||
listen_radius: 0.0.0.0:1812
|
listen_radius: 0.0.0.0:1812
|
||||||
listen_metrics: 0.0.0.0:9300
|
listen_metrics: 0.0.0.0:9300
|
||||||
listen_debug: 0.0.0.0:9900
|
listen_debug: 0.0.0.0:9900
|
||||||
|
listen_debug_py: 0.0.0.0:9901
|
||||||
trusted_proxy_cidrs:
|
trusted_proxy_cidrs:
|
||||||
- 127.0.0.0/8
|
- 127.0.0.0/8
|
||||||
- 10.0.0.0/8
|
- 10.0.0.0/8
|
||||||
@ -57,7 +59,7 @@ cache:
|
|||||||
# transport_options: ""
|
# transport_options: ""
|
||||||
|
|
||||||
debug: false
|
debug: false
|
||||||
remote_debug: false
|
debugger: false
|
||||||
|
|
||||||
log_level: info
|
log_level: info
|
||||||
|
|
||||||
|
|||||||
@ -22,9 +22,9 @@ class OutgoingSyncProvider(Model):
|
|||||||
class Meta:
|
class Meta:
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
def client_for_model[
|
def client_for_model[T: User | Group](
|
||||||
T: User | Group
|
self, model: type[T]
|
||||||
](self, model: type[T]) -> BaseOutgoingSyncClient[T, Any, Any, Self]:
|
) -> BaseOutgoingSyncClient[T, Any, Any, Self]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_object_qs[T: User | Group](self, type: type[T]) -> QuerySet[T]:
|
def get_object_qs[T: User | Group](self, type: type[T]) -> QuerySet[T]:
|
||||||
|
|||||||
@ -42,6 +42,8 @@ class DebugSession(Session):
|
|||||||
|
|
||||||
def get_http_session() -> Session:
|
def get_http_session() -> Session:
|
||||||
"""Get a requests session with common headers"""
|
"""Get a requests session with common headers"""
|
||||||
session = DebugSession() if CONFIG.get_bool("debug") else Session()
|
session = Session()
|
||||||
|
if CONFIG.get_bool("debug") or CONFIG.get("log_level") == "trace":
|
||||||
|
session = DebugSession()
|
||||||
session.headers["User-Agent"] = authentik_user_agent()
|
session.headers["User-Agent"] = authentik_user_agent()
|
||||||
return session
|
return session
|
||||||
|
|||||||
@ -1,11 +1,26 @@
|
|||||||
"""Expression Policy API"""
|
"""Expression Policy API"""
|
||||||
|
|
||||||
|
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||||
|
from guardian.shortcuts import get_objects_for_user
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.fields import CharField
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.response import Response
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
|
from authentik.events.logs import LogEventSerializer, capture_logs
|
||||||
|
from authentik.policies.api.exec import PolicyTestResultSerializer, PolicyTestSerializer
|
||||||
from authentik.policies.api.policies import PolicySerializer
|
from authentik.policies.api.policies import PolicySerializer
|
||||||
from authentik.policies.expression.evaluator import PolicyEvaluator
|
from authentik.policies.expression.evaluator import PolicyEvaluator
|
||||||
from authentik.policies.expression.models import ExpressionPolicy
|
from authentik.policies.expression.models import ExpressionPolicy
|
||||||
|
from authentik.policies.models import PolicyBinding
|
||||||
|
from authentik.policies.process import PolicyProcess
|
||||||
|
from authentik.policies.types import PolicyRequest
|
||||||
|
from authentik.rbac.decorators import permission_required
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
class ExpressionPolicySerializer(PolicySerializer):
|
class ExpressionPolicySerializer(PolicySerializer):
|
||||||
@ -30,3 +45,50 @@ class ExpressionPolicyViewSet(UsedByMixin, ModelViewSet):
|
|||||||
filterset_fields = "__all__"
|
filterset_fields = "__all__"
|
||||||
ordering = ["name"]
|
ordering = ["name"]
|
||||||
search_fields = ["name"]
|
search_fields = ["name"]
|
||||||
|
|
||||||
|
class ExpressionPolicyTestSerializer(PolicyTestSerializer):
|
||||||
|
"""Expression policy test serializer"""
|
||||||
|
|
||||||
|
expression = CharField()
|
||||||
|
|
||||||
|
@permission_required("authentik_policies.view_policy")
|
||||||
|
@extend_schema(
|
||||||
|
request=ExpressionPolicyTestSerializer(),
|
||||||
|
responses={
|
||||||
|
200: PolicyTestResultSerializer(),
|
||||||
|
400: OpenApiResponse(description="Invalid parameters"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||||
|
def test(self, request: Request, pk: str) -> Response:
|
||||||
|
"""Test policy"""
|
||||||
|
policy = self.get_object()
|
||||||
|
test_params = self.ExpressionPolicyTestSerializer(data=request.data)
|
||||||
|
if not test_params.is_valid():
|
||||||
|
return Response(test_params.errors, status=400)
|
||||||
|
|
||||||
|
# User permission check, only allow policy testing for users that are readable
|
||||||
|
users = get_objects_for_user(request.user, "authentik_core.view_user").filter(
|
||||||
|
pk=test_params.validated_data["user"].pk
|
||||||
|
)
|
||||||
|
if not users.exists():
|
||||||
|
return Response(status=400)
|
||||||
|
|
||||||
|
policy.expression = test_params.validated_data["expression"]
|
||||||
|
|
||||||
|
p_request = PolicyRequest(users.first())
|
||||||
|
p_request.debug = True
|
||||||
|
p_request.set_http_request(self.request)
|
||||||
|
p_request.context = test_params.validated_data.get("context", {})
|
||||||
|
|
||||||
|
proc = PolicyProcess(PolicyBinding(policy=policy), p_request, None)
|
||||||
|
with capture_logs() as logs:
|
||||||
|
result = proc.execute()
|
||||||
|
log_messages = []
|
||||||
|
for log in logs:
|
||||||
|
if log.attributes.get("process", "") == "PolicyProcess":
|
||||||
|
continue
|
||||||
|
log_messages.append(LogEventSerializer(log).data)
|
||||||
|
result.log_messages = log_messages
|
||||||
|
response = PolicyTestResultSerializer(result)
|
||||||
|
return Response(response.data)
|
||||||
|
|||||||
@ -281,7 +281,6 @@ class OAuth2Provider(WebfingerProvider, Provider):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
return request.build_absolute_uri(url)
|
return request.build_absolute_uri(url)
|
||||||
|
|
||||||
except Provider.application.RelatedObjectDoesNotExist:
|
except Provider.application.RelatedObjectDoesNotExist:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@ -1,9 +1,10 @@
|
|||||||
from django.contrib.auth.signals import user_logged_out
|
from django.contrib.auth.signals import user_logged_out
|
||||||
|
from django.db.models.signals import post_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.providers.oauth2.models import AccessToken
|
from authentik.providers.oauth2.models import AccessToken, DeviceToken, RefreshToken
|
||||||
|
|
||||||
|
|
||||||
@receiver(user_logged_out)
|
@receiver(user_logged_out)
|
||||||
@ -12,3 +13,13 @@ def user_logged_out_oauth_access_token(sender, request: HttpRequest, user: User,
|
|||||||
if not request.session or not request.session.session_key:
|
if not request.session or not request.session.session_key:
|
||||||
return
|
return
|
||||||
AccessToken.objects.filter(user=user, session__session_key=request.session.session_key).delete()
|
AccessToken.objects.filter(user=user, session__session_key=request.session.session_key).delete()
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_save, sender=User)
|
||||||
|
def user_deactivated(sender, instance: User, **_):
|
||||||
|
"""Remove user tokens when deactivated"""
|
||||||
|
if instance.is_active:
|
||||||
|
return
|
||||||
|
AccessToken.objects.filter(session__user=instance).delete()
|
||||||
|
RefreshToken.objects.filter(session__user=instance).delete()
|
||||||
|
DeviceToken.objects.filter(session__user=instance).delete()
|
||||||
|
|||||||
@ -12,6 +12,7 @@ from authentik.core.tests.utils import create_test_admin_user, create_test_cert,
|
|||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.providers.oauth2.models import (
|
from authentik.providers.oauth2.models import (
|
||||||
AccessToken,
|
AccessToken,
|
||||||
|
ClientTypes,
|
||||||
IDToken,
|
IDToken,
|
||||||
OAuth2Provider,
|
OAuth2Provider,
|
||||||
RedirectURI,
|
RedirectURI,
|
||||||
@ -108,3 +109,29 @@ class TesOAuth2Revoke(OAuthTestCase):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertEqual(res.status_code, 401)
|
self.assertEqual(res.status_code, 401)
|
||||||
|
|
||||||
|
def test_revoke_public(self):
|
||||||
|
"""Test revoke public client"""
|
||||||
|
self.provider.client_type = ClientTypes.PUBLIC
|
||||||
|
self.provider.save()
|
||||||
|
token: AccessToken = AccessToken.objects.create(
|
||||||
|
provider=self.provider,
|
||||||
|
user=self.user,
|
||||||
|
token=generate_id(),
|
||||||
|
auth_time=timezone.now(),
|
||||||
|
_scope="openid user profile",
|
||||||
|
_id_token=json.dumps(
|
||||||
|
asdict(
|
||||||
|
IDToken("foo", "bar"),
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
auth_public = b64encode(f"{self.provider.client_id}:{generate_id()}".encode()).decode()
|
||||||
|
res = self.client.post(
|
||||||
|
reverse("authentik_providers_oauth2:token-revoke"),
|
||||||
|
HTTP_AUTHORIZATION=f"Basic {auth_public}",
|
||||||
|
data={
|
||||||
|
"token": token.token,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(res.status_code, 200)
|
||||||
|
|||||||
@ -150,6 +150,7 @@ class TestToken(OAuthTestCase):
|
|||||||
"id_token": provider.encode(
|
"id_token": provider.encode(
|
||||||
access.id_token.to_dict(),
|
access.id_token.to_dict(),
|
||||||
),
|
),
|
||||||
|
"scope": "",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.validate_jwt(access, provider)
|
self.validate_jwt(access, provider)
|
||||||
@ -242,6 +243,7 @@ class TestToken(OAuthTestCase):
|
|||||||
"id_token": provider.encode(
|
"id_token": provider.encode(
|
||||||
access.id_token.to_dict(),
|
access.id_token.to_dict(),
|
||||||
),
|
),
|
||||||
|
"scope": "offline_access",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.validate_jwt(access, provider)
|
self.validate_jwt(access, provider)
|
||||||
@ -301,6 +303,7 @@ class TestToken(OAuthTestCase):
|
|||||||
"id_token": provider.encode(
|
"id_token": provider.encode(
|
||||||
access.id_token.to_dict(),
|
access.id_token.to_dict(),
|
||||||
),
|
),
|
||||||
|
"scope": "offline_access",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -178,12 +178,18 @@ def protected_resource_view(scopes: list[str]):
|
|||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
def authenticate_provider(request: HttpRequest) -> OAuth2Provider | None:
|
def provider_from_request(request: HttpRequest) -> tuple[OAuth2Provider | None, str, str]:
|
||||||
"""Attempt to authenticate via Basic auth of client_id:client_secret"""
|
"""Get provider from Basic auth of client_id:client_secret. Does not perform authentication"""
|
||||||
client_id, client_secret = extract_client_auth(request)
|
client_id, client_secret = extract_client_auth(request)
|
||||||
if client_id == client_secret == "":
|
if client_id == client_secret == "":
|
||||||
return None
|
return None, "", ""
|
||||||
provider: OAuth2Provider | None = OAuth2Provider.objects.filter(client_id=client_id).first()
|
provider: OAuth2Provider | None = OAuth2Provider.objects.filter(client_id=client_id).first()
|
||||||
|
return provider, client_id, client_secret
|
||||||
|
|
||||||
|
|
||||||
|
def authenticate_provider(request: HttpRequest) -> OAuth2Provider | None:
|
||||||
|
"""Attempt to authenticate via Basic auth of client_id:client_secret"""
|
||||||
|
provider, client_id, client_secret = provider_from_request(request)
|
||||||
if not provider:
|
if not provider:
|
||||||
return None
|
return None
|
||||||
if client_id != provider.client_id or client_secret != provider.client_secret:
|
if client_id != provider.client_id or client_secret != provider.client_secret:
|
||||||
|
|||||||
@ -499,11 +499,11 @@ class OAuthFulfillmentStage(StageView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
challenge.is_valid()
|
challenge.is_valid()
|
||||||
|
self.executor.stage_ok()
|
||||||
return HttpChallengeResponse(
|
return HttpChallengeResponse(
|
||||||
challenge=challenge,
|
challenge=challenge,
|
||||||
)
|
)
|
||||||
|
self.executor.stage_ok()
|
||||||
return HttpResponseRedirectScheme(uri, allowed_schemes=[parsed.scheme])
|
return HttpResponseRedirectScheme(uri, allowed_schemes=[parsed.scheme])
|
||||||
|
|
||||||
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||||
|
|||||||
@ -64,7 +64,8 @@ def to_base64url_uint(val: int, min_length: int = 0) -> bytes:
|
|||||||
class JWKSView(View):
|
class JWKSView(View):
|
||||||
"""Show RSA Key data for Provider"""
|
"""Show RSA Key data for Provider"""
|
||||||
|
|
||||||
def get_jwk_for_key(self, key: CertificateKeyPair, use: str) -> dict | None:
|
@staticmethod
|
||||||
|
def get_jwk_for_key(key: CertificateKeyPair, use: str) -> dict | None:
|
||||||
"""Convert a certificate-key pair into JWK"""
|
"""Convert a certificate-key pair into JWK"""
|
||||||
private_key = key.private_key
|
private_key = key.private_key
|
||||||
key_data = None
|
key_data = None
|
||||||
@ -123,12 +124,12 @@ class JWKSView(View):
|
|||||||
response_data = {}
|
response_data = {}
|
||||||
|
|
||||||
if signing_key := provider.signing_key:
|
if signing_key := provider.signing_key:
|
||||||
jwk = self.get_jwk_for_key(signing_key, "sig")
|
jwk = JWKSView.get_jwk_for_key(signing_key, "sig")
|
||||||
if jwk:
|
if jwk:
|
||||||
response_data.setdefault("keys", [])
|
response_data.setdefault("keys", [])
|
||||||
response_data["keys"].append(jwk)
|
response_data["keys"].append(jwk)
|
||||||
if encryption_key := provider.encryption_key:
|
if encryption_key := provider.encryption_key:
|
||||||
jwk = self.get_jwk_for_key(encryption_key, "enc")
|
jwk = JWKSView.get_jwk_for_key(encryption_key, "enc")
|
||||||
if jwk:
|
if jwk:
|
||||||
response_data.setdefault("keys", [])
|
response_data.setdefault("keys", [])
|
||||||
response_data["keys"].append(jwk)
|
response_data["keys"].append(jwk)
|
||||||
|
|||||||
@ -627,6 +627,7 @@ class TokenView(View):
|
|||||||
response = {
|
response = {
|
||||||
"access_token": access_token.token,
|
"access_token": access_token.token,
|
||||||
"token_type": TOKEN_TYPE,
|
"token_type": TOKEN_TYPE,
|
||||||
|
"scope": " ".join(access_token.scope),
|
||||||
"expires_in": int(
|
"expires_in": int(
|
||||||
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
||||||
),
|
),
|
||||||
@ -710,6 +711,7 @@ class TokenView(View):
|
|||||||
"access_token": access_token.token,
|
"access_token": access_token.token,
|
||||||
"refresh_token": refresh_token.token,
|
"refresh_token": refresh_token.token,
|
||||||
"token_type": TOKEN_TYPE,
|
"token_type": TOKEN_TYPE,
|
||||||
|
"scope": " ".join(access_token.scope),
|
||||||
"expires_in": int(
|
"expires_in": int(
|
||||||
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
||||||
),
|
),
|
||||||
@ -736,6 +738,7 @@ class TokenView(View):
|
|||||||
return {
|
return {
|
||||||
"access_token": access_token.token,
|
"access_token": access_token.token,
|
||||||
"token_type": TOKEN_TYPE,
|
"token_type": TOKEN_TYPE,
|
||||||
|
"scope": " ".join(access_token.scope),
|
||||||
"expires_in": int(
|
"expires_in": int(
|
||||||
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
||||||
),
|
),
|
||||||
@ -767,6 +770,7 @@ class TokenView(View):
|
|||||||
response = {
|
response = {
|
||||||
"access_token": access_token.token,
|
"access_token": access_token.token,
|
||||||
"token_type": TOKEN_TYPE,
|
"token_type": TOKEN_TYPE,
|
||||||
|
"scope": " ".join(access_token.scope),
|
||||||
"expires_in": int(
|
"expires_in": int(
|
||||||
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
timedelta_from_string(self.provider.access_token_validity).total_seconds()
|
||||||
),
|
),
|
||||||
|
|||||||
@ -9,8 +9,12 @@ from django.views.decorators.csrf import csrf_exempt
|
|||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.providers.oauth2.errors import TokenRevocationError
|
from authentik.providers.oauth2.errors import TokenRevocationError
|
||||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider, RefreshToken
|
from authentik.providers.oauth2.models import AccessToken, ClientTypes, OAuth2Provider, RefreshToken
|
||||||
from authentik.providers.oauth2.utils import TokenResponse, authenticate_provider
|
from authentik.providers.oauth2.utils import (
|
||||||
|
TokenResponse,
|
||||||
|
authenticate_provider,
|
||||||
|
provider_from_request,
|
||||||
|
)
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -27,7 +31,9 @@ class TokenRevocationParams:
|
|||||||
"""Extract required Parameters from HTTP Request"""
|
"""Extract required Parameters from HTTP Request"""
|
||||||
raw_token = request.POST.get("token")
|
raw_token = request.POST.get("token")
|
||||||
|
|
||||||
provider = authenticate_provider(request)
|
provider, _, _ = provider_from_request(request)
|
||||||
|
if provider and provider.client_type == ClientTypes.CONFIDENTIAL:
|
||||||
|
provider = authenticate_provider(request)
|
||||||
if not provider:
|
if not provider:
|
||||||
raise TokenRevocationError("invalid_client")
|
raise TokenRevocationError("invalid_client")
|
||||||
|
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.contrib.auth.models import Permission
|
from django.contrib.auth.models import Permission
|
||||||
from django.db.models import QuerySet
|
from django.db.models import Q, QuerySet
|
||||||
from django_filters.filters import ModelChoiceFilter
|
from django_filters.filters import ModelChoiceFilter
|
||||||
from django_filters.filterset import FilterSet
|
from django_filters.filterset import FilterSet
|
||||||
from django_filters.rest_framework import DjangoFilterBackend
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
@ -18,6 +18,7 @@ from rest_framework.filters import OrderingFilter, SearchFilter
|
|||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||||
|
|
||||||
|
from authentik.blueprints.v1.importer import excluded_models
|
||||||
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
|
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.lib.validators import RequiredTogetherValidator
|
from authentik.lib.validators import RequiredTogetherValidator
|
||||||
@ -105,13 +106,13 @@ class RBACPermissionViewSet(ReadOnlyModelViewSet):
|
|||||||
]
|
]
|
||||||
|
|
||||||
def get_queryset(self) -> QuerySet:
|
def get_queryset(self) -> QuerySet:
|
||||||
return (
|
query = Q()
|
||||||
Permission.objects.all()
|
for model in excluded_models():
|
||||||
.select_related("content_type")
|
query |= Q(
|
||||||
.filter(
|
content_type__app_label=model._meta.app_label,
|
||||||
content_type__app_label__startswith="authentik",
|
content_type__model=model._meta.model_name,
|
||||||
)
|
)
|
||||||
)
|
return Permission.objects.all().select_related("content_type").exclude(query)
|
||||||
|
|
||||||
|
|
||||||
class PermissionAssignSerializer(PassiveSerializer):
|
class PermissionAssignSerializer(PassiveSerializer):
|
||||||
|
|||||||
@ -7,7 +7,12 @@ from psycopg import connect
|
|||||||
|
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
|
|
||||||
QUERY = """SELECT id FROM public.authentik_install_id ORDER BY id LIMIT 1;"""
|
# We need to string format the query as tables and schemas can't be set by parameters
|
||||||
|
# not a security issue as the config value is set by the person installing authentik
|
||||||
|
# which also has postgres credentials etc
|
||||||
|
QUERY = """SELECT id FROM {}.authentik_install_id ORDER BY id LIMIT 1;""".format( # nosec
|
||||||
|
CONFIG.get("postgresql.default_schema")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@lru_cache
|
@lru_cache
|
||||||
|
|||||||
@ -129,6 +129,7 @@ TENANT_DOMAIN_MODEL = "authentik_tenants.Domain"
|
|||||||
|
|
||||||
TENANT_CREATION_FAKES_MIGRATIONS = True
|
TENANT_CREATION_FAKES_MIGRATIONS = True
|
||||||
TENANT_BASE_SCHEMA = "template"
|
TENANT_BASE_SCHEMA = "template"
|
||||||
|
PUBLIC_SCHEMA_NAME = CONFIG.get("postgresql.default_schema")
|
||||||
|
|
||||||
GUARDIAN_MONKEY_PATCH = False
|
GUARDIAN_MONKEY_PATCH = False
|
||||||
|
|
||||||
|
|||||||
@ -1,3 +1,4 @@
|
|||||||
|
import math
|
||||||
from os import environ
|
from os import environ
|
||||||
from ssl import OPENSSL_VERSION
|
from ssl import OPENSSL_VERSION
|
||||||
|
|
||||||
@ -24,3 +25,20 @@ def pytest_report_header(*_, **__):
|
|||||||
f"authentik version: {get_full_version()}",
|
f"authentik version: {get_full_version()}",
|
||||||
f"OpenSSL version: {OPENSSL_VERSION}, FIPS: {backend._fips_enabled}",
|
f"OpenSSL version: {OPENSSL_VERSION}, FIPS: {backend._fips_enabled}",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]) -> None:
|
||||||
|
current_id = int(environ.get("CI_RUN_ID", 0)) - 1
|
||||||
|
total_ids = int(environ.get("CI_TOTAL_RUNS", 0))
|
||||||
|
|
||||||
|
if total_ids:
|
||||||
|
num_tests = len(items)
|
||||||
|
matrix_size = math.ceil(num_tests / total_ids)
|
||||||
|
|
||||||
|
start = current_id * matrix_size
|
||||||
|
end = (current_id + 1) * matrix_size
|
||||||
|
|
||||||
|
deselected_items = items[:start] + items[end:]
|
||||||
|
config.hook.pytest_deselected(items=deselected_items)
|
||||||
|
items[:] = items[start:end]
|
||||||
|
print(f" Executing {start} - {end} tests")
|
||||||
|
|||||||
@ -66,6 +66,7 @@ class KerberosSourceViewSet(UsedByMixin, ModelViewSet):
|
|||||||
serializer_class = KerberosSourceSerializer
|
serializer_class = KerberosSourceSerializer
|
||||||
lookup_field = "slug"
|
lookup_field = "slug"
|
||||||
filterset_fields = [
|
filterset_fields = [
|
||||||
|
"pbm_uuid",
|
||||||
"name",
|
"name",
|
||||||
"slug",
|
"slug",
|
||||||
"enabled",
|
"enabled",
|
||||||
|
|||||||
@ -12,6 +12,7 @@ from django.db.models.fields import b64decode
|
|||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from django.shortcuts import reverse
|
from django.shortcuts import reverse
|
||||||
from django.templatetags.static import static
|
from django.templatetags.static import static
|
||||||
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from kadmin import KAdmin, KAdminApiVersion
|
from kadmin import KAdmin, KAdminApiVersion
|
||||||
from kadmin.exceptions import PyKAdminException
|
from kadmin.exceptions import PyKAdminException
|
||||||
@ -173,12 +174,18 @@ class KerberosSource(Source):
|
|||||||
def get_base_user_properties(self, principal: str, **kwargs):
|
def get_base_user_properties(self, principal: str, **kwargs):
|
||||||
localpart, _ = principal.rsplit("@", 1)
|
localpart, _ = principal.rsplit("@", 1)
|
||||||
|
|
||||||
return {
|
properties = {
|
||||||
"username": localpart,
|
"username": localpart,
|
||||||
"type": UserTypes.INTERNAL,
|
"type": UserTypes.INTERNAL,
|
||||||
"path": self.get_user_path(),
|
"path": self.get_user_path(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if "principal_obj" in kwargs:
|
||||||
|
princ_expiry = kwargs["principal_obj"].expire_time
|
||||||
|
properties["is_active"] = princ_expiry is None or princ_expiry > now()
|
||||||
|
|
||||||
|
return properties
|
||||||
|
|
||||||
def get_base_group_properties(self, group_id: str, **kwargs):
|
def get_base_group_properties(self, group_id: str, **kwargs):
|
||||||
return {
|
return {
|
||||||
"name": group_id,
|
"name": group_id,
|
||||||
|
|||||||
@ -110,6 +110,7 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
|||||||
serializer_class = LDAPSourceSerializer
|
serializer_class = LDAPSourceSerializer
|
||||||
lookup_field = "slug"
|
lookup_field = "slug"
|
||||||
filterset_fields = [
|
filterset_fields = [
|
||||||
|
"pbm_uuid",
|
||||||
"name",
|
"name",
|
||||||
"slug",
|
"slug",
|
||||||
"enabled",
|
"enabled",
|
||||||
|
|||||||
@ -152,6 +152,7 @@ class OAuthSourceFilter(FilterSet):
|
|||||||
class Meta:
|
class Meta:
|
||||||
model = OAuthSource
|
model = OAuthSource
|
||||||
fields = [
|
fields = [
|
||||||
|
"pbm_uuid",
|
||||||
"name",
|
"name",
|
||||||
"slug",
|
"slug",
|
||||||
"enabled",
|
"enabled",
|
||||||
|
|||||||
@ -81,7 +81,12 @@ class OAuth2Client(BaseOAuthClient):
|
|||||||
if self.source.source_type.urls_customizable and self.source.access_token_url:
|
if self.source.source_type.urls_customizable and self.source.access_token_url:
|
||||||
access_token_url = self.source.access_token_url
|
access_token_url = self.source.access_token_url
|
||||||
response = self.do_request(
|
response = self.do_request(
|
||||||
"post", access_token_url, data=args, headers=self._default_headers, **request_kwargs
|
"post",
|
||||||
|
access_token_url,
|
||||||
|
auth=(self.get_client_id(), self.get_client_secret()),
|
||||||
|
data=args,
|
||||||
|
headers=self._default_headers,
|
||||||
|
**request_kwargs,
|
||||||
)
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
except RequestException as exc:
|
except RequestException as exc:
|
||||||
|
|||||||
@ -52,6 +52,7 @@ class PlexSourceViewSet(UsedByMixin, ModelViewSet):
|
|||||||
serializer_class = PlexSourceSerializer
|
serializer_class = PlexSourceSerializer
|
||||||
lookup_field = "slug"
|
lookup_field = "slug"
|
||||||
filterset_fields = [
|
filterset_fields = [
|
||||||
|
"pbm_uuid",
|
||||||
"name",
|
"name",
|
||||||
"slug",
|
"slug",
|
||||||
"enabled",
|
"enabled",
|
||||||
|
|||||||
@ -44,6 +44,7 @@ class SAMLSourceViewSet(UsedByMixin, ModelViewSet):
|
|||||||
serializer_class = SAMLSourceSerializer
|
serializer_class = SAMLSourceSerializer
|
||||||
lookup_field = "slug"
|
lookup_field = "slug"
|
||||||
filterset_fields = [
|
filterset_fields = [
|
||||||
|
"pbm_uuid",
|
||||||
"name",
|
"name",
|
||||||
"slug",
|
"slug",
|
||||||
"enabled",
|
"enabled",
|
||||||
|
|||||||
@ -53,6 +53,6 @@ class SCIMSourceViewSet(UsedByMixin, ModelViewSet):
|
|||||||
queryset = SCIMSource.objects.all()
|
queryset = SCIMSource.objects.all()
|
||||||
serializer_class = SCIMSourceSerializer
|
serializer_class = SCIMSourceSerializer
|
||||||
lookup_field = "slug"
|
lookup_field = "slug"
|
||||||
filterset_fields = ["name", "slug"]
|
filterset_fields = ["pbm_uuid", "name", "slug"]
|
||||||
search_fields = ["name", "slug", "token__identifier", "token__user__username"]
|
search_fields = ["name", "slug", "token__identifier", "token__user__username"]
|
||||||
ordering = ["name"]
|
ordering = ["name"]
|
||||||
|
|||||||
@ -114,7 +114,7 @@ class SCIMView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
class SCIMObjectView(SCIMView):
|
class SCIMObjectView(SCIMView):
|
||||||
"""Base SCIM View for object management"""
|
"""Base SCIM View for object management"""
|
||||||
|
|
||||||
mapper: SourceMapper
|
mapper: SourceMapper
|
||||||
manager: PropertyMappingManager
|
manager: PropertyMappingManager
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@ -5,6 +5,7 @@ from email.policy import Policy
|
|||||||
from types import MethodType
|
from types import MethodType
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
from django.contrib.messages import INFO, add_message
|
||||||
from django.db.models.query import QuerySet
|
from django.db.models.query import QuerySet
|
||||||
from django.http import HttpRequest, HttpResponse
|
from django.http import HttpRequest, HttpResponse
|
||||||
from django.http.request import QueryDict
|
from django.http.request import QueryDict
|
||||||
@ -147,6 +148,9 @@ class PromptChallengeResponse(ChallengeResponse):
|
|||||||
result = engine.result
|
result = engine.result
|
||||||
if not result.passing:
|
if not result.passing:
|
||||||
raise ValidationError(list(result.messages))
|
raise ValidationError(list(result.messages))
|
||||||
|
else:
|
||||||
|
for msg in result.messages:
|
||||||
|
add_message(self.request, INFO, msg)
|
||||||
return attrs
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -20,7 +20,7 @@ from authentik.flows.planner import (
|
|||||||
FlowPlanner,
|
FlowPlanner,
|
||||||
)
|
)
|
||||||
from authentik.flows.stage import ChallengeStageView
|
from authentik.flows.stage import ChallengeStageView
|
||||||
from authentik.flows.views.executor import SESSION_KEY_PLAN, InvalidStageError
|
from authentik.flows.views.executor import SESSION_KEY_GET, SESSION_KEY_PLAN, InvalidStageError
|
||||||
from authentik.lib.utils.urls import reverse_with_qs
|
from authentik.lib.utils.urls import reverse_with_qs
|
||||||
from authentik.stages.redirect.models import RedirectMode, RedirectStage
|
from authentik.stages.redirect.models import RedirectMode, RedirectStage
|
||||||
|
|
||||||
@ -72,7 +72,9 @@ class RedirectStageView(ChallengeStageView):
|
|||||||
self.request.session[SESSION_KEY_PLAN] = plan
|
self.request.session[SESSION_KEY_PLAN] = plan
|
||||||
kwargs = self.executor.kwargs
|
kwargs = self.executor.kwargs
|
||||||
kwargs.update({"flow_slug": flow.slug})
|
kwargs.update({"flow_slug": flow.slug})
|
||||||
return reverse_with_qs("authentik_core:if-flow", self.request.GET, kwargs=kwargs)
|
return reverse_with_qs(
|
||||||
|
"authentik_core:if-flow", self.request.session[SESSION_KEY_GET], kwargs=kwargs
|
||||||
|
)
|
||||||
|
|
||||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||||
"""Get the redirect target. Prioritize `redirect_stage_target` if present."""
|
"""Get the redirect target. Prioritize `redirect_stage_target` if present."""
|
||||||
@ -83,7 +85,7 @@ class RedirectStageView(ChallengeStageView):
|
|||||||
target_url_override = self.executor.plan.context.get(PLAN_CONTEXT_REDIRECT_STAGE_TARGET, "")
|
target_url_override = self.executor.plan.context.get(PLAN_CONTEXT_REDIRECT_STAGE_TARGET, "")
|
||||||
if target_url_override:
|
if target_url_override:
|
||||||
target = self.parse_target(target_url_override)
|
target = self.parse_target(target_url_override)
|
||||||
# `target` is falsy if the override was to a Flow but that Flow doesn't exist.
|
# `target` is false if the override was to a Flow but that Flow doesn't exist.
|
||||||
if not target:
|
if not target:
|
||||||
if current_stage.mode == RedirectMode.STATIC:
|
if current_stage.mode == RedirectMode.STATIC:
|
||||||
target = current_stage.target_static
|
target = current_stage.target_static
|
||||||
|
|||||||
@ -1,5 +1,7 @@
|
|||||||
"""Test Redirect stage"""
|
"""Test Redirect stage"""
|
||||||
|
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
from django.urls.base import reverse
|
from django.urls.base import reverse
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
|
|
||||||
@ -58,6 +60,23 @@ class TestRedirectStage(FlowTestCase):
|
|||||||
response, reverse("authentik_core:if-flow", kwargs={"flow_slug": self.target_flow.slug})
|
response, reverse("authentik_core:if-flow", kwargs={"flow_slug": self.target_flow.slug})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_flow_query(self):
|
||||||
|
self.stage.mode = RedirectMode.FLOW
|
||||||
|
self.stage.save()
|
||||||
|
|
||||||
|
response = self.client.get(
|
||||||
|
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
|
||||||
|
+ "?"
|
||||||
|
+ urlencode({"query": urlencode({"test": "foo"})})
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertStageRedirects(
|
||||||
|
response,
|
||||||
|
reverse("authentik_core:if-flow", kwargs={"flow_slug": self.target_flow.slug})
|
||||||
|
+ "?"
|
||||||
|
+ urlencode({"test": "foo"}),
|
||||||
|
)
|
||||||
|
|
||||||
def test_override_static(self):
|
def test_override_static(self):
|
||||||
policy = ExpressionPolicy.objects.create(
|
policy = ExpressionPolicy.objects.create(
|
||||||
name=generate_id(),
|
name=generate_id(),
|
||||||
|
|||||||
@ -2,7 +2,7 @@
|
|||||||
"$schema": "http://json-schema.org/draft-07/schema",
|
"$schema": "http://json-schema.org/draft-07/schema",
|
||||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"title": "authentik 2024.12.2 Blueprint schema",
|
"title": "authentik 2024.12.3 Blueprint schema",
|
||||||
"required": [
|
"required": [
|
||||||
"version",
|
"version",
|
||||||
"entries"
|
"entries"
|
||||||
@ -3601,6 +3601,46 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"model",
|
||||||
|
"identifiers"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"model": {
|
||||||
|
"const": "authentik_providers_ssf.ssfprovider"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"state": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"absent",
|
||||||
|
"present",
|
||||||
|
"created",
|
||||||
|
"must_created"
|
||||||
|
],
|
||||||
|
"default": "present"
|
||||||
|
},
|
||||||
|
"conditions": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"permissions": {
|
||||||
|
"$ref": "#/$defs/model_authentik_providers_ssf.ssfprovider_permissions"
|
||||||
|
},
|
||||||
|
"attrs": {
|
||||||
|
"$ref": "#/$defs/model_authentik_providers_ssf.ssfprovider"
|
||||||
|
},
|
||||||
|
"identifiers": {
|
||||||
|
"$ref": "#/$defs/model_authentik_providers_ssf.ssfprovider"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
@ -4583,6 +4623,7 @@
|
|||||||
"authentik.enterprise.providers.google_workspace",
|
"authentik.enterprise.providers.google_workspace",
|
||||||
"authentik.enterprise.providers.microsoft_entra",
|
"authentik.enterprise.providers.microsoft_entra",
|
||||||
"authentik.enterprise.providers.rac",
|
"authentik.enterprise.providers.rac",
|
||||||
|
"authentik.enterprise.providers.ssf",
|
||||||
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
|
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
|
||||||
"authentik.enterprise.stages.source",
|
"authentik.enterprise.stages.source",
|
||||||
"authentik.events"
|
"authentik.events"
|
||||||
@ -4686,6 +4727,7 @@
|
|||||||
"authentik_providers_rac.racprovider",
|
"authentik_providers_rac.racprovider",
|
||||||
"authentik_providers_rac.endpoint",
|
"authentik_providers_rac.endpoint",
|
||||||
"authentik_providers_rac.racpropertymapping",
|
"authentik_providers_rac.racpropertymapping",
|
||||||
|
"authentik_providers_ssf.ssfprovider",
|
||||||
"authentik_stages_authenticator_endpoint_gdtc.authenticatorendpointgdtcstage",
|
"authentik_stages_authenticator_endpoint_gdtc.authenticatorendpointgdtcstage",
|
||||||
"authentik_stages_source.sourcestage",
|
"authentik_stages_source.sourcestage",
|
||||||
"authentik_events.event",
|
"authentik_events.event",
|
||||||
@ -6687,6 +6729,18 @@
|
|||||||
"authentik_providers_scim.view_scimprovider",
|
"authentik_providers_scim.view_scimprovider",
|
||||||
"authentik_providers_scim.view_scimprovidergroup",
|
"authentik_providers_scim.view_scimprovidergroup",
|
||||||
"authentik_providers_scim.view_scimprovideruser",
|
"authentik_providers_scim.view_scimprovideruser",
|
||||||
|
"authentik_providers_ssf.add_ssfprovider",
|
||||||
|
"authentik_providers_ssf.add_stream",
|
||||||
|
"authentik_providers_ssf.add_streamevent",
|
||||||
|
"authentik_providers_ssf.change_ssfprovider",
|
||||||
|
"authentik_providers_ssf.change_stream",
|
||||||
|
"authentik_providers_ssf.change_streamevent",
|
||||||
|
"authentik_providers_ssf.delete_ssfprovider",
|
||||||
|
"authentik_providers_ssf.delete_stream",
|
||||||
|
"authentik_providers_ssf.delete_streamevent",
|
||||||
|
"authentik_providers_ssf.view_ssfprovider",
|
||||||
|
"authentik_providers_ssf.view_stream",
|
||||||
|
"authentik_providers_ssf.view_streamevent",
|
||||||
"authentik_rbac.access_admin_interface",
|
"authentik_rbac.access_admin_interface",
|
||||||
"authentik_rbac.add_role",
|
"authentik_rbac.add_role",
|
||||||
"authentik_rbac.assign_role_permissions",
|
"authentik_rbac.assign_role_permissions",
|
||||||
@ -12936,6 +12990,18 @@
|
|||||||
"authentik_providers_scim.view_scimprovider",
|
"authentik_providers_scim.view_scimprovider",
|
||||||
"authentik_providers_scim.view_scimprovidergroup",
|
"authentik_providers_scim.view_scimprovidergroup",
|
||||||
"authentik_providers_scim.view_scimprovideruser",
|
"authentik_providers_scim.view_scimprovideruser",
|
||||||
|
"authentik_providers_ssf.add_ssfprovider",
|
||||||
|
"authentik_providers_ssf.add_stream",
|
||||||
|
"authentik_providers_ssf.add_streamevent",
|
||||||
|
"authentik_providers_ssf.change_ssfprovider",
|
||||||
|
"authentik_providers_ssf.change_stream",
|
||||||
|
"authentik_providers_ssf.change_streamevent",
|
||||||
|
"authentik_providers_ssf.delete_ssfprovider",
|
||||||
|
"authentik_providers_ssf.delete_stream",
|
||||||
|
"authentik_providers_ssf.delete_streamevent",
|
||||||
|
"authentik_providers_ssf.view_ssfprovider",
|
||||||
|
"authentik_providers_ssf.view_stream",
|
||||||
|
"authentik_providers_ssf.view_streamevent",
|
||||||
"authentik_rbac.access_admin_interface",
|
"authentik_rbac.access_admin_interface",
|
||||||
"authentik_rbac.add_role",
|
"authentik_rbac.add_role",
|
||||||
"authentik_rbac.assign_role_permissions",
|
"authentik_rbac.assign_role_permissions",
|
||||||
@ -13988,6 +14054,62 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"model_authentik_providers_ssf.ssfprovider": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 1,
|
||||||
|
"title": "Name"
|
||||||
|
},
|
||||||
|
"signing_key": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "uuid",
|
||||||
|
"title": "Signing Key",
|
||||||
|
"description": "Key used to sign the SSF Events."
|
||||||
|
},
|
||||||
|
"oidc_auth_providers": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"title": "Oidc auth providers"
|
||||||
|
},
|
||||||
|
"event_retention": {
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 1,
|
||||||
|
"title": "Event retention"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": []
|
||||||
|
},
|
||||||
|
"model_authentik_providers_ssf.ssfprovider_permissions": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"permission"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"permission": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"add_stream",
|
||||||
|
"add_ssfprovider",
|
||||||
|
"change_ssfprovider",
|
||||||
|
"delete_ssfprovider",
|
||||||
|
"view_ssfprovider"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"role": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"model_authentik_stages_authenticator_endpoint_gdtc.authenticatorendpointgdtcstage": {
|
"model_authentik_stages_authenticator_endpoint_gdtc.authenticatorendpointgdtcstage": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|||||||
@ -31,7 +31,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- redis:/data
|
- redis:/data
|
||||||
server:
|
server:
|
||||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.12.2}
|
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.12.3}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
command: server
|
command: server
|
||||||
environment:
|
environment:
|
||||||
@ -54,7 +54,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
worker:
|
worker:
|
||||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.12.2}
|
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2024.12.3}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
command: worker
|
command: worker
|
||||||
environment:
|
environment:
|
||||||
|
|||||||
8
go.mod
8
go.mod
@ -24,15 +24,15 @@ require (
|
|||||||
github.com/pires/go-proxyproto v0.8.0
|
github.com/pires/go-proxyproto v0.8.0
|
||||||
github.com/prometheus/client_golang v1.20.5
|
github.com/prometheus/client_golang v1.20.5
|
||||||
github.com/redis/go-redis/v9 v9.7.0
|
github.com/redis/go-redis/v9 v9.7.0
|
||||||
github.com/sethvargo/go-envconfig v1.1.0
|
github.com/sethvargo/go-envconfig v1.1.1
|
||||||
github.com/sirupsen/logrus v1.9.3
|
github.com/sirupsen/logrus v1.9.3
|
||||||
github.com/spf13/cobra v1.8.1
|
github.com/spf13/cobra v1.8.1
|
||||||
github.com/stretchr/testify v1.10.0
|
github.com/stretchr/testify v1.10.0
|
||||||
github.com/wwt/guac v1.3.2
|
github.com/wwt/guac v1.3.2
|
||||||
goauthentik.io/api/v3 v3.2024122.2
|
goauthentik.io/api/v3 v3.2024123.4
|
||||||
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
|
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
|
||||||
golang.org/x/oauth2 v0.25.0
|
golang.org/x/oauth2 v0.26.0
|
||||||
golang.org/x/sync v0.10.0
|
golang.org/x/sync v0.11.0
|
||||||
gopkg.in/yaml.v2 v2.4.0
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
layeh.com/radius v0.0.0-20210819152912-ad72663a72ab
|
layeh.com/radius v0.0.0-20210819152912-ad72663a72ab
|
||||||
)
|
)
|
||||||
|
|||||||
15
go.sum
15
go.sum
@ -254,8 +254,8 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
|
|||||||
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
|
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
|
||||||
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/sethvargo/go-envconfig v1.1.0 h1:cWZiJxeTm7AlCvzGXrEXaSTCNgip5oJepekh/BOQuog=
|
github.com/sethvargo/go-envconfig v1.1.1 h1:JDu8Q9baIzJf47NPkzhIB6aLYL0vQ+pPypoYrejS9QY=
|
||||||
github.com/sethvargo/go-envconfig v1.1.0/go.mod h1:JLd0KFWQYzyENqnEPWWZ49i4vzZo/6nRidxI8YvGiHw=
|
github.com/sethvargo/go-envconfig v1.1.1/go.mod h1:JLd0KFWQYzyENqnEPWWZ49i4vzZo/6nRidxI8YvGiHw=
|
||||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||||
@ -299,8 +299,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y
|
|||||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||||
goauthentik.io/api/v3 v3.2024122.2 h1:QC+ZQ+AxlPwl9OG1X/Z62EVepmTGyfvJUxhUdFjs+4s=
|
goauthentik.io/api/v3 v3.2024123.4 h1:JYLsUjkJ7kT+jHO72DyFTXFwKEGAcOOlLh36SRG9BDw=
|
||||||
goauthentik.io/api/v3 v3.2024122.2/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
|
goauthentik.io/api/v3 v3.2024123.4/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
@ -393,8 +393,8 @@ golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4Iltr
|
|||||||
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
@ -408,8 +408,9 @@ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
|||||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
|
|
||||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
|
||||||
|
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
|||||||
@ -29,4 +29,4 @@ func UserAgent() string {
|
|||||||
return fmt.Sprintf("authentik@%s", FullVersion())
|
return fmt.Sprintf("authentik@%s", FullVersion())
|
||||||
}
|
}
|
||||||
|
|
||||||
const VERSION = "2024.12.2"
|
const VERSION = "2024.12.3"
|
||||||
|
|||||||
@ -15,7 +15,6 @@ import (
|
|||||||
func EnableDebugServer() {
|
func EnableDebugServer() {
|
||||||
l := log.WithField("logger", "authentik.go_debugger")
|
l := log.WithField("logger", "authentik.go_debugger")
|
||||||
if !config.Get().Debug {
|
if !config.Get().Debug {
|
||||||
l.Info("not enabling debug server, set `AUTHENTIK_DEBUG` to `true` to enable it.")
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
h := mux.NewRouter()
|
h := mux.NewRouter()
|
||||||
|
|||||||
@ -43,6 +43,11 @@ LABEL org.opencontainers.image.source=https://github.com/goauthentik/authentik
|
|||||||
LABEL org.opencontainers.image.version=${VERSION}
|
LABEL org.opencontainers.image.version=${VERSION}
|
||||||
LABEL org.opencontainers.image.revision=${GIT_BUILD_HASH}
|
LABEL org.opencontainers.image.revision=${GIT_BUILD_HASH}
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get upgrade -y && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /tmp/* /var/lib/apt/lists/*
|
||||||
|
|
||||||
COPY --from=builder /go/ldap /
|
COPY --from=builder /go/ldap /
|
||||||
|
|
||||||
HEALTHCHECK --interval=5s --retries=20 --start-period=3s CMD [ "/ldap", "healthcheck" ]
|
HEALTHCHECK --interval=5s --retries=20 --start-period=3s CMD [ "/ldap", "healthcheck" ]
|
||||||
|
|||||||
12
lifecycle/ak
12
lifecycle/ak
@ -1,4 +1,5 @@
|
|||||||
#!/usr/bin/env -S bash -e
|
#!/usr/bin/env -S bash
|
||||||
|
set -e -o pipefail
|
||||||
MODE_FILE="${TMPDIR}/authentik-mode"
|
MODE_FILE="${TMPDIR}/authentik-mode"
|
||||||
|
|
||||||
function log {
|
function log {
|
||||||
@ -54,6 +55,10 @@ function cleanup {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function prepare_debug {
|
function prepare_debug {
|
||||||
|
# Only attempt to install debug dependencies if we're running in a container
|
||||||
|
if [ ! -d /ak-root ]; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install -y --no-install-recommends krb5-kdc krb5-user krb5-admin-server libkrb5-dev gcc
|
apt-get install -y --no-install-recommends krb5-kdc krb5-user krb5-admin-server libkrb5-dev gcc
|
||||||
@ -62,7 +67,7 @@ function prepare_debug {
|
|||||||
chown authentik:authentik /unittest.xml
|
chown authentik:authentik /unittest.xml
|
||||||
}
|
}
|
||||||
|
|
||||||
if [[ "${AUTHENTIK_REMOTE_DEBUG}" == "true" ]]; then
|
if [[ "$(python -m authentik.lib.config debugger 2> /dev/null)" == "True" ]]; then
|
||||||
prepare_debug
|
prepare_debug
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -87,12 +92,11 @@ elif [[ "$1" == "bash" ]]; then
|
|||||||
elif [[ "$1" == "test-all" ]]; then
|
elif [[ "$1" == "test-all" ]]; then
|
||||||
prepare_debug
|
prepare_debug
|
||||||
chmod 777 /root
|
chmod 777 /root
|
||||||
pip install --force-reinstall /wheels/*
|
|
||||||
check_if_root "python -m manage test authentik"
|
check_if_root "python -m manage test authentik"
|
||||||
elif [[ "$1" == "healthcheck" ]]; then
|
elif [[ "$1" == "healthcheck" ]]; then
|
||||||
run_authentik healthcheck $(cat $MODE_FILE)
|
run_authentik healthcheck $(cat $MODE_FILE)
|
||||||
elif [[ "$1" == "dump_config" ]]; then
|
elif [[ "$1" == "dump_config" ]]; then
|
||||||
exec python -m authentik.lib.config
|
exec python -m authentik.lib.config $@
|
||||||
elif [[ "$1" == "debug" ]]; then
|
elif [[ "$1" == "debug" ]]; then
|
||||||
exec sleep infinity
|
exec sleep infinity
|
||||||
else
|
else
|
||||||
|
|||||||
111
lifecycle/aws/.gitignore
vendored
Normal file
111
lifecycle/aws/.gitignore
vendored
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
|
||||||
|
# Created by https://www.gitignore.io/api/node
|
||||||
|
# Edit at https://www.gitignore.io/?templates=node
|
||||||
|
|
||||||
|
### Node ###
|
||||||
|
# Logs
|
||||||
|
logs
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
lerna-debug.log*
|
||||||
|
|
||||||
|
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||||
|
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||||
|
|
||||||
|
# Runtime data
|
||||||
|
pids
|
||||||
|
*.pid
|
||||||
|
*.seed
|
||||||
|
*.pid.lock
|
||||||
|
|
||||||
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
|
lib-cov
|
||||||
|
|
||||||
|
# Coverage directory used by tools like istanbul
|
||||||
|
coverage
|
||||||
|
*.lcov
|
||||||
|
|
||||||
|
# nyc test coverage
|
||||||
|
.nyc_output
|
||||||
|
|
||||||
|
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
|
.grunt
|
||||||
|
|
||||||
|
# Bower dependency directory (https://bower.io/)
|
||||||
|
bower_components
|
||||||
|
|
||||||
|
# node-waf configuration
|
||||||
|
.lock-wscript
|
||||||
|
|
||||||
|
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||||
|
build/Release
|
||||||
|
|
||||||
|
# Dependency directories
|
||||||
|
node_modules/
|
||||||
|
jspm_packages/
|
||||||
|
|
||||||
|
# TypeScript v1 declaration files
|
||||||
|
typings/
|
||||||
|
|
||||||
|
# TypeScript cache
|
||||||
|
*.tsbuildinfo
|
||||||
|
|
||||||
|
# Optional npm cache directory
|
||||||
|
.npm
|
||||||
|
|
||||||
|
# Optional eslint cache
|
||||||
|
.eslintcache
|
||||||
|
|
||||||
|
# Optional REPL history
|
||||||
|
.node_repl_history
|
||||||
|
|
||||||
|
# Output of 'npm pack'
|
||||||
|
*.tgz
|
||||||
|
|
||||||
|
# Yarn Integrity file
|
||||||
|
.yarn-integrity
|
||||||
|
|
||||||
|
# dotenv environment variables file
|
||||||
|
.env
|
||||||
|
.env.test
|
||||||
|
|
||||||
|
# parcel-bundler cache (https://parceljs.org/)
|
||||||
|
.cache
|
||||||
|
|
||||||
|
# next.js build output
|
||||||
|
.next
|
||||||
|
|
||||||
|
# nuxt.js build output
|
||||||
|
.nuxt
|
||||||
|
dist
|
||||||
|
|
||||||
|
# Uncomment the public line if your project uses Gatsby
|
||||||
|
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||||
|
# https://create-react-app.dev/docs/using-the-public-folder/#docsNav
|
||||||
|
# public
|
||||||
|
|
||||||
|
# Storybook build outputs
|
||||||
|
.out
|
||||||
|
.storybook-out
|
||||||
|
|
||||||
|
# vuepress build output
|
||||||
|
.vuepress/dist
|
||||||
|
|
||||||
|
# Serverless directories
|
||||||
|
.serverless/
|
||||||
|
|
||||||
|
# FuseBox cache
|
||||||
|
.fusebox/
|
||||||
|
|
||||||
|
# DynamoDB Local files
|
||||||
|
.dynamodb/
|
||||||
|
|
||||||
|
# Temporary folders
|
||||||
|
tmp/
|
||||||
|
temp/
|
||||||
|
|
||||||
|
# End of https://www.gitignore.io/api/node
|
||||||
|
|
||||||
|
cdk.out
|
||||||
@ -6,6 +6,7 @@ from aws_cdk import (
|
|||||||
App,
|
App,
|
||||||
CfnOutput,
|
CfnOutput,
|
||||||
CfnParameter,
|
CfnParameter,
|
||||||
|
DefaultStackSynthesizer,
|
||||||
Duration,
|
Duration,
|
||||||
RemovalPolicy,
|
RemovalPolicy,
|
||||||
Stack,
|
Stack,
|
||||||
@ -38,7 +39,7 @@ from authentik import __version__
|
|||||||
|
|
||||||
class AuthentikStack(Stack):
|
class AuthentikStack(Stack):
|
||||||
def __init__(self, scope: Construct, id: str, **kwargs):
|
def __init__(self, scope: Construct, id: str, **kwargs):
|
||||||
super().__init__(scope, id, *kwargs)
|
super().__init__(scope, id, **kwargs)
|
||||||
|
|
||||||
### Inputs
|
### Inputs
|
||||||
|
|
||||||
@ -327,6 +328,7 @@ class AuthentikStack(Stack):
|
|||||||
security_groups=[authentik_security_group],
|
security_groups=[authentik_security_group],
|
||||||
vpc_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PRIVATE_WITH_EGRESS),
|
vpc_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PRIVATE_WITH_EGRESS),
|
||||||
enable_execute_command=True,
|
enable_execute_command=True,
|
||||||
|
min_healthy_percent=50,
|
||||||
)
|
)
|
||||||
|
|
||||||
worker_task = ecs.FargateTaskDefinition(
|
worker_task = ecs.FargateTaskDefinition(
|
||||||
@ -376,6 +378,7 @@ class AuthentikStack(Stack):
|
|||||||
security_groups=[authentik_security_group],
|
security_groups=[authentik_security_group],
|
||||||
vpc_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PRIVATE_WITH_EGRESS),
|
vpc_subnets=ec2.SubnetSelection(subnet_type=ec2.SubnetType.PRIVATE_WITH_EGRESS),
|
||||||
enable_execute_command=True,
|
enable_execute_command=True,
|
||||||
|
min_healthy_percent=50,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Load balancer
|
# Load balancer
|
||||||
@ -417,5 +420,9 @@ class AuthentikStack(Stack):
|
|||||||
|
|
||||||
|
|
||||||
app = App()
|
app = App()
|
||||||
AuthentikStack(app, "AuthentikStack")
|
AuthentikStack(
|
||||||
|
app,
|
||||||
|
"AuthentikStack",
|
||||||
|
synthesizer=DefaultStackSynthesizer(generate_bootstrap_version_rule=False),
|
||||||
|
)
|
||||||
app.synth()
|
app.synth()
|
||||||
141
lifecycle/aws/package-lock.json
generated
Normal file
141
lifecycle/aws/package-lock.json
generated
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
{
|
||||||
|
"name": "@goauthentik/lifecycle-aws",
|
||||||
|
"version": "0.0.0",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"name": "@goauthentik/lifecycle-aws",
|
||||||
|
"version": "0.0.0",
|
||||||
|
"license": "MIT",
|
||||||
|
"devDependencies": {
|
||||||
|
"aws-cdk": "^2.178.2",
|
||||||
|
"cross-env": "^7.0.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=20"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/aws-cdk": {
|
||||||
|
"version": "2.178.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.178.2.tgz",
|
||||||
|
"integrity": "sha512-ojMCMnBGinvDUD6+BOOlUOB9pjsYXoQdFVbf4bvi3dy3nwn557r0j6qDUcJMeikzPJ6YWzfAdL0fYxBZg4xcOg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"bin": {
|
||||||
|
"cdk": "bin/cdk"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 14.15.0"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"fsevents": "2.3.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/cross-env": {
|
||||||
|
"version": "7.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz",
|
||||||
|
"integrity": "sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"cross-spawn": "^7.0.1"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"cross-env": "src/bin/cross-env.js",
|
||||||
|
"cross-env-shell": "src/bin/cross-env-shell.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10.14",
|
||||||
|
"npm": ">=6",
|
||||||
|
"yarn": ">=1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/cross-spawn": {
|
||||||
|
"version": "7.0.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
|
||||||
|
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"path-key": "^3.1.0",
|
||||||
|
"shebang-command": "^2.0.0",
|
||||||
|
"which": "^2.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/fsevents": {
|
||||||
|
"version": "2.3.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
|
||||||
|
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
|
||||||
|
"dev": true,
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/isexe": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
|
"node_modules/path-key": {
|
||||||
|
"version": "3.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
|
||||||
|
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/shebang-command": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"shebang-regex": "^3.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/shebang-regex": {
|
||||||
|
"version": "3.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
|
||||||
|
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/which": {
|
||||||
|
"version": "2.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
|
||||||
|
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"isexe": "^2.0.0"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"node-which": "bin/node-which"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 8"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user