Compare commits
5 Commits
sources/ld
...
blog-ent
| Author | SHA1 | Date | |
|---|---|---|---|
| 8888f80642 | |||
| 0b7ca0abc9 | |||
| efaa61a8ff | |||
| 90b149cf0a | |||
| 818a03b3b1 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2023.8.3
|
||||
current_version = 2023.8.1
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||
|
||||
@ -1,11 +1,10 @@
|
||||
env
|
||||
htmlcov
|
||||
*.env.yml
|
||||
**/node_modules
|
||||
dist/**
|
||||
build/**
|
||||
build_docs/**
|
||||
*Dockerfile
|
||||
Dockerfile
|
||||
authentik/enterprise
|
||||
blueprints/local
|
||||
.git
|
||||
!gen-ts-api/node_modules
|
||||
!gen-ts-api/dist/**
|
||||
|
||||
42
.github/workflows/ci-main.yml
vendored
42
.github/workflows/ci-main.yml
vendored
@ -33,7 +33,7 @@ jobs:
|
||||
- ruff
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run job
|
||||
@ -41,7 +41,7 @@ jobs:
|
||||
test-migrations:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run migrations
|
||||
@ -50,7 +50,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup authentik env
|
||||
@ -91,7 +91,7 @@ jobs:
|
||||
- 12-alpine
|
||||
- 15-alpine
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
@ -108,7 +108,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Create k8s Kind Cluster
|
||||
@ -144,7 +144,7 @@ jobs:
|
||||
- name: flows
|
||||
glob: tests/e2e/test_flows*
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Setup e2e env (chrome, etc)
|
||||
@ -186,31 +186,28 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: generate ts client
|
||||
run: make gen-client-ts
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
@ -223,8 +220,6 @@ jobs:
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
- name: Comment on PR
|
||||
if: github.event_name == 'pull_request'
|
||||
continue-on-error: true
|
||||
@ -236,31 +231,28 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: generate ts client
|
||||
run: make gen-client-ts
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
@ -274,5 +266,3 @@ jobs:
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: linux/arm64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
24
.github/workflows/ci-outpost.yml
vendored
24
.github/workflows/ci-outpost.yml
vendored
@ -14,7 +14,7 @@ jobs:
|
||||
lint-golint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
@ -31,16 +31,14 @@ jobs:
|
||||
with:
|
||||
version: v1.52.2
|
||||
args: --timeout 5000s --verbose
|
||||
skip-cache: true
|
||||
skip-pkg-cache: true
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Go unittests
|
||||
@ -66,20 +64,20 @@ jobs:
|
||||
- radius
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
@ -88,7 +86,7 @@ jobs:
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
@ -101,8 +99,6 @@ jobs:
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-binary:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
@ -118,13 +114,13 @@ jobs:
|
||||
goos: [linux]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v3.8.1
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
|
||||
20
.github/workflows/ci-web.yml
vendored
20
.github/workflows/ci-web.yml
vendored
@ -14,8 +14,8 @@ jobs:
|
||||
lint-eslint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.8.1
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
@ -30,8 +30,8 @@ jobs:
|
||||
lint-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.8.1
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
@ -46,8 +46,8 @@ jobs:
|
||||
lint-prettier:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.8.1
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
@ -62,8 +62,8 @@ jobs:
|
||||
lint-lit-analyse:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.8.1
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
@ -94,8 +94,8 @@ jobs:
|
||||
- ci-web-mark
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.8.1
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
|
||||
12
.github/workflows/ci-website.yml
vendored
12
.github/workflows/ci-website.yml
vendored
@ -14,8 +14,8 @@ jobs:
|
||||
lint-prettier:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.8.1
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
@ -28,8 +28,8 @@ jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.8.1
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
@ -49,8 +49,8 @@ jobs:
|
||||
- build
|
||||
- build-docs-only
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.8.1
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
|
||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@ -23,7 +23,7 @@ jobs:
|
||||
language: ["go", "javascript", "python"]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Initialize CodeQL
|
||||
|
||||
2
.github/workflows/gha-cache-cleanup.yml
vendored
2
.github/workflows/gha-cache-cleanup.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
|
||||
2
.github/workflows/ghcr-retention.yml
vendored
2
.github/workflows/ghcr-retention.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
|
||||
4
.github/workflows/image-compress.yml
vendored
4
.github/workflows/image-compress.yml
vendored
@ -29,11 +29,11 @@ jobs:
|
||||
github.event.pull_request.head.repo.full_name == github.repository)
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Compress images
|
||||
|
||||
2
.github/workflows/publish-source-docs.yml
vendored
2
.github/workflows/publish-source-docs.yml
vendored
@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: generate docs
|
||||
|
||||
2
.github/workflows/release-next-branch.yml
vendored
2
.github/workflows/release-next-branch.yml
vendored
@ -13,7 +13,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment: internal-production
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: main
|
||||
- run: |
|
||||
|
||||
35
.github/workflows/release-publish.yml
vendored
35
.github/workflows/release-publish.yml
vendored
@ -8,31 +8,28 @@ jobs:
|
||||
build-server:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: make empty ts client
|
||||
run: mkdir -p ./gen-ts-client
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
@ -58,30 +55,30 @@ jobs:
|
||||
- ldap
|
||||
- radius
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
@ -109,11 +106,11 @@ jobs:
|
||||
goos: [linux, darwin]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v3.8.1
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
@ -144,7 +141,7 @@ jobs:
|
||||
- build-outpost-binary
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run test suite in final docker images
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
@ -160,7 +157,7 @@ jobs:
|
||||
- build-outpost-binary
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
|
||||
4
.github/workflows/release-tag.yml
vendored
4
.github/workflows/release-tag.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
||||
name: Create Release from Tag
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Pre-release test
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
@ -23,7 +23,7 @@ jobs:
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server test-all
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
|
||||
2
.github/workflows/repo-stale.yml
vendored
2
.github/workflows/repo-stale.yml
vendored
@ -14,7 +14,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
|
||||
4
.github/workflows/translation-compile.yml
vendored
4
.github/workflows/translation-compile.yml
vendored
@ -16,11 +16,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Setup authentik env
|
||||
|
||||
2
.github/workflows/translation-rename.yml
vendored
2
.github/workflows/translation-rename.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
|
||||
6
.github/workflows/web-api-publish.yml
vendored
6
.github/workflows/web-api-publish.yml
vendored
@ -10,14 +10,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v3.8.1
|
||||
with:
|
||||
node-version: "20"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
25
CODEOWNERS
25
CODEOWNERS
@ -1,23 +1,2 @@
|
||||
# Fallback
|
||||
* @goauthentik/backend @goauthentik/frontend
|
||||
# Backend
|
||||
authentik/ @goauthentik/backend
|
||||
blueprints/ @goauthentik/backend
|
||||
cmd/ @goauthentik/backend
|
||||
internal/ @goauthentik/backend
|
||||
lifecycle/ @goauthentik/backend
|
||||
schemas/ @goauthentik/backend
|
||||
scripts/ @goauthentik/backend
|
||||
tests/ @goauthentik/backend
|
||||
# Infrastructure
|
||||
.github/ @goauthentik/infrastructure
|
||||
Dockerfile @goauthentik/infrastructure
|
||||
*Dockerfile @goauthentik/infrastructure
|
||||
.dockerignore @goauthentik/infrastructure
|
||||
docker-compose.yml @goauthentik/infrastructure
|
||||
# Web
|
||||
web/ @goauthentik/frontend
|
||||
# Docs & Website
|
||||
website/ @goauthentik/docs
|
||||
# Security
|
||||
website/docs/security/ @goauthentik/security
|
||||
* @goauthentik/core
|
||||
website/docs/security/** @goauthentik/security
|
||||
|
||||
122
Dockerfile
122
Dockerfile
@ -1,65 +1,53 @@
|
||||
# Stage 1: Build website
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
WORKDIR /work/website
|
||||
|
||||
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
|
||||
--mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
|
||||
--mount=type=cache,target=/root/.npm \
|
||||
npm ci --include=dev
|
||||
|
||||
COPY ./website /work/website/
|
||||
COPY ./blueprints /work/blueprints/
|
||||
COPY ./SECURITY.md /work/
|
||||
|
||||
RUN npm run build-docs-only
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /work/website
|
||||
RUN npm ci --include=dev && npm run build-docs-only
|
||||
|
||||
# Stage 2: Build webui
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
WORKDIR /work/web
|
||||
|
||||
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
|
||||
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
|
||||
--mount=type=cache,target=/root/.npm \
|
||||
npm ci --include=dev
|
||||
|
||||
COPY ./web /work/web/
|
||||
COPY ./website /work/website/
|
||||
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
||||
|
||||
RUN npm run build
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /work/web
|
||||
RUN npm ci --include=dev && npm run build
|
||||
|
||||
# Stage 3: Build go proxy
|
||||
FROM docker.io/golang:1.21.1-bookworm AS go-builder
|
||||
# Stage 3: Poetry to requirements.txt export
|
||||
FROM docker.io/python:3.11.5-slim-bookworm AS poetry-locker
|
||||
|
||||
WORKDIR /go/src/goauthentik.io
|
||||
WORKDIR /work
|
||||
COPY ./pyproject.toml /work
|
||||
COPY ./poetry.lock /work
|
||||
|
||||
RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
|
||||
--mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
go mod download
|
||||
RUN pip install --no-cache-dir poetry && \
|
||||
poetry export -f requirements.txt --output requirements.txt && \
|
||||
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
||||
|
||||
COPY ./cmd /go/src/goauthentik.io/cmd
|
||||
COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib
|
||||
COPY ./web/static.go /go/src/goauthentik.io/web/static.go
|
||||
COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt
|
||||
COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt
|
||||
COPY ./internal /go/src/goauthentik.io/internal
|
||||
COPY ./go.mod /go/src/goauthentik.io/go.mod
|
||||
COPY ./go.sum /go/src/goauthentik.io/go.sum
|
||||
# Stage 4: Build go proxy
|
||||
FROM docker.io/golang:1.21.0-bookworm AS go-builder
|
||||
|
||||
ENV CGO_ENABLED=0
|
||||
WORKDIR /work
|
||||
|
||||
RUN --mount=type=cache,target=/go/pkg/mod \
|
||||
--mount=type=cache,target=/root/.cache/go-build \
|
||||
go build -o /go/authentik ./cmd/server
|
||||
COPY --from=web-builder /work/web/robots.txt /work/web/robots.txt
|
||||
COPY --from=web-builder /work/web/security.txt /work/web/security.txt
|
||||
|
||||
# Stage 4: MaxMind GeoIP
|
||||
COPY ./cmd /work/cmd
|
||||
COPY ./authentik/lib /work/authentik/lib
|
||||
COPY ./web/static.go /work/web/static.go
|
||||
COPY ./internal /work/internal
|
||||
COPY ./go.mod /work/go.mod
|
||||
COPY ./go.sum /work/go.sum
|
||||
|
||||
RUN go build -o /work/bin/authentik ./cmd/server/
|
||||
|
||||
# Stage 5: MaxMind GeoIP
|
||||
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
||||
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||
@ -73,29 +61,6 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
mkdir -p /usr/share/GeoIP && \
|
||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 5: Python dependencies
|
||||
FROM docker.io/python:3.11.5-bookworm AS python-deps
|
||||
|
||||
WORKDIR /ak-root/poetry
|
||||
|
||||
ENV VENV_PATH="/ak-root/venv" \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="/ak-root/venv/bin:$PATH"
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt \
|
||||
apt-get update && \
|
||||
# Required for installing pip packages
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev
|
||||
|
||||
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
||||
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
||||
--mount=type=cache,target=/root/.cache/pip \
|
||||
--mount=type=cache,target=/root/.cache/pypoetry \
|
||||
python -m venv /ak-root/venv/ && \
|
||||
pip3 install --upgrade pip && \
|
||||
pip3 install poetry && \
|
||||
poetry install --only=main --no-ansi --no-interaction
|
||||
|
||||
# Stage 6: Run
|
||||
FROM docker.io/python:3.11.5-slim-bookworm AS final-image
|
||||
|
||||
@ -111,45 +76,46 @@ LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH}
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||
COPY --from=poetry-locker /work/requirements.txt /
|
||||
COPY --from=poetry-locker /work/requirements-dev.txt /
|
||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||
|
||||
RUN apt-get update && \
|
||||
# Required for installing pip packages
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev python3-dev && \
|
||||
# Required for runtime
|
||||
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 && \
|
||||
# Required for bootstrap & healtcheck
|
||||
apt-get install -y --no-install-recommends runit && \
|
||||
pip install --no-cache-dir -r /requirements.txt && \
|
||||
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev libpq-dev python3-dev && \
|
||||
apt-get autoremove --purge -y && \
|
||||
apt-get clean && \
|
||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||
mkdir -p /certs /media /blueprints && \
|
||||
mkdir -p /authentik/.ssh && \
|
||||
mkdir -p /ak-root && \
|
||||
chown authentik:authentik /certs /media /authentik/.ssh /ak-root
|
||||
chown authentik:authentik /certs /media /authentik/.ssh
|
||||
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./poetry.lock /
|
||||
COPY ./schemas /schemas
|
||||
COPY ./locale /locale
|
||||
COPY ./tests /tests
|
||||
COPY ./manage.py /
|
||||
COPY ./blueprints /blueprints
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY --from=go-builder /go/authentik /bin/authentik
|
||||
COPY --from=python-deps /ak-root/venv /ak-root/venv
|
||||
COPY --from=go-builder /work/bin/authentik /bin/authentik
|
||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||
COPY --from=website-builder /work/website/help/ /website/help/
|
||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||
|
||||
USER 1000
|
||||
|
||||
ENV TMPDIR=/dev/shm/ \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PATH="/ak-root/venv/bin:$PATH" \
|
||||
VENV_PATH="/ak-root/venv" \
|
||||
POETRY_VIRTUALENVS_CREATE=false
|
||||
ENV TMPDIR /dev/shm/
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PATH "/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/lifecycle"
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "/lifecycle/ak", "healthcheck" ]
|
||||
|
||||
ENTRYPOINT [ "dumb-init", "--", "/lifecycle/ak" ]
|
||||
ENTRYPOINT [ "/usr/local/bin/dumb-init", "--", "/lifecycle/ak" ]
|
||||
|
||||
94
Makefile
94
Makefile
@ -1,16 +1,9 @@
|
||||
.PHONY: gen dev-reset all clean test web website
|
||||
|
||||
.SHELLFLAGS += ${SHELLFLAGS} -e
|
||||
.SHELLFLAGS += -x -e
|
||||
PWD = $(shell pwd)
|
||||
UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
PY_SOURCES = authentik tests scripts lifecycle
|
||||
DOCKER_IMAGE ?= "authentik:test"
|
||||
|
||||
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
|
||||
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
|
||||
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
|
||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
-I .github/codespell-words.txt \
|
||||
@ -26,78 +19,57 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
website/integrations \
|
||||
website/src
|
||||
|
||||
all: lint-fix lint test gen web ## Lint, build, and test everything
|
||||
|
||||
help: ## Show this help
|
||||
@echo "\nSpecify a command. The choices are:\n"
|
||||
@grep -E '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \
|
||||
awk 'BEGIN {FS = ":.*?## "}; {printf " \033[0;36m%-24s\033[m %s\n", $$1, $$2}' | \
|
||||
sort
|
||||
@echo ""
|
||||
all: lint-fix lint test gen web
|
||||
|
||||
test-go:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
|
||||
test-docker: ## Run all tests in a docker-compose
|
||||
test-docker:
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||
docker-compose pull -q
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server test-all
|
||||
docker-compose run -u root server test
|
||||
rm -f .env
|
||||
|
||||
test: ## Run the server tests and produce a coverage report (locally)
|
||||
test:
|
||||
coverage run manage.py test --keepdb authentik
|
||||
coverage html
|
||||
coverage report
|
||||
|
||||
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
||||
lint-fix:
|
||||
isort authentik $(PY_SOURCES)
|
||||
black authentik $(PY_SOURCES)
|
||||
ruff authentik $(PY_SOURCES)
|
||||
codespell -w $(CODESPELL_ARGS)
|
||||
|
||||
lint: ## Lint the python and golang sources
|
||||
lint:
|
||||
pylint $(PY_SOURCES)
|
||||
bandit -r $(PY_SOURCES) -x node_modules
|
||||
golangci-lint run -v
|
||||
|
||||
migrate: ## Run the Authentik Django server's migrations
|
||||
migrate:
|
||||
python -m lifecycle.migrate
|
||||
|
||||
i18n-extract: i18n-extract-core web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||
i18n-extract: i18n-extract-core web-i18n-extract
|
||||
|
||||
i18n-extract-core:
|
||||
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
||||
|
||||
install: web-install website-install ## Install all requires dependencies for `web`, `website` and `core`
|
||||
poetry install
|
||||
|
||||
dev-drop-db:
|
||||
echo dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||
# Also remove the test-db if it exists
|
||||
dropdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
|
||||
echo redis-cli -n 0 flushall
|
||||
|
||||
dev-create-db:
|
||||
createdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||
|
||||
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
|
||||
|
||||
#########################
|
||||
## API Schema
|
||||
#########################
|
||||
|
||||
gen-build: ## Extract the schema from the database
|
||||
gen-build:
|
||||
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
||||
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
||||
|
||||
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
||||
gen-changelog:
|
||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||
npx prettier --write changelog.md
|
||||
|
||||
gen-diff: ## (Release) generate the changelog diff between the current schema and the last tag
|
||||
gen-diff:
|
||||
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
@ -112,7 +84,7 @@ gen-clean:
|
||||
rm -rf web/api/src/
|
||||
rm -rf api/
|
||||
|
||||
gen-client-ts: ## Build and install the authentik API for Typescript into the authentik UI Application
|
||||
gen-client-ts:
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
@ -128,7 +100,7 @@ gen-client-ts: ## Build and install the authentik API for Typescript into the a
|
||||
cd gen-ts-api && npm i
|
||||
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
||||
|
||||
gen-client-go: ## Build and install the authentik API for Golang
|
||||
gen-client-go:
|
||||
mkdir -p ./gen-go-api ./gen-go-api/templates
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
||||
@ -145,7 +117,7 @@ gen-client-go: ## Build and install the authentik API for Golang
|
||||
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
||||
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
||||
|
||||
gen-dev-config: ## Generate a local development config file
|
||||
gen-dev-config:
|
||||
python -m scripts.generate_config
|
||||
|
||||
gen: gen-build gen-clean gen-client-ts
|
||||
@ -154,21 +126,21 @@ gen: gen-build gen-clean gen-client-ts
|
||||
## Web
|
||||
#########################
|
||||
|
||||
web-build: web-install ## Build the Authentik UI
|
||||
web-build: web-install
|
||||
cd web && npm run build
|
||||
|
||||
web: web-lint-fix web-lint web-check-compile web-i18n-extract ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||
web: web-lint-fix web-lint web-check-compile
|
||||
|
||||
web-install: ## Install the necessary libraries to build the Authentik UI
|
||||
web-install:
|
||||
cd web && npm ci
|
||||
|
||||
web-watch: ## Build and watch the Authentik UI for changes, updating automatically
|
||||
web-watch:
|
||||
rm -rf web/dist/
|
||||
mkdir web/dist/
|
||||
touch web/dist/.gitkeep
|
||||
cd web && npm run watch
|
||||
|
||||
web-storybook-watch: ## Build and run the storybook documentation server
|
||||
web-storybook-watch:
|
||||
cd web && npm run storybook
|
||||
|
||||
web-lint-fix:
|
||||
@ -188,7 +160,7 @@ web-i18n-extract:
|
||||
## Website
|
||||
#########################
|
||||
|
||||
website: website-lint-fix website-build ## Automatically fix formatting issues in the Authentik website/docs source code, lint the code, and compile it
|
||||
website: website-lint-fix website-build
|
||||
|
||||
website-install:
|
||||
cd website && npm ci
|
||||
@ -199,22 +171,11 @@ website-lint-fix:
|
||||
website-build:
|
||||
cd website && npm run build
|
||||
|
||||
website-watch: ## Build and watch the documentation website, updating automatically
|
||||
website-watch:
|
||||
cd website && npm run watch
|
||||
|
||||
#########################
|
||||
## Docker
|
||||
#########################
|
||||
|
||||
docker: ## Build a docker image of the current source tree
|
||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||
|
||||
#########################
|
||||
## CI
|
||||
#########################
|
||||
# These targets are use by GitHub actions to allow usage of matrix
|
||||
# which makes the YAML File a lot smaller
|
||||
|
||||
ci--meta-debug:
|
||||
python -V
|
||||
node --version
|
||||
@ -242,3 +203,14 @@ ci-pyright: ci--meta-debug
|
||||
|
||||
ci-pending-migrations: ci--meta-debug
|
||||
ak makemigrations --check
|
||||
|
||||
install: web-install website-install
|
||||
poetry install
|
||||
|
||||
dev-reset:
|
||||
dropdb -U postgres -h localhost authentik
|
||||
# Also remove the test-db if it exists
|
||||
dropdb -U postgres -h localhost test_authentik || true
|
||||
createdb -U postgres -h localhost authentik
|
||||
redis-cli -n 0 flushall
|
||||
make migrate
|
||||
|
||||
12
README.md
12
README.md
@ -41,3 +41,15 @@ See [SECURITY.md](SECURITY.md)
|
||||
## Adoption and Contributions
|
||||
|
||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
|
||||
|
||||
## Sponsors
|
||||
|
||||
This project is proudly sponsored by:
|
||||
|
||||
<p>
|
||||
<a href="https://www.digitalocean.com/?utm_medium=opensource&utm_source=goauthentik.io">
|
||||
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="201px">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
DigitalOcean provides development and testing resources for authentik.
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
from os import environ
|
||||
from typing import Optional
|
||||
|
||||
__version__ = "2023.8.3"
|
||||
__version__ = "2023.8.1"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
||||
@ -49,7 +49,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
if content == "":
|
||||
return content
|
||||
context = self.instance.context if self.instance else {}
|
||||
valid, logs = Importer.from_string(content, context).validate()
|
||||
valid, logs = Importer(content, context).validate()
|
||||
if not valid:
|
||||
text_logs = "\n".join([x["event"] for x in logs])
|
||||
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
|
||||
|
||||
@ -18,7 +18,7 @@ class Command(BaseCommand):
|
||||
"""Apply all blueprints in order, abort when one fails to import"""
|
||||
for blueprint_path in options.get("blueprints", []):
|
||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||
importer = Importer.from_string(content)
|
||||
importer = Importer(content)
|
||||
valid, _ = importer.validate()
|
||||
if not valid:
|
||||
self.stderr.write("blueprint invalid")
|
||||
|
||||
@ -9,7 +9,6 @@ from rest_framework.fields import Field, JSONField, UUIDField
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.common import BlueprintEntryDesiredState
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||
from authentik.lib.models import SerializerModel
|
||||
@ -111,7 +110,7 @@ class Command(BaseCommand):
|
||||
"id": {"type": "string"},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": [s.value for s in BlueprintEntryDesiredState],
|
||||
"enum": ["absent", "present", "created"],
|
||||
"default": "present",
|
||||
},
|
||||
"conditions": {"type": "array", "items": {"type": "boolean"}},
|
||||
|
||||
@ -20,7 +20,7 @@ def apply_blueprint(*files: str):
|
||||
def wrapper(*args, **kwargs):
|
||||
for file in files:
|
||||
content = BlueprintInstance(path=file).retrieve()
|
||||
Importer.from_string(content).apply()
|
||||
Importer(content).apply()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
@ -25,7 +25,7 @@ def blueprint_tester(file_name: Path) -> Callable:
|
||||
def tester(self: TestPackaged):
|
||||
base = Path("blueprints/")
|
||||
rel_path = Path(file_name).relative_to(base)
|
||||
importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve())
|
||||
importer = Importer(BlueprintInstance(path=str(rel_path)).retrieve())
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
|
||||
@ -21,14 +21,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
|
||||
def test_blueprint_invalid_format(self):
|
||||
"""Test blueprint with invalid format"""
|
||||
importer = Importer.from_string('{"version": 3}')
|
||||
importer = Importer('{"version": 3}')
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer.from_string(
|
||||
importer = Importer(
|
||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||
'"model": "authentik_core.User"}]}'
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer.from_string(
|
||||
importer = Importer(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||
'"identifiers": {}, '
|
||||
'"model": "authentik_core.Group"}]}'
|
||||
@ -54,7 +54,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
},
|
||||
)
|
||||
|
||||
importer = Importer.from_string(
|
||||
importer = Importer(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||
@ -103,7 +103,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
self.assertEqual(len(export.entries), 3)
|
||||
export_yaml = exporter.export_to_string()
|
||||
|
||||
importer = Importer.from_string(export_yaml)
|
||||
importer = Importer(export_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
@ -113,14 +113,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
"""Test export and import it twice"""
|
||||
count_initial = Prompt.objects.filter(field_key="username").count()
|
||||
|
||||
importer = Importer.from_string(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
count_before = Prompt.objects.filter(field_key="username").count()
|
||||
self.assertEqual(count_initial + 1, count_before)
|
||||
|
||||
importer = Importer.from_string(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
||||
@ -130,7 +130,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||
Group.objects.filter(name="test").delete()
|
||||
environ["foo"] = generate_id()
|
||||
importer = Importer.from_string(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
importer = Importer(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||
@ -248,7 +248,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
exporter = FlowExporter(flow)
|
||||
export_yaml = exporter.export_to_string()
|
||||
|
||||
importer = Importer.from_string(export_yaml)
|
||||
importer = Importer(export_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
self.assertTrue(UserLoginStage.objects.filter(name=stage_name).exists())
|
||||
@ -297,7 +297,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
exporter = FlowExporter(flow)
|
||||
export_yaml = exporter.export_to_string()
|
||||
|
||||
importer = Importer.from_string(export_yaml)
|
||||
importer = Importer(export_yaml)
|
||||
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
@ -18,7 +18,7 @@ class TestBlueprintsV1ConditionalFields(TransactionTestCase):
|
||||
self.uid = generate_id()
|
||||
import_yaml = load_fixture("fixtures/conditional_fields.yaml", uid=self.uid, user=user.pk)
|
||||
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
|
||||
@ -18,7 +18,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure objects exist
|
||||
@ -35,7 +35,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure objects do not exist
|
||||
|
||||
@ -15,7 +15,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
@ -30,7 +30,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
self.assertEqual(flow.title, "bar")
|
||||
|
||||
# Ensure importer updates it
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
@ -41,7 +41,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
@ -56,7 +56,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
self.assertEqual(flow.title, "bar")
|
||||
|
||||
# Ensure importer doesn't update it
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
@ -67,7 +67,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
flow_slug = generate_id()
|
||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
@ -75,7 +75,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||
self.assertEqual(flow.slug, flow_slug)
|
||||
|
||||
import_yaml = load_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = Importer(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
|
||||
@ -12,7 +12,6 @@ from uuid import UUID
|
||||
from deepmerge import always_merger
|
||||
from django.apps import apps
|
||||
from django.db.models import Model, Q
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import Field
|
||||
from rest_framework.serializers import Serializer
|
||||
from yaml import SafeDumper, SafeLoader, ScalarNode, SequenceNode
|
||||
@ -53,7 +52,6 @@ class BlueprintEntryDesiredState(Enum):
|
||||
ABSENT = "absent"
|
||||
PRESENT = "present"
|
||||
CREATED = "created"
|
||||
MUST_CREATED = "must_created"
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -208,8 +206,8 @@ class KeyOf(YAMLTag):
|
||||
):
|
||||
return _entry._state.instance.pbm_uuid
|
||||
return _entry._state.instance.pk
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance", entry
|
||||
raise EntryInvalidError(
|
||||
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance"
|
||||
)
|
||||
|
||||
|
||||
@ -280,7 +278,7 @@ class Format(YAMLTag):
|
||||
try:
|
||||
return self.format_string % tuple(args)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class Find(YAMLTag):
|
||||
@ -357,15 +355,13 @@ class Condition(YAMLTag):
|
||||
args.append(arg)
|
||||
|
||||
if not args:
|
||||
raise EntryInvalidError.from_entry(
|
||||
"At least one value is required after mode selection.", entry
|
||||
)
|
||||
raise EntryInvalidError("At least one value is required after mode selection.")
|
||||
|
||||
try:
|
||||
comparator = self._COMPARATORS[self.mode.upper()]
|
||||
return comparator(tuple(bool(x) for x in args))
|
||||
except (TypeError, KeyError) as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class If(YAMLTag):
|
||||
@ -397,7 +393,7 @@ class If(YAMLTag):
|
||||
blueprint,
|
||||
)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class Enumerate(YAMLTag, YAMLTagContext):
|
||||
@ -429,10 +425,9 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
|
||||
raise EntryInvalidError.from_entry(
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__} tag's iterable references this tag's context. "
|
||||
"This is a noop. Check you are setting depth bigger than 0.",
|
||||
entry,
|
||||
"This is a noop. Check you are setting depth bigger than 0."
|
||||
)
|
||||
|
||||
if isinstance(self.iterable, YAMLTag):
|
||||
@ -441,10 +436,9 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
||||
iterable = self.iterable
|
||||
|
||||
if not isinstance(iterable, Iterable):
|
||||
raise EntryInvalidError.from_entry(
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__}'s iterable must be an iterable "
|
||||
"such as a sequence or a mapping",
|
||||
entry,
|
||||
"such as a sequence or a mapping"
|
||||
)
|
||||
|
||||
if isinstance(iterable, Mapping):
|
||||
@ -455,7 +449,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
||||
try:
|
||||
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
||||
except KeyError as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
result = output_class()
|
||||
|
||||
@ -467,8 +461,8 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
||||
resolved_body = entry.tag_resolver(self.item_body, blueprint)
|
||||
result = add_fn(result, resolved_body)
|
||||
if not isinstance(result, output_class):
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}", entry
|
||||
raise EntryInvalidError(
|
||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}"
|
||||
)
|
||||
finally:
|
||||
self.__current_context = tuple()
|
||||
@ -495,13 +489,12 @@ class EnumeratedItem(YAMLTag):
|
||||
)
|
||||
except ValueError as exc:
|
||||
if self.depth == 0:
|
||||
raise EntryInvalidError.from_entry(
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__} tags are only usable "
|
||||
f"inside an {Enumerate.__name__} tag",
|
||||
entry,
|
||||
f"inside an {Enumerate.__name__} tag"
|
||||
)
|
||||
|
||||
raise EntryInvalidError.from_entry(f"{self.__class__.__name__} tag: {exc}", entry)
|
||||
raise EntryInvalidError(f"{self.__class__.__name__} tag: {exc}")
|
||||
|
||||
return context_tag.get_context(entry, blueprint)
|
||||
|
||||
@ -515,7 +508,7 @@ class Index(EnumeratedItem):
|
||||
try:
|
||||
return context[0]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
|
||||
|
||||
class Value(EnumeratedItem):
|
||||
@ -527,7 +520,7 @@ class Value(EnumeratedItem):
|
||||
try:
|
||||
return context[1]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
|
||||
|
||||
class BlueprintDumper(SafeDumper):
|
||||
@ -581,26 +574,8 @@ class BlueprintLoader(SafeLoader):
|
||||
class EntryInvalidError(SentryIgnoredException):
|
||||
"""Error raised when an entry is invalid"""
|
||||
|
||||
entry_model: Optional[str]
|
||||
entry_id: Optional[str]
|
||||
validation_error: Optional[ValidationError]
|
||||
serializer_errors: Optional[dict]
|
||||
|
||||
def __init__(self, *args: object, validation_error: Optional[ValidationError] = None) -> None:
|
||||
def __init__(self, *args: object, serializer_errors: Optional[dict] = None) -> None:
|
||||
super().__init__(*args)
|
||||
self.entry_model = None
|
||||
self.entry_id = None
|
||||
self.validation_error = validation_error
|
||||
|
||||
@staticmethod
|
||||
def from_entry(
|
||||
msg_or_exc: str | Exception, entry: BlueprintEntry, *args, **kwargs
|
||||
) -> "EntryInvalidError":
|
||||
"""Create EntryInvalidError with the context of an entry"""
|
||||
error = EntryInvalidError(msg_or_exc, *args, **kwargs)
|
||||
if isinstance(msg_or_exc, ValidationError):
|
||||
error.validation_error = msg_or_exc
|
||||
# Make sure the model and id are strings, depending where the error happens
|
||||
# they might still be YAMLTag instances
|
||||
error.entry_model = str(entry.model)
|
||||
error.entry_id = str(entry.id)
|
||||
return error
|
||||
self.serializer_errors = serializer_errors
|
||||
|
||||
@ -8,9 +8,9 @@ from dacite.core import from_dict
|
||||
from dacite.exceptions import DaciteError
|
||||
from deepmerge import always_merger
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db import transaction
|
||||
from django.db.models import Model
|
||||
from django.db.models.query_utils import Q
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.serializers import BaseSerializer, Serializer
|
||||
@ -38,7 +38,6 @@ from authentik.core.models import (
|
||||
from authentik.events.utils import cleanse_dict
|
||||
from authentik.flows.models import FlowToken, Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
|
||||
@ -73,53 +72,41 @@ def is_model_allowed(model: type[Model]) -> bool:
|
||||
return model not in excluded_models and issubclass(model, (SerializerModel, BaseMetaModel))
|
||||
|
||||
|
||||
class DoRollback(SentryIgnoredException):
|
||||
"""Exception to trigger a rollback"""
|
||||
|
||||
|
||||
@contextmanager
|
||||
def transaction_rollback():
|
||||
"""Enters an atomic transaction and always triggers a rollback at the end of the block."""
|
||||
try:
|
||||
with atomic():
|
||||
yield
|
||||
raise DoRollback()
|
||||
except DoRollback:
|
||||
pass
|
||||
atomic = transaction.atomic()
|
||||
# pylint: disable=unnecessary-dunder-call
|
||||
atomic.__enter__()
|
||||
yield
|
||||
atomic.__exit__(IntegrityError, None, None)
|
||||
|
||||
|
||||
class Importer:
|
||||
"""Import Blueprint from raw dict or YAML/JSON"""
|
||||
"""Import Blueprint from YAML"""
|
||||
|
||||
logger: BoundLogger
|
||||
_import: Blueprint
|
||||
|
||||
def __init__(self, blueprint: Blueprint, context: Optional[dict] = None):
|
||||
def __init__(self, yaml_input: str, context: Optional[dict] = None):
|
||||
self.__pk_map: dict[Any, Model] = {}
|
||||
self._import = blueprint
|
||||
self.logger = get_logger()
|
||||
ctx = {}
|
||||
always_merger.merge(ctx, self._import.context)
|
||||
if context:
|
||||
always_merger.merge(ctx, context)
|
||||
self._import.context = ctx
|
||||
|
||||
@staticmethod
|
||||
def from_string(yaml_input: str, context: dict | None = None) -> "Importer":
|
||||
"""Parse YAML string and create blueprint importer from it"""
|
||||
import_dict = load(yaml_input, BlueprintLoader)
|
||||
try:
|
||||
_import = from_dict(
|
||||
self.__import = from_dict(
|
||||
Blueprint, import_dict, config=Config(cast=[BlueprintEntryDesiredState])
|
||||
)
|
||||
except DaciteError as exc:
|
||||
raise EntryInvalidError from exc
|
||||
return Importer(_import, context)
|
||||
ctx = {}
|
||||
always_merger.merge(ctx, self.__import.context)
|
||||
if context:
|
||||
always_merger.merge(ctx, context)
|
||||
self.__import.context = ctx
|
||||
|
||||
@property
|
||||
def blueprint(self) -> Blueprint:
|
||||
"""Get imported blueprint"""
|
||||
return self._import
|
||||
return self.__import
|
||||
|
||||
def __update_pks_for_attrs(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Replace any value if it is a known primary key of an other object"""
|
||||
@ -165,19 +152,19 @@ class Importer:
|
||||
# pylint: disable-msg=too-many-locals
|
||||
def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]:
|
||||
"""Validate a single entry"""
|
||||
if not entry.check_all_conditions_match(self._import):
|
||||
if not entry.check_all_conditions_match(self.__import):
|
||||
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
||||
return None
|
||||
|
||||
model_app_label, model_name = entry.get_model(self._import).split(".")
|
||||
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
# Don't use isinstance since we don't want to check for inheritance
|
||||
if not is_model_allowed(model):
|
||||
raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry)
|
||||
raise EntryInvalidError(f"Model {model} not allowed")
|
||||
if issubclass(model, BaseMetaModel):
|
||||
serializer_class: type[Serializer] = model.serializer()
|
||||
serializer = serializer_class(
|
||||
data=entry.get_attrs(self._import),
|
||||
data=entry.get_attrs(self.__import),
|
||||
context={
|
||||
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
||||
},
|
||||
@ -185,10 +172,8 @@ class Importer:
|
||||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"Serializer errors {serializer.errors}",
|
||||
validation_error=exc,
|
||||
entry=entry,
|
||||
raise EntryInvalidError(
|
||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||
) from exc
|
||||
return serializer
|
||||
|
||||
@ -197,7 +182,7 @@ class Importer:
|
||||
# the full serializer for later usage
|
||||
# Because a model might have multiple unique columns, we chain all identifiers together
|
||||
# to create an OR query.
|
||||
updated_identifiers = self.__update_pks_for_attrs(entry.get_identifiers(self._import))
|
||||
updated_identifiers = self.__update_pks_for_attrs(entry.get_identifiers(self.__import))
|
||||
for key, value in list(updated_identifiers.items()):
|
||||
if isinstance(value, dict) and "pk" in value:
|
||||
del updated_identifiers[key]
|
||||
@ -205,12 +190,12 @@ class Importer:
|
||||
|
||||
query = self.__query_from_identifier(updated_identifiers)
|
||||
if not query:
|
||||
raise EntryInvalidError.from_entry("No or invalid identifiers", entry)
|
||||
raise EntryInvalidError("No or invalid identifiers")
|
||||
|
||||
try:
|
||||
existing_models = model.objects.filter(query)
|
||||
except FieldError as exc:
|
||||
raise EntryInvalidError.from_entry(f"Invalid identifier field: {exc}", entry) from exc
|
||||
raise EntryInvalidError(f"Invalid identifier field: {exc}") from exc
|
||||
|
||||
serializer_kwargs = {}
|
||||
model_instance = existing_models.first()
|
||||
@ -223,14 +208,6 @@ class Importer:
|
||||
)
|
||||
serializer_kwargs["instance"] = model_instance
|
||||
serializer_kwargs["partial"] = True
|
||||
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
|
||||
raise EntryInvalidError.from_entry(
|
||||
(
|
||||
f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} "
|
||||
"and object exists already",
|
||||
),
|
||||
entry,
|
||||
)
|
||||
else:
|
||||
self.logger.debug(
|
||||
"initialised new serializer instance",
|
||||
@ -243,9 +220,9 @@ class Importer:
|
||||
model_instance.pk = updated_identifiers["pk"]
|
||||
serializer_kwargs["instance"] = model_instance
|
||||
try:
|
||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import))
|
||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self.__import))
|
||||
except ValueError as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||
raise EntryInvalidError(exc) from exc
|
||||
always_merger.merge(full_data, updated_identifiers)
|
||||
serializer_kwargs["data"] = full_data
|
||||
|
||||
@ -258,17 +235,15 @@ class Importer:
|
||||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"Serializer errors {serializer.errors}",
|
||||
validation_error=exc,
|
||||
entry=entry,
|
||||
raise EntryInvalidError(
|
||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||
) from exc
|
||||
return serializer
|
||||
|
||||
def apply(self) -> bool:
|
||||
"""Apply (create/update) models yaml, in database transaction"""
|
||||
try:
|
||||
with atomic():
|
||||
with transaction.atomic():
|
||||
if not self._apply_models():
|
||||
self.logger.debug("Reverting changes due to error")
|
||||
raise IntegrityError
|
||||
@ -277,11 +252,11 @@ class Importer:
|
||||
self.logger.debug("Committing changes")
|
||||
return True
|
||||
|
||||
def _apply_models(self, raise_errors=False) -> bool:
|
||||
def _apply_models(self) -> bool:
|
||||
"""Apply (create/update) models yaml"""
|
||||
self.__pk_map = {}
|
||||
for entry in self._import.entries:
|
||||
model_app_label, model_name = entry.get_model(self._import).split(".")
|
||||
for entry in self.__import.entries:
|
||||
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
||||
try:
|
||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
except LookupError:
|
||||
@ -294,21 +269,15 @@ class Importer:
|
||||
serializer = self._validate_single(entry)
|
||||
except EntryInvalidError as exc:
|
||||
# For deleting objects we don't need the serializer to be valid
|
||||
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
|
||||
if entry.get_state(self.__import) == BlueprintEntryDesiredState.ABSENT:
|
||||
continue
|
||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||
if raise_errors:
|
||||
raise exc
|
||||
return False
|
||||
if not serializer:
|
||||
continue
|
||||
|
||||
state = entry.get_state(self._import)
|
||||
if state in [
|
||||
BlueprintEntryDesiredState.PRESENT,
|
||||
BlueprintEntryDesiredState.CREATED,
|
||||
BlueprintEntryDesiredState.MUST_CREATED,
|
||||
]:
|
||||
state = entry.get_state(self.__import)
|
||||
if state in [BlueprintEntryDesiredState.PRESENT, BlueprintEntryDesiredState.CREATED]:
|
||||
instance = serializer.instance
|
||||
if (
|
||||
instance
|
||||
@ -336,23 +305,23 @@ class Importer:
|
||||
self.logger.debug("entry to delete with no instance, skipping")
|
||||
return True
|
||||
|
||||
def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]:
|
||||
def validate(self) -> tuple[bool, list[EventDict]]:
|
||||
"""Validate loaded blueprint export, ensure all models are allowed
|
||||
and serializers have no errors"""
|
||||
self.logger.debug("Starting blueprint import validation")
|
||||
orig_import = deepcopy(self._import)
|
||||
if self._import.version != 1:
|
||||
orig_import = deepcopy(self.__import)
|
||||
if self.__import.version != 1:
|
||||
self.logger.warning("Invalid blueprint version")
|
||||
return False, [{"event": "Invalid blueprint version"}]
|
||||
with (
|
||||
transaction_rollback(),
|
||||
capture_logs() as logs,
|
||||
):
|
||||
successful = self._apply_models(raise_errors=raise_validation_errors)
|
||||
successful = self._apply_models()
|
||||
if not successful:
|
||||
self.logger.debug("Blueprint validation failed")
|
||||
for log in logs:
|
||||
getattr(self.logger, log.get("log_level"))(**log)
|
||||
self.logger.debug("Finished blueprint import validation")
|
||||
self._import = orig_import
|
||||
self.__import = orig_import
|
||||
return successful, logs
|
||||
|
||||
@ -190,7 +190,7 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||
self.set_uid(slugify(instance.name))
|
||||
blueprint_content = instance.retrieve()
|
||||
file_hash = sha512(blueprint_content.encode()).hexdigest()
|
||||
importer = Importer.from_string(blueprint_content, instance.context)
|
||||
importer = Importer(blueprint_content, instance.context)
|
||||
if importer.blueprint.metadata:
|
||||
instance.metadata = asdict(importer.blueprint.metadata)
|
||||
valid, logs = importer.validate()
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
"""Authenticator Devices API Views"""
|
||||
from django_otp import device_classes, devices_for_user
|
||||
from django_otp.models import Device
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField
|
||||
@ -8,8 +10,6 @@ from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ViewSet
|
||||
|
||||
from authentik.core.api.utils import MetaNameSerializer
|
||||
from authentik.stages.authenticator import device_classes, devices_for_user
|
||||
from authentik.stages.authenticator.models import Device
|
||||
|
||||
|
||||
class DeviceSerializer(MetaNameSerializer):
|
||||
|
||||
@ -1,139 +0,0 @@
|
||||
"""transactional application and provider creation"""
|
||||
from django.apps import apps
|
||||
from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema, extend_schema_field
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, ListField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from yaml import ScalarNode
|
||||
|
||||
from authentik.blueprints.v1.common import (
|
||||
Blueprint,
|
||||
BlueprintEntry,
|
||||
BlueprintEntryDesiredState,
|
||||
EntryInvalidError,
|
||||
KeyOf,
|
||||
)
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.core.api.applications import ApplicationSerializer
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.core.models import Provider
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
|
||||
|
||||
def get_provider_serializer_mapping():
|
||||
"""Get a mapping of all providers' model names and their serializers"""
|
||||
mapping = {}
|
||||
for model in all_subclasses(Provider):
|
||||
if model._meta.abstract:
|
||||
continue
|
||||
mapping[f"{model._meta.app_label}.{model._meta.model_name}"] = model().serializer
|
||||
return mapping
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
PolymorphicProxySerializer(
|
||||
component_name="model",
|
||||
serializers=get_provider_serializer_mapping,
|
||||
resource_type_field_name="provider_model",
|
||||
)
|
||||
)
|
||||
class TransactionProviderField(DictField):
|
||||
"""Dictionary field which can hold provider creation data"""
|
||||
|
||||
|
||||
class TransactionApplicationSerializer(PassiveSerializer):
|
||||
"""Serializer for creating a provider and an application in one transaction"""
|
||||
|
||||
app = ApplicationSerializer()
|
||||
provider_model = ChoiceField(choices=list(get_provider_serializer_mapping().keys()))
|
||||
provider = TransactionProviderField()
|
||||
|
||||
_provider_model: type[Provider] = None
|
||||
|
||||
def validate_provider_model(self, fq_model_name: str) -> str:
|
||||
"""Validate that the model exists and is a provider"""
|
||||
if "." not in fq_model_name:
|
||||
raise ValidationError("Invalid provider model")
|
||||
try:
|
||||
app, _, model_name = fq_model_name.partition(".")
|
||||
model = apps.get_model(app, model_name)
|
||||
if not issubclass(model, Provider):
|
||||
raise ValidationError("Invalid provider model")
|
||||
self._provider_model = model
|
||||
except LookupError:
|
||||
raise ValidationError("Invalid provider model")
|
||||
return fq_model_name
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
blueprint = Blueprint()
|
||||
blueprint.entries.append(
|
||||
BlueprintEntry(
|
||||
model=attrs["provider_model"],
|
||||
state=BlueprintEntryDesiredState.MUST_CREATED,
|
||||
identifiers={
|
||||
"name": attrs["provider"]["name"],
|
||||
},
|
||||
# Must match the name of the field on `self`
|
||||
id="provider",
|
||||
attrs=attrs["provider"],
|
||||
)
|
||||
)
|
||||
app_data = attrs["app"]
|
||||
app_data["provider"] = KeyOf(None, ScalarNode(tag="", value="provider"))
|
||||
blueprint.entries.append(
|
||||
BlueprintEntry(
|
||||
model="authentik_core.application",
|
||||
state=BlueprintEntryDesiredState.MUST_CREATED,
|
||||
identifiers={
|
||||
"slug": attrs["app"]["slug"],
|
||||
},
|
||||
attrs=app_data,
|
||||
# Must match the name of the field on `self`
|
||||
id="app",
|
||||
)
|
||||
)
|
||||
importer = Importer(blueprint, {})
|
||||
try:
|
||||
valid, _ = importer.validate(raise_validation_errors=True)
|
||||
if not valid:
|
||||
raise ValidationError("Invalid blueprint")
|
||||
except EntryInvalidError as exc:
|
||||
raise ValidationError(
|
||||
{
|
||||
exc.entry_id: exc.validation_error.detail,
|
||||
}
|
||||
)
|
||||
return blueprint
|
||||
|
||||
|
||||
class TransactionApplicationResponseSerializer(PassiveSerializer):
|
||||
"""Transactional creation response"""
|
||||
|
||||
applied = BooleanField()
|
||||
logs = ListField(child=CharField())
|
||||
|
||||
|
||||
class TransactionalApplicationView(APIView):
|
||||
"""Create provider and application and attach them in a single transaction"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@extend_schema(
|
||||
request=TransactionApplicationSerializer(),
|
||||
responses={
|
||||
200: TransactionApplicationResponseSerializer(),
|
||||
},
|
||||
)
|
||||
def put(self, request: Request) -> Response:
|
||||
"""Convert data into a blueprint, validate it and apply it"""
|
||||
data = TransactionApplicationSerializer(data=request.data)
|
||||
data.is_valid(raise_exception=True)
|
||||
|
||||
importer = Importer(data.validated_data, {})
|
||||
applied = importer.apply()
|
||||
response = {"applied": False, "logs": []}
|
||||
response["applied"] = applied
|
||||
return Response(response, status=200)
|
||||
@ -616,10 +616,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
if not request.user.has_perm("impersonate"):
|
||||
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
||||
return Response(status=401)
|
||||
|
||||
user_to_be = self.get_object()
|
||||
if user_to_be.pk == self.request.user.pk:
|
||||
LOGGER.debug("User attempted to impersonate themselves", user=request.user)
|
||||
return Response(status=401)
|
||||
|
||||
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
|
||||
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
|
||||
|
||||
@ -1,9 +0,0 @@
|
||||
"""custom runserver command"""
|
||||
from daphne.management.commands.runserver import Command as RunServer
|
||||
|
||||
|
||||
class Command(RunServer):
|
||||
"""custom runserver command, which doesn't show the misleading django startup message"""
|
||||
|
||||
def on_bind(self, server_port):
|
||||
pass
|
||||
@ -16,9 +16,6 @@ LOGGER = get_logger()
|
||||
class Command(BaseCommand):
|
||||
"""Run worker"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("-b", "--beat", action="store_true")
|
||||
|
||||
def handle(self, **options):
|
||||
close_old_connections()
|
||||
if CONFIG.get_bool("remote_debug"):
|
||||
@ -29,9 +26,10 @@ class Command(BaseCommand):
|
||||
no_color=False,
|
||||
quiet=True,
|
||||
optimization="fair",
|
||||
autoscale=(CONFIG.get_int("worker.concurrency"), 1),
|
||||
max_tasks_per_child=1,
|
||||
autoscale=(3, 1),
|
||||
task_events=True,
|
||||
beat=options.get("beat", True),
|
||||
beat=True,
|
||||
schedule_filename=f"{tempdir}/celerybeat-schedule",
|
||||
queues=["authentik", "authentik_scheduled", "authentik_events"],
|
||||
)
|
||||
|
||||
@ -1,41 +0,0 @@
|
||||
# Generated by Django 4.2.5 on 2023-09-27 10:44
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_core", "0031_alter_user_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="GroupSourceConnection",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("created", models.DateTimeField(auto_now_add=True)),
|
||||
("last_updated", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"group",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="authentik_core.group"
|
||||
),
|
||||
),
|
||||
(
|
||||
"source",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="authentik_core.source"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"unique_together": {("group", "source")},
|
||||
},
|
||||
),
|
||||
]
|
||||
@ -575,23 +575,6 @@ class UserSourceConnection(SerializerModel, CreatedUpdatedModel):
|
||||
unique_together = (("user", "source"),)
|
||||
|
||||
|
||||
class GroupSourceConnection(SerializerModel, CreatedUpdatedModel):
|
||||
"""Connection between Group and Source."""
|
||||
|
||||
group = models.ForeignKey(Group, on_delete=models.CASCADE)
|
||||
source = models.ForeignKey(Source, on_delete=models.CASCADE)
|
||||
|
||||
objects = InheritanceManager()
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[Serializer]:
|
||||
"""Get serializer for this model"""
|
||||
raise NotImplementedError
|
||||
|
||||
class Meta:
|
||||
unique_together = (("group", "source"),)
|
||||
|
||||
|
||||
class ExpiringModel(models.Model):
|
||||
"""Base Model which can expire, and is automatically cleaned up."""
|
||||
|
||||
|
||||
@ -48,7 +48,7 @@ class Action(Enum):
|
||||
class MessageStage(StageView):
|
||||
"""Show a pre-configured message after the flow is done"""
|
||||
|
||||
def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
"""Show a pre-configured message after the flow is done"""
|
||||
message = getattr(self.executor.current_stage, "message", "")
|
||||
level = getattr(self.executor.current_stage, "level", messages.SUCCESS)
|
||||
@ -59,6 +59,10 @@ class MessageStage(StageView):
|
||||
)
|
||||
return self.executor.stage_ok()
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Wrapper for post requests"""
|
||||
return self.get(request)
|
||||
|
||||
|
||||
class SourceFlowManager:
|
||||
"""Help sources decide what they should do after authorization. Based on source settings and
|
||||
|
||||
@ -13,7 +13,7 @@ class PostUserEnrollmentStage(StageView):
|
||||
"""Dynamically injected stage which saves the Connection after
|
||||
the user has been enrolled."""
|
||||
|
||||
def dispatch(self, request: HttpRequest) -> HttpResponse:
|
||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
"""Stage used after the user has been enrolled"""
|
||||
connection: UserSourceConnection = self.executor.plan.context[
|
||||
PLAN_CONTEXT_SOURCES_CONNECTION
|
||||
@ -27,3 +27,7 @@ class PostUserEnrollmentStage(StageView):
|
||||
source=connection.source,
|
||||
).from_http(self.request)
|
||||
return self.executor.stage_ok()
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Wrapper for post requests"""
|
||||
return self.get(request)
|
||||
|
||||
@ -6,7 +6,6 @@ from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
class TestImpersonation(APITestCase):
|
||||
@ -47,42 +46,12 @@ class TestImpersonation(APITestCase):
|
||||
"""test impersonation without permissions"""
|
||||
self.client.force_login(self.other_user)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.client.get(reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk}))
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
||||
|
||||
@CONFIG.patch("impersonation", False)
|
||||
def test_impersonate_disabled(self):
|
||||
"""test impersonation that is disabled"""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.other_user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
self.assertEqual(response_body["user"]["username"], self.user.username)
|
||||
|
||||
def test_impersonate_self(self):
|
||||
"""test impersonation that user can't impersonate themselves"""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
self.assertEqual(response_body["user"]["username"], self.user.username)
|
||||
|
||||
def test_un_impersonate_empty(self):
|
||||
"""test un-impersonation without impersonating first"""
|
||||
self.client.force_login(self.other_user)
|
||||
|
||||
@ -1,64 +0,0 @@
|
||||
"""Test Transactional API"""
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.models import OAuth2Provider
|
||||
|
||||
|
||||
class TestTransactionalApplicationsAPI(APITestCase):
|
||||
"""Test Transactional API"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.user = create_test_admin_user()
|
||||
|
||||
def test_create_transactional(self):
|
||||
"""Test transactional Application + provider creation"""
|
||||
self.client.force_login(self.user)
|
||||
uid = generate_id()
|
||||
authorization_flow = create_test_flow()
|
||||
response = self.client.put(
|
||||
reverse("authentik_api:core-transactional-application"),
|
||||
data={
|
||||
"app": {
|
||||
"name": uid,
|
||||
"slug": uid,
|
||||
},
|
||||
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
||||
"provider": {
|
||||
"name": uid,
|
||||
"authorization_flow": str(authorization_flow.pk),
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertJSONEqual(response.content.decode(), {"applied": True, "logs": []})
|
||||
provider = OAuth2Provider.objects.filter(name=uid).first()
|
||||
self.assertIsNotNone(provider)
|
||||
app = Application.objects.filter(slug=uid).first()
|
||||
self.assertIsNotNone(app)
|
||||
self.assertEqual(app.provider.pk, provider.pk)
|
||||
|
||||
def test_create_transactional_invalid(self):
|
||||
"""Test transactional Application + provider creation"""
|
||||
self.client.force_login(self.user)
|
||||
uid = generate_id()
|
||||
response = self.client.put(
|
||||
reverse("authentik_api:core-transactional-application"),
|
||||
data={
|
||||
"app": {
|
||||
"name": uid,
|
||||
"slug": uid,
|
||||
},
|
||||
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
||||
"provider": {
|
||||
"name": uid,
|
||||
"authorization_flow": "",
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"provider": {"authorization_flow": ["This field may not be null."]}},
|
||||
)
|
||||
@ -25,10 +25,10 @@ def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
|
||||
"""Generate a test-admin user"""
|
||||
uid = generate_id(20) if not name else name
|
||||
group = Group.objects.create(name=uid, is_superuser=True)
|
||||
kwargs.setdefault("email", f"{uid}@goauthentik.io")
|
||||
kwargs.setdefault("username", uid)
|
||||
user: User = User.objects.create(
|
||||
username=uid,
|
||||
name=uid,
|
||||
email=f"{uid}@goauthentik.io",
|
||||
**kwargs,
|
||||
)
|
||||
user.set_password(uid)
|
||||
|
||||
@ -15,7 +15,6 @@ from authentik.core.api.propertymappings import PropertyMappingViewSet
|
||||
from authentik.core.api.providers import ProviderViewSet
|
||||
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
||||
from authentik.core.api.tokens import TokenViewSet
|
||||
from authentik.core.api.transactional_applications import TransactionalApplicationView
|
||||
from authentik.core.api.users import UserViewSet
|
||||
from authentik.core.views import apps
|
||||
from authentik.core.views.debug import AccessDeniedView
|
||||
@ -71,11 +70,6 @@ urlpatterns = [
|
||||
api_urlpatterns = [
|
||||
("core/authenticated_sessions", AuthenticatedSessionViewSet),
|
||||
("core/applications", ApplicationViewSet),
|
||||
path(
|
||||
"core/transactional/applications/",
|
||||
TransactionalApplicationView.as_view(),
|
||||
name="core-transactional-application",
|
||||
),
|
||||
("core/groups", GroupViewSet),
|
||||
("core/users", UserViewSet),
|
||||
("core/tokens", TokenViewSet),
|
||||
|
||||
@ -189,8 +189,6 @@ class CertificateKeyPairFilter(FilterSet):
|
||||
|
||||
def filter_has_key(self, queryset, name, value): # pragma: no cover
|
||||
"""Only return certificate-key pairs with keys"""
|
||||
if not value:
|
||||
return queryset
|
||||
return queryset.exclude(key_data__exact="")
|
||||
|
||||
class Meta:
|
||||
|
||||
@ -128,26 +128,8 @@ class TestCrypto(APITestCase):
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-list",
|
||||
),
|
||||
data={"name": cert.name},
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
body = loads(response.content.decode())
|
||||
api_cert = [x for x in body["results"] if x["name"] == cert.name][0]
|
||||
self.assertEqual(api_cert["fingerprint_sha1"], cert.fingerprint_sha1)
|
||||
self.assertEqual(api_cert["fingerprint_sha256"], cert.fingerprint_sha256)
|
||||
|
||||
def test_list_has_key_false(self):
|
||||
"""Test API List with has_key set to false"""
|
||||
cert = create_test_cert()
|
||||
cert.key_data = ""
|
||||
cert.save()
|
||||
self.client.force_login(create_test_admin_user())
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-list",
|
||||
),
|
||||
data={"name": cert.name, "has_key": False},
|
||||
)
|
||||
+ f"?name={cert.name}"
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
body = loads(response.content.decode())
|
||||
@ -162,8 +144,8 @@ class TestCrypto(APITestCase):
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-list",
|
||||
),
|
||||
data={"name": cert.name, "include_details": False},
|
||||
)
|
||||
+ f"?name={cert.name}&include_details=false"
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
body = loads(response.content.decode())
|
||||
@ -186,8 +168,8 @@ class TestCrypto(APITestCase):
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-view-certificate",
|
||||
kwargs={"pk": keypair.pk},
|
||||
),
|
||||
data={"download": True},
|
||||
)
|
||||
+ "?download",
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertIn("Content-Disposition", response)
|
||||
@ -207,8 +189,8 @@ class TestCrypto(APITestCase):
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-view-private-key",
|
||||
kwargs={"pk": keypair.pk},
|
||||
),
|
||||
data={"download": True},
|
||||
)
|
||||
+ "?download",
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertIn("Content-Disposition", response)
|
||||
@ -218,7 +200,7 @@ class TestCrypto(APITestCase):
|
||||
self.client.force_login(create_test_admin_user())
|
||||
keypair = create_test_cert()
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
name="test",
|
||||
client_id="test",
|
||||
client_secret=generate_key(),
|
||||
authorization_flow=create_test_flow(),
|
||||
|
||||
@ -1,30 +1,44 @@
|
||||
"""Enterprise license policies"""
|
||||
from typing import Optional
|
||||
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
|
||||
from authentik.core.models import User, UserTypes
|
||||
from authentik.enterprise.models import LicenseKey
|
||||
from authentik.policies.models import Policy
|
||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||
from authentik.policies.views import PolicyAccessView
|
||||
|
||||
|
||||
class EnterprisePolicy(Policy):
|
||||
"""Check that a user is correctly licensed for the request"""
|
||||
|
||||
@property
|
||||
def component(self) -> str:
|
||||
return ""
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
raise NotImplementedError
|
||||
|
||||
def passes(self, request: PolicyRequest) -> PolicyResult:
|
||||
if not LicenseKey.get_total().is_valid():
|
||||
return PolicyResult(False)
|
||||
if request.user.type != UserTypes.INTERNAL:
|
||||
return PolicyResult(False)
|
||||
return PolicyResult(True)
|
||||
|
||||
|
||||
class EnterprisePolicyAccessView(PolicyAccessView):
|
||||
"""PolicyAccessView which also checks enterprise licensing"""
|
||||
|
||||
def check_license(self):
|
||||
"""Check license"""
|
||||
if not LicenseKey.get_total().is_valid():
|
||||
return False
|
||||
if self.request.user.type != UserTypes.INTERNAL:
|
||||
return False
|
||||
return True
|
||||
|
||||
def user_has_access(self, user: Optional[User] = None) -> PolicyResult:
|
||||
user = user or self.request.user
|
||||
request = PolicyRequest(user)
|
||||
request.http_request = self.request
|
||||
result = super().user_has_access(user)
|
||||
enterprise_result = self.check_license()
|
||||
if not enterprise_result:
|
||||
enterprise_result = EnterprisePolicy().passes(request)
|
||||
if not enterprise_result.passing:
|
||||
return enterprise_result
|
||||
return result
|
||||
|
||||
|
||||
@ -9,6 +9,7 @@ from django.core.exceptions import SuspiciousOperation
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import m2m_changed, post_save, pre_delete
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django_otp.plugins.otp_static.models import StaticToken
|
||||
from guardian.models import UserObjectPermission
|
||||
|
||||
from authentik.core.models import (
|
||||
@ -29,7 +30,6 @@ from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
||||
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
||||
from authentik.stages.authenticator_static.models import StaticToken
|
||||
|
||||
IGNORED_MODELS = (
|
||||
Event,
|
||||
|
||||
@ -181,7 +181,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
|
||||
if not file:
|
||||
return Response(data=import_response.initial_data, status=400)
|
||||
|
||||
importer = Importer.from_string(file.read().decode())
|
||||
importer = Importer(file.read().decode())
|
||||
valid, logs = importer.validate()
|
||||
import_response.initial_data["logs"] = [sanitize_dict(log) for log in logs]
|
||||
import_response.initial_data["success"] = valid
|
||||
|
||||
@ -26,8 +26,3 @@ class EmptyFlowException(SentryIgnoredException):
|
||||
|
||||
class FlowSkipStageException(SentryIgnoredException):
|
||||
"""Exception to skip a stage"""
|
||||
|
||||
|
||||
class StageInvalidException(SentryIgnoredException):
|
||||
"""Exception can be thrown in a `Challenge` or `ChallengeResponse` serializer's
|
||||
validation to trigger a `executor.stage_invalid()` response"""
|
||||
|
||||
@ -23,7 +23,6 @@ from authentik.flows.challenge import (
|
||||
RedirectChallenge,
|
||||
WithUserInfoChallenge,
|
||||
)
|
||||
from authentik.flows.exceptions import StageInvalidException
|
||||
from authentik.flows.models import InvalidResponseAction
|
||||
from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, PLAN_CONTEXT_PENDING_USER
|
||||
from authentik.lib.avatars import DEFAULT_AVATAR
|
||||
@ -101,14 +100,8 @@ class ChallengeStageView(StageView):
|
||||
|
||||
def post(self, request: Request, *args, **kwargs) -> HttpResponse:
|
||||
"""Handle challenge response"""
|
||||
valid = False
|
||||
try:
|
||||
challenge: ChallengeResponse = self.get_response_instance(data=request.data)
|
||||
valid = challenge.is_valid()
|
||||
except StageInvalidException as exc:
|
||||
self.logger.debug("Got StageInvalidException", exc=exc)
|
||||
return self.executor.stage_invalid()
|
||||
if not valid:
|
||||
challenge: ChallengeResponse = self.get_response_instance(data=request.data)
|
||||
if not challenge.is_valid():
|
||||
if self.executor.current_binding.invalid_response_action in [
|
||||
InvalidResponseAction.RESTART,
|
||||
InvalidResponseAction.RESTART_WITH_CONTEXT,
|
||||
|
||||
@ -21,9 +21,8 @@ def view_tester_factory(view_class: type[StageView]) -> Callable:
|
||||
|
||||
def tester(self: TestViews):
|
||||
model_class = view_class(self.exec)
|
||||
if not hasattr(model_class, "dispatch"):
|
||||
self.assertIsNotNone(model_class.post)
|
||||
self.assertIsNotNone(model_class.get)
|
||||
self.assertIsNotNone(model_class.post)
|
||||
self.assertIsNotNone(model_class.get)
|
||||
|
||||
return tester
|
||||
|
||||
|
||||
@ -42,7 +42,6 @@ from authentik.flows.models import (
|
||||
FlowDesignation,
|
||||
FlowStageBinding,
|
||||
FlowToken,
|
||||
InvalidResponseAction,
|
||||
Stage,
|
||||
)
|
||||
from authentik.flows.planner import (
|
||||
@ -74,23 +73,40 @@ QS_QUERY = "query"
|
||||
|
||||
|
||||
def challenge_types():
|
||||
"""This function returns a mapping which contains all subclasses of challenges
|
||||
"""This is a workaround for PolymorphicProxySerializer not accepting a callable for
|
||||
`serializers`. This function returns a class which is an iterator, which returns the
|
||||
subclasses of Challenge, and Challenge itself."""
|
||||
mapping = {}
|
||||
for cls in all_subclasses(Challenge):
|
||||
if cls == WithUserInfoChallenge:
|
||||
continue
|
||||
mapping[cls().fields["component"].default] = cls
|
||||
return mapping
|
||||
|
||||
class Inner(dict):
|
||||
"""dummy class with custom callback on .items()"""
|
||||
|
||||
def items(self):
|
||||
mapping = {}
|
||||
classes = all_subclasses(Challenge)
|
||||
classes.remove(WithUserInfoChallenge)
|
||||
for cls in classes:
|
||||
mapping[cls().fields["component"].default] = cls
|
||||
return mapping.items()
|
||||
|
||||
return Inner()
|
||||
|
||||
|
||||
def challenge_response_types():
|
||||
"""This function returns a mapping which contains all subclasses of challenges
|
||||
"""This is a workaround for PolymorphicProxySerializer not accepting a callable for
|
||||
`serializers`. This function returns a class which is an iterator, which returns the
|
||||
subclasses of Challenge, and Challenge itself."""
|
||||
mapping = {}
|
||||
for cls in all_subclasses(ChallengeResponse):
|
||||
mapping[cls(stage=None).fields["component"].default] = cls
|
||||
return mapping
|
||||
|
||||
class Inner(dict):
|
||||
"""dummy class with custom callback on .items()"""
|
||||
|
||||
def items(self):
|
||||
mapping = {}
|
||||
classes = all_subclasses(ChallengeResponse)
|
||||
for cls in classes:
|
||||
mapping[cls(stage=None).fields["component"].default] = cls
|
||||
return mapping.items()
|
||||
|
||||
return Inner()
|
||||
|
||||
|
||||
class InvalidStageError(SentryIgnoredException):
|
||||
@ -106,7 +122,7 @@ class FlowExecutorView(APIView):
|
||||
flow: Flow
|
||||
|
||||
plan: Optional[FlowPlan] = None
|
||||
current_binding: Optional[FlowStageBinding] = None
|
||||
current_binding: FlowStageBinding
|
||||
current_stage: Stage
|
||||
current_stage_view: View
|
||||
|
||||
@ -248,7 +264,7 @@ class FlowExecutorView(APIView):
|
||||
responses={
|
||||
200: PolymorphicProxySerializer(
|
||||
component_name="ChallengeTypes",
|
||||
serializers=challenge_types,
|
||||
serializers=challenge_types(),
|
||||
resource_type_field_name="component",
|
||||
),
|
||||
},
|
||||
@ -279,7 +295,7 @@ class FlowExecutorView(APIView):
|
||||
span.set_data("Method", "GET")
|
||||
span.set_data("authentik Stage", self.current_stage_view)
|
||||
span.set_data("authentik Flow", self.flow.slug)
|
||||
stage_response = self.current_stage_view.dispatch(request)
|
||||
stage_response = self.current_stage_view.get(request, *args, **kwargs)
|
||||
return to_stage_response(request, stage_response)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
return self.handle_exception(exc)
|
||||
@ -288,13 +304,13 @@ class FlowExecutorView(APIView):
|
||||
responses={
|
||||
200: PolymorphicProxySerializer(
|
||||
component_name="ChallengeTypes",
|
||||
serializers=challenge_types,
|
||||
serializers=challenge_types(),
|
||||
resource_type_field_name="component",
|
||||
),
|
||||
},
|
||||
request=PolymorphicProxySerializer(
|
||||
component_name="FlowChallengeResponse",
|
||||
serializers=challenge_response_types,
|
||||
serializers=challenge_response_types(),
|
||||
resource_type_field_name="component",
|
||||
),
|
||||
parameters=[
|
||||
@ -323,7 +339,7 @@ class FlowExecutorView(APIView):
|
||||
span.set_data("Method", "POST")
|
||||
span.set_data("authentik Stage", self.current_stage_view)
|
||||
span.set_data("authentik Flow", self.flow.slug)
|
||||
stage_response = self.current_stage_view.dispatch(request)
|
||||
stage_response = self.current_stage_view.post(request, *args, **kwargs)
|
||||
return to_stage_response(request, stage_response)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
return self.handle_exception(exc)
|
||||
@ -346,15 +362,10 @@ class FlowExecutorView(APIView):
|
||||
def restart_flow(self, keep_context=False) -> HttpResponse:
|
||||
"""Restart the currently active flow, optionally keeping the current context"""
|
||||
planner = FlowPlanner(self.flow)
|
||||
planner.use_cache = False
|
||||
default_context = None
|
||||
if keep_context:
|
||||
default_context = self.plan.context
|
||||
try:
|
||||
plan = planner.plan(self.request, default_context)
|
||||
except FlowNonApplicableException as exc:
|
||||
self._logger.warning("f(exec): Flow restart not applicable to current user", exc=exc)
|
||||
return self.handle_invalid_flow(exc)
|
||||
plan = planner.plan(self.request, default_context)
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
kwargs = self.kwargs
|
||||
kwargs.update({"flow_slug": self.flow.slug})
|
||||
@ -412,19 +423,6 @@ class FlowExecutorView(APIView):
|
||||
Optionally, an exception can be passed, which will be shown if the current user
|
||||
is a superuser."""
|
||||
self._logger.debug("f(exec): Stage invalid")
|
||||
if self.current_binding and self.current_binding.invalid_response_action in [
|
||||
InvalidResponseAction.RESTART,
|
||||
InvalidResponseAction.RESTART_WITH_CONTEXT,
|
||||
]:
|
||||
keep_context = (
|
||||
self.current_binding.invalid_response_action
|
||||
== InvalidResponseAction.RESTART_WITH_CONTEXT
|
||||
)
|
||||
self._logger.debug(
|
||||
"f(exec): Invalid response, restarting flow",
|
||||
keep_context=keep_context,
|
||||
)
|
||||
return self.restart_flow(keep_context)
|
||||
self.cancel()
|
||||
challenge_view = AccessDeniedChallengeView(self, error_message)
|
||||
challenge_view.request = self.request
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
# update website/docs/installation/configuration.mdx
|
||||
# update website/docs/installation/configuration.md
|
||||
# This is the default configuration file
|
||||
postgresql:
|
||||
host: localhost
|
||||
@ -7,7 +7,6 @@ postgresql:
|
||||
port: 5432
|
||||
password: "env://POSTGRES_PASSWORD"
|
||||
use_pgbouncer: false
|
||||
use_pgpool: false
|
||||
|
||||
listen:
|
||||
listen_http: 0.0.0.0:9000
|
||||
@ -111,6 +110,3 @@ web:
|
||||
# No default here as it's set dynamically
|
||||
# workers: 2
|
||||
threads: 4
|
||||
|
||||
worker:
|
||||
concurrency: 2
|
||||
|
||||
@ -7,6 +7,7 @@ from typing import Any, Iterable, Optional
|
||||
|
||||
from cachetools import TLRUCache, cached
|
||||
from django.core.exceptions import FieldError
|
||||
from django_otp import devices_for_user
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from rest_framework.serializers import ValidationError
|
||||
from sentry_sdk.hub import Hub
|
||||
@ -19,7 +20,6 @@ from authentik.lib.utils.http import get_http_session
|
||||
from authentik.policies.models import Policy, PolicyBinding
|
||||
from authentik.policies.process import PolicyProcess
|
||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||
from authentik.stages.authenticator import devices_for_user
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@ -1,112 +1,7 @@
|
||||
"""logging helpers"""
|
||||
import logging
|
||||
from logging import Logger
|
||||
from os import getpid
|
||||
|
||||
import structlog
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
LOG_PRE_CHAIN = [
|
||||
# Add the log level and a timestamp to the event_dict if the log entry
|
||||
# is not from structlog.
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.stdlib.add_logger_name,
|
||||
structlog.processors.TimeStamper(),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
]
|
||||
|
||||
|
||||
def get_log_level():
|
||||
"""Get log level, clamp trace to debug"""
|
||||
level = CONFIG.get("log_level").upper()
|
||||
# We could add a custom level to stdlib logging and structlog, but it's not easy or clean
|
||||
# https://stackoverflow.com/questions/54505487/custom-log-level-not-working-with-structlog
|
||||
# Additionally, the entire code uses debug as highest level
|
||||
# so that would have to be re-written too
|
||||
if level == "TRACE":
|
||||
level = "DEBUG"
|
||||
return level
|
||||
|
||||
|
||||
def structlog_configure():
|
||||
"""Configure structlog itself"""
|
||||
structlog.configure_once(
|
||||
processors=[
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.stdlib.add_logger_name,
|
||||
structlog.contextvars.merge_contextvars,
|
||||
add_process_id,
|
||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||
structlog.processors.TimeStamper(fmt="iso", utc=False),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
structlog.processors.dict_tracebacks,
|
||||
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
|
||||
],
|
||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||
wrapper_class=structlog.make_filtering_bound_logger(
|
||||
getattr(logging, get_log_level(), logging.WARNING)
|
||||
),
|
||||
cache_logger_on_first_use=True,
|
||||
)
|
||||
|
||||
|
||||
def get_logger_config():
|
||||
"""Configure python stdlib's logging"""
|
||||
debug = CONFIG.get_bool("debug")
|
||||
global_level = get_log_level()
|
||||
base_config = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"json": {
|
||||
"()": structlog.stdlib.ProcessorFormatter,
|
||||
"processor": structlog.processors.JSONRenderer(sort_keys=True),
|
||||
"foreign_pre_chain": LOG_PRE_CHAIN + [structlog.processors.dict_tracebacks],
|
||||
},
|
||||
"console": {
|
||||
"()": structlog.stdlib.ProcessorFormatter,
|
||||
"processor": structlog.dev.ConsoleRenderer(colors=debug),
|
||||
"foreign_pre_chain": LOG_PRE_CHAIN,
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"level": "DEBUG",
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "console" if debug else "json",
|
||||
},
|
||||
},
|
||||
"loggers": {},
|
||||
}
|
||||
|
||||
handler_level_map = {
|
||||
"": global_level,
|
||||
"authentik": global_level,
|
||||
"django": "WARNING",
|
||||
"django.request": "ERROR",
|
||||
"celery": "WARNING",
|
||||
"selenium": "WARNING",
|
||||
"docker": "WARNING",
|
||||
"urllib3": "WARNING",
|
||||
"websockets": "WARNING",
|
||||
"daphne": "WARNING",
|
||||
"kubernetes": "INFO",
|
||||
"asyncio": "WARNING",
|
||||
"redis": "WARNING",
|
||||
"silk": "INFO",
|
||||
"fsevents": "WARNING",
|
||||
"uvicorn": "WARNING",
|
||||
"gunicorn": "INFO",
|
||||
}
|
||||
for handler_name, level in handler_level_map.items():
|
||||
base_config["loggers"][handler_name] = {
|
||||
"handlers": ["console"],
|
||||
"level": level,
|
||||
"propagate": False,
|
||||
}
|
||||
return base_config
|
||||
|
||||
|
||||
def add_process_id(logger: Logger, method_name: str, event_dict):
|
||||
"""Add the current process ID"""
|
||||
|
||||
@ -77,7 +77,6 @@ class PolicyBindingSerializer(ModelSerializer):
|
||||
"enabled",
|
||||
"order",
|
||||
"timeout",
|
||||
"failure_result",
|
||||
]
|
||||
|
||||
def validate(self, attrs: OrderedDict) -> OrderedDict:
|
||||
|
||||
@ -1,26 +0,0 @@
|
||||
# Generated by Django 4.2.5 on 2023-09-13 18:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_policies", "0010_alter_policy_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="policybinding",
|
||||
name="failure_result",
|
||||
field=models.BooleanField(
|
||||
default=False, help_text="Result if the Policy execution fails."
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="policybinding",
|
||||
name="timeout",
|
||||
field=models.PositiveIntegerField(
|
||||
default=30, help_text="Timeout after which Policy execution is terminated."
|
||||
),
|
||||
),
|
||||
]
|
||||
@ -85,12 +85,9 @@ class PolicyBinding(SerializerModel):
|
||||
default=False,
|
||||
help_text=_("Negates the outcome of the policy. Messages are unaffected."),
|
||||
)
|
||||
timeout = models.PositiveIntegerField(
|
||||
timeout = models.IntegerField(
|
||||
default=30, help_text=_("Timeout after which Policy execution is terminated.")
|
||||
)
|
||||
failure_result = models.BooleanField(
|
||||
default=False, help_text=_("Result if the Policy execution fails.")
|
||||
)
|
||||
|
||||
order = models.IntegerField()
|
||||
|
||||
|
||||
@ -98,8 +98,8 @@ class PolicyProcess(PROCESS_CLASS):
|
||||
# Create policy exception event, only when we're not debugging
|
||||
if not self.request.debug:
|
||||
self.create_event(EventAction.POLICY_EXCEPTION, message=error_string)
|
||||
LOGGER.debug("P_ENG(proc): error, using failure result", exc=src_exc)
|
||||
policy_result = PolicyResult(self.binding.failure_result, str(src_exc))
|
||||
LOGGER.debug("P_ENG(proc): error", exc=src_exc)
|
||||
policy_result = PolicyResult(False, str(src_exc))
|
||||
policy_result.source_binding = self.binding
|
||||
should_cache = self.request.should_cache
|
||||
if should_cache:
|
||||
|
||||
@ -1,7 +1,5 @@
|
||||
"""Reputation policy API Views"""
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import mixins
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import GenericViewSet, ModelViewSet
|
||||
|
||||
@ -13,11 +11,6 @@ from authentik.policies.reputation.models import Reputation, ReputationPolicy
|
||||
class ReputationPolicySerializer(PolicySerializer):
|
||||
"""Reputation Policy Serializer"""
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
if not attrs.get("check_ip", False) and not attrs.get("check_username", False):
|
||||
raise ValidationError(_("Either IP or Username must be checked"))
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
model = ReputationPolicy
|
||||
fields = PolicySerializer.Meta.fields + [
|
||||
|
||||
@ -3,8 +3,6 @@ from django.core.cache import cache
|
||||
from django.test import RequestFactory, TestCase
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.policies.reputation.api import ReputationPolicySerializer
|
||||
from authentik.policies.reputation.models import CACHE_KEY_PREFIX, Reputation, ReputationPolicy
|
||||
from authentik.policies.reputation.tasks import save_reputation
|
||||
from authentik.policies.types import PolicyRequest
|
||||
@ -63,8 +61,3 @@ class TestReputationPolicy(TestCase):
|
||||
name="reputation-test", threshold=0
|
||||
)
|
||||
self.assertTrue(policy.passes(request).passing)
|
||||
|
||||
def test_api(self):
|
||||
"""Test API Validation"""
|
||||
no_toggle = ReputationPolicySerializer(data={"name": generate_id(), "threshold": -5})
|
||||
self.assertFalse(no_toggle.is_valid())
|
||||
|
||||
@ -97,17 +97,6 @@ class TestPolicyEngine(TestCase):
|
||||
self.assertEqual(result.passing, False)
|
||||
self.assertEqual(result.messages, ("division by zero",))
|
||||
|
||||
def test_engine_policy_error_failure(self):
|
||||
"""Test policy raising an error flag"""
|
||||
pbm = PolicyBindingModel.objects.create()
|
||||
PolicyBinding.objects.create(
|
||||
target=pbm, policy=self.policy_raises, order=0, failure_result=True
|
||||
)
|
||||
engine = PolicyEngine(pbm, self.user)
|
||||
result = engine.build().result
|
||||
self.assertEqual(result.passing, True)
|
||||
self.assertEqual(result.messages, ("division by zero",))
|
||||
|
||||
def test_engine_policy_type(self):
|
||||
"""Test invalid policy type"""
|
||||
pbm = PolicyBindingModel.objects.create()
|
||||
|
||||
@ -13,9 +13,10 @@ from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, FileField, SerializerMethodField
|
||||
from rest_framework.parsers import MultiPartParser
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.relations import SlugRelatedField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import PrimaryKeyRelatedField, ValidationError
|
||||
from rest_framework.serializers import ValidationError
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
@ -167,8 +168,10 @@ class SAMLProviderImportSerializer(PassiveSerializer):
|
||||
"""Import saml provider from XML Metadata"""
|
||||
|
||||
name = CharField(required=True)
|
||||
authorization_flow = PrimaryKeyRelatedField(
|
||||
# Using SlugField because https://github.com/OpenAPITools/openapi-generator/issues/3278
|
||||
authorization_flow = SlugRelatedField(
|
||||
queryset=Flow.objects.filter(designation=FlowDesignation.AUTHORIZATION),
|
||||
slug_field="slug",
|
||||
)
|
||||
file = FileField()
|
||||
|
||||
|
||||
@ -171,8 +171,6 @@ class MetadataProcessor:
|
||||
entity_descriptor, f"{{{NS_SAML_METADATA}}}IDPSSODescriptor"
|
||||
)
|
||||
idp_sso_descriptor.attrib["protocolSupportEnumeration"] = NS_SAML_PROTOCOL
|
||||
if self.provider.verification_kp:
|
||||
idp_sso_descriptor.attrib["WantAuthnRequestsSigned"] = "true"
|
||||
|
||||
signing_descriptor = self.get_signing_key_descriptor()
|
||||
if signing_descriptor is not None:
|
||||
|
||||
@ -89,7 +89,7 @@ class TestSAMLProviderAPI(APITestCase):
|
||||
{
|
||||
"file": metadata,
|
||||
"name": generate_id(),
|
||||
"authorization_flow": create_test_flow(FlowDesignation.AUTHORIZATION).pk,
|
||||
"authorization_flow": create_test_flow(FlowDesignation.AUTHORIZATION).slug,
|
||||
},
|
||||
format="multipart",
|
||||
)
|
||||
@ -106,7 +106,7 @@ class TestSAMLProviderAPI(APITestCase):
|
||||
{
|
||||
"file": metadata,
|
||||
"name": generate_id(),
|
||||
"authorization_flow": create_test_flow().pk,
|
||||
"authorization_flow": create_test_flow().slug,
|
||||
},
|
||||
format="multipart",
|
||||
)
|
||||
|
||||
@ -12,7 +12,7 @@ from authentik.lib.xml import lxml_from_string
|
||||
from authentik.providers.saml.models import SAMLBindings, SAMLPropertyMapping, SAMLProvider
|
||||
from authentik.providers.saml.processors.metadata import MetadataProcessor
|
||||
from authentik.providers.saml.processors.metadata_parser import ServiceProviderMetadataParser
|
||||
from authentik.sources.saml.processors.constants import NS_MAP, NS_SAML_METADATA
|
||||
from authentik.sources.saml.processors.constants import NS_MAP
|
||||
|
||||
|
||||
class TestServiceProviderMetadataParser(TestCase):
|
||||
@ -55,24 +55,6 @@ class TestServiceProviderMetadataParser(TestCase):
|
||||
schema = etree.XMLSchema(etree.parse("schemas/saml-schema-metadata-2.0.xsd")) # nosec
|
||||
self.assertTrue(schema.validate(metadata))
|
||||
|
||||
def test_schema_want_authn_requests_signed(self):
|
||||
"""Test metadata generation with WantAuthnRequestsSigned"""
|
||||
cert = create_test_cert()
|
||||
provider = SAMLProvider.objects.create(
|
||||
name=generate_id(),
|
||||
authorization_flow=self.flow,
|
||||
verification_kp=cert,
|
||||
)
|
||||
Application.objects.create(
|
||||
name=generate_id(),
|
||||
slug=generate_id(),
|
||||
provider=provider,
|
||||
)
|
||||
request = self.factory.get("/")
|
||||
metadata = lxml_from_string(MetadataProcessor(provider, request).build_entity_descriptor())
|
||||
idp_sso_descriptor = metadata.findall(f"{{{NS_SAML_METADATA}}}IDPSSODescriptor")[0]
|
||||
self.assertEqual(idp_sso_descriptor.attrib["WantAuthnRequestsSigned"], "true")
|
||||
|
||||
def test_simple(self):
|
||||
"""Test simple metadata without Signing"""
|
||||
metadata = ServiceProviderMetadataParser().parse(load_fixture("fixtures/simple.xml"))
|
||||
|
||||
@ -23,8 +23,6 @@ def post_save_provider(sender: type[Model], instance, created: bool, **_):
|
||||
@receiver(post_save, sender=Group)
|
||||
def post_save_scim(sender: type[Model], instance: User | Group, created: bool, **_):
|
||||
"""Post save handler"""
|
||||
if not SCIMProvider.objects.filter(backchannel_application__isnull=False).exists():
|
||||
return
|
||||
scim_signal_direct.delay(class_to_path(instance.__class__), instance.pk, PatchOp.add.value)
|
||||
|
||||
|
||||
@ -32,8 +30,6 @@ def post_save_scim(sender: type[Model], instance: User | Group, created: bool, *
|
||||
@receiver(pre_delete, sender=Group)
|
||||
def pre_delete_scim(sender: type[Model], instance: User | Group, **_):
|
||||
"""Pre-delete handler"""
|
||||
if not SCIMProvider.objects.filter(backchannel_application__isnull=False).exists():
|
||||
return
|
||||
scim_signal_direct.delay(class_to_path(instance.__class__), instance.pk, PatchOp.remove.value)
|
||||
|
||||
|
||||
@ -44,8 +40,6 @@ def m2m_changed_scim(
|
||||
"""Sync group membership"""
|
||||
if action not in ["post_add", "post_remove"]:
|
||||
return
|
||||
if not SCIMProvider.objects.filter(backchannel_application__isnull=False).exists():
|
||||
return
|
||||
# reverse: instance is a Group, pk_set is a list of user pks
|
||||
# non-reverse: instance is a User, pk_set is a list of groups
|
||||
if reverse:
|
||||
|
||||
@ -172,7 +172,7 @@ class ChannelsLoggingMiddleware:
|
||||
LOGGER.info(
|
||||
scope["path"],
|
||||
scheme="ws",
|
||||
remote=headers.get(b"x-forwarded-for", b"").decode(),
|
||||
remote=scope.get("client", [""])[0],
|
||||
user_agent=headers.get(b"user-agent", b"").decode(),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@ -1,21 +1,25 @@
|
||||
"""root settings for authentik"""
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
from hashlib import sha512
|
||||
from pathlib import Path
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
import structlog
|
||||
from celery.schedules import crontab
|
||||
from sentry_sdk import set_tag
|
||||
|
||||
from authentik import ENV_GIT_HASH_KEY, __version__
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.logging import get_logger_config, structlog_configure
|
||||
from authentik.lib.logging import add_process_id
|
||||
from authentik.lib.sentry import sentry_init
|
||||
from authentik.lib.utils.reflection import get_env
|
||||
from authentik.stages.password import BACKEND_APP_PASSWORD, BACKEND_INBUILT, BACKEND_LDAP
|
||||
|
||||
LOGGER = structlog.get_logger()
|
||||
|
||||
BASE_DIR = Path(__file__).absolute().parent.parent.parent
|
||||
STATICFILES_DIRS = [BASE_DIR / Path("web")]
|
||||
MEDIA_ROOT = BASE_DIR / Path("media")
|
||||
@ -37,7 +41,6 @@ CSRF_HEADER_NAME = "HTTP_X_AUTHENTIK_CSRF"
|
||||
LANGUAGE_COOKIE_NAME = "authentik_language"
|
||||
SESSION_COOKIE_NAME = "authentik_session"
|
||||
SESSION_COOKIE_DOMAIN = CONFIG.get("cookie_domain", None)
|
||||
APPEND_SLASH = False
|
||||
|
||||
AUTHENTICATION_BACKENDS = [
|
||||
"django.contrib.auth.backends.ModelBackend",
|
||||
@ -82,7 +85,6 @@ INSTALLED_APPS = [
|
||||
"authentik.sources.oauth",
|
||||
"authentik.sources.plex",
|
||||
"authentik.sources.saml",
|
||||
"authentik.stages.authenticator",
|
||||
"authentik.stages.authenticator_duo",
|
||||
"authentik.stages.authenticator_sms",
|
||||
"authentik.stages.authenticator_static",
|
||||
@ -280,9 +282,6 @@ DATABASES = {
|
||||
}
|
||||
}
|
||||
|
||||
if CONFIG.get_bool("postgresql.use_pgpool", False):
|
||||
DATABASES["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True
|
||||
|
||||
if CONFIG.get_bool("postgresql.use_pgbouncer", False):
|
||||
# https://docs.djangoproject.com/en/4.0/ref/databases/#transaction-pooling-server-side-cursors
|
||||
DATABASES["default"]["DISABLE_SERVER_SIDE_CURSORS"] = True
|
||||
@ -333,7 +332,7 @@ LOCALE_PATHS = ["./locale"]
|
||||
CELERY = {
|
||||
"task_soft_time_limit": 600,
|
||||
"worker_max_tasks_per_child": 50,
|
||||
"worker_concurrency": CONFIG.get_int("worker.concurrency"),
|
||||
"worker_concurrency": 2,
|
||||
"beat_schedule": {
|
||||
"clean_expired_models": {
|
||||
"task": "authentik.core.tasks.clean_expired_models",
|
||||
@ -369,9 +368,91 @@ MEDIA_URL = "/media/"
|
||||
|
||||
TEST = False
|
||||
TEST_RUNNER = "authentik.root.test_runner.PytestTestRunner"
|
||||
# We can't check TEST here as its set later by the test runner
|
||||
LOG_LEVEL = CONFIG.get("log_level").upper() if "TF_BUILD" not in os.environ else "DEBUG"
|
||||
# We could add a custom level to stdlib logging and structlog, but it's not easy or clean
|
||||
# https://stackoverflow.com/questions/54505487/custom-log-level-not-working-with-structlog
|
||||
# Additionally, the entire code uses debug as highest level so that would have to be re-written too
|
||||
if LOG_LEVEL == "TRACE":
|
||||
LOG_LEVEL = "DEBUG"
|
||||
|
||||
structlog_configure()
|
||||
LOGGING = get_logger_config()
|
||||
structlog.configure_once(
|
||||
processors=[
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.stdlib.add_logger_name,
|
||||
structlog.contextvars.merge_contextvars,
|
||||
add_process_id,
|
||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||
structlog.processors.TimeStamper(fmt="iso", utc=False),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
structlog.processors.dict_tracebacks,
|
||||
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
|
||||
],
|
||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||
wrapper_class=structlog.make_filtering_bound_logger(
|
||||
getattr(logging, LOG_LEVEL, logging.WARNING)
|
||||
),
|
||||
cache_logger_on_first_use=True,
|
||||
)
|
||||
|
||||
LOG_PRE_CHAIN = [
|
||||
# Add the log level and a timestamp to the event_dict if the log entry
|
||||
# is not from structlog.
|
||||
structlog.stdlib.add_log_level,
|
||||
structlog.stdlib.add_logger_name,
|
||||
structlog.processors.TimeStamper(),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
structlog.processors.format_exc_info,
|
||||
]
|
||||
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"json": {
|
||||
"()": structlog.stdlib.ProcessorFormatter,
|
||||
"processor": structlog.processors.JSONRenderer(sort_keys=True),
|
||||
"foreign_pre_chain": LOG_PRE_CHAIN,
|
||||
},
|
||||
"console": {
|
||||
"()": structlog.stdlib.ProcessorFormatter,
|
||||
"processor": structlog.dev.ConsoleRenderer(colors=DEBUG),
|
||||
"foreign_pre_chain": LOG_PRE_CHAIN,
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"level": "DEBUG",
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "console" if DEBUG else "json",
|
||||
},
|
||||
},
|
||||
"loggers": {},
|
||||
}
|
||||
|
||||
_LOGGING_HANDLER_MAP = {
|
||||
"": LOG_LEVEL,
|
||||
"authentik": LOG_LEVEL,
|
||||
"django": "WARNING",
|
||||
"django.request": "ERROR",
|
||||
"celery": "WARNING",
|
||||
"selenium": "WARNING",
|
||||
"docker": "WARNING",
|
||||
"urllib3": "WARNING",
|
||||
"websockets": "WARNING",
|
||||
"daphne": "WARNING",
|
||||
"kubernetes": "INFO",
|
||||
"asyncio": "WARNING",
|
||||
"redis": "WARNING",
|
||||
"silk": "INFO",
|
||||
"fsevents": "WARNING",
|
||||
}
|
||||
for handler_name, level in _LOGGING_HANDLER_MAP.items():
|
||||
LOGGING["loggers"][handler_name] = {
|
||||
"handlers": ["console"],
|
||||
"level": level,
|
||||
"propagate": False,
|
||||
}
|
||||
|
||||
|
||||
_DISALLOWED_ITEMS = [
|
||||
|
||||
@ -20,7 +20,7 @@ class PytestTestRunner: # pragma: no cover
|
||||
self.failfast = failfast
|
||||
self.keepdb = keepdb
|
||||
|
||||
self.args = []
|
||||
self.args = ["-vv", "--full-trace"]
|
||||
if self.failfast:
|
||||
self.args.append("--exitfirst")
|
||||
if self.keepdb:
|
||||
|
||||
@ -1,7 +1,10 @@
|
||||
"""Source API Views"""
|
||||
from typing import Any
|
||||
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from django_filters.filters import AllValuesMultipleFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field, inline_serializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import DictField, ListField
|
||||
@ -11,11 +14,12 @@ from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.admin.api.tasks import TaskSerializer
|
||||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
||||
from authentik.core.api.sources import SourceSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.events.monitored_tasks import TaskInfo
|
||||
from authentik.sources.ldap.models import LDAPSource
|
||||
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
||||
from authentik.sources.ldap.tasks import SYNC_CLASSES
|
||||
|
||||
|
||||
@ -150,3 +154,33 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
obj.pop("raw_dn", None)
|
||||
all_objects[class_name].append(obj)
|
||||
return Response(data=all_objects)
|
||||
|
||||
|
||||
class LDAPPropertyMappingSerializer(PropertyMappingSerializer):
|
||||
"""LDAP PropertyMapping Serializer"""
|
||||
|
||||
class Meta:
|
||||
model = LDAPPropertyMapping
|
||||
fields = PropertyMappingSerializer.Meta.fields + [
|
||||
"object_field",
|
||||
]
|
||||
|
||||
|
||||
class LDAPPropertyMappingFilter(FilterSet):
|
||||
"""Filter for LDAPPropertyMapping"""
|
||||
|
||||
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||
|
||||
class Meta:
|
||||
model = LDAPPropertyMapping
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class LDAPPropertyMappingViewSet(UsedByMixin, ModelViewSet):
|
||||
"""LDAP PropertyMapping Viewset"""
|
||||
|
||||
queryset = LDAPPropertyMapping.objects.all()
|
||||
serializer_class = LDAPPropertyMappingSerializer
|
||||
filterset_class = LDAPPropertyMappingFilter
|
||||
search_fields = ["name"]
|
||||
ordering = ["name"]
|
||||
@ -1,40 +0,0 @@
|
||||
"""Property mapping API Views"""
|
||||
from django_filters.filters import AllValuesMultipleFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.sources.ldap.models import LDAPPropertyMapping
|
||||
|
||||
|
||||
class LDAPPropertyMappingSerializer(PropertyMappingSerializer):
|
||||
"""LDAP PropertyMapping Serializer"""
|
||||
|
||||
class Meta:
|
||||
model = LDAPPropertyMapping
|
||||
fields = PropertyMappingSerializer.Meta.fields + [
|
||||
"object_field",
|
||||
]
|
||||
|
||||
|
||||
class LDAPPropertyMappingFilter(FilterSet):
|
||||
"""Filter for LDAPPropertyMapping"""
|
||||
|
||||
managed = extend_schema_field(OpenApiTypes.STR)(AllValuesMultipleFilter(field_name="managed"))
|
||||
|
||||
class Meta:
|
||||
model = LDAPPropertyMapping
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class LDAPPropertyMappingViewSet(UsedByMixin, ModelViewSet):
|
||||
"""LDAP PropertyMapping Viewset"""
|
||||
|
||||
queryset = LDAPPropertyMapping.objects.all()
|
||||
serializer_class = LDAPPropertyMappingSerializer
|
||||
filterset_class = LDAPPropertyMappingFilter
|
||||
search_fields = ["name"]
|
||||
ordering = ["name"]
|
||||
@ -1,32 +0,0 @@
|
||||
"""LDAP Source Serializer"""
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.api.authorization import OwnerFilter, OwnerSuperuserPermissions
|
||||
from authentik.core.api.sources import UserSourceConnectionSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.sources.ldap.models import LDAPUserSourceConnection
|
||||
|
||||
|
||||
class LDAPUserSourceConnectionSerializer(UserSourceConnectionSerializer):
|
||||
"""LDAP Source Serializer"""
|
||||
|
||||
class Meta:
|
||||
model = LDAPUserSourceConnection
|
||||
fields = ["pk", "user", "source", "unique_identifier"]
|
||||
extra_kwargs = {
|
||||
"access_token": {"write_only": True},
|
||||
}
|
||||
|
||||
|
||||
class LDAPUserSourceConnectionViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Source Viewset"""
|
||||
|
||||
queryset = LDAPUserSourceConnection.objects.all()
|
||||
serializer_class = LDAPUserSourceConnectionSerializer
|
||||
filterset_fields = ["source__slug"]
|
||||
search_fields = ["source__slug"]
|
||||
permission_classes = [OwnerSuperuserPermissions]
|
||||
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||
ordering = ["source__slug"]
|
||||
@ -1,58 +0,0 @@
|
||||
# Generated by Django 4.2.5 on 2023-09-27 10:44
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_core", "0032_groupsourceconnection"),
|
||||
("authentik_sources_ldap", "0003_ldapsource_client_certificate_ldapsource_sni_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="LDAPGroupSourceConnection",
|
||||
fields=[
|
||||
(
|
||||
"groupsourceconnection_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="authentik_core.groupsourceconnection",
|
||||
),
|
||||
),
|
||||
("unique_identifier", models.TextField(unique=True)),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "LDAP Group Source Connection",
|
||||
"verbose_name_plural": "LDAP Group Source Connections",
|
||||
},
|
||||
bases=("authentik_core.groupsourceconnection",),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="LDAPUserSourceConnection",
|
||||
fields=[
|
||||
(
|
||||
"usersourceconnection_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="authentik_core.usersourceconnection",
|
||||
),
|
||||
),
|
||||
("unique_identifier", models.TextField(unique=True)),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "LDAP User Source Connection",
|
||||
"verbose_name_plural": "LDAP User Source Connections",
|
||||
},
|
||||
bases=("authentik_core.usersourceconnection",),
|
||||
),
|
||||
]
|
||||
@ -10,13 +10,7 @@ from ldap3 import ALL, NONE, RANDOM, Connection, Server, ServerPool, Tls
|
||||
from ldap3.core.exceptions import LDAPInsufficientAccessRightsResult, LDAPSchemaError
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
from authentik.core.models import (
|
||||
Group,
|
||||
GroupSourceConnection,
|
||||
PropertyMapping,
|
||||
Source,
|
||||
UserSourceConnection,
|
||||
)
|
||||
from authentik.core.models import Group, PropertyMapping, Source
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.models import DomainlessURLValidator
|
||||
@ -119,7 +113,7 @@ class LDAPSource(Source):
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[Serializer]:
|
||||
from authentik.sources.ldap.api.sources import LDAPSourceSerializer
|
||||
from authentik.sources.ldap.api import LDAPSourceSerializer
|
||||
|
||||
return LDAPSourceSerializer
|
||||
|
||||
@ -208,7 +202,7 @@ class LDAPPropertyMapping(PropertyMapping):
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[Serializer]:
|
||||
from authentik.sources.ldap.api.property_mappings import LDAPPropertyMappingSerializer
|
||||
from authentik.sources.ldap.api import LDAPPropertyMappingSerializer
|
||||
|
||||
return LDAPPropertyMappingSerializer
|
||||
|
||||
@ -218,35 +212,3 @@ class LDAPPropertyMapping(PropertyMapping):
|
||||
class Meta:
|
||||
verbose_name = _("LDAP Property Mapping")
|
||||
verbose_name_plural = _("LDAP Property Mappings")
|
||||
|
||||
|
||||
class LDAPUserSourceConnection(UserSourceConnection):
|
||||
"""Connection between an authentik user and an LDAP source."""
|
||||
|
||||
unique_identifier = models.TextField(unique=True)
|
||||
|
||||
@property
|
||||
def serializer(self) -> Serializer:
|
||||
from authentik.sources.ldap.api.source_connections import LDAPUserSourceConnectionSerializer
|
||||
|
||||
return LDAPUserSourceConnectionSerializer
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("LDAP User Source Connection")
|
||||
verbose_name_plural = _("LDAP User Source Connections")
|
||||
|
||||
|
||||
class LDAPGroupSourceConnection(GroupSourceConnection):
|
||||
"""Connection between an authentik group and an LDAP source."""
|
||||
|
||||
unique_identifier = models.TextField(unique=True)
|
||||
|
||||
@property
|
||||
def serializer(self) -> Serializer:
|
||||
from authentik.sources.ldap.api.source_connections import LDAPUserSourceConnectionSerializer
|
||||
|
||||
return LDAPUserSourceConnectionSerializer
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("LDAP Group Source Connection")
|
||||
verbose_name_plural = _("LDAP Group Source Connections")
|
||||
|
||||
@ -133,7 +133,7 @@ class BaseLDAPSynchronizer:
|
||||
def build_user_properties(self, user_dn: str, **kwargs) -> dict[str, Any]:
|
||||
"""Build attributes for User object based on property mappings."""
|
||||
props = self._build_object_properties(user_dn, self._source.property_mappings, **kwargs)
|
||||
props.setdefault("path", self._source.get_user_path())
|
||||
props["path"] = self._source.get_user_path()
|
||||
return props
|
||||
|
||||
def build_group_properties(self, group_dn: str, **kwargs) -> dict[str, Any]:
|
||||
@ -151,14 +151,10 @@ class BaseLDAPSynchronizer:
|
||||
continue
|
||||
mapping: LDAPPropertyMapping
|
||||
try:
|
||||
value = mapping.evaluate(
|
||||
user=None, request=None, ldap=kwargs, dn=object_dn, source=self._source
|
||||
)
|
||||
value = mapping.evaluate(user=None, request=None, ldap=kwargs, dn=object_dn)
|
||||
if value is None:
|
||||
self._logger.warning("property mapping returned None", mapping=mapping)
|
||||
continue
|
||||
if isinstance(value, (bytes)):
|
||||
self._logger.warning("property mapping returned bytes", mapping=mapping)
|
||||
continue
|
||||
object_field = mapping.object_field
|
||||
if object_field.startswith("attributes."):
|
||||
|
||||
@ -7,7 +7,6 @@ from ldap3 import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, SUBTREE
|
||||
|
||||
from authentik.core.models import Group
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.sources.ldap.models import LDAPGroupSourceConnection
|
||||
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer
|
||||
|
||||
|
||||
@ -19,9 +18,6 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
return "groups"
|
||||
|
||||
def get_objects(self, **kwargs) -> Generator:
|
||||
if not self._source.sync_groups:
|
||||
self.message("Group syncing is disabled for this Source")
|
||||
return iter(())
|
||||
return self.search_paginator(
|
||||
search_base=self.base_dn_groups,
|
||||
search_filter=self._source.group_object_filter,
|
||||
@ -64,13 +60,7 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
},
|
||||
defaults,
|
||||
)
|
||||
LDAPGroupSourceConnection.objects.update_or_create(
|
||||
defaults={
|
||||
"unique_identifier": uniq,
|
||||
},
|
||||
source=self._source,
|
||||
group=ak_group,
|
||||
)
|
||||
self._logger.debug("Created group with attributes", **defaults)
|
||||
except (IntegrityError, FieldError, TypeError, AttributeError) as exc:
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
|
||||
@ -24,9 +24,6 @@ class MembershipLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
return "membership"
|
||||
|
||||
def get_objects(self, **kwargs) -> Generator:
|
||||
if not self._source.sync_groups:
|
||||
self.message("Group syncing is disabled for this Source")
|
||||
return iter(())
|
||||
return self.search_paginator(
|
||||
search_base=self.base_dn_groups,
|
||||
search_filter=self._source.group_object_filter,
|
||||
|
||||
@ -7,7 +7,6 @@ from ldap3 import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, SUBTREE
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.sources.ldap.models import LDAPUserSourceConnection
|
||||
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer
|
||||
from authentik.sources.ldap.sync.vendor.freeipa import FreeIPA
|
||||
from authentik.sources.ldap.sync.vendor.ms_ad import MicrosoftActiveDirectory
|
||||
@ -21,9 +20,6 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
return "users"
|
||||
|
||||
def get_objects(self, **kwargs) -> Generator:
|
||||
if not self._source.sync_users:
|
||||
self.message("User syncing is disabled for this Source")
|
||||
return iter(())
|
||||
return self.search_paginator(
|
||||
search_base=self.base_dn_users,
|
||||
search_filter=self._source.user_object_filter,
|
||||
@ -59,13 +55,6 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
ak_user, created = self.update_or_create_attributes(
|
||||
User, {f"attributes__{LDAP_UNIQUENESS}": uniq}, defaults
|
||||
)
|
||||
LDAPUserSourceConnection.objects.update_or_create(
|
||||
defaults={
|
||||
"unique_identifier": uniq,
|
||||
},
|
||||
source=self._source,
|
||||
user=ak_user,
|
||||
)
|
||||
except (IntegrityError, FieldError, TypeError, AttributeError) as exc:
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
@ -80,7 +69,6 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
else:
|
||||
self._logger.debug("Synced User", user=ak_user.username, created=created)
|
||||
user_count += 1
|
||||
# TODO: Optimise vendor sync to not create a new connection
|
||||
MicrosoftActiveDirectory(self._source).sync(attributes, ak_user, created)
|
||||
FreeIPA(self._source).sync(attributes, ak_user, created)
|
||||
return user_count
|
||||
|
||||
@ -45,13 +45,7 @@ class FreeIPA(BaseLDAPSynchronizer):
|
||||
# 389-ds and this will trigger regardless
|
||||
if "nsaccountlock" not in attributes:
|
||||
return
|
||||
# For some reason, nsaccountlock is not defined properly in the schema as bool
|
||||
# hence we get it as a list of strings
|
||||
_is_locked = str(self._flatten(attributes.get("nsaccountlock", ["FALSE"])))
|
||||
# So we have to attempt to convert it to a bool
|
||||
is_locked = _is_locked.lower() == "true"
|
||||
# And then invert it since freeipa saves locked and we save active
|
||||
is_active = not is_locked
|
||||
is_active = attributes.get("nsaccountlock", False)
|
||||
if is_active != user.is_active:
|
||||
user.is_active = is_active
|
||||
user.save()
|
||||
|
||||
@ -4,8 +4,6 @@ from uuid import uuid4
|
||||
from celery import chain, group
|
||||
from django.core.cache import cache
|
||||
from ldap3.core.exceptions import LDAPException
|
||||
from redis.exceptions import LockError
|
||||
from redis.lock import Lock
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
@ -35,40 +33,24 @@ def ldap_sync_all():
|
||||
ldap_sync_single(source.pk)
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
# We take the configured hours timeout time by 2.5 as we run user and
|
||||
# group in parallel and then membership, so 2x is to cover the serial tasks,
|
||||
# and 0.5x on top of that to give some more leeway
|
||||
soft_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5,
|
||||
task_time_limit=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 2.5,
|
||||
)
|
||||
@CELERY_APP.task()
|
||||
def ldap_sync_single(source_pk: str):
|
||||
"""Sync a single source"""
|
||||
source: LDAPSource = LDAPSource.objects.filter(pk=source_pk).first()
|
||||
if not source:
|
||||
return
|
||||
lock = Lock(cache.client.get_client(), name=f"goauthentik.io/sources/ldap/sync-{source.slug}")
|
||||
if lock.locked():
|
||||
LOGGER.debug("LDAP sync locked, skipping task", source=source.slug)
|
||||
return
|
||||
try:
|
||||
with lock:
|
||||
task = chain(
|
||||
# User and group sync can happen at once, they have no dependencies on each other
|
||||
group(
|
||||
ldap_sync_paginator(source, UserLDAPSynchronizer)
|
||||
+ ldap_sync_paginator(source, GroupLDAPSynchronizer),
|
||||
),
|
||||
# Membership sync needs to run afterwards
|
||||
group(
|
||||
ldap_sync_paginator(source, MembershipLDAPSynchronizer),
|
||||
),
|
||||
)
|
||||
task()
|
||||
except LockError:
|
||||
# This should never happen, we check if the lock is locked above so this
|
||||
# would only happen if there was some other timeout
|
||||
LOGGER.debug("Failed to acquire lock for LDAP sync", source=source.slug)
|
||||
task = chain(
|
||||
# User and group sync can happen at once, they have no dependencies on each other
|
||||
group(
|
||||
ldap_sync_paginator(source, UserLDAPSynchronizer)
|
||||
+ ldap_sync_paginator(source, GroupLDAPSynchronizer),
|
||||
),
|
||||
# Membership sync needs to run afterwards
|
||||
group(
|
||||
ldap_sync_paginator(source, MembershipLDAPSynchronizer),
|
||||
),
|
||||
)
|
||||
task()
|
||||
|
||||
|
||||
def ldap_sync_paginator(source: LDAPSource, sync: type[BaseLDAPSynchronizer]) -> list:
|
||||
|
||||
@ -55,7 +55,7 @@ def mock_ad_connection(password: str) -> Connection:
|
||||
"revision": 0,
|
||||
"objectSid": "user0",
|
||||
"objectClass": "person",
|
||||
"distinguishedName": "cn=user0,ou=foo,ou=users,dc=goauthentik,dc=io",
|
||||
"distinguishedName": "cn=user0,ou=users,dc=goauthentik,dc=io",
|
||||
"userAccountControl": (
|
||||
UserAccountControl.ACCOUNTDISABLE + UserAccountControl.NORMAL_ACCOUNT
|
||||
),
|
||||
|
||||
@ -1,111 +0,0 @@
|
||||
"""ldap testing utils"""
|
||||
|
||||
from ldap3 import MOCK_SYNC, OFFLINE_DS389_1_3_3, Connection, Server
|
||||
|
||||
|
||||
def mock_freeipa_connection(password: str) -> Connection:
|
||||
"""Create mock FreeIPA-ish connection"""
|
||||
server = Server("my_fake_server", get_info=OFFLINE_DS389_1_3_3)
|
||||
_pass = "foo" # noqa # nosec
|
||||
connection = Connection(
|
||||
server,
|
||||
user="cn=my_user,dc=goauthentik,dc=io",
|
||||
password=_pass,
|
||||
client_strategy=MOCK_SYNC,
|
||||
)
|
||||
# Entry for password checking
|
||||
connection.strategy.add_entry(
|
||||
"cn=user,ou=users,dc=goauthentik,dc=io",
|
||||
{
|
||||
"name": "test-user",
|
||||
"uid": "unique-test-group",
|
||||
"objectClass": "person",
|
||||
"displayName": "Erin M. Hagens",
|
||||
},
|
||||
)
|
||||
connection.strategy.add_entry(
|
||||
"cn=group1,ou=groups,dc=goauthentik,dc=io",
|
||||
{
|
||||
"cn": "group1",
|
||||
"uid": "unique-test-group",
|
||||
"objectClass": "groupOfNames",
|
||||
"member": ["cn=user0,ou=users,dc=goauthentik,dc=io"],
|
||||
},
|
||||
)
|
||||
# Group without SID
|
||||
connection.strategy.add_entry(
|
||||
"cn=group2,ou=groups,dc=goauthentik,dc=io",
|
||||
{
|
||||
"cn": "group2",
|
||||
"objectClass": "groupOfNames",
|
||||
},
|
||||
)
|
||||
connection.strategy.add_entry(
|
||||
"cn=user0,ou=users,dc=goauthentik,dc=io",
|
||||
{
|
||||
"userPassword": password,
|
||||
"name": "user0_sn",
|
||||
"uid": "user0_sn",
|
||||
"objectClass": "person",
|
||||
},
|
||||
)
|
||||
# User without SID
|
||||
connection.strategy.add_entry(
|
||||
"cn=user1,ou=users,dc=goauthentik,dc=io",
|
||||
{
|
||||
"userPassword": "test1111",
|
||||
"name": "user1_sn",
|
||||
"objectClass": "person",
|
||||
},
|
||||
)
|
||||
# Duplicate users
|
||||
connection.strategy.add_entry(
|
||||
"cn=user2,ou=users,dc=goauthentik,dc=io",
|
||||
{
|
||||
"userPassword": "test2222",
|
||||
"name": "user2_sn",
|
||||
"uid": "unique-test2222",
|
||||
"objectClass": "person",
|
||||
},
|
||||
)
|
||||
connection.strategy.add_entry(
|
||||
"cn=user3,ou=users,dc=goauthentik,dc=io",
|
||||
{
|
||||
"userPassword": "test2222",
|
||||
"name": "user2_sn",
|
||||
"uid": "unique-test2222",
|
||||
"objectClass": "person",
|
||||
},
|
||||
)
|
||||
# Group with posixGroup and memberUid
|
||||
connection.strategy.add_entry(
|
||||
"cn=group-posix,ou=groups,dc=goauthentik,dc=io",
|
||||
{
|
||||
"cn": "group-posix",
|
||||
"objectClass": "posixGroup",
|
||||
"memberUid": ["user-posix"],
|
||||
},
|
||||
)
|
||||
# User with posixAccount
|
||||
connection.strategy.add_entry(
|
||||
"cn=user-posix,ou=users,dc=goauthentik,dc=io",
|
||||
{
|
||||
"userPassword": password,
|
||||
"uid": "user-posix",
|
||||
"cn": "user-posix",
|
||||
"objectClass": "posixAccount",
|
||||
},
|
||||
)
|
||||
# Locked out user
|
||||
connection.strategy.add_entry(
|
||||
"cn=user-nsaccountlock,ou=users,dc=goauthentik,dc=io",
|
||||
{
|
||||
"userPassword": password,
|
||||
"uid": "user-nsaccountlock",
|
||||
"cn": "user-nsaccountlock",
|
||||
"objectClass": "person",
|
||||
"nsaccountlock": ["TRUE"],
|
||||
},
|
||||
)
|
||||
connection.bind()
|
||||
return connection
|
||||
@ -4,7 +4,7 @@ from ldap3 import MOCK_SYNC, OFFLINE_SLAPD_2_4, Connection, Server
|
||||
|
||||
|
||||
def mock_slapd_connection(password: str) -> Connection:
|
||||
"""Create mock SLAPD connection"""
|
||||
"""Create mock AD connection"""
|
||||
server = Server("my_fake_server", get_info=OFFLINE_SLAPD_2_4)
|
||||
_pass = "foo" # noqa # nosec
|
||||
connection = Connection(
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.lib.generators import generate_key
|
||||
from authentik.sources.ldap.api.sources import LDAPSourceSerializer
|
||||
from authentik.sources.ldap.api import LDAPSourceSerializer
|
||||
from authentik.sources.ldap.models import LDAPSource
|
||||
|
||||
LDAP_PASSWORD = generate_key()
|
||||
|
||||
@ -9,7 +9,7 @@ from authentik.core.models import Group, User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
||||
from authentik.lib.generators import generate_id, generate_key
|
||||
from authentik.lib.generators import generate_key
|
||||
from authentik.lib.utils.reflection import class_to_path
|
||||
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
||||
from authentik.sources.ldap.sync.groups import GroupLDAPSynchronizer
|
||||
@ -17,7 +17,6 @@ from authentik.sources.ldap.sync.membership import MembershipLDAPSynchronizer
|
||||
from authentik.sources.ldap.sync.users import UserLDAPSynchronizer
|
||||
from authentik.sources.ldap.tasks import ldap_sync, ldap_sync_all
|
||||
from authentik.sources.ldap.tests.mock_ad import mock_ad_connection
|
||||
from authentik.sources.ldap.tests.mock_freeipa import mock_freeipa_connection
|
||||
from authentik.sources.ldap.tests.mock_slapd import mock_slapd_connection
|
||||
|
||||
LDAP_PASSWORD = generate_key()
|
||||
@ -71,28 +70,6 @@ class LDAPSyncTests(TestCase):
|
||||
)
|
||||
self.assertTrue(events.exists())
|
||||
|
||||
def test_sync_mapping(self):
|
||||
"""Test property mappings"""
|
||||
none = LDAPPropertyMapping.objects.create(
|
||||
name=generate_id(), object_field="none", expression="return None"
|
||||
)
|
||||
byte_mapping = LDAPPropertyMapping.objects.create(
|
||||
name=generate_id(), object_field="bytes", expression="return b''"
|
||||
)
|
||||
self.source.property_mappings.set(
|
||||
LDAPPropertyMapping.objects.filter(
|
||||
Q(managed__startswith="goauthentik.io/sources/ldap/default")
|
||||
| Q(managed__startswith="goauthentik.io/sources/ldap/ms")
|
||||
)
|
||||
)
|
||||
self.source.property_mappings.add(none, byte_mapping)
|
||||
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
|
||||
|
||||
# we basically just test that the mappings don't throw errors
|
||||
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
|
||||
user_sync = UserLDAPSynchronizer(self.source)
|
||||
user_sync.sync_full()
|
||||
|
||||
def test_sync_users_ad(self):
|
||||
"""Test user sync"""
|
||||
self.source.property_mappings.set(
|
||||
@ -101,6 +78,7 @@ class LDAPSyncTests(TestCase):
|
||||
| Q(managed__startswith="goauthentik.io/sources/ldap/ms")
|
||||
)
|
||||
)
|
||||
self.source.save()
|
||||
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
|
||||
|
||||
# Create the user beforehand so we can set attributes and check they aren't removed
|
||||
@ -123,7 +101,6 @@ class LDAPSyncTests(TestCase):
|
||||
user = User.objects.filter(username="user0_sn").first()
|
||||
self.assertEqual(user.attributes["foo"], "bar")
|
||||
self.assertFalse(user.is_active)
|
||||
self.assertEqual(user.path, "goauthentik.io/sources/ldap/users/foo")
|
||||
self.assertFalse(User.objects.filter(username="user1_sn").exists())
|
||||
|
||||
def test_sync_users_openldap(self):
|
||||
@ -135,6 +112,7 @@ class LDAPSyncTests(TestCase):
|
||||
| Q(managed__startswith="goauthentik.io/sources/ldap/openldap")
|
||||
)
|
||||
)
|
||||
self.source.save()
|
||||
connection = MagicMock(return_value=mock_slapd_connection(LDAP_PASSWORD))
|
||||
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
|
||||
user_sync = UserLDAPSynchronizer(self.source)
|
||||
@ -142,23 +120,6 @@ class LDAPSyncTests(TestCase):
|
||||
self.assertTrue(User.objects.filter(username="user0_sn").exists())
|
||||
self.assertFalse(User.objects.filter(username="user1_sn").exists())
|
||||
|
||||
def test_sync_users_freeipa_ish(self):
|
||||
"""Test user sync (FreeIPA-ish), mainly testing vendor quirks"""
|
||||
self.source.object_uniqueness_field = "uid"
|
||||
self.source.property_mappings.set(
|
||||
LDAPPropertyMapping.objects.filter(
|
||||
Q(managed__startswith="goauthentik.io/sources/ldap/default")
|
||||
| Q(managed__startswith="goauthentik.io/sources/ldap/openldap")
|
||||
)
|
||||
)
|
||||
connection = MagicMock(return_value=mock_freeipa_connection(LDAP_PASSWORD))
|
||||
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
|
||||
user_sync = UserLDAPSynchronizer(self.source)
|
||||
user_sync.sync_full()
|
||||
self.assertTrue(User.objects.filter(username="user0_sn").exists())
|
||||
self.assertFalse(User.objects.filter(username="user1_sn").exists())
|
||||
self.assertFalse(User.objects.get(username="user-nsaccountlock").is_active)
|
||||
|
||||
def test_sync_groups_ad(self):
|
||||
"""Test group sync"""
|
||||
self.source.property_mappings.set(
|
||||
|
||||
@ -1,10 +1,7 @@
|
||||
"""API URLs"""
|
||||
from authentik.sources.ldap.api.property_mappings import LDAPPropertyMappingViewSet
|
||||
from authentik.sources.ldap.api.source_connections import LDAPUserSourceConnectionViewSet
|
||||
from authentik.sources.ldap.api.sources import LDAPSourceViewSet
|
||||
from authentik.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
||||
|
||||
api_urlpatterns = [
|
||||
("propertymappings/ldap", LDAPPropertyMappingViewSet),
|
||||
("sources/user_connections/ldap", LDAPUserSourceConnectionViewSet),
|
||||
("sources/ldap", LDAPSourceViewSet),
|
||||
]
|
||||
|
||||
@ -68,7 +68,7 @@ class OAuthSource(Source):
|
||||
# we're using Type[] instead of type[] here since type[] interferes with the property above
|
||||
@property
|
||||
def serializer(self) -> Type[Serializer]:
|
||||
from authentik.sources.oauth.api.sources import OAuthSourceSerializer
|
||||
from authentik.sources.oauth.api.source import OAuthSourceSerializer
|
||||
|
||||
return OAuthSourceSerializer
|
||||
|
||||
@ -234,7 +234,7 @@ class UserOAuthSourceConnection(UserSourceConnection):
|
||||
|
||||
@property
|
||||
def serializer(self) -> Serializer:
|
||||
from authentik.sources.oauth.api.source_connections import (
|
||||
from authentik.sources.oauth.api.source_connection import (
|
||||
UserOAuthSourceConnectionSerializer,
|
||||
)
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@ from django.test import TestCase
|
||||
from django.urls import reverse
|
||||
from requests_mock import Mocker
|
||||
|
||||
from authentik.sources.oauth.api.sources import OAuthSourceSerializer
|
||||
from authentik.sources.oauth.api.source import OAuthSourceSerializer
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
|
||||
|
||||
|
||||
@ -2,8 +2,8 @@
|
||||
|
||||
from django.urls import path
|
||||
|
||||
from authentik.sources.oauth.api.source_connections import UserOAuthSourceConnectionViewSet
|
||||
from authentik.sources.oauth.api.sources import OAuthSourceViewSet
|
||||
from authentik.sources.oauth.api.source import OAuthSourceViewSet
|
||||
from authentik.sources.oauth.api.source_connection import UserOAuthSourceConnectionViewSet
|
||||
from authentik.sources.oauth.types.registry import RequestKind
|
||||
from authentik.sources.oauth.views.dispatcher import DispatcherView
|
||||
|
||||
|
||||
@ -1,129 +0,0 @@
|
||||
"""Authenticator devices helpers"""
|
||||
from django.db import transaction
|
||||
|
||||
|
||||
def verify_token(user, device_id, token):
|
||||
"""
|
||||
Attempts to verify a :term:`token` against a specific device, identified by
|
||||
:attr:`~authentik.stages.authenticator.models.Device.persistent_id`.
|
||||
|
||||
This wraps the verification process in a transaction to ensure that things
|
||||
like throttling polices are properly enforced.
|
||||
|
||||
:param user: The user supplying the token.
|
||||
:type user: :class:`~django.contrib.auth.models.User`
|
||||
|
||||
:param str device_id: A device's persistent_id value.
|
||||
|
||||
:param str token: An OTP token to verify.
|
||||
|
||||
:returns: The device that accepted ``token``, if any.
|
||||
:rtype: :class:`~authentik.stages.authenticator.models.Device` or ``None``
|
||||
|
||||
"""
|
||||
from authentik.stages.authenticator.models import Device
|
||||
|
||||
verified = None
|
||||
with transaction.atomic():
|
||||
device = Device.from_persistent_id(device_id, for_verify=True)
|
||||
if (device is not None) and (device.user_id == user.pk) and device.verify_token(token):
|
||||
verified = device
|
||||
|
||||
return verified
|
||||
|
||||
|
||||
def match_token(user, token):
|
||||
"""
|
||||
Attempts to verify a :term:`token` on every device attached to the given
|
||||
user until one of them succeeds.
|
||||
|
||||
.. warning::
|
||||
|
||||
This originally existed for more convenient integration with the admin
|
||||
site. Its use is no longer recommended and it is not guaranteed to
|
||||
interact well with more recent features (such as throttling). Tokens
|
||||
should always be verified against specific devices.
|
||||
|
||||
:param user: The user supplying the token.
|
||||
:type user: :class:`~django.contrib.auth.models.User`
|
||||
|
||||
:param str token: An OTP token to verify.
|
||||
|
||||
:returns: The device that accepted ``token``, if any.
|
||||
:rtype: :class:`~authentik.stages.authenticator.models.Device` or ``None``
|
||||
"""
|
||||
with transaction.atomic():
|
||||
for device in devices_for_user(user, for_verify=True):
|
||||
if device.verify_token(token):
|
||||
break
|
||||
else:
|
||||
device = None
|
||||
|
||||
return device
|
||||
|
||||
|
||||
def devices_for_user(user, confirmed=True, for_verify=False):
|
||||
"""
|
||||
Return an iterable of all devices registered to the given user.
|
||||
|
||||
Returns an empty iterable for anonymous users.
|
||||
|
||||
:param user: standard or custom user object.
|
||||
:type user: :class:`~django.contrib.auth.models.User`
|
||||
|
||||
:param bool confirmed: If ``None``, all matching devices are returned.
|
||||
Otherwise, this can be any true or false value to limit the query
|
||||
to confirmed or unconfirmed devices, respectively.
|
||||
|
||||
:param bool for_verify: If ``True``, we'll load the devices with
|
||||
:meth:`~django.db.models.query.QuerySet.select_for_update` to prevent
|
||||
concurrent verifications from succeeding. In which case, this must be
|
||||
called inside a transaction.
|
||||
|
||||
:rtype: iterable
|
||||
"""
|
||||
if user.is_anonymous:
|
||||
return
|
||||
|
||||
for model in device_classes():
|
||||
device_set = model.objects.devices_for_user(user, confirmed=confirmed)
|
||||
if for_verify:
|
||||
device_set = device_set.select_for_update()
|
||||
|
||||
yield from device_set
|
||||
|
||||
|
||||
def user_has_device(user, confirmed=True):
|
||||
"""
|
||||
Return ``True`` if the user has at least one device.
|
||||
|
||||
Returns ``False`` for anonymous users.
|
||||
|
||||
:param user: standard or custom user object.
|
||||
:type user: :class:`~django.contrib.auth.models.User`
|
||||
|
||||
:param confirmed: If ``None``, all matching devices are considered.
|
||||
Otherwise, this can be any true or false value to limit the query
|
||||
to confirmed or unconfirmed devices, respectively.
|
||||
"""
|
||||
try:
|
||||
next(devices_for_user(user, confirmed=confirmed))
|
||||
except StopIteration:
|
||||
has_device = False
|
||||
else:
|
||||
has_device = True
|
||||
|
||||
return has_device
|
||||
|
||||
|
||||
def device_classes():
|
||||
"""
|
||||
Returns an iterable of all loaded device models.
|
||||
"""
|
||||
from django.apps import apps # isort: skip
|
||||
from authentik.stages.authenticator.models import Device
|
||||
|
||||
for config in apps.get_app_configs():
|
||||
for model in config.get_models():
|
||||
if issubclass(model, Device):
|
||||
yield model
|
||||
@ -1,10 +0,0 @@
|
||||
"""Authenticator"""
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AuthentikStageAuthenticatorConfig(AppConfig):
|
||||
"""Authenticator App config"""
|
||||
|
||||
name = "authentik.stages.authenticator"
|
||||
label = "authentik_stages_authenticator"
|
||||
verbose_name = "authentik Stages.Authenticator"
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user