Compare commits

..

1 Commits

Author SHA1 Message Date
406d18ead6 Update beta.mdx
Added explanation of what our next versions are, clarified step to run upgrade commands, linked to Release Notes page.

Signed-off-by: Tana M Berry <tanamarieberry@yahoo.com>
2023-05-11 18:04:26 -05:00
1072 changed files with 128685 additions and 133955 deletions

View File

@ -1,5 +1,5 @@
[bumpversion] [bumpversion]
current_version = 2023.8.7 current_version = 2023.4.1
tag = True tag = True
commit = True commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+) parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)

View File

@ -7,4 +7,3 @@ build/**
build_docs/** build_docs/**
Dockerfile Dockerfile
authentik/enterprise authentik/enterprise
blueprints/local

View File

@ -1,17 +0,0 @@
---
name: Hackathon Idea
about: Propose an idea for the hackathon
title: ""
labels: hackathon
assignees: ""
---
**Describe the idea**
A clear concise description of the idea you want to implement
You're also free to work on existing GitHub issues, whether they be feature requests or bugs, just link the existing GitHub issue here.
<!-- Don't modify below here -->
If you want to help working on this idea or want to contribute in any other way, react to this issue with a :rocket:

View File

@ -14,7 +14,7 @@ runs:
run: | run: |
pipx install poetry || true pipx install poetry || true
sudo apt update sudo apt update
sudo apt install -y libpq-dev openssl libxmlsec1-dev pkg-config gettext sudo apt install -y libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry - name: Setup python and restore poetry
uses: actions/setup-python@v3 uses: actions/setup-python@v3
with: with:
@ -23,7 +23,7 @@ runs:
- name: Setup node - name: Setup node
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: "20.5" node-version: "20"
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: web/package-lock.json
- name: Setup dependencies - name: Setup dependencies

View File

@ -1,2 +0,0 @@
enabled: true
preservePullRequestTitle: true

View File

@ -8,8 +8,6 @@ updates:
open-pull-requests-limit: 10 open-pull-requests-limit: 10
commit-message: commit-message:
prefix: "ci:" prefix: "ci:"
labels:
- dependencies
- package-ecosystem: gomod - package-ecosystem: gomod
directory: "/" directory: "/"
schedule: schedule:
@ -18,38 +16,14 @@ updates:
open-pull-requests-limit: 10 open-pull-requests-limit: 10
commit-message: commit-message:
prefix: "core:" prefix: "core:"
labels:
- dependencies
- package-ecosystem: npm - package-ecosystem: npm
directory: "/web" directory: "/web"
schedule: schedule:
interval: daily interval: daily
time: "04:00" time: "04:00"
labels:
- dependencies
open-pull-requests-limit: 10 open-pull-requests-limit: 10
commit-message: commit-message:
prefix: "web:" prefix: "web:"
groups:
sentry:
patterns:
- "@sentry/*"
babel:
patterns:
- "@babel/*"
- "babel-*"
eslint:
patterns:
- "@typescript-eslint/eslint-*"
- "eslint"
- "eslint-*"
storybook:
patterns:
- "@storybook/*"
- "*storybook*"
esbuild:
patterns:
- "@esbuild/*"
- package-ecosystem: npm - package-ecosystem: npm
directory: "/website" directory: "/website"
schedule: schedule:
@ -58,12 +32,6 @@ updates:
open-pull-requests-limit: 10 open-pull-requests-limit: 10
commit-message: commit-message:
prefix: "website:" prefix: "website:"
labels:
- dependencies
groups:
docusaurus:
patterns:
- "@docusaurus/*"
- package-ecosystem: pip - package-ecosystem: pip
directory: "/" directory: "/"
schedule: schedule:
@ -72,8 +40,6 @@ updates:
open-pull-requests-limit: 10 open-pull-requests-limit: 10
commit-message: commit-message:
prefix: "core:" prefix: "core:"
labels:
- dependencies
- package-ecosystem: docker - package-ecosystem: docker
directory: "/" directory: "/"
schedule: schedule:
@ -82,5 +48,3 @@ updates:
open-pull-requests-limit: 10 open-pull-requests-limit: 10
commit-message: commit-message:
prefix: "core:" prefix: "core:"
labels:
- dependencies

View File

@ -1,19 +1,23 @@
<!-- <!--
👋 Hi there! Welcome. 👋 Hello there! Welcome.
Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute Please check the [Contributing guidelines](https://goauthentik.io/developer-docs/#how-can-i-contribute).
--> -->
## Details ## Details
<!-- - **Does this resolve an issue?**
Explain what this PR changes, what the rationale behind the change is, if any new requirements are introduced or any breaking changes caused by this PR. Resolves #
Ideally also link an Issue for context that this PR will close using `closes #` ## Changes
-->
REPLACE ME
--- ### New Features
- Adds feature which does x, y, and z.
### Breaking Changes
- Adds breaking change which causes \<issue\>.
## Checklist ## Checklist

19
.github/stale.yml vendored Normal file
View File

@ -0,0 +1,19 @@
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 60
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Issues with these labels will never be considered stale
exemptLabels:
- pinned
- security
- pr_wanted
- enhancement
- bug/confirmed
- enhancement/confirmed
- question
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
only: issues

View File

@ -2,11 +2,11 @@ git:
filters: filters:
- filter_type: file - filter_type: file
# all supported i18n types: https://docs.transifex.com/formats # all supported i18n types: https://docs.transifex.com/formats
file_format: XLIFF file_format: PO
source_language: en source_language: en
source_file: web/xliff/en.xlf source_file: web/src/locales/en.po
# path expression to translation files, must contain <lang> placeholder # path expression to translation files, must contain <lang> placeholder
translation_files_expression: "web/xliff/<lang>.xlf" translation_files_expression: "web/src/locales/<lang>.po"
- filter_type: file - filter_type: file
# all supported i18n types: https://docs.transifex.com/formats # all supported i18n types: https://docs.transifex.com/formats
file_format: PO file_format: PO

View File

@ -11,7 +11,6 @@ on:
pull_request: pull_request:
branches: branches:
- main - main
- version-*
env: env:
POSTGRES_DB: authentik POSTGRES_DB: authentik
@ -89,8 +88,8 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
psql: psql:
- 11-alpine
- 12-alpine - 12-alpine
- 15-alpine
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Setup authentik env - name: Setup authentik env
@ -113,7 +112,7 @@ jobs:
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: Create k8s Kind Cluster - name: Create k8s Kind Cluster
uses: helm/kind-action@v1.8.0 uses: helm/kind-action@v1.5.0
- name: run integration - name: run integration
run: | run: |
poetry run coverage run manage.py test tests/integration poetry run coverage run manage.py test tests/integration
@ -185,16 +184,13 @@ jobs:
build: build:
needs: ci-core-mark needs: ci-core-mark
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
timeout-minutes: 120 timeout-minutes: 120
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v2.2.0 uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v2
- name: prepare variables - name: prepare variables
@ -222,7 +218,6 @@ jobs:
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }} ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}
build-args: | build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
VERSION=${{ steps.ev.outputs.version }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }} VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
- name: Comment on PR - name: Comment on PR
if: github.event_name == 'pull_request' if: github.event_name == 'pull_request'
@ -233,16 +228,13 @@ jobs:
build-arm64: build-arm64:
needs: ci-core-mark needs: ci-core-mark
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
timeout-minutes: 120 timeout-minutes: 120
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v2.2.0 uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v2
- name: prepare variables - name: prepare variables
@ -270,6 +262,5 @@ jobs:
ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64 ghcr.io/goauthentik/dev-server:gh-${{ steps.ev.outputs.branchNameContainer }}-${{ steps.ev.outputs.timestamp }}-${{ steps.ev.outputs.shortHash }}-arm64
build-args: | build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
VERSION=${{ steps.ev.outputs.version }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }} VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: linux/arm64 platforms: linux/arm64

View File

@ -9,7 +9,6 @@ on:
pull_request: pull_request:
branches: branches:
- main - main
- version-*
jobs: jobs:
lint-golint: lint-golint:
@ -30,8 +29,7 @@ jobs:
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@v3 uses: golangci/golangci-lint-action@v3
with: with:
version: v1.52.2 args: --timeout 5000s
args: --timeout 5000s --verbose
skip-pkg-cache: true skip-pkg-cache: true
test-unittest: test-unittest:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -64,15 +62,12 @@ jobs:
- ldap - ldap
- radius - radius
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v2.2.0 uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v2
- name: prepare variables - name: prepare variables
@ -99,7 +94,6 @@ jobs:
file: ${{ matrix.type }}.Dockerfile file: ${{ matrix.type }}.Dockerfile
build-args: | build-args: |
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }} GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
VERSION=${{ steps.ev.outputs.version }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }} VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
context: . context: .
@ -124,9 +118,9 @@ jobs:
- uses: actions/setup-go@v4 - uses: actions/setup-go@v4
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3.6.0
with: with:
node-version: "20.5" node-version: "20"
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: web/package-lock.json
- name: Generate API - name: Generate API
@ -141,5 +135,4 @@ jobs:
set -x set -x
export GOOS=${{ matrix.goos }} export GOOS=${{ matrix.goos }}
export GOARCH=${{ matrix.goarch }} export GOARCH=${{ matrix.goarch }}
export CGO_ENABLED=0
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }} go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}

View File

@ -9,16 +9,15 @@ on:
pull_request: pull_request:
branches: branches:
- main - main
- version-*
jobs: jobs:
lint-eslint: lint-eslint:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3.6.0
with: with:
node-version: "20.5" node-version: "20"
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: web/package-lock.json
- working-directory: web/ - working-directory: web/
@ -31,10 +30,10 @@ jobs:
lint-build: lint-build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3.6.0
with: with:
node-version: "20.5" node-version: "20"
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: web/package-lock.json
- working-directory: web/ - working-directory: web/
@ -47,10 +46,10 @@ jobs:
lint-prettier: lint-prettier:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3.6.0
with: with:
node-version: "20.5" node-version: "20"
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: web/package-lock.json
- working-directory: web/ - working-directory: web/
@ -63,10 +62,10 @@ jobs:
lint-lit-analyse: lint-lit-analyse:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3.6.0
with: with:
node-version: "20.5" node-version: "20"
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: web/package-lock.json
- working-directory: web/ - working-directory: web/
@ -95,10 +94,10 @@ jobs:
- ci-web-mark - ci-web-mark
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3.6.0
with: with:
node-version: "20.5" node-version: "20"
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: web/package-lock.json
- working-directory: web/ - working-directory: web/

View File

@ -9,16 +9,15 @@ on:
pull_request: pull_request:
branches: branches:
- main - main
- version-*
jobs: jobs:
lint-prettier: lint-prettier:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3.6.0
with: with:
node-version: "20.5" node-version: "20"
cache: "npm" cache: "npm"
cache-dependency-path: website/package-lock.json cache-dependency-path: website/package-lock.json
- working-directory: website/ - working-directory: website/
@ -29,10 +28,10 @@ jobs:
test: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3.6.0
with: with:
node-version: "20.5" node-version: "20"
cache: "npm" cache: "npm"
cache-dependency-path: website/package-lock.json cache-dependency-path: website/package-lock.json
- working-directory: website/ - working-directory: website/
@ -50,10 +49,10 @@ jobs:
- build - build
- build-docs-only - build-docs-only
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3.6.0
with: with:
node-version: "20.5" node-version: "20"
cache: "npm" cache: "npm"
cache-dependency-path: website/package-lock.json cache-dependency-path: website/package-lock.json
- working-directory: website/ - working-directory: website/

View File

@ -1,34 +0,0 @@
---
# See https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries
name: Cleanup cache after PR is closed
on:
pull_request:
types:
- closed
jobs:
cleanup:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v3
- name: Cleanup
run: |
gh extension install actions/gh-actions-cache
REPO=${{ github.repository }}
BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
echo "Fetching list of cache key"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
# Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
for cacheKey in $cacheKeysForPR; do
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done
echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -10,11 +10,6 @@ jobs:
name: Delete old unused container images name: Delete old unused container images
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- id: generate_token
uses: tibdex/github-app-token@v1
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Delete 'dev' containers older than a week - name: Delete 'dev' containers older than a week
uses: snok/container-retention-policy@v2 uses: snok/container-retention-policy@v2
with: with:
@ -23,5 +18,5 @@ jobs:
account-type: org account-type: org
org-name: goauthentik org-name: goauthentik
untagged-only: false untagged-only: false
token: ${{ steps.generate_token.outputs.token }} token: ${{ secrets.BOT_GITHUB_TOKEN }}
skip-tags: gh-next,gh-main skip-tags: gh-next,gh-main

View File

@ -1,61 +0,0 @@
---
name: authentik-compress-images
on:
push:
branches:
- main
paths:
- "**.jpg"
- "**.jpeg"
- "**.png"
- "**.webp"
pull_request:
paths:
- "**.jpg"
- "**.jpeg"
- "**.png"
- "**.webp"
workflow_dispatch:
jobs:
compress:
name: compress
runs-on: ubuntu-latest
# Don't run on forks. Token will not be available. Will run on main and open a PR anyway
if: |
github.repository == 'goauthentik/authentik' &&
(github.event_name != 'pull_request' ||
github.event.pull_request.head.repo.full_name == github.repository)
steps:
- id: generate_token
uses: tibdex/github-app-token@v1
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v3
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Compress images
id: compress
uses: calibreapp/image-actions@main
with:
githubToken: ${{ steps.generate_token.outputs.token }}
compressOnly: ${{ github.event_name != 'pull_request' }}
- uses: peter-evans/create-pull-request@v5
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}
title: "*: Auto compress images"
branch-suffix: timestamp
commit-messsage: "*: compress images"
body: ${{ steps.compress.outputs.markdown }}
delete-branch: true
signoff: true
- uses: peter-evans/enable-pull-request-automerge@v3
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash

View File

@ -1,31 +0,0 @@
name: authentik-publish-source-docs
on:
push:
branches:
- main
env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
jobs:
publish-source-docs:
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Setup authentik env
uses: ./.github/actions/setup
- name: generate docs
run: |
poetry run make migrate
poetry run ak build_source_docs
- name: Publish
uses: netlify/actions/cli@master
with:
args: deploy --dir=source_docs --prod
env:
NETLIFY_SITE_ID: eb246b7b-1d83-4f69-89f7-01a936b4ca59
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}

View File

@ -1,21 +0,0 @@
name: authentik-on-release-next-branch
on:
schedule:
- cron: "0 12 * * *" # every day at noon
workflow_dispatch:
permissions:
# Needed to be able to push to the next branch
contents: write
jobs:
update-next:
runs-on: ubuntu-latest
environment: internal-production
steps:
- uses: actions/checkout@v3
with:
ref: main
- run: |
git push origin --force main:next

View File

@ -7,13 +7,10 @@ on:
jobs: jobs:
build-server: build-server:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v2.2.0 uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v2
- name: prepare variables - name: prepare variables
@ -46,13 +43,9 @@ jobs:
ghcr.io/goauthentik/server:latest ghcr.io/goauthentik/server:latest
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
build-args: | build-args: |
VERSION=${{ steps.ev.outputs.version }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }} VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
build-outpost: build-outpost:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@ -66,7 +59,7 @@ jobs:
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v2.2.0 uses: docker/setup-qemu-action@v2.1.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v2
- name: prepare variables - name: prepare variables
@ -97,14 +90,10 @@ jobs:
file: ${{ matrix.type }}.Dockerfile file: ${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
build-args: | build-args: |
VERSION=${{ steps.ev.outputs.version }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }} VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
build-outpost-binary: build-outpost-binary:
timeout-minutes: 120 timeout-minutes: 120
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
# Needed to upload binaries to the release
contents: write
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@ -119,9 +108,9 @@ jobs:
- uses: actions/setup-go@v4 - uses: actions/setup-go@v4
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3.6.0
with: with:
node-version: "20.5" node-version: "20"
cache: "npm" cache: "npm"
cache-dependency-path: web/package-lock.json cache-dependency-path: web/package-lock.json
- name: Build web - name: Build web
@ -134,7 +123,6 @@ jobs:
set -x set -x
export GOOS=${{ matrix.goos }} export GOOS=${{ matrix.goos }}
export GOARCH=${{ matrix.goarch }} export GOARCH=${{ matrix.goarch }}
export CGO_ENABLED=0
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }} go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
- name: Upload binaries to release - name: Upload binaries to release
uses: svenstaro/upload-release-action@v2 uses: svenstaro/upload-release-action@v2

View File

@ -22,23 +22,18 @@ jobs:
docker-compose up --no-start docker-compose up --no-start
docker-compose start postgresql redis docker-compose start postgresql redis
docker-compose run -u root server test-all docker-compose run -u root server test-all
- id: generate_token
uses: tibdex/github-app-token@v1
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Extract version number - name: Extract version number
id: get_version id: get_version
uses: actions/github-script@v6 uses: actions/github-script@v6
with: with:
github-token: ${{ steps.generate_token.outputs.token }} github-token: ${{ secrets.BOT_GITHUB_TOKEN }}
script: | script: |
return context.payload.ref.replace(/\/refs\/tags\/version\//, ''); return context.payload.ref.replace(/\/refs\/tags\/version\//, '');
- name: Create Release - name: Create Release
id: create_release id: create_release
uses: actions/create-release@v1.1.4 uses: actions/create-release@v1.1.4
env: env:
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN }}
with: with:
tag_name: ${{ github.ref }} tag_name: ${{ github.ref }}
release_name: Release ${{ steps.get_version.outputs.result }} release_name: Release ${{ steps.get_version.outputs.result }}

View File

@ -1,33 +0,0 @@
name: 'authentik-repo-stale'
on:
schedule:
- cron: '30 1 * * *'
workflow_dispatch:
permissions:
# Needed to update issues and PRs
issues: write
jobs:
stale:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: tibdex/github-app-token@v1
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/stale@v8
with:
repo-token: ${{ steps.generate_token.outputs.token }}
days-before-stale: 60
days-before-close: 7
exempt-issue-labels: pinned,security,pr_wanted,enhancement,bug/confirmed,enhancement/confirmed,question
stale-issue-label: wontfix
stale-issue-message: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Don't stale PRs, so only apply to PRs with a non-existent label
only-pr-labels: foo

View File

@ -15,14 +15,9 @@ jobs:
compile: compile:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- id: generate_token
uses: tibdex/github-app-token@v1
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ secrets.BOT_GITHUB_TOKEN }}
- name: Setup authentik env - name: Setup authentik env
uses: ./.github/actions/setup uses: ./.github/actions/setup
- name: run compile - name: run compile
@ -31,7 +26,7 @@ jobs:
uses: peter-evans/create-pull-request@v5 uses: peter-evans/create-pull-request@v5
id: cpr id: cpr
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ secrets.BOT_GITHUB_TOKEN }}
branch: compile-backend-translation branch: compile-backend-translation
commit-message: "core: compile backend translations" commit-message: "core: compile backend translations"
title: "core: compile backend translations" title: "core: compile backend translations"

View File

@ -1,45 +0,0 @@
# Rename transifex pull requests to have a correct naming
# Also enables auto squash-merge
name: authentik-translation-transifex-rename
on:
pull_request:
types: [opened, reopened]
jobs:
rename_pr:
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
steps:
- id: generate_token
uses: tibdex/github-app-token@v1
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Get current title
id: title
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
title=$(curl -q -L \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} | jq -r .title)
echo "title=${title}" >> "$GITHUB_OUTPUT"
- name: Rename
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
curl -L \
-X PATCH \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
-d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
- uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ github.event.pull_request.number }}
merge-method: squash

View File

@ -9,17 +9,12 @@ jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- id: generate_token
uses: tibdex/github-app-token@v1
with:
app_id: ${{ secrets.GH_APP_ID }}
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ secrets.BOT_GITHUB_TOKEN }}
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3.6.0
with: with:
node-version: "20.5" node-version: "20"
registry-url: "https://registry.npmjs.org" registry-url: "https://registry.npmjs.org"
- name: Generate API Client - name: Generate API Client
run: make gen-client-ts run: make gen-client-ts
@ -38,17 +33,17 @@ jobs:
- uses: peter-evans/create-pull-request@v5 - uses: peter-evans/create-pull-request@v5
id: cpr id: cpr
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ secrets.BOT_GITHUB_TOKEN }}
branch: update-web-api-client branch: update-web-api-client
commit-message: "web: bump API Client version" commit-message: "web: bump API Client version"
title: "web: bump API Client version" title: "web: bump API Client version"
body: "web: bump API Client version" body: "web: bump API Client version"
delete-branch: true delete-branch: true
signoff: true signoff: true
# ID from https://api.github.com/users/authentik-automation[bot] team-reviewers: "@goauthentik/core"
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com> author: authentik bot <github-bot@goauthentik.io>
- uses: peter-evans/enable-pull-request-automerge@v3 - uses: peter-evans/enable-pull-request-automerge@v3
with: with:
token: ${{ steps.generate_token.outputs.token }} token: ${{ secrets.BOT_GITHUB_TOKEN }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }} pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash merge-method: squash

3
.gitignore vendored
View File

@ -166,7 +166,6 @@ dmypy.json
# SageMath parsed files # SageMath parsed files
# Environments # Environments
**/.DS_Store
# Spyder project settings # Spyder project settings
@ -204,5 +203,3 @@ data/
# Local Netlify folder # Local Netlify folder
.netlify .netlify
.ruff_cache
source_docs/

View File

@ -1,11 +1,10 @@
{ {
"recommendations": [ "recommendations": [
"EditorConfig.EditorConfig",
"bashmish.es6-string-css", "bashmish.es6-string-css",
"bpruitt-goddard.mermaid-markdown-syntax-highlighting", "bpruitt-goddard.mermaid-markdown-syntax-highlighting",
"dbaeumer.vscode-eslint", "dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig",
"esbenp.prettier-vscode", "esbenp.prettier-vscode",
"github.vscode-github-actions",
"golang.go", "golang.go",
"Gruntfuggly.todo-tree", "Gruntfuggly.todo-tree",
"mechatroner.rainbow-csv", "mechatroner.rainbow-csv",
@ -16,6 +15,6 @@
"ms-python.vscode-pylance", "ms-python.vscode-pylance",
"redhat.vscode-yaml", "redhat.vscode-yaml",
"Tobermory.es6-string-html", "Tobermory.es6-string-html",
"unifiedjs.vscode-mdx", "unifiedjs.vscode-mdx"
] ]
} }

27
.vscode/launch.json vendored
View File

@ -1,27 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Python: PDB attach Server",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 6800
},
"justMyCode": true,
"django": true
},
{
"name": "Python: PDB attach Worker",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 6900
},
"justMyCode": true,
"django": true
},
]
}

10
.vscode/settings.json vendored
View File

@ -31,8 +31,7 @@
"!Format sequence", "!Format sequence",
"!Condition sequence", "!Condition sequence",
"!Env sequence", "!Env sequence",
"!Env scalar", "!Env scalar"
"!If sequence"
], ],
"typescript.preferences.importModuleSpecifier": "non-relative", "typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.importModuleSpecifierEnding": "index", "typescript.preferences.importModuleSpecifierEnding": "index",
@ -49,10 +48,5 @@
"ignoreCase": false "ignoreCase": false
} }
], ],
"go.testFlags": [ "go.testFlags": ["-count=1"]
"-count=1"
],
"github-actions.workflows.pinned.workflows": [
".github/workflows/ci-main.yml"
]
} }

View File

@ -1,5 +1,5 @@
# Stage 1: Build website # Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/node:20.5 as website-builder FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
COPY ./website /work/website/ COPY ./website /work/website/
COPY ./blueprints /work/blueprints/ COPY ./blueprints /work/blueprints/
@ -7,20 +7,20 @@ COPY ./SECURITY.md /work/
ENV NODE_ENV=production ENV NODE_ENV=production
WORKDIR /work/website WORKDIR /work/website
RUN npm ci --include=dev && npm run build-docs-only RUN npm ci && npm run build-docs-only
# Stage 2: Build webui # Stage 2: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/node:20.5 as web-builder FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
COPY ./web /work/web/ COPY ./web /work/web/
COPY ./website /work/website/ COPY ./website /work/website/
ENV NODE_ENV=production ENV NODE_ENV=production
WORKDIR /work/web WORKDIR /work/web
RUN npm ci --include=dev && npm run build RUN npm ci && npm run build
# Stage 3: Poetry to requirements.txt export # Stage 3: Poetry to requirements.txt export
FROM docker.io/python:3.11.5-slim-bookworm AS poetry-locker FROM docker.io/python:3.11.3-slim-bullseye AS poetry-locker
WORKDIR /work WORKDIR /work
COPY ./pyproject.toml /work COPY ./pyproject.toml /work
@ -31,7 +31,7 @@ RUN pip install --no-cache-dir poetry && \
poetry export -f requirements.txt --dev --output requirements-dev.txt poetry export -f requirements.txt --dev --output requirements-dev.txt
# Stage 4: Build go proxy # Stage 4: Build go proxy
FROM docker.io/golang:1.21.0-bookworm AS go-builder FROM docker.io/golang:1.20.4-bullseye AS go-builder
WORKDIR /work WORKDIR /work
@ -39,56 +39,54 @@ COPY --from=web-builder /work/web/robots.txt /work/web/robots.txt
COPY --from=web-builder /work/web/security.txt /work/web/security.txt COPY --from=web-builder /work/web/security.txt /work/web/security.txt
COPY ./cmd /work/cmd COPY ./cmd /work/cmd
COPY ./authentik/lib /work/authentik/lib
COPY ./web/static.go /work/web/static.go COPY ./web/static.go /work/web/static.go
COPY ./internal /work/internal COPY ./internal /work/internal
COPY ./go.mod /work/go.mod COPY ./go.mod /work/go.mod
COPY ./go.sum /work/go.sum COPY ./go.sum /work/go.sum
RUN go build -o /work/bin/authentik ./cmd/server/ RUN go build -o /work/authentik ./cmd/server/
# Stage 5: MaxMind GeoIP # Stage 5: MaxMind GeoIP
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip FROM ghcr.io/maxmind/geoipupdate:v5.1 as geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City" ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
ENV GEOIPUPDATE_VERBOSE="true" ENV GEOIPUPDATE_VERBOSE="true"
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
USER root USER root
RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
--mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \ --mount=type=secret,id=GEOIPUPDATE_LICENSE_KEY \
mkdir -p /usr/share/GeoIP && \ mkdir -p /usr/share/GeoIP && \
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0" /bin/sh -c "\
export GEOIPUPDATE_ACCOUNT_ID=$(cat /run/secrets/GEOIPUPDATE_ACCOUNT_ID); \
export GEOIPUPDATE_LICENSE_KEY=$(cat /run/secrets/GEOIPUPDATE_LICENSE_KEY); \
/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0 \
"
# Stage 6: Run # Stage 6: Run
FROM docker.io/python:3.11.5-slim-bookworm AS final-image FROM docker.io/python:3.11.3-slim-bullseye AS final-image
ARG GIT_BUILD_HASH
ARG VERSION
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
LABEL org.opencontainers.image.url https://goauthentik.io LABEL org.opencontainers.image.url https://goauthentik.io
LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info. LABEL org.opencontainers.image.description goauthentik.io Main server image, see https://goauthentik.io for more info.
LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik
LABEL org.opencontainers.image.version ${VERSION}
LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH}
WORKDIR / WORKDIR /
ARG GIT_BUILD_HASH
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
COPY --from=poetry-locker /work/requirements.txt / COPY --from=poetry-locker /work/requirements.txt /
COPY --from=poetry-locker /work/requirements-dev.txt / COPY --from=poetry-locker /work/requirements-dev.txt /
COPY --from=geoip /usr/share/GeoIP /geoip COPY --from=geoip /usr/share/GeoIP /geoip
RUN apt-get update && \ RUN apt-get update && \
# Required for installing pip packages # Required for installing pip packages
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev python3-dev && \ apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev && \
# Required for runtime # Required for runtime
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 && \ apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
# Required for bootstrap & healtcheck # Required for bootstrap & healtcheck
apt-get install -y --no-install-recommends runit && \ apt-get install -y --no-install-recommends runit && \
pip install --no-cache-dir -r /requirements.txt && \ pip install --no-cache-dir -r /requirements.txt && \
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev libpq-dev python3-dev && \ apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
apt-get autoremove --purge -y && \ apt-get autoremove --purge -y && \
apt-get clean && \ apt-get clean && \
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \ rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
@ -105,7 +103,7 @@ COPY ./tests /tests
COPY ./manage.py / COPY ./manage.py /
COPY ./blueprints /blueprints COPY ./blueprints /blueprints
COPY ./lifecycle/ /lifecycle COPY ./lifecycle/ /lifecycle
COPY --from=go-builder /work/bin/authentik /bin/authentik COPY --from=go-builder /work/authentik /bin/authentik
COPY --from=web-builder /work/web/dist/ /web/dist/ COPY --from=web-builder /work/web/dist/ /web/dist/
COPY --from=web-builder /work/web/authentik/ /web/authentik/ COPY --from=web-builder /work/web/authentik/ /web/authentik/
COPY --from=website-builder /work/website/help/ /website/help/ COPY --from=website-builder /work/website/help/ /website/help/

View File

@ -52,7 +52,7 @@ lint:
migrate: migrate:
python -m lifecycle.migrate python -m lifecycle.migrate
i18n-extract: i18n-extract-core web-i18n-extract i18n-extract: i18n-extract-core web-extract
i18n-extract-core: i18n-extract-core:
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
@ -140,9 +140,6 @@ web-watch:
touch web/dist/.gitkeep touch web/dist/.gitkeep
cd web && npm run watch cd web && npm run watch
web-storybook-watch:
cd web && npm run storybook
web-lint-fix: web-lint-fix:
cd web && npm run prettier cd web && npm run prettier
@ -153,8 +150,8 @@ web-lint:
web-check-compile: web-check-compile:
cd web && npm run tsc cd web && npm run tsc
web-i18n-extract: web-extract:
cd web && npm run extract-locales cd web && npm run extract
######################### #########################
## Website ## Website

View File

@ -15,7 +15,7 @@
## What is authentik? ## What is authentik?
authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them. Authentik is an open-source Identity Provider that emphasizes flexibility and versatility. It can be seamlessly integrated into existing environments to support new protocols. Authentik is also a great solution for implementing sign-up, recovery, and other similar features in your application, saving you the hassle of dealing with them.
## Installation ## Installation

View File

@ -1,50 +1,44 @@
authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version. Authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
## What authentik classifies as a CVE
CVE (Common Vulnerability and Exposure) is a system designed to aggregate all vulnerabilities. As such, a CVE will be issued when there is a either vulnerability or exposure. Per NIST, A vulnerability is:
“Weakness in an information system, system security procedures, internal controls, or implementation that could be exploited or triggered by a threat source.”
If it is determined that the issue does qualify as a CVE, a CVE number will be issued to the reporter from GitHub.
Even if the issue is not a CVE, we still greatly appreciate your help in hardening authentik.
## Supported Versions ## Supported Versions
(.x being the latest patch release for each version) (.x being the latest patch release for each version)
| Version | Supported | | Version | Supported |
| --- | --- | | --------- | ------------------ |
| 2023.6.x | | | 2023.2.x | :white_check_mark: |
| 2023.8.x | | | 2023.3.x | :white_check_mark: |
## Reporting a Vulnerability ## Reporting a Vulnerability
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the issue. To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the bug.
## Severity levels ## Criticality levels
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories: ### High
| Score | Severity | - Authorization bypass
| --- | --- | - Circumvention of policies
| 0.0 | None |
| 0.1 3.9 | Low | ### Moderate
| 4.0 6.9 | Medium |
| 7.0 8.9 | High | - Denial-of-Service attacks
| 9.0 10.0 | Critical |
### Low
- Unvalidated redirects
- Issues requiring uncommon setups
## Disclosure process ## Disclosure process
1. Report from Github or Issue is reported via Email as listed above. 1. Issue is reported via Email as listed above.
2. The authentik Security team will try to reproduce the issue and ask for more information if required. 2. The authentik Security team will try to reproduce the issue and ask for more information if required.
3. A severity level is assigned. 3. A criticality level is assigned.
4. A fix is created, and if possible tested by the issue reporter. 4. A fix is created, and if possible tested by the issue reporter.
5. The fix is backported to other supported versions, and if possible a workaround for other versions is created. 5. The fix is backported to other supported versions, and if possible a workaround for other versions is created.
6. An announcement is sent out with a fixed release date and severity level of the issue. The announcement will be sent at least 24 hours before the release of the security fix. 6. An announcement is sent out with a fixed release date and criticality level of the issue. The announcement will be sent at least 24 hours before the release of the fix
7. The fixed version is released for the supported versions. 7. The fixed version is released for the supported versions.
## Getting security notifications ## Getting security notifications
To get security notifications, subscribe to the mailing list [here](https://groups.google.com/g/authentik-security-announcements) or join the [discord](https://goauthentik.io/discord) server. To get security notifications, subscribe to the mailing list [here](https://groups.google.com/g/authentik-security-announcements) or join the [discord](https://goauthentik.io/discord) server.

View File

@ -1,8 +1,8 @@
"""authentik root module""" """authentik"""
from os import environ from os import environ
from typing import Optional from typing import Optional
__version__ = "2023.8.7" __version__ = "2023.4.1"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH" ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -8,7 +8,6 @@ from rest_framework.viewsets import ViewSet
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
from authentik.lib.utils.reflection import get_apps from authentik.lib.utils.reflection import get_apps
from authentik.policies.event_matcher.models import model_choices
class AppSerializer(PassiveSerializer): class AppSerializer(PassiveSerializer):
@ -30,17 +29,3 @@ class AppsViewSet(ViewSet):
for app in sorted(get_apps(), key=lambda app: app.name): for app in sorted(get_apps(), key=lambda app: app.name):
data.append({"name": app.name, "label": app.verbose_name}) data.append({"name": app.name, "label": app.verbose_name})
return Response(AppSerializer(data, many=True).data) return Response(AppSerializer(data, many=True).data)
class ModelViewSet(ViewSet):
"""Read-only view list all installed models"""
permission_classes = [IsAdminUser]
@extend_schema(responses={200: AppSerializer(many=True)})
def list(self, request: Request) -> Response:
"""Read-only view list all installed models"""
data = []
for name, label in model_choices():
data.append({"name": name, "label": label})
return Response(AppSerializer(data, many=True).data)

View File

@ -1,4 +1,5 @@
"""authentik administration overview""" """authentik administration overview"""
import os
import platform import platform
from datetime import datetime from datetime import datetime
from sys import version as python_version from sys import version as python_version
@ -33,6 +34,7 @@ class RuntimeDict(TypedDict):
class SystemSerializer(PassiveSerializer): class SystemSerializer(PassiveSerializer):
"""Get system information.""" """Get system information."""
env = SerializerMethodField()
http_headers = SerializerMethodField() http_headers = SerializerMethodField()
http_host = SerializerMethodField() http_host = SerializerMethodField()
http_is_secure = SerializerMethodField() http_is_secure = SerializerMethodField()
@ -41,6 +43,10 @@ class SystemSerializer(PassiveSerializer):
server_time = SerializerMethodField() server_time = SerializerMethodField()
embedded_outpost_host = SerializerMethodField() embedded_outpost_host = SerializerMethodField()
def get_env(self, request: Request) -> dict[str, str]:
"""Get Environment"""
return os.environ.copy()
def get_http_headers(self, request: Request) -> dict[str, str]: def get_http_headers(self, request: Request) -> dict[str, str]:
"""Get HTTP Request headers""" """Get HTTP Request headers"""
headers = {} headers = {}

View File

@ -19,7 +19,7 @@ class WorkerView(APIView):
def get(self, request: Request) -> Response: def get(self, request: Request) -> Response:
"""Get currently connected worker count.""" """Get currently connected worker count."""
count = len(CELERY_APP.control.ping(timeout=0.5)) count = len(CELERY_APP.control.ping(timeout=0.5))
# In debug we run with `task_always_eager`, so tasks are ran on the main process # In debug we run with `CELERY_TASK_ALWAYS_EAGER`, so tasks are ran on the main process
if settings.DEBUG: # pragma: no cover if settings.DEBUG: # pragma: no cover
count += 1 count += 1
return Response({"count": count}) return Response({"count": count})

View File

@ -58,7 +58,7 @@ def clear_update_notifications():
@prefill_task @prefill_task
def update_latest_version(self: MonitoredTask): def update_latest_version(self: MonitoredTask):
"""Update latest version info""" """Update latest version info"""
if CONFIG.get_bool("disable_update_check"): if CONFIG.y_bool("disable_update_check"):
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT) cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."])) self.set_status(TaskResult(TaskResultStatus.WARNING, messages=["Version check disabled."]))
return return

View File

@ -94,11 +94,6 @@ class TestAdminAPI(TestCase):
response = self.client.get(reverse("authentik_api:apps-list")) response = self.client.get(reverse("authentik_api:apps-list"))
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
def test_models(self):
"""Test models API"""
response = self.client.get(reverse("authentik_api:models-list"))
self.assertEqual(response.status_code, 200)
@reconcile_app("authentik_outposts") @reconcile_app("authentik_outposts")
def test_system(self): def test_system(self):
"""Test system API""" """Test system API"""

View File

@ -1,7 +1,7 @@
"""API URLs""" """API URLs"""
from django.urls import path from django.urls import path
from authentik.admin.api.meta import AppsViewSet, ModelViewSet from authentik.admin.api.meta import AppsViewSet
from authentik.admin.api.metrics import AdministrationMetricsViewSet from authentik.admin.api.metrics import AdministrationMetricsViewSet
from authentik.admin.api.system import SystemView from authentik.admin.api.system import SystemView
from authentik.admin.api.tasks import TaskViewSet from authentik.admin.api.tasks import TaskViewSet
@ -11,7 +11,6 @@ from authentik.admin.api.workers import WorkerView
api_urlpatterns = [ api_urlpatterns = [
("admin/system_tasks", TaskViewSet, "admin_system_tasks"), ("admin/system_tasks", TaskViewSet, "admin_system_tasks"),
("admin/apps", AppsViewSet, "apps"), ("admin/apps", AppsViewSet, "apps"),
("admin/models", ModelViewSet, "models"),
path( path(
"admin/metrics/", "admin/metrics/",
AdministrationMetricsViewSet.as_view(), AdministrationMetricsViewSet.as_view(),

View File

@ -1,5 +1,4 @@
"""API Authentication""" """API Authentication"""
from hmac import compare_digest
from typing import Any, Optional from typing import Any, Optional
from django.conf import settings from django.conf import settings
@ -79,7 +78,7 @@ def token_secret_key(value: str) -> Optional[User]:
and return the service account for the managed outpost""" and return the service account for the managed outpost"""
from authentik.outposts.apps import MANAGED_OUTPOST from authentik.outposts.apps import MANAGED_OUTPOST
if not compare_digest(value, settings.SECRET_KEY): if value != settings.SECRET_KEY:
return None return None
outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST) outposts = Outpost.objects.filter(managed=MANAGED_OUTPOST)
if not outposts: if not outposts:

View File

@ -2,43 +2,6 @@
from rest_framework import pagination from rest_framework import pagination
from rest_framework.response import Response from rest_framework.response import Response
PAGINATION_COMPONENT_NAME = "Pagination"
PAGINATION_SCHEMA = {
"type": "object",
"properties": {
"next": {
"type": "number",
},
"previous": {
"type": "number",
},
"count": {
"type": "number",
},
"current": {
"type": "number",
},
"total_pages": {
"type": "number",
},
"start_index": {
"type": "number",
},
"end_index": {
"type": "number",
},
},
"required": [
"next",
"previous",
"count",
"current",
"total_pages",
"start_index",
"end_index",
],
}
class Pagination(pagination.PageNumberPagination): class Pagination(pagination.PageNumberPagination):
"""Pagination which includes total pages and current page""" """Pagination which includes total pages and current page"""
@ -72,7 +35,41 @@ class Pagination(pagination.PageNumberPagination):
return { return {
"type": "object", "type": "object",
"properties": { "properties": {
"pagination": {"$ref": f"#/components/schemas/{PAGINATION_COMPONENT_NAME}"}, "pagination": {
"type": "object",
"properties": {
"next": {
"type": "number",
},
"previous": {
"type": "number",
},
"count": {
"type": "number",
},
"current": {
"type": "number",
},
"total_pages": {
"type": "number",
},
"start_index": {
"type": "number",
},
"end_index": {
"type": "number",
},
},
"required": [
"next",
"previous",
"count",
"current",
"total_pages",
"start_index",
"end_index",
],
},
"results": schema, "results": schema,
}, },
"required": ["pagination", "results"], "required": ["pagination", "results"],

View File

@ -1,6 +1,5 @@
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224""" """Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from drf_spectacular.generators import SchemaGenerator
from drf_spectacular.plumbing import ( from drf_spectacular.plumbing import (
ResolvedComponent, ResolvedComponent,
build_array_type, build_array_type,
@ -9,9 +8,6 @@ from drf_spectacular.plumbing import (
) )
from drf_spectacular.settings import spectacular_settings from drf_spectacular.settings import spectacular_settings
from drf_spectacular.types import OpenApiTypes from drf_spectacular.types import OpenApiTypes
from rest_framework.settings import api_settings
from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA
def build_standard_type(obj, **kwargs): def build_standard_type(obj, **kwargs):
@ -32,7 +28,7 @@ GENERIC_ERROR = build_object_type(
VALIDATION_ERROR = build_object_type( VALIDATION_ERROR = build_object_type(
description=_("Validation Error"), description=_("Validation Error"),
properties={ properties={
api_settings.NON_FIELD_ERRORS_KEY: build_array_type(build_standard_type(OpenApiTypes.STR)), "non_field_errors": build_array_type(build_standard_type(OpenApiTypes.STR)),
"code": build_standard_type(OpenApiTypes.STR), "code": build_standard_type(OpenApiTypes.STR),
}, },
required=[], required=[],
@ -40,19 +36,7 @@ VALIDATION_ERROR = build_object_type(
) )
def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedComponent.SCHEMA): def postprocess_schema_responses(result, generator, **kwargs): # noqa: W0613
"""Register a component and return a reference to it."""
component = ResolvedComponent(
name=name,
type=type_,
schema=schema,
object=name,
)
generator.registry.register_on_missing(component)
return component
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): # noqa: W0613
"""Workaround to set a default response for endpoints. """Workaround to set a default response for endpoints.
Workaround suggested at Workaround suggested at
<https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357> <https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>
@ -60,10 +44,19 @@ def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
<https://github.com/tfranzel/drf-spectacular/issues/101>. <https://github.com/tfranzel/drf-spectacular/issues/101>.
""" """
create_component(generator, PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA) def create_component(name, schema, type_=ResolvedComponent.SCHEMA):
"""Register a component and return a reference to it."""
component = ResolvedComponent(
name=name,
type=type_,
schema=schema,
object=name,
)
generator.registry.register_on_missing(component)
return component
generic_error = create_component(generator, "GenericError", GENERIC_ERROR) generic_error = create_component("GenericError", GENERIC_ERROR)
validation_error = create_component(generator, "ValidationError", VALIDATION_ERROR) validation_error = create_component("ValidationError", VALIDATION_ERROR)
for path in result["paths"].values(): for path in result["paths"].values():
for method in path.values(): for method in path.values():

View File

@ -10,6 +10,8 @@ API Browser - {{ tenant.branding_title }}
<script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script> <script src="{% static 'dist/standalone/api-browser/index.js' %}?version={{ version }}" type="module"></script>
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)"> <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)"> <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
<link rel="icon" href="{{ tenant.branding_favicon }}">
<link rel="shortcut icon" href="{{ tenant.branding_favicon }}">
{% endblock %} {% endblock %}
{% block body %} {% block body %}

View File

@ -9,7 +9,7 @@ from rest_framework.exceptions import AuthenticationFailed
from authentik.api.authentication import bearer_auth from authentik.api.authentication import bearer_auth
from authentik.blueprints.tests import reconcile_app from authentik.blueprints.tests import reconcile_app
from authentik.core.models import Token, TokenIntents, User, UserTypes from authentik.core.models import USER_ATTRIBUTE_SA, Token, TokenIntents
from authentik.core.tests.utils import create_test_admin_user, create_test_flow from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
@ -57,8 +57,8 @@ class TestAPIAuth(TestCase):
@reconcile_app("authentik_outposts") @reconcile_app("authentik_outposts")
def test_managed_outpost_success(self): def test_managed_outpost_success(self):
"""Test managed outpost""" """Test managed outpost"""
user: User = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode()) user = bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
self.assertEqual(user.type, UserTypes.INTERNAL_SERVICE_ACCOUNT) self.assertEqual(user.attributes[USER_ATTRIBUTE_SA], True)
def test_jwt_valid(self): def test_jwt_valid(self):
"""Test valid JWT""" """Test valid JWT"""

View File

@ -1,9 +1,8 @@
"""core Configs API""" """core Configs API"""
from pathlib import Path from os import path
from django.conf import settings from django.conf import settings
from django.db import models from django.db import models
from django.dispatch import Signal
from drf_spectacular.utils import extend_schema from drf_spectacular.utils import extend_schema
from rest_framework.fields import ( from rest_framework.fields import (
BooleanField, BooleanField,
@ -22,8 +21,6 @@ from authentik.core.api.utils import PassiveSerializer
from authentik.events.geo import GEOIP_READER from authentik.events.geo import GEOIP_READER
from authentik.lib.config import CONFIG from authentik.lib.config import CONFIG
capabilities = Signal()
class Capabilities(models.TextChoices): class Capabilities(models.TextChoices):
"""Define capabilities which influence which APIs can/should be used""" """Define capabilities which influence which APIs can/should be used"""
@ -66,19 +63,16 @@ class ConfigView(APIView):
"""Get all capabilities this server instance supports""" """Get all capabilities this server instance supports"""
caps = [] caps = []
deb_test = settings.DEBUG or settings.TEST deb_test = settings.DEBUG or settings.TEST
if Path(settings.MEDIA_ROOT).is_mount() or deb_test: if path.ismount(settings.MEDIA_ROOT) or deb_test:
caps.append(Capabilities.CAN_SAVE_MEDIA) caps.append(Capabilities.CAN_SAVE_MEDIA)
if GEOIP_READER.enabled: if GEOIP_READER.enabled:
caps.append(Capabilities.CAN_GEO_IP) caps.append(Capabilities.CAN_GEO_IP)
if CONFIG.get_bool("impersonation"): if CONFIG.y_bool("impersonation"):
caps.append(Capabilities.CAN_IMPERSONATE) caps.append(Capabilities.CAN_IMPERSONATE)
if settings.DEBUG: # pragma: no cover if settings.DEBUG: # pragma: no cover
caps.append(Capabilities.CAN_DEBUG) caps.append(Capabilities.CAN_DEBUG)
if "authentik.enterprise" in settings.INSTALLED_APPS: if "authentik.enterprise" in settings.INSTALLED_APPS:
caps.append(Capabilities.IS_ENTERPRISE) caps.append(Capabilities.IS_ENTERPRISE)
for _, result in capabilities.send(sender=self):
if result:
caps.append(result)
return caps return caps
def get_config(self) -> ConfigSerializer: def get_config(self) -> ConfigSerializer:
@ -86,17 +80,17 @@ class ConfigView(APIView):
return ConfigSerializer( return ConfigSerializer(
{ {
"error_reporting": { "error_reporting": {
"enabled": CONFIG.get("error_reporting.enabled"), "enabled": CONFIG.y("error_reporting.enabled"),
"sentry_dsn": CONFIG.get("error_reporting.sentry_dsn"), "sentry_dsn": CONFIG.y("error_reporting.sentry_dsn"),
"environment": CONFIG.get("error_reporting.environment"), "environment": CONFIG.y("error_reporting.environment"),
"send_pii": CONFIG.get("error_reporting.send_pii"), "send_pii": CONFIG.y("error_reporting.send_pii"),
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)), "traces_sample_rate": float(CONFIG.y("error_reporting.sample_rate", 0.4)),
}, },
"capabilities": self.get_capabilities(), "capabilities": self.get_capabilities(),
"cache_timeout": CONFIG.get_int("redis.cache_timeout"), "cache_timeout": int(CONFIG.y("redis.cache_timeout")),
"cache_timeout_flows": CONFIG.get_int("redis.cache_timeout_flows"), "cache_timeout_flows": int(CONFIG.y("redis.cache_timeout_flows")),
"cache_timeout_policies": CONFIG.get_int("redis.cache_timeout_policies"), "cache_timeout_policies": int(CONFIG.y("redis.cache_timeout_policies")),
"cache_timeout_reputation": CONFIG.get_int("redis.cache_timeout_reputation"), "cache_timeout_reputation": int(CONFIG.y("redis.cache_timeout_reputation")),
} }
) )

View File

@ -21,14 +21,9 @@ _other_urls = []
for _authentik_app in get_apps(): for _authentik_app in get_apps():
try: try:
api_urls = import_module(f"{_authentik_app.name}.urls") api_urls = import_module(f"{_authentik_app.name}.urls")
except (ModuleNotFoundError, ImportError) as exc: except (ModuleNotFoundError, ImportError):
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
continue continue
if not hasattr(api_urls, "api_urlpatterns"): if not hasattr(api_urls, "api_urlpatterns"):
LOGGER.debug(
"App does not define API URLs",
app_name=_authentik_app.name,
)
continue continue
urls: list = getattr(api_urls, "api_urlpatterns") urls: list = getattr(api_urls, "api_urlpatterns")
for url in urls: for url in urls:

View File

@ -11,9 +11,8 @@ from rest_framework.serializers import ListSerializer, ModelSerializer
from rest_framework.viewsets import ModelViewSet from rest_framework.viewsets import ModelViewSet
from authentik.api.decorators import permission_required from authentik.api.decorators import permission_required
from authentik.blueprints.models import BlueprintInstance from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed
from authentik.blueprints.v1.importer import Importer from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.oci import OCI_PREFIX
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer from authentik.core.api.utils import PassiveSerializer
@ -36,12 +35,11 @@ class BlueprintInstanceSerializer(ModelSerializer):
"""Info about a single blueprint instance file""" """Info about a single blueprint instance file"""
def validate_path(self, path: str) -> str: def validate_path(self, path: str) -> str:
"""Ensure the path (if set) specified is retrievable""" """Ensure the path specified is retrievable"""
if path == "" or path.startswith(OCI_PREFIX): try:
return path BlueprintInstance(path=path).retrieve()
files: list[dict] = blueprints_find_dict.delay().get() except BlueprintRetrievalFailed as exc:
if path not in [file["path"] for file in files]: raise ValidationError(exc) from exc
raise ValidationError(_("Blueprint file does not exist"))
return path return path
def validate_content(self, content: str) -> str: def validate_content(self, content: str) -> str:

View File

@ -10,7 +10,7 @@ from rest_framework.serializers import Serializer
from structlog.stdlib import get_logger from structlog.stdlib import get_logger
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry from authentik.blueprints.v1.meta.registry import registry
from authentik.lib.models import SerializerModel from authentik.lib.models import SerializerModel
LOGGER = get_logger() LOGGER = get_logger()
@ -74,18 +74,14 @@ class Command(BaseCommand):
def build(self): def build(self):
"""Build all models into the schema""" """Build all models into the schema"""
for model in registry.get_models(): for model in registry.get_models():
if issubclass(model, BaseMetaModel): if model._meta.abstract:
serializer_class = model.serializer() continue
else: if not is_model_allowed(model):
if model._meta.abstract: continue
continue model_instance: Model = model()
if not is_model_allowed(model): if not isinstance(model_instance, SerializerModel):
continue continue
model_instance: Model = model() serializer = model_instance.serializer(
if not isinstance(model_instance, SerializerModel):
continue
serializer_class = model_instance.serializer
serializer = serializer_class(
context={ context={
SERIALIZER_CONTEXT_BLUEPRINT: False, SERIALIZER_CONTEXT_BLUEPRINT: False,
} }

View File

@ -30,7 +30,7 @@ def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
return return
blueprint_file.seek(0) blueprint_file.seek(0)
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first() instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first()
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir"))) rel_path = path.relative_to(Path(CONFIG.y("blueprints_dir")))
meta = None meta = None
if metadata: if metadata:
meta = from_dict(BlueprintMetadata, metadata) meta = from_dict(BlueprintMetadata, metadata)
@ -45,7 +45,7 @@ def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
enabled=True, enabled=True,
managed_models=[], managed_models=[],
last_applied_hash="", last_applied_hash="",
metadata=metadata or {}, metadata=metadata,
) )
instance.save() instance.save()
@ -55,7 +55,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
Flow = apps.get_model("authentik_flows", "Flow") Flow = apps.get_model("authentik_flows", "Flow")
db_alias = schema_editor.connection.alias db_alias = schema_editor.connection.alias
for file in glob(f"{CONFIG.get('blueprints_dir')}/**/*.yaml", recursive=True): for file in glob(f"{CONFIG.y('blueprints_dir')}/**/*.yaml", recursive=True):
check_blueprint_v1_file(BlueprintInstance, Path(file)) check_blueprint_v1_file(BlueprintInstance, Path(file))
for blueprint in BlueprintInstance.objects.using(db_alias).all(): for blueprint in BlueprintInstance.objects.using(db_alias).all():

View File

@ -8,7 +8,7 @@ from django.utils.translation import gettext_lazy as _
from rest_framework.serializers import Serializer from rest_framework.serializers import Serializer
from structlog import get_logger from structlog import get_logger
from authentik.blueprints.v1.oci import OCI_PREFIX, BlueprintOCIClient, OCIException from authentik.blueprints.v1.oci import BlueprintOCIClient, OCIException
from authentik.lib.config import CONFIG from authentik.lib.config import CONFIG
from authentik.lib.models import CreatedUpdatedModel, SerializerModel from authentik.lib.models import CreatedUpdatedModel, SerializerModel
from authentik.lib.sentry import SentryIgnoredException from authentik.lib.sentry import SentryIgnoredException
@ -72,7 +72,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
def retrieve_oci(self) -> str: def retrieve_oci(self) -> str:
"""Get blueprint from an OCI registry""" """Get blueprint from an OCI registry"""
client = BlueprintOCIClient(self.path.replace(OCI_PREFIX, "https://")) client = BlueprintOCIClient(self.path.replace("oci://", "https://"))
try: try:
manifests = client.fetch_manifests() manifests = client.fetch_manifests()
return client.fetch_blobs(manifests) return client.fetch_blobs(manifests)
@ -82,10 +82,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
def retrieve_file(self) -> str: def retrieve_file(self) -> str:
"""Get blueprint from path""" """Get blueprint from path"""
try: try:
base = Path(CONFIG.get("blueprints_dir")) full_path = Path(CONFIG.y("blueprints_dir")).joinpath(Path(self.path))
full_path = base.joinpath(Path(self.path)).resolve()
if not str(full_path).startswith(str(base.resolve())):
raise BlueprintRetrievalFailed("Invalid blueprint path")
with full_path.open("r", encoding="utf-8") as _file: with full_path.open("r", encoding="utf-8") as _file:
return _file.read() return _file.read()
except (IOError, OSError) as exc: except (IOError, OSError) as exc:
@ -93,7 +90,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
def retrieve(self) -> str: def retrieve(self) -> str:
"""Retrieve blueprint contents""" """Retrieve blueprint contents"""
if self.path.startswith(OCI_PREFIX): if self.path.startswith("oci://"):
return self.retrieve_oci() return self.retrieve_oci()
if self.path != "": if self.path != "":
return self.retrieve_file() return self.retrieve_file()

View File

@ -11,42 +11,31 @@ metadata:
entries: entries:
- model: authentik_core.token - model: authentik_core.token
identifiers: identifiers:
identifier: "%(uid)s-token" identifier: %(uid)s-token
attrs: attrs:
key: "%(uid)s" key: %(uid)s
user: "%(user)s" user: %(user)s
intent: api intent: api
- model: authentik_core.application - model: authentik_core.application
identifiers: identifiers:
slug: "%(uid)s-app" slug: %(uid)s-app
attrs: attrs:
name: "%(uid)s-app" name: %(uid)s-app
icon: https://goauthentik.io/img/icon.png icon: https://goauthentik.io/img/icon.png
- model: authentik_sources_oauth.oauthsource - model: authentik_sources_oauth.oauthsource
identifiers: identifiers:
slug: "%(uid)s-source" slug: %(uid)s-source
attrs: attrs:
name: "%(uid)s-source" name: %(uid)s-source
provider_type: azuread provider_type: azuread
consumer_key: "%(uid)s" consumer_key: %(uid)s
consumer_secret: "%(uid)s" consumer_secret: %(uid)s
icon: https://goauthentik.io/img/icon.png icon: https://goauthentik.io/img/icon.png
- model: authentik_flows.flow - model: authentik_flows.flow
identifiers: identifiers:
slug: "%(uid)s-flow" slug: %(uid)s-flow
attrs: attrs:
name: "%(uid)s-flow" name: %(uid)s-flow
title: "%(uid)s-flow" title: %(uid)s-flow
designation: authentication designation: authentication
background: https://goauthentik.io/img/icon.png background: https://goauthentik.io/img/icon.png
- model: authentik_core.user
identifiers:
username: "%(uid)s"
attrs:
name: "%(uid)s"
password: "%(uid)s"
- model: authentik_core.user
identifiers:
username: "%(uid)s-no-password"
attrs:
name: "%(uid)s"

View File

@ -7,5 +7,7 @@ entries:
state: absent state: absent
- identifiers: - identifiers:
name: "%(id)s" name: "%(id)s"
expression: |
return True
model: authentik_policies_expression.expressionpolicy model: authentik_policies_expression.expressionpolicy
state: absent state: absent

View File

@ -9,8 +9,6 @@ context:
mapping: mapping:
key1: value key1: value
key2: 2 key2: 2
context1: context-nested-value
context2: !Context context1
entries: entries:
- model: !Format ["%s", authentik_sources_oauth.oauthsource] - model: !Format ["%s", authentik_sources_oauth.oauthsource]
state: !Format ["%s", present] state: !Format ["%s", present]
@ -36,7 +34,6 @@ entries:
model: authentik_policies_expression.expressionpolicy model: authentik_policies_expression.expressionpolicy
- attrs: - attrs:
attributes: attributes:
env_null: !Env [bar-baz, null]
policy_pk1: policy_pk1:
!Format [ !Format [
"%s-%s", "%s-%s",
@ -100,7 +97,6 @@ entries:
[list, with, items, !Format ["foo-%s", !Context foo]], [list, with, items, !Format ["foo-%s", !Context foo]],
] ]
if_true_simple: !If [!Context foo, true, text] if_true_simple: !If [!Context foo, true, text]
if_short: !If [!Context foo]
if_false_simple: !If [null, false, 2] if_false_simple: !If [null, false, 2]
enumerate_mapping_to_mapping: !Enumerate [ enumerate_mapping_to_mapping: !Enumerate [
!Context mapping, !Context mapping,
@ -145,7 +141,6 @@ entries:
] ]
] ]
] ]
nested_context: !Context context2
identifiers: identifiers:
name: test name: test
conditions: conditions:

View File

@ -1,15 +1,34 @@
"""authentik managed models tests""" """authentik managed models tests"""
from typing import Callable, Type
from django.apps import apps
from django.test import TestCase from django.test import TestCase
from authentik.blueprints.models import BlueprintInstance, BlueprintRetrievalFailed from authentik.blueprints.v1.importer import is_model_allowed
from authentik.lib.generators import generate_id from authentik.lib.models import SerializerModel
class TestModels(TestCase): class TestModels(TestCase):
"""Test Models""" """Test Models"""
def test_retrieve_file(self):
"""Test retrieve_file""" def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
instance = BlueprintInstance.objects.create(name=generate_id(), path="../etc/hosts") """Test serializer"""
with self.assertRaises(BlueprintRetrievalFailed):
instance.retrieve() def tester(self: TestModels):
if test_model._meta.abstract: # pragma: no cover
return
model_class = test_model()
self.assertTrue(isinstance(model_class, SerializerModel))
self.assertIsNotNone(model_class.serializer)
return tester
for app in apps.get_app_configs():
if not app.label.startswith("authentik"):
continue
for model in app.get_models():
if not is_model_allowed(model):
continue
setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))

View File

@ -32,29 +32,6 @@ class TestBlueprintOCI(TransactionTestCase):
"foo", "foo",
) )
def test_successful_port(self):
"""Successful retrieval with custom port"""
with Mocker() as mocker:
mocker.get(
"https://ghcr.io:1234/v2/goauthentik/blueprints/test/manifests/latest",
json={
"layers": [
{
"mediaType": OCI_MEDIA_TYPE,
"digest": "foo",
}
]
},
)
mocker.get("https://ghcr.io:1234/v2/goauthentik/blueprints/test/blobs/foo", text="foo")
self.assertEqual(
BlueprintInstance(
path="oci://ghcr.io:1234/goauthentik/blueprints/test:latest"
).retrieve(),
"foo",
)
def test_manifests_error(self): def test_manifests_error(self):
"""Test manifests request erroring""" """Test manifests request erroring"""
with Mocker() as mocker: with Mocker() as mocker:

View File

@ -1,34 +0,0 @@
"""authentik managed models tests"""
from typing import Callable, Type
from django.apps import apps
from django.test import TestCase
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.lib.models import SerializerModel
class TestModels(TestCase):
"""Test Models"""
def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
"""Test serializer"""
def tester(self: TestModels):
if test_model._meta.abstract: # pragma: no cover
return
model_class = test_model()
self.assertTrue(isinstance(model_class, SerializerModel))
self.assertIsNotNone(model_class.serializer)
return tester
for app in apps.get_app_configs():
if not app.label.startswith("authentik"):
continue
for model in app.get_models():
if not is_model_allowed(model):
continue
setattr(TestModels, f"test_{app.label}_{model.__name__}", serializer_tester_factory(model))

View File

@ -155,7 +155,6 @@ class TestBlueprintsV1(TransactionTestCase):
}, },
"if_false_complex": ["list", "with", "items", "foo-bar"], "if_false_complex": ["list", "with", "items", "foo-bar"],
"if_true_simple": True, "if_true_simple": True,
"if_short": True,
"if_false_simple": 2, "if_false_simple": 2,
"enumerate_mapping_to_mapping": { "enumerate_mapping_to_mapping": {
"prefix-key1": "other-prefix-value", "prefix-key1": "other-prefix-value",
@ -212,10 +211,8 @@ class TestBlueprintsV1(TransactionTestCase):
], ],
}, },
}, },
"nested_context": "context-nested-value",
"env_null": None,
} }
).exists() )
) )
self.assertTrue( self.assertTrue(
OAuthSource.objects.filter( OAuthSource.objects.filter(

View File

@ -44,14 +44,6 @@ class TestBlueprintsV1API(APITestCase):
), ),
) )
def test_api_oci(self):
"""Test validation with OCI path"""
res = self.client.post(
reverse("authentik_api:blueprintinstance-list"),
data={"name": "foo", "path": "oci://foo/bar"},
)
self.assertEqual(res.status_code, 201)
def test_api_blank(self): def test_api_blank(self):
"""Test blank""" """Test blank"""
res = self.client.post( res = self.client.post(

View File

@ -2,7 +2,7 @@
from django.test import TransactionTestCase from django.test import TransactionTestCase
from authentik.blueprints.v1.importer import Importer from authentik.blueprints.v1.importer import Importer
from authentik.core.models import Application, Token, User from authentik.core.models import Application, Token
from authentik.core.tests.utils import create_test_admin_user from authentik.core.tests.utils import create_test_admin_user
from authentik.flows.models import Flow from authentik.flows.models import Flow
from authentik.lib.generators import generate_id from authentik.lib.generators import generate_id
@ -45,15 +45,3 @@ class TestBlueprintsV1ConditionalFields(TransactionTestCase):
flow = Flow.objects.filter(slug=f"{self.uid}-flow").first() flow = Flow.objects.filter(slug=f"{self.uid}-flow").first()
self.assertIsNotNone(flow) self.assertIsNotNone(flow)
self.assertEqual(flow.background, "https://goauthentik.io/img/icon.png") self.assertEqual(flow.background, "https://goauthentik.io/img/icon.png")
def test_user(self):
"""Test user"""
user: User = User.objects.filter(username=self.uid).first()
self.assertIsNotNone(user)
self.assertTrue(user.check_password(self.uid))
def test_user_null(self):
"""Test user"""
user: User = User.objects.filter(username=f"{self.uid}-no-password").first()
self.assertIsNotNone(user)
self.assertFalse(user.has_usable_password())

View File

@ -223,11 +223,11 @@ class Env(YAMLTag):
if isinstance(node, ScalarNode): if isinstance(node, ScalarNode):
self.key = node.value self.key = node.value
if isinstance(node, SequenceNode): if isinstance(node, SequenceNode):
self.key = loader.construct_object(node.value[0]) self.key = node.value[0].value
self.default = loader.construct_object(node.value[1]) self.default = node.value[1].value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
return getenv(self.key) or self.default return getenv(self.key, self.default)
class Context(YAMLTag): class Context(YAMLTag):
@ -242,15 +242,13 @@ class Context(YAMLTag):
if isinstance(node, ScalarNode): if isinstance(node, ScalarNode):
self.key = node.value self.key = node.value
if isinstance(node, SequenceNode): if isinstance(node, SequenceNode):
self.key = loader.construct_object(node.value[0]) self.key = node.value[0].value
self.default = loader.construct_object(node.value[1]) self.default = node.value[1].value
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
value = self.default value = self.default
if self.key in blueprint.context: if self.key in blueprint.context:
value = blueprint.context[self.key] value = blueprint.context[self.key]
if isinstance(value, YAMLTag):
return value.resolve(entry, blueprint)
return value return value
@ -262,7 +260,7 @@ class Format(YAMLTag):
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None: def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__() super().__init__()
self.format_string = loader.construct_object(node.value[0]) self.format_string = node.value[0].value
self.args = [] self.args = []
for raw_node in node.value[1:]: for raw_node in node.value[1:]:
self.args.append(loader.construct_object(raw_node)) self.args.append(loader.construct_object(raw_node))
@ -341,7 +339,7 @@ class Condition(YAMLTag):
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None: def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__() super().__init__()
self.mode = loader.construct_object(node.value[0]) self.mode = node.value[0].value
self.args = [] self.args = []
for raw_node in node.value[1:]: for raw_node in node.value[1:]:
self.args.append(loader.construct_object(raw_node)) self.args.append(loader.construct_object(raw_node))
@ -374,12 +372,8 @@ class If(YAMLTag):
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None: def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__() super().__init__()
self.condition = loader.construct_object(node.value[0]) self.condition = loader.construct_object(node.value[0])
if len(node.value) == 1: self.when_true = loader.construct_object(node.value[1])
self.when_true = True self.when_false = loader.construct_object(node.value[2])
self.when_false = False
else:
self.when_true = loader.construct_object(node.value[1])
self.when_false = loader.construct_object(node.value[2])
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any: def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
if isinstance(self.condition, YAMLTag): if isinstance(self.condition, YAMLTag):
@ -416,7 +410,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None: def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
super().__init__() super().__init__()
self.iterable = loader.construct_object(node.value[0]) self.iterable = loader.construct_object(node.value[0])
self.output_body = loader.construct_object(node.value[1]) self.output_body = node.value[1].value
self.item_body = loader.construct_object(node.value[2]) self.item_body = loader.construct_object(node.value[2])
self.__current_context: tuple[Any, Any] = tuple() self.__current_context: tuple[Any, Any] = tuple()

View File

@ -35,7 +35,6 @@ from authentik.core.models import (
Source, Source,
UserSourceConnection, UserSourceConnection,
) )
from authentik.events.utils import cleanse_dict
from authentik.flows.models import FlowToken, Stage from authentik.flows.models import FlowToken, Stage
from authentik.lib.models import SerializerModel from authentik.lib.models import SerializerModel
from authentik.outposts.models import OutpostServiceConnection from authentik.outposts.models import OutpostServiceConnection
@ -200,6 +199,9 @@ class Importer:
serializer_kwargs = {} serializer_kwargs = {}
model_instance = existing_models.first() model_instance = existing_models.first()
if not isinstance(model(), BaseMetaModel) and model_instance: if not isinstance(model(), BaseMetaModel) and model_instance:
if entry.get_state(self.__import) == BlueprintEntryDesiredState.CREATED:
self.logger.debug("instance exists, skipping")
return None
self.logger.debug( self.logger.debug(
"initialise serializer with instance", "initialise serializer with instance",
model=model, model=model,
@ -210,9 +212,7 @@ class Importer:
serializer_kwargs["partial"] = True serializer_kwargs["partial"] = True
else: else:
self.logger.debug( self.logger.debug(
"initialised new serializer instance", "initialised new serializer instance", model=model, **updated_identifiers
model=model,
**cleanse_dict(updated_identifiers),
) )
model_instance = model() model_instance = model()
# pk needs to be set on the model instance otherwise a new one will be generated # pk needs to be set on the model instance otherwise a new one will be generated
@ -268,34 +268,21 @@ class Importer:
try: try:
serializer = self._validate_single(entry) serializer = self._validate_single(entry)
except EntryInvalidError as exc: except EntryInvalidError as exc:
# For deleting objects we don't need the serializer to be valid
if entry.get_state(self.__import) == BlueprintEntryDesiredState.ABSENT:
continue
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc) self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
return False return False
if not serializer: if not serializer:
continue continue
state = entry.get_state(self.__import) state = entry.get_state(self.__import)
if state in [BlueprintEntryDesiredState.PRESENT, BlueprintEntryDesiredState.CREATED]: if state in [
instance = serializer.instance BlueprintEntryDesiredState.PRESENT,
if ( BlueprintEntryDesiredState.CREATED,
instance ]:
and not instance._state.adding model = serializer.save()
and state == BlueprintEntryDesiredState.CREATED
):
self.logger.debug(
"instance exists, skipping",
model=model,
instance=instance,
pk=instance.pk,
)
else:
instance = serializer.save()
self.logger.debug("updated model", model=instance)
if "pk" in entry.identifiers: if "pk" in entry.identifiers:
self.__pk_map[entry.identifiers["pk"]] = instance.pk self.__pk_map[entry.identifiers["pk"]] = model.pk
entry._state = BlueprintEntryState(instance) entry._state = BlueprintEntryState(model)
self.logger.debug("updated model", model=model)
elif state == BlueprintEntryDesiredState.ABSENT: elif state == BlueprintEntryDesiredState.ABSENT:
instance: Optional[Model] = serializer.instance instance: Optional[Model] = serializer.instance
if instance.pk: if instance.pk:
@ -322,6 +309,5 @@ class Importer:
self.logger.debug("Blueprint validation failed") self.logger.debug("Blueprint validation failed")
for log in logs: for log in logs:
getattr(self.logger, log.get("log_level"))(**log) getattr(self.logger, log.get("log_level"))(**log)
self.logger.debug("Finished blueprint import validation")
self.__import = orig_import self.__import = orig_import
return successful, logs return successful, logs

View File

@ -31,7 +31,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer):
required = attrs["required"] required = attrs["required"]
instance = BlueprintInstance.objects.filter(**identifiers).first() instance = BlueprintInstance.objects.filter(**identifiers).first()
if not instance and required: if not instance and required:
raise ValidationError({"identifiers": "Required blueprint does not exist"}) raise ValidationError("Required blueprint does not exist")
self.blueprint_instance = instance self.blueprint_instance = instance
return super().validate(attrs) return super().validate(attrs)

View File

@ -19,7 +19,6 @@ from authentik.lib.sentry import SentryIgnoredException
from authentik.lib.utils.http import authentik_user_agent from authentik.lib.utils.http import authentik_user_agent
OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml" OCI_MEDIA_TYPE = "application/vnd.goauthentik.blueprint.v1+yaml"
OCI_PREFIX = "oci://"
class OCIException(SentryIgnoredException): class OCIException(SentryIgnoredException):
@ -40,16 +39,11 @@ class BlueprintOCIClient:
self.logger = get_logger().bind(url=self.sanitized_url) self.logger = get_logger().bind(url=self.sanitized_url)
self.ref = "latest" self.ref = "latest"
# Remove the leading slash of the path to convert it to an image name
path = self.url.path[1:] path = self.url.path[1:]
if ":" in path: if ":" in self.url.path:
# if there's a colon in the path, use everything after it as a ref
path, _, self.ref = path.partition(":") path, _, self.ref = path.partition(":")
base_url = f"https://{self.url.hostname}"
if self.url.port:
base_url += f":{self.url.port}"
self.client = NewClient( self.client = NewClient(
base_url, f"https://{self.url.hostname}",
WithUserAgent(authentik_user_agent()), WithUserAgent(authentik_user_agent()),
WithUsernamePassword(self.url.username, self.url.password), WithUsernamePassword(self.url.username, self.url.password),
WithDefaultName(path), WithDefaultName(path),

View File

@ -28,7 +28,6 @@ from authentik.blueprints.models import (
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, EntryInvalidError from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, EntryInvalidError
from authentik.blueprints.v1.importer import Importer from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
from authentik.blueprints.v1.oci import OCI_PREFIX
from authentik.events.monitored_tasks import ( from authentik.events.monitored_tasks import (
MonitoredTask, MonitoredTask,
TaskResult, TaskResult,
@ -62,7 +61,7 @@ def start_blueprint_watcher():
if _file_watcher_started: if _file_watcher_started:
return return
observer = Observer() observer = Observer()
observer.schedule(BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True) observer.schedule(BlueprintEventHandler(), CONFIG.y("blueprints_dir"), recursive=True)
observer.start() observer.start()
_file_watcher_started = True _file_watcher_started = True
@ -80,7 +79,7 @@ class BlueprintEventHandler(FileSystemEventHandler):
blueprints_discovery.delay() blueprints_discovery.delay()
if isinstance(event, FileModifiedEvent): if isinstance(event, FileModifiedEvent):
path = Path(event.src_path) path = Path(event.src_path)
root = Path(CONFIG.get("blueprints_dir")).absolute() root = Path(CONFIG.y("blueprints_dir")).absolute()
rel_path = str(path.relative_to(root)) rel_path = str(path.relative_to(root))
for instance in BlueprintInstance.objects.filter(path=rel_path): for instance in BlueprintInstance.objects.filter(path=rel_path):
LOGGER.debug("modified blueprint file, starting apply", instance=instance) LOGGER.debug("modified blueprint file, starting apply", instance=instance)
@ -101,7 +100,7 @@ def blueprints_find_dict():
def blueprints_find(): def blueprints_find():
"""Find blueprints and return valid ones""" """Find blueprints and return valid ones"""
blueprints = [] blueprints = []
root = Path(CONFIG.get("blueprints_dir")) root = Path(CONFIG.y("blueprints_dir"))
for path in root.rglob("**/*.yaml"): for path in root.rglob("**/*.yaml"):
# Check if any part in the path starts with a dot and assume a hidden file # Check if any part in the path starts with a dot and assume a hidden file
if any(part for part in path.parts if part.startswith(".")): if any(part for part in path.parts if part.startswith(".")):
@ -185,9 +184,9 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
instance: Optional[BlueprintInstance] = None instance: Optional[BlueprintInstance] = None
try: try:
instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first() instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first()
self.set_uid(slugify(instance.name))
if not instance or not instance.enabled: if not instance or not instance.enabled:
return return
self.set_uid(slugify(instance.name))
blueprint_content = instance.retrieve() blueprint_content = instance.retrieve()
file_hash = sha512(blueprint_content.encode()).hexdigest() file_hash = sha512(blueprint_content.encode()).hexdigest()
importer = Importer(blueprint_content, instance.context) importer = Importer(blueprint_content, instance.context)
@ -229,7 +228,7 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
def clear_failed_blueprints(): def clear_failed_blueprints():
"""Remove blueprints which couldn't be fetched""" """Remove blueprints which couldn't be fetched"""
# Exclude OCI blueprints as those might be temporarily unavailable # Exclude OCI blueprints as those might be temporarily unavailable
for blueprint in BlueprintInstance.objects.exclude(path__startswith=OCI_PREFIX): for blueprint in BlueprintInstance.objects.exclude(path__startswith="oci://"):
try: try:
blueprint.retrieve() blueprint.retrieve()
except BlueprintRetrievalFailed: except BlueprintRetrievalFailed:

View File

@ -1,6 +1,5 @@
"""Groups API Viewset""" """Groups API Viewset"""
from json import loads from json import loads
from typing import Optional
from django.db.models.query import QuerySet from django.db.models.query import QuerySet
from django.http import Http404 from django.http import Http404
@ -49,18 +48,10 @@ class GroupSerializer(ModelSerializer):
users_obj = ListSerializer( users_obj = ListSerializer(
child=GroupMemberSerializer(), read_only=True, source="users", required=False child=GroupMemberSerializer(), read_only=True, source="users", required=False
) )
parent_name = CharField(source="parent.name", read_only=True, allow_null=True) parent_name = CharField(source="parent.name", read_only=True)
num_pk = IntegerField(read_only=True) num_pk = IntegerField(read_only=True)
def validate_parent(self, parent: Optional[Group]):
"""Validate group parent (if set), ensuring the parent isn't itself"""
if not self.instance or not parent:
return parent
if str(parent.group_uuid) == str(self.instance.group_uuid):
raise ValidationError("Cannot set group as parent of itself.")
return parent
class Meta: class Meta:
model = Group model = Group
fields = [ fields = [

View File

@ -1,6 +1,4 @@
"""Provider API Views""" """Provider API Views"""
from django.db.models import QuerySet
from django.db.models.query import Q
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from django_filters.filters import BooleanFilter from django_filters.filters import BooleanFilter
from django_filters.filterset import FilterSet from django_filters.filterset import FilterSet
@ -58,22 +56,17 @@ class ProviderSerializer(ModelSerializer, MetaNameSerializer):
class ProviderFilter(FilterSet): class ProviderFilter(FilterSet):
"""Filter for providers""" """Filter for groups"""
application__isnull = BooleanFilter(method="filter_application__isnull") application__isnull = BooleanFilter(
field_name="application",
lookup_expr="isnull",
)
backchannel_only = BooleanFilter( backchannel_only = BooleanFilter(
method="filter_backchannel_only", method="filter_backchannel_only",
) )
def filter_application__isnull(self, queryset: QuerySet, name, value): def filter_backchannel_only(self, queryset, name, value):
"""Only return providers that are neither assigned to application,
both as provider or application provider"""
return queryset.filter(
Q(backchannel_application__isnull=value, is_backchannel=True)
| Q(application__isnull=value)
)
def filter_backchannel_only(self, queryset: QuerySet, name, value):
"""Only return backchannel providers""" """Only return backchannel providers"""
return queryset.filter(is_backchannel=value) return queryset.filter(is_backchannel=value)

View File

@ -33,7 +33,7 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
def __init__(self, *args, **kwargs) -> None: def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
if SERIALIZER_CONTEXT_BLUEPRINT in self.context: if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
self.fields["key"] = CharField(required=False) self.fields["key"] = CharField()
def validate(self, attrs: dict[Any, str]) -> dict[Any, str]: def validate(self, attrs: dict[Any, str]) -> dict[Any, str]:
"""Ensure only API or App password tokens are created.""" """Ensure only API or App password tokens are created."""
@ -47,7 +47,7 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
attrs.setdefault("user", request.user) attrs.setdefault("user", request.user)
attrs.setdefault("intent", TokenIntents.INTENT_API) attrs.setdefault("intent", TokenIntents.INTENT_API)
if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]: if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]:
raise ValidationError({"intent": f"Invalid intent {attrs.get('intent')}"}) raise ValidationError(f"Invalid intent {attrs.get('intent')}")
return attrs return attrs
class Meta: class Meta:

View File

@ -15,13 +15,7 @@ from django.utils.http import urlencode
from django.utils.text import slugify from django.utils.text import slugify
from django.utils.timezone import now from django.utils.timezone import now
from django.utils.translation import gettext as _ from django.utils.translation import gettext as _
from django_filters.filters import ( from django_filters.filters import BooleanFilter, CharFilter, ModelMultipleChoiceFilter
BooleanFilter,
CharFilter,
ModelMultipleChoiceFilter,
MultipleChoiceFilter,
UUIDFilter,
)
from django_filters.filterset import FilterSet from django_filters.filterset import FilterSet
from drf_spectacular.types import OpenApiTypes from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import ( from drf_spectacular.utils import (
@ -57,7 +51,6 @@ from structlog.stdlib import get_logger
from authentik.admin.api.metrics import CoordinateSerializer from authentik.admin.api.metrics import CoordinateSerializer
from authentik.api.decorators import permission_required from authentik.api.decorators import permission_required
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
from authentik.core.middleware import ( from authentik.core.middleware import (
@ -65,6 +58,7 @@ from authentik.core.middleware import (
SESSION_KEY_IMPERSONATE_USER, SESSION_KEY_IMPERSONATE_USER,
) )
from authentik.core.models import ( from authentik.core.models import (
USER_ATTRIBUTE_SA,
USER_ATTRIBUTE_TOKEN_EXPIRING, USER_ATTRIBUTE_TOKEN_EXPIRING,
USER_PATH_SERVICE_ACCOUNT, USER_PATH_SERVICE_ACCOUNT,
AuthenticatedSession, AuthenticatedSession,
@ -72,14 +66,12 @@ from authentik.core.models import (
Token, Token,
TokenIntents, TokenIntents,
User, User,
UserTypes,
) )
from authentik.events.models import Event, EventAction from authentik.events.models import EventAction
from authentik.flows.exceptions import FlowNonApplicableException from authentik.flows.exceptions import FlowNonApplicableException
from authentik.flows.models import FlowToken from authentik.flows.models import FlowToken
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
from authentik.flows.views.executor import QS_KEY_TOKEN from authentik.flows.views.executor import QS_KEY_TOKEN
from authentik.lib.config import CONFIG
from authentik.stages.email.models import EmailStage from authentik.stages.email.models import EmailStage
from authentik.stages.email.tasks import send_mails from authentik.stages.email.tasks import send_mails
from authentik.stages.email.utils import TemplateEmailMessage from authentik.stages.email.utils import TemplateEmailMessage
@ -114,44 +106,12 @@ class UserSerializer(ModelSerializer):
avatar = CharField(read_only=True) avatar = CharField(read_only=True)
attributes = JSONField(validators=[is_dict], required=False) attributes = JSONField(validators=[is_dict], required=False)
groups = PrimaryKeyRelatedField( groups = PrimaryKeyRelatedField(
allow_empty=True, many=True, source="ak_groups", queryset=Group.objects.all(), default=list allow_empty=True, many=True, source="ak_groups", queryset=Group.objects.all()
) )
groups_obj = ListSerializer(child=UserGroupSerializer(), read_only=True, source="ak_groups") groups_obj = ListSerializer(child=UserGroupSerializer(), read_only=True, source="ak_groups")
uid = CharField(read_only=True) uid = CharField(read_only=True)
username = CharField(max_length=150, validators=[UniqueValidator(queryset=User.objects.all())]) username = CharField(max_length=150, validators=[UniqueValidator(queryset=User.objects.all())])
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
self.fields["password"] = CharField(required=False, allow_null=True)
def create(self, validated_data: dict) -> User:
"""If this serializer is used in the blueprint context, we allow for
directly setting a password. However should be done via the `set_password`
method instead of directly setting it like rest_framework."""
password = validated_data.pop("password", None)
instance: User = super().create(validated_data)
self._set_password(instance, password)
return instance
def update(self, instance: User, validated_data: dict) -> User:
"""Same as `create` above, set the password directly if we're in a blueprint
context"""
password = validated_data.pop("password", None)
instance = super().update(instance, validated_data)
self._set_password(instance, password)
return instance
def _set_password(self, instance: User, password: Optional[str]):
"""Set password of user if we're in a blueprint context, and if it's an empty
string then use an unusable password"""
if SERIALIZER_CONTEXT_BLUEPRINT in self.context and password:
instance.set_password(password)
instance.save()
if len(instance.password) == 0:
instance.set_unusable_password()
instance.save()
def validate_path(self, path: str) -> str: def validate_path(self, path: str) -> str:
"""Validate path""" """Validate path"""
if path[:1] == "/" or path[-1] == "/": if path[:1] == "/" or path[-1] == "/":
@ -161,18 +121,6 @@ class UserSerializer(ModelSerializer):
raise ValidationError(_("No empty segments in user path allowed.")) raise ValidationError(_("No empty segments in user path allowed."))
return path return path
def validate_type(self, user_type: str) -> str:
"""Validate user type, internal_service_account is an internal value"""
if (
self.instance
and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT
and user_type != UserTypes.INTERNAL_SERVICE_ACCOUNT.value
):
raise ValidationError("Can't change internal service account to other user type.")
if not self.instance and user_type == UserTypes.INTERNAL_SERVICE_ACCOUNT.value:
raise ValidationError("Setting a user to internal service account is not allowed.")
return user_type
class Meta: class Meta:
model = User model = User
fields = [ fields = [
@ -189,7 +137,6 @@ class UserSerializer(ModelSerializer):
"attributes", "attributes",
"uid", "uid",
"path", "path",
"type",
] ]
extra_kwargs = { extra_kwargs = {
"name": {"allow_blank": True}, "name": {"allow_blank": True},
@ -215,7 +162,7 @@ class UserSelfSerializer(ModelSerializer):
) )
def get_groups(self, _: User): def get_groups(self, _: User):
"""Return only the group names a user is member of""" """Return only the group names a user is member of"""
for group in self.instance.all_groups().order_by("name"): for group in self.instance.ak_groups.all():
yield { yield {
"name": group.name, "name": group.name,
"pk": group.pk, "pk": group.pk,
@ -238,7 +185,6 @@ class UserSelfSerializer(ModelSerializer):
"avatar", "avatar",
"uid", "uid",
"settings", "settings",
"type",
] ]
extra_kwargs = { extra_kwargs = {
"is_active": {"read_only": True}, "is_active": {"read_only": True},
@ -312,13 +258,13 @@ class UsersFilter(FilterSet):
) )
is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser") is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
uuid = UUIDFilter(field_name="uuid") uuid = CharFilter(field_name="uuid")
path = CharFilter(field_name="path") path = CharFilter(
field_name="path",
)
path_startswith = CharFilter(field_name="path", lookup_expr="startswith") path_startswith = CharFilter(field_name="path", lookup_expr="startswith")
type = MultipleChoiceFilter(choices=UserTypes.choices, field_name="type")
groups_by_name = ModelMultipleChoiceFilter( groups_by_name = ModelMultipleChoiceFilter(
field_name="ak_groups__name", field_name="ak_groups__name",
to_field_name="name", to_field_name="name",
@ -357,7 +303,6 @@ class UsersFilter(FilterSet):
"attributes", "attributes",
"groups_by_name", "groups_by_name",
"groups_by_pk", "groups_by_pk",
"type",
] ]
@ -450,8 +395,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
user: User = User.objects.create( user: User = User.objects.create(
username=username, username=username,
name=username, name=username,
type=UserTypes.SERVICE_ACCOUNT, attributes={USER_ATTRIBUTE_SA: True, USER_ATTRIBUTE_TOKEN_EXPIRING: expiring},
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: expiring},
path=USER_PATH_SERVICE_ACCOUNT, path=USER_PATH_SERVICE_ACCOUNT,
) )
user.set_unusable_password() user.set_unusable_password()
@ -599,58 +543,6 @@ class UserViewSet(UsedByMixin, ModelViewSet):
send_mails(email_stage, message) send_mails(email_stage, message)
return Response(status=204) return Response(status=204)
@permission_required("authentik_core.impersonate")
@extend_schema(
request=OpenApiTypes.NONE,
responses={
"204": OpenApiResponse(description="Successfully started impersonation"),
"401": OpenApiResponse(description="Access denied"),
},
)
@action(detail=True, methods=["POST"])
def impersonate(self, request: Request, pk: int) -> Response:
"""Impersonate a user"""
if not CONFIG.get_bool("impersonation"):
LOGGER.debug("User attempted to impersonate", user=request.user)
return Response(status=401)
if not request.user.has_perm("impersonate"):
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
return Response(status=401)
user_to_be = self.get_object()
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
Event.new(EventAction.IMPERSONATION_STARTED).from_http(request, user_to_be)
return Response(status=201)
@extend_schema(
request=OpenApiTypes.NONE,
responses={
"204": OpenApiResponse(description="Successfully started impersonation"),
},
)
@action(detail=False, methods=["GET"])
def impersonate_end(self, request: Request) -> Response:
"""End Impersonation a user"""
if (
SESSION_KEY_IMPERSONATE_USER not in request.session
or SESSION_KEY_IMPERSONATE_ORIGINAL_USER not in request.session
):
LOGGER.debug("Can't end impersonation", user=request.user)
return Response(status=204)
original_user = request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER]
del request.session[SESSION_KEY_IMPERSONATE_USER]
del request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER]
Event.new(EventAction.IMPERSONATION_ENDED).from_http(request, original_user)
return Response(status=204)
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet: def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
"""Custom filter_queryset method which ignores guardian, but still supports sorting""" """Custom filter_queryset method which ignores guardian, but still supports sorting"""
for backend in list(self.filter_backends): for backend in list(self.filter_backends):

View File

@ -1,21 +0,0 @@
"""Build source docs"""
from pathlib import Path
from django.core.management.base import BaseCommand
from pdoc import pdoc
from pdoc.render import configure
class Command(BaseCommand):
"""Build source docs"""
def handle(self, **options):
configure(
docformat="markdown",
mermaid=True,
logo="https://goauthentik.io/img/icon_top_brand_colour.svg",
)
pdoc(
"authentik",
output_directory=Path("./source_docs"),
)

View File

@ -1,39 +0,0 @@
"""Run worker"""
from sys import exit as sysexit
from tempfile import tempdir
from celery.apps.worker import Worker
from django.core.management.base import BaseCommand
from django.db import close_old_connections
from structlog.stdlib import get_logger
from authentik.lib.config import CONFIG
from authentik.root.celery import CELERY_APP
LOGGER = get_logger()
class Command(BaseCommand):
"""Run worker"""
def handle(self, **options):
close_old_connections()
if CONFIG.get_bool("remote_debug"):
import debugpy
debugpy.listen(("0.0.0.0", 6900)) # nosec
worker: Worker = CELERY_APP.Worker(
no_color=False,
quiet=True,
optimization="fair",
autoscale=(3, 1),
task_events=True,
beat=True,
schedule_filename=f"{tempdir}/celerybeat-schedule",
queues=["authentik", "authentik_scheduled", "authentik_events"],
)
for task in CELERY_APP.tasks:
LOGGER.debug("Registered task", task=task)
worker.start()
sysexit(worker.exitcode)

View File

@ -1,11 +1,55 @@
# Generated by Django 3.2.8 on 2021-10-10 16:16 # Generated by Django 3.2.8 on 2021-10-10 16:16
from os import environ
import django.db.models.deletion import django.db.models.deletion
from django.apps.registry import Apps
from django.conf import settings
from django.db import migrations, models from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
import authentik.core.models import authentik.core.models
def create_default_user(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from django.contrib.auth.hashers import make_password
User = apps.get_model("authentik_core", "User")
db_alias = schema_editor.connection.alias
akadmin, _ = User.objects.using(db_alias).get_or_create(
username="akadmin",
email=environ.get("AUTHENTIK_BOOTSTRAP_EMAIL", "root@localhost"),
name="authentik Default Admin",
)
password = None
if "TF_BUILD" in environ or settings.TEST:
password = "akadmin" # noqa # nosec
if "AUTHENTIK_BOOTSTRAP_PASSWORD" in environ:
password = environ["AUTHENTIK_BOOTSTRAP_PASSWORD"]
if password:
akadmin.password = make_password(password)
else:
akadmin.password = make_password(None)
akadmin.save()
def create_default_admin_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
Group = apps.get_model("authentik_core", "Group")
User = apps.get_model("authentik_core", "User")
# Creates a default admin group
group, _ = Group.objects.using(db_alias).get_or_create(
is_superuser=True,
defaults={
"name": "authentik Admins",
},
)
group.users.set(User.objects.filter(username="akadmin"))
group.save()
class Migration(migrations.Migration): class Migration(migrations.Migration):
replaces = [ replaces = [
("authentik_core", "0002_auto_20200523_1133"), ("authentik_core", "0002_auto_20200523_1133"),
@ -75,6 +119,9 @@ class Migration(migrations.Migration):
model_name="user", model_name="user",
name="is_staff", name="is_staff",
), ),
migrations.RunPython(
code=create_default_user,
),
migrations.AddField( migrations.AddField(
model_name="user", model_name="user",
name="is_superuser", name="is_superuser",
@ -154,6 +201,9 @@ class Migration(migrations.Migration):
default=False, help_text="Users added to this group will be superusers." default=False, help_text="Users added to this group will be superusers."
), ),
), ),
migrations.RunPython(
code=create_default_admin_group,
),
migrations.AlterModelManagers( migrations.AlterModelManagers(
name="user", name="user",
managers=[ managers=[

View File

@ -1,6 +1,7 @@
# Generated by Django 3.2.8 on 2021-10-10 16:12 # Generated by Django 3.2.8 on 2021-10-10 16:12
import uuid import uuid
from os import environ
import django.db.models.deletion import django.db.models.deletion
from django.apps.registry import Apps from django.apps.registry import Apps
@ -34,6 +35,29 @@ def fix_duplicates(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
Token.objects.using(db_alias).filter(identifier=ident["identifier"]).delete() Token.objects.using(db_alias).filter(identifier=ident["identifier"]).delete()
def create_default_user_token(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from authentik.core.models import TokenIntents
User = apps.get_model("authentik_core", "User")
Token = apps.get_model("authentik_core", "Token")
db_alias = schema_editor.connection.alias
akadmin = User.objects.using(db_alias).filter(username="akadmin")
if not akadmin.exists():
return
if "AUTHENTIK_BOOTSTRAP_TOKEN" not in environ:
return
key = environ["AUTHENTIK_BOOTSTRAP_TOKEN"]
Token.objects.using(db_alias).create(
identifier="authentik-bootstrap-token",
user=akadmin.first(),
intent=TokenIntents.INTENT_API,
expiring=False,
key=key,
)
class Migration(migrations.Migration): class Migration(migrations.Migration):
replaces = [ replaces = [
("authentik_core", "0018_auto_20210330_1345"), ("authentik_core", "0018_auto_20210330_1345"),
@ -190,6 +214,9 @@ class Migration(migrations.Migration):
"verbose_name_plural": "Authenticated Sessions", "verbose_name_plural": "Authenticated Sessions",
}, },
), ),
migrations.RunPython(
code=create_default_user_token,
),
migrations.AlterField( migrations.AlterField(
model_name="token", model_name="token",
name="intent", name="intent",

View File

@ -11,7 +11,7 @@ def backport_is_backchannel(apps: Apps, schema_editor: BaseDatabaseSchemaEditor)
for model in BackchannelProvider.__subclasses__(): for model in BackchannelProvider.__subclasses__():
try: try:
for obj in model.objects.only("is_backchannel"): for obj in model.objects.all():
obj.is_backchannel = True obj.is_backchannel = True
obj.save() obj.save()
except (DatabaseError, InternalError, ProgrammingError): except (DatabaseError, InternalError, ProgrammingError):

View File

@ -1,43 +0,0 @@
# Generated by Django 4.1.7 on 2023-05-21 11:44
from django.apps.registry import Apps
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def migrate_user_type(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
User = apps.get_model("authentik_core", "User")
from authentik.core.models import UserTypes
for user in User.objects.using(db_alias).all():
user.type = UserTypes.INTERNAL
if "goauthentik.io/user/service-account" in user.attributes:
user.type = UserTypes.SERVICE_ACCOUNT
if "goauthentik.io/user/override-ips" in user.attributes:
user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
user.save()
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0029_provider_backchannel_applications_and_more"),
]
operations = [
migrations.AddField(
model_name="user",
name="type",
field=models.TextField(
choices=[
("default", "Default"),
("external", "External"),
("service_account", "Service Account"),
("internal_service_account", "Internal Service Account"),
],
default="default",
),
),
migrations.RunPython(migrate_user_type),
]

View File

@ -1,41 +0,0 @@
# Generated by Django 4.1.10 on 2023-07-21 12:54
from django.apps.registry import Apps
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def migrate_user_type_v2(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
db_alias = schema_editor.connection.alias
User = apps.get_model("authentik_core", "User")
from authentik.core.models import UserTypes
for user in User.objects.using(db_alias).all():
if user.type != "default":
continue
user.type = UserTypes.INTERNAL
user.save()
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0030_user_type"),
]
operations = [
migrations.AlterField(
model_name="user",
name="type",
field=models.TextField(
choices=[
("internal", "Internal"),
("external", "External"),
("service_account", "Service Account"),
("internal_service_account", "Internal Service Account"),
],
default="internal",
),
),
migrations.RunPython(migrate_user_type_v2),
]

View File

@ -5,6 +5,7 @@ from typing import Any, Optional
from uuid import uuid4 from uuid import uuid4
from deepmerge import always_merger from deepmerge import always_merger
from django.conf import settings
from django.contrib.auth.hashers import check_password from django.contrib.auth.hashers import check_password
from django.contrib.auth.models import AbstractUser from django.contrib.auth.models import AbstractUser
from django.contrib.auth.models import UserManager as DjangoUserManager from django.contrib.auth.models import UserManager as DjangoUserManager
@ -32,10 +33,10 @@ from authentik.lib.models import (
) )
from authentik.lib.utils.http import get_client_ip from authentik.lib.utils.http import get_client_ip
from authentik.policies.models import PolicyBindingModel from authentik.policies.models import PolicyBindingModel
from authentik.root.install_id import get_install_id
LOGGER = get_logger() LOGGER = get_logger()
USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug" USER_ATTRIBUTE_DEBUG = "goauthentik.io/user/debug"
USER_ATTRIBUTE_SA = "goauthentik.io/user/service-account"
USER_ATTRIBUTE_GENERATED = "goauthentik.io/user/generated" USER_ATTRIBUTE_GENERATED = "goauthentik.io/user/generated"
USER_ATTRIBUTE_EXPIRES = "goauthentik.io/user/expires" USER_ATTRIBUTE_EXPIRES = "goauthentik.io/user/expires"
USER_ATTRIBUTE_DELETE_ON_LOGOUT = "goauthentik.io/user/delete-on-logout" USER_ATTRIBUTE_DELETE_ON_LOGOUT = "goauthentik.io/user/delete-on-logout"
@ -44,6 +45,8 @@ USER_ATTRIBUTE_TOKEN_EXPIRING = "goauthentik.io/user/token-expires" # nosec
USER_ATTRIBUTE_CHANGE_USERNAME = "goauthentik.io/user/can-change-username" USER_ATTRIBUTE_CHANGE_USERNAME = "goauthentik.io/user/can-change-username"
USER_ATTRIBUTE_CHANGE_NAME = "goauthentik.io/user/can-change-name" USER_ATTRIBUTE_CHANGE_NAME = "goauthentik.io/user/can-change-name"
USER_ATTRIBUTE_CHANGE_EMAIL = "goauthentik.io/user/can-change-email" USER_ATTRIBUTE_CHANGE_EMAIL = "goauthentik.io/user/can-change-email"
USER_ATTRIBUTE_CAN_OVERRIDE_IP = "goauthentik.io/user/override-ips"
USER_PATH_SYSTEM_PREFIX = "goauthentik.io" USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
USER_PATH_SERVICE_ACCOUNT = USER_PATH_SYSTEM_PREFIX + "/service-accounts" USER_PATH_SERVICE_ACCOUNT = USER_PATH_SYSTEM_PREFIX + "/service-accounts"
@ -60,26 +63,11 @@ def default_token_key():
"""Default token key""" """Default token key"""
# We use generate_id since the chars in the key should be easy # We use generate_id since the chars in the key should be easy
# to use in Emails (for verification) and URLs (for recovery) # to use in Emails (for verification) and URLs (for recovery)
return generate_id(CONFIG.get_int("default_token_length")) return generate_id(int(CONFIG.y("default_token_length")))
class UserTypes(models.TextChoices):
"""User types, both for grouping, licensing and permissions in the case
of the internal_service_account"""
INTERNAL = "internal"
EXTERNAL = "external"
# User-created service accounts
SERVICE_ACCOUNT = "service_account"
# Special user type for internally managed and created service
# accounts, such as outpost users
INTERNAL_SERVICE_ACCOUNT = "internal_service_account"
class Group(SerializerModel): class Group(SerializerModel):
"""Group model which supports a basic hierarchy and has attributes""" """Custom Group model which supports a basic hierarchy"""
group_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4) group_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
@ -113,7 +101,27 @@ class Group(SerializerModel):
def is_member(self, user: "User") -> bool: def is_member(self, user: "User") -> bool:
"""Recursively check if `user` is member of us, or any parent.""" """Recursively check if `user` is member of us, or any parent."""
return user.all_groups().filter(group_uuid=self.group_uuid).exists() query = """
WITH RECURSIVE parents AS (
SELECT authentik_core_group.*, 0 AS relative_depth
FROM authentik_core_group
WHERE authentik_core_group.group_uuid = %s
UNION ALL
SELECT authentik_core_group.*, parents.relative_depth - 1
FROM authentik_core_group,parents
WHERE (
authentik_core_group.parent_id = parents.group_uuid and
parents.relative_depth > -20
)
)
SELECT group_uuid
FROM parents
GROUP BY group_uuid;
"""
groups = Group.objects.raw(query, [self.group_uuid])
return user.ak_groups.filter(pk__in=[group.pk for group in groups]).exists()
def __str__(self): def __str__(self):
return f"Group {self.name}" return f"Group {self.name}"
@ -128,20 +136,19 @@ class Group(SerializerModel):
class UserManager(DjangoUserManager): class UserManager(DjangoUserManager):
"""User manager that doesn't assign is_superuser and is_staff""" """Custom user manager that doesn't assign is_superuser and is_staff"""
def create_user(self, username, email=None, password=None, **extra_fields): def create_user(self, username, email=None, password=None, **extra_fields):
"""User manager that doesn't assign is_superuser and is_staff""" """Custom user manager that doesn't assign is_superuser and is_staff"""
return self._create_user(username, email, password, **extra_fields) return self._create_user(username, email, password, **extra_fields)
class User(SerializerModel, GuardianUserMixin, AbstractUser): class User(SerializerModel, GuardianUserMixin, AbstractUser):
"""authentik User model, based on django's contrib auth user model.""" """Custom User model to allow easier adding of user-based settings"""
uuid = models.UUIDField(default=uuid4, editable=False, unique=True) uuid = models.UUIDField(default=uuid4, editable=False, unique=True)
name = models.TextField(help_text=_("User's display name.")) name = models.TextField(help_text=_("User's display name."))
path = models.TextField(default="users") path = models.TextField(default="users")
type = models.TextField(choices=UserTypes.choices, default=UserTypes.INTERNAL)
sources = models.ManyToManyField("Source", through="UserSourceConnection") sources = models.ManyToManyField("Source", through="UserSourceConnection")
ak_groups = models.ManyToManyField("Group", related_name="users") ak_groups = models.ManyToManyField("Group", related_name="users")
@ -156,45 +163,13 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
"""Get the default user path""" """Get the default user path"""
return User._meta.get_field("path").default return User._meta.get_field("path").default
def all_groups(self) -> QuerySet[Group]:
"""Recursively get all groups this user is a member of.
At least one query is done to get the direct groups of the user, with groups
there are at most 3 queries done"""
direct_groups = list(
x for x in self.ak_groups.all().values_list("pk", flat=True).iterator()
)
if len(direct_groups) < 1:
return Group.objects.none()
query = """
WITH RECURSIVE parents AS (
SELECT authentik_core_group.*, 0 AS relative_depth
FROM authentik_core_group
WHERE authentik_core_group.group_uuid = ANY(%s)
UNION ALL
SELECT authentik_core_group.*, parents.relative_depth + 1
FROM authentik_core_group, parents
WHERE (
authentik_core_group.group_uuid = parents.parent_id and
parents.relative_depth < 20
)
)
SELECT group_uuid
FROM parents
GROUP BY group_uuid, name
ORDER BY name;
"""
group_pks = [group.pk for group in Group.objects.raw(query, [direct_groups]).iterator()]
return Group.objects.filter(pk__in=group_pks)
def group_attributes(self, request: Optional[HttpRequest] = None) -> dict[str, Any]: def group_attributes(self, request: Optional[HttpRequest] = None) -> dict[str, Any]:
"""Get a dictionary containing the attributes from all groups the user belongs to, """Get a dictionary containing the attributes from all groups the user belongs to,
including the users attributes""" including the users attributes"""
final_attributes = {} final_attributes = {}
if request and hasattr(request, "tenant"): if request and hasattr(request, "tenant"):
always_merger.merge(final_attributes, request.tenant.attributes) always_merger.merge(final_attributes, request.tenant.attributes)
for group in self.all_groups().order_by("name"): for group in self.ak_groups.all().order_by("name"):
always_merger.merge(final_attributes, group.attributes) always_merger.merge(final_attributes, group.attributes)
always_merger.merge(final_attributes, self.attributes) always_merger.merge(final_attributes, self.attributes)
return final_attributes return final_attributes
@ -208,7 +183,7 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
@cached_property @cached_property
def is_superuser(self) -> bool: def is_superuser(self) -> bool:
"""Get supseruser status based on membership in a group with superuser status""" """Get supseruser status based on membership in a group with superuser status"""
return self.all_groups().filter(is_superuser=True).exists() return self.ak_groups.filter(is_superuser=True).exists()
@property @property
def is_staff(self) -> bool: def is_staff(self) -> bool:
@ -242,7 +217,7 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
@property @property
def uid(self) -> str: def uid(self) -> str:
"""Generate a globally unique UID, based on the user ID and the hashed secret key""" """Generate a globally unique UID, based on the user ID and the hashed secret key"""
return sha256(f"{self.id}-{get_install_id()}".encode("ascii")).hexdigest() return sha256(f"{self.id}-{settings.SECRET_KEY}".encode("ascii")).hexdigest()
def locale(self, request: Optional[HttpRequest] = None) -> str: def locale(self, request: Optional[HttpRequest] = None) -> str:
"""Get the locale the user has configured""" """Get the locale the user has configured"""
@ -401,10 +376,10 @@ class Application(SerializerModel, PolicyBindingModel):
def get_launch_url(self, user: Optional["User"] = None) -> Optional[str]: def get_launch_url(self, user: Optional["User"] = None) -> Optional[str]:
"""Get launch URL if set, otherwise attempt to get launch URL based on provider.""" """Get launch URL if set, otherwise attempt to get launch URL based on provider."""
url = None url = None
if provider := self.get_provider():
url = provider.launch_url
if self.meta_launch_url: if self.meta_launch_url:
url = self.meta_launch_url url = self.meta_launch_url
elif provider := self.get_provider():
url = provider.launch_url
if user and url: if user and url:
if isinstance(user, SimpleLazyObject): if isinstance(user, SimpleLazyObject):
user._setup() user._setup()

View File

@ -1,4 +1,6 @@
"""authentik core signals""" """authentik core signals"""
from typing import TYPE_CHECKING
from django.contrib.auth.signals import user_logged_in, user_logged_out from django.contrib.auth.signals import user_logged_in, user_logged_out
from django.contrib.sessions.backends.cache import KEY_PREFIX from django.contrib.sessions.backends.cache import KEY_PREFIX
from django.core.cache import cache from django.core.cache import cache
@ -8,13 +10,16 @@ from django.db.models.signals import post_save, pre_delete, pre_save
from django.dispatch import receiver from django.dispatch import receiver
from django.http.request import HttpRequest from django.http.request import HttpRequest
from authentik.core.models import Application, AuthenticatedSession, BackchannelProvider, User from authentik.core.models import Application, AuthenticatedSession, BackchannelProvider
# Arguments: user: User, password: str # Arguments: user: User, password: str
password_changed = Signal() password_changed = Signal()
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage # Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
login_failed = Signal() login_failed = Signal()
if TYPE_CHECKING:
from authentik.core.models import User
@receiver(post_save, sender=Application) @receiver(post_save, sender=Application)
def post_save_application(sender: type[Model], instance, created: bool, **_): def post_save_application(sender: type[Model], instance, created: bool, **_):
@ -30,7 +35,7 @@ def post_save_application(sender: type[Model], instance, created: bool, **_):
@receiver(user_logged_in) @receiver(user_logged_in)
def user_logged_in_session(sender, request: HttpRequest, user: User, **_): def user_logged_in_session(sender, request: HttpRequest, user: "User", **_):
"""Create an AuthenticatedSession from request""" """Create an AuthenticatedSession from request"""
session = AuthenticatedSession.from_request(request, user) session = AuthenticatedSession.from_request(request, user)
@ -39,7 +44,7 @@ def user_logged_in_session(sender, request: HttpRequest, user: User, **_):
@receiver(user_logged_out) @receiver(user_logged_out)
def user_logged_out_session(sender, request: HttpRequest, user: User, **_): def user_logged_out_session(sender, request: HttpRequest, user: "User", **_):
"""Delete AuthenticatedSession if it exists""" """Delete AuthenticatedSession if it exists"""
AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete() AuthenticatedSession.objects.filter(session_key=request.session.session_key).delete()

View File

@ -28,7 +28,7 @@ from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSI
from authentik.lib.utils.urls import redirect_with_qs from authentik.lib.utils.urls import redirect_with_qs
from authentik.lib.views import bad_request_message from authentik.lib.views import bad_request_message
from authentik.policies.denied import AccessDeniedResponse from authentik.policies.denied import AccessDeniedResponse
from authentik.policies.utils import delete_none_values from authentik.policies.utils import delete_none_keys
from authentik.stages.password import BACKEND_INBUILT from authentik.stages.password import BACKEND_INBUILT
from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
@ -329,7 +329,7 @@ class SourceFlowManager:
) )
], ],
**{ **{
PLAN_CONTEXT_PROMPT: delete_none_values(self.enroll_info), PLAN_CONTEXT_PROMPT: delete_none_keys(self.enroll_info),
PLAN_CONTEXT_USER_PATH: self.source.get_user_path(), PLAN_CONTEXT_USER_PATH: self.source.get_user_path(),
}, },
) )

View File

@ -8,8 +8,7 @@
<meta charset="UTF-8"> <meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"> <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
<title>{% block title %}{% trans title|default:tenant.branding_title %}{% endblock %}</title> <title>{% block title %}{% trans title|default:tenant.branding_title %}{% endblock %}</title>
<link rel="icon" href="{{ tenant.branding_favicon }}"> <link rel="shortcut icon" type="image/png" href="{% static 'dist/assets/icons/icon.png' %}">
<link rel="shortcut icon" href="{{ tenant.branding_favicon }}">
{% block head_before %} {% block head_before %}
{% endblock %} {% endblock %}
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}"> <link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">

View File

@ -6,6 +6,8 @@
<script src="{% static 'dist/admin/AdminInterface.js' %}?version={{ version }}" type="module"></script> <script src="{% static 'dist/admin/AdminInterface.js' %}?version={{ version }}" type="module"></script>
<meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)"> <meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)">
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)"> <meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)">
<link rel="icon" href="{{ tenant.branding_favicon }}">
<link rel="shortcut icon" href="{{ tenant.branding_favicon }}">
{% include "base/header_js.html" %} {% include "base/header_js.html" %}
{% endblock %} {% endblock %}

View File

@ -5,6 +5,8 @@
{% block head_before %} {% block head_before %}
{{ block.super }} {{ block.super }}
<link rel="prefetch" href="{{ flow.background_url }}" /> <link rel="prefetch" href="{{ flow.background_url }}" />
<link rel="icon" href="{{ tenant.branding_favicon }}">
<link rel="shortcut icon" href="{{ tenant.branding_favicon }}">
{% if flow.compatibility_mode and not inspector %} {% if flow.compatibility_mode and not inspector %}
<script>ShadyDOM = { force: !navigator.webdriver };</script> <script>ShadyDOM = { force: !navigator.webdriver };</script>
{% endif %} {% endif %}

View File

@ -4,8 +4,10 @@
{% block head %} {% block head %}
<script src="{% static 'dist/user/UserInterface.js' %}?version={{ version }}" type="module"></script> <script src="{% static 'dist/user/UserInterface.js' %}?version={{ version }}" type="module"></script>
<meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: light)"> <meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
<meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: dark)"> <meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
<link rel="icon" href="{{ tenant.branding_favicon }}">
<link rel="shortcut icon" href="{{ tenant.branding_favicon }}">
{% include "base/header_js.html" %} {% include "base/header_js.html" %}
{% endblock %} {% endblock %}

View File

@ -78,6 +78,7 @@
</main> </main>
{% endblock %} {% endblock %}
<footer class="pf-c-login__footer"> <footer class="pf-c-login__footer">
<p></p>
<ul class="pf-c-list pf-m-inline"> <ul class="pf-c-list pf-m-inline">
{% for link in footer_links %} {% for link in footer_links %}
<li> <li>

View File

@ -13,9 +13,7 @@ class TestGroups(TestCase):
user = User.objects.create(username=generate_id()) user = User.objects.create(username=generate_id())
user2 = User.objects.create(username=generate_id()) user2 = User.objects.create(username=generate_id())
group = Group.objects.create(name=generate_id()) group = Group.objects.create(name=generate_id())
other_group = Group.objects.create(name=generate_id())
group.users.add(user) group.users.add(user)
other_group.users.add(user)
self.assertTrue(group.is_member(user)) self.assertTrue(group.is_member(user))
self.assertFalse(group.is_member(user2)) self.assertFalse(group.is_member(user2))
@ -23,26 +21,22 @@ class TestGroups(TestCase):
"""Test parent membership""" """Test parent membership"""
user = User.objects.create(username=generate_id()) user = User.objects.create(username=generate_id())
user2 = User.objects.create(username=generate_id()) user2 = User.objects.create(username=generate_id())
parent = Group.objects.create(name=generate_id()) first = Group.objects.create(name=generate_id())
child = Group.objects.create(name=generate_id(), parent=parent) second = Group.objects.create(name=generate_id(), parent=first)
child.users.add(user) second.users.add(user)
self.assertTrue(child.is_member(user)) self.assertTrue(first.is_member(user))
self.assertTrue(parent.is_member(user)) self.assertFalse(first.is_member(user2))
self.assertFalse(child.is_member(user2))
self.assertFalse(parent.is_member(user2))
def test_group_membership_parent_extra(self): def test_group_membership_parent_extra(self):
"""Test parent membership""" """Test parent membership"""
user = User.objects.create(username=generate_id()) user = User.objects.create(username=generate_id())
user2 = User.objects.create(username=generate_id()) user2 = User.objects.create(username=generate_id())
parent = Group.objects.create(name=generate_id()) first = Group.objects.create(name=generate_id())
second = Group.objects.create(name=generate_id(), parent=parent) second = Group.objects.create(name=generate_id(), parent=first)
third = Group.objects.create(name=generate_id(), parent=second) third = Group.objects.create(name=generate_id(), parent=second)
second.users.add(user) second.users.add(user)
self.assertTrue(parent.is_member(user)) self.assertTrue(first.is_member(user))
self.assertFalse(parent.is_member(user2)) self.assertFalse(first.is_member(user2))
self.assertTrue(second.is_member(user))
self.assertFalse(second.is_member(user2))
self.assertFalse(third.is_member(user)) self.assertFalse(third.is_member(user))
self.assertFalse(third.is_member(user2)) self.assertFalse(third.is_member(user2))

View File

@ -67,16 +67,3 @@ class TestGroupsAPI(APITestCase):
}, },
) )
self.assertEqual(res.status_code, 404) self.assertEqual(res.status_code, 404)
def test_parent_self(self):
"""Test parent"""
group = Group.objects.create(name=generate_id())
self.client.force_login(self.admin)
res = self.client.patch(
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
data={
"pk": self.user.pk + 3,
"parent": group.pk,
},
)
self.assertEqual(res.status_code, 400)

View File

@ -1,14 +1,14 @@
"""impersonation tests""" """impersonation tests"""
from json import loads from json import loads
from django.test.testcases import TestCase
from django.urls import reverse from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.models import User from authentik.core.models import User
from authentik.core.tests.utils import create_test_admin_user from authentik.core.tests.utils import create_test_admin_user
class TestImpersonation(APITestCase): class TestImpersonation(TestCase):
"""impersonation tests""" """impersonation tests"""
def setUp(self) -> None: def setUp(self) -> None:
@ -23,10 +23,10 @@ class TestImpersonation(APITestCase):
self.other_user.save() self.other_user.save()
self.client.force_login(self.user) self.client.force_login(self.user)
self.client.post( self.client.get(
reverse( reverse(
"authentik_api:user-impersonate", "authentik_core:impersonate-init",
kwargs={"pk": self.other_user.pk}, kwargs={"user_id": self.other_user.pk},
) )
) )
@ -35,7 +35,7 @@ class TestImpersonation(APITestCase):
self.assertEqual(response_body["user"]["username"], self.other_user.username) self.assertEqual(response_body["user"]["username"], self.other_user.username)
self.assertEqual(response_body["original"]["username"], self.user.username) self.assertEqual(response_body["original"]["username"], self.user.username)
self.client.get(reverse("authentik_api:user-impersonate-end")) self.client.get(reverse("authentik_core:impersonate-end"))
response = self.client.get(reverse("authentik_api:user-me")) response = self.client.get(reverse("authentik_api:user-me"))
response_body = loads(response.content.decode()) response_body = loads(response.content.decode())
@ -46,7 +46,9 @@ class TestImpersonation(APITestCase):
"""test impersonation without permissions""" """test impersonation without permissions"""
self.client.force_login(self.other_user) self.client.force_login(self.other_user)
self.client.get(reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})) self.client.get(
reverse("authentik_core:impersonate-init", kwargs={"user_id": self.user.pk})
)
response = self.client.get(reverse("authentik_api:user-me")) response = self.client.get(reverse("authentik_api:user-me"))
response_body = loads(response.content.decode()) response_body = loads(response.content.decode())
@ -56,5 +58,5 @@ class TestImpersonation(APITestCase):
"""test un-impersonation without impersonating first""" """test un-impersonation without impersonating first"""
self.client.force_login(self.other_user) self.client.force_login(self.other_user)
response = self.client.get(reverse("authentik_api:user-impersonate-end")) response = self.client.get(reverse("authentik_core:impersonate-end"))
self.assertEqual(response.status_code, 204) self.assertRedirects(response, reverse("authentik_core:if-user"))

View File

@ -8,11 +8,11 @@ from django.urls.base import reverse
from rest_framework.test import APITestCase from rest_framework.test import APITestCase
from authentik.core.models import ( from authentik.core.models import (
USER_ATTRIBUTE_SA,
USER_ATTRIBUTE_TOKEN_EXPIRING, USER_ATTRIBUTE_TOKEN_EXPIRING,
AuthenticatedSession, AuthenticatedSession,
Token, Token,
User, User,
UserTypes,
) )
from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant from authentik.core.tests.utils import create_test_admin_user, create_test_flow, create_test_tenant
from authentik.flows.models import FlowDesignation from authentik.flows.models import FlowDesignation
@ -28,19 +28,6 @@ class TestUsersAPI(APITestCase):
self.admin = create_test_admin_user() self.admin = create_test_admin_user()
self.user = User.objects.create(username="test-user") self.user = User.objects.create(username="test-user")
def test_filter_type(self):
"""Test API filtering by type"""
self.client.force_login(self.admin)
user = create_test_admin_user(type=UserTypes.EXTERNAL)
response = self.client.get(
reverse("authentik_api:user-list"),
data={
"type": UserTypes.EXTERNAL,
"username": user.username,
},
)
self.assertEqual(response.status_code, 200)
def test_metrics(self): def test_metrics(self):
"""Test user's metrics""" """Test user's metrics"""
self.client.force_login(self.admin) self.client.force_login(self.admin)
@ -154,8 +141,7 @@ class TestUsersAPI(APITestCase):
user_filter = User.objects.filter( user_filter = User.objects.filter(
username="test-sa", username="test-sa",
type=UserTypes.SERVICE_ACCOUNT, attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: True, USER_ATTRIBUTE_SA: True},
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: True},
) )
self.assertTrue(user_filter.exists()) self.assertTrue(user_filter.exists())
user: User = user_filter.first() user: User = user_filter.first()
@ -180,8 +166,7 @@ class TestUsersAPI(APITestCase):
user_filter = User.objects.filter( user_filter = User.objects.filter(
username="test-sa", username="test-sa",
type=UserTypes.SERVICE_ACCOUNT, attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: False, USER_ATTRIBUTE_SA: True},
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: False},
) )
self.assertTrue(user_filter.exists()) self.assertTrue(user_filter.exists())
user: User = user_filter.first() user: User = user_filter.first()
@ -207,8 +192,7 @@ class TestUsersAPI(APITestCase):
user_filter = User.objects.filter( user_filter = User.objects.filter(
username="test-sa", username="test-sa",
type=UserTypes.SERVICE_ACCOUNT, attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: True, USER_ATTRIBUTE_SA: True},
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: True},
) )
self.assertTrue(user_filter.exists()) self.assertTrue(user_filter.exists())
user: User = user_filter.first() user: User = user_filter.first()
@ -234,8 +218,7 @@ class TestUsersAPI(APITestCase):
user_filter = User.objects.filter( user_filter = User.objects.filter(
username="test-sa", username="test-sa",
type=UserTypes.SERVICE_ACCOUNT, attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: True, USER_ATTRIBUTE_SA: True},
attributes={USER_ATTRIBUTE_TOKEN_EXPIRING: True},
) )
self.assertTrue(user_filter.exists()) self.assertTrue(user_filter.exists())
user: User = user_filter.first() user: User = user_filter.first()

View File

@ -21,7 +21,7 @@ def create_test_flow(
) )
def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User: def create_test_admin_user(name: Optional[str] = None) -> User:
"""Generate a test-admin user""" """Generate a test-admin user"""
uid = generate_id(20) if not name else name uid = generate_id(20) if not name else name
group = Group.objects.create(name=uid, is_superuser=True) group = Group.objects.create(name=uid, is_superuser=True)
@ -29,7 +29,6 @@ def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
username=uid, username=uid,
name=uid, name=uid,
email=f"{uid}@goauthentik.io", email=f"{uid}@goauthentik.io",
**kwargs,
) )
user.set_password(uid) user.set_password(uid)
user.save() user.save()
@ -37,12 +36,12 @@ def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
return user return user
def create_test_tenant(**kwargs) -> Tenant: def create_test_tenant() -> Tenant:
"""Generate a test tenant, removing all other tenants to make sure this one """Generate a test tenant, removing all other tenants to make sure this one
matches.""" matches."""
uid = generate_id(20) uid = generate_id(20)
Tenant.objects.all().delete() Tenant.objects.all().delete()
return Tenant.objects.create(domain=uid, default=True, **kwargs) return Tenant.objects.create(domain=uid, default=True)
def create_test_cert(use_ec_private_key=False) -> CertificateKeyPair: def create_test_cert(use_ec_private_key=False) -> CertificateKeyPair:

View File

@ -8,7 +8,7 @@ from authentik.core.api.utils import PassiveSerializer
from authentik.flows.challenge import Challenge from authentik.flows.challenge import Challenge
@dataclass(slots=True) @dataclass
class UILoginButton: class UILoginButton:
"""Dataclass for Source's ui_login_button""" """Dataclass for Source's ui_login_button"""

View File

@ -16,7 +16,7 @@ from authentik.core.api.providers import ProviderViewSet
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
from authentik.core.api.tokens import TokenViewSet from authentik.core.api.tokens import TokenViewSet
from authentik.core.api.users import UserViewSet from authentik.core.api.users import UserViewSet
from authentik.core.views import apps from authentik.core.views import apps, impersonate
from authentik.core.views.debug import AccessDeniedView from authentik.core.views.debug import AccessDeniedView
from authentik.core.views.interface import FlowInterfaceView, InterfaceView from authentik.core.views.interface import FlowInterfaceView, InterfaceView
from authentik.core.views.session import EndSessionView from authentik.core.views.session import EndSessionView
@ -38,6 +38,17 @@ urlpatterns = [
apps.RedirectToAppLaunch.as_view(), apps.RedirectToAppLaunch.as_view(),
name="application-launch", name="application-launch",
), ),
# Impersonation
path(
"-/impersonation/<int:user_id>/",
impersonate.ImpersonateInitView.as_view(),
name="impersonate-init",
),
path(
"-/impersonation/end/",
impersonate.ImpersonateEndView.as_view(),
name="impersonate-end",
),
# Interfaces # Interfaces
path( path(
"if/admin/", "if/admin/",

View File

@ -0,0 +1,60 @@
"""authentik impersonation views"""
from django.http import HttpRequest, HttpResponse
from django.shortcuts import get_object_or_404, redirect
from django.views import View
from structlog.stdlib import get_logger
from authentik.core.middleware import (
SESSION_KEY_IMPERSONATE_ORIGINAL_USER,
SESSION_KEY_IMPERSONATE_USER,
)
from authentik.core.models import User
from authentik.events.models import Event, EventAction
from authentik.lib.config import CONFIG
LOGGER = get_logger()
class ImpersonateInitView(View):
"""Initiate Impersonation"""
def get(self, request: HttpRequest, user_id: int) -> HttpResponse:
"""Impersonation handler, checks permissions"""
if not CONFIG.y_bool("impersonation"):
LOGGER.debug("User attempted to impersonate", user=request.user)
return HttpResponse("Unauthorized", status=401)
if not request.user.has_perm("impersonate"):
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
return HttpResponse("Unauthorized", status=401)
user_to_be = get_object_or_404(User, pk=user_id)
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
Event.new(EventAction.IMPERSONATION_STARTED).from_http(request, user_to_be)
return redirect("authentik_core:if-user")
class ImpersonateEndView(View):
"""End User impersonation"""
def get(self, request: HttpRequest) -> HttpResponse:
"""End Impersonation handler"""
if (
SESSION_KEY_IMPERSONATE_USER not in request.session
or SESSION_KEY_IMPERSONATE_ORIGINAL_USER not in request.session
):
LOGGER.debug("Can't end impersonation", user=request.user)
return redirect("authentik_core:if-user")
original_user = request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER]
del request.session[SESSION_KEY_IMPERSONATE_USER]
del request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER]
Event.new(EventAction.IMPERSONATION_ENDED).from_http(request, original_user)
return redirect("authentik_core:root-redirect")

View File

@ -189,8 +189,6 @@ class CertificateKeyPairFilter(FilterSet):
def filter_has_key(self, queryset, name, value): # pragma: no cover def filter_has_key(self, queryset, name, value): # pragma: no cover
"""Only return certificate-key pairs with keys""" """Only return certificate-key pairs with keys"""
if not value:
return queryset
return queryset.exclude(key_data__exact="") return queryset.exclude(key_data__exact="")
class Meta: class Meta:

View File

@ -7,7 +7,7 @@ from cryptography import x509
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import ec, rsa from cryptography.hazmat.primitives.asymmetric import ec, rsa
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes from cryptography.hazmat.primitives.asymmetric.types import PRIVATE_KEY_TYPES
from cryptography.x509.oid import NameOID from cryptography.x509.oid import NameOID
from authentik import __version__ from authentik import __version__
@ -40,7 +40,7 @@ class CertificateBuilder:
self.cert.save() self.cert.save()
return self.cert return self.cert
def generate_private_key(self) -> PrivateKeyTypes: def generate_private_key(self) -> PRIVATE_KEY_TYPES:
"""Generate private key""" """Generate private key"""
if self._use_ec_private_key: if self._use_ec_private_key:
return ec.generate_private_key(curve=ec.SECP256R1) return ec.generate_private_key(curve=ec.SECP256R1)

View File

@ -6,7 +6,7 @@ from uuid import uuid4
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes, PublicKeyTypes from cryptography.hazmat.primitives.asymmetric.types import PRIVATE_KEY_TYPES, PUBLIC_KEY_TYPES
from cryptography.hazmat.primitives.serialization import load_pem_private_key from cryptography.hazmat.primitives.serialization import load_pem_private_key
from cryptography.x509 import Certificate, load_pem_x509_certificate from cryptography.x509 import Certificate, load_pem_x509_certificate
from django.db import models from django.db import models
@ -37,8 +37,8 @@ class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
) )
_cert: Optional[Certificate] = None _cert: Optional[Certificate] = None
_private_key: Optional[PrivateKeyTypes] = None _private_key: Optional[PRIVATE_KEY_TYPES] = None
_public_key: Optional[PublicKeyTypes] = None _public_key: Optional[PUBLIC_KEY_TYPES] = None
@property @property
def serializer(self) -> Serializer: def serializer(self) -> Serializer:
@ -56,7 +56,7 @@ class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
return self._cert return self._cert
@property @property
def public_key(self) -> Optional[PublicKeyTypes]: def public_key(self) -> Optional[PUBLIC_KEY_TYPES]:
"""Get public key of the private key""" """Get public key of the private key"""
if not self._public_key: if not self._public_key:
self._public_key = self.private_key.public_key() self._public_key = self.private_key.public_key()
@ -65,7 +65,7 @@ class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
@property @property
def private_key( def private_key(
self, self,
) -> Optional[PrivateKeyTypes]: ) -> Optional[PRIVATE_KEY_TYPES]:
"""Get python cryptography PrivateKey instance""" """Get python cryptography PrivateKey instance"""
if not self._private_key and self.key_data != "": if not self._private_key and self.key_data != "":
try: try:

View File

@ -46,7 +46,7 @@ def certificate_discovery(self: MonitoredTask):
certs = {} certs = {}
private_keys = {} private_keys = {}
discovered = 0 discovered = 0
for file in glob(CONFIG.get("cert_discovery_dir") + "/**", recursive=True): for file in glob(CONFIG.y("cert_discovery_dir") + "/**", recursive=True):
path = Path(file) path = Path(file)
if not path.exists(): if not path.exists():
continue continue

View File

@ -128,26 +128,8 @@ class TestCrypto(APITestCase):
response = self.client.get( response = self.client.get(
reverse( reverse(
"authentik_api:certificatekeypair-list", "authentik_api:certificatekeypair-list",
), )
data={"name": cert.name}, + f"?name={cert.name}"
)
self.assertEqual(200, response.status_code)
body = loads(response.content.decode())
api_cert = [x for x in body["results"] if x["name"] == cert.name][0]
self.assertEqual(api_cert["fingerprint_sha1"], cert.fingerprint_sha1)
self.assertEqual(api_cert["fingerprint_sha256"], cert.fingerprint_sha256)
def test_list_has_key_false(self):
"""Test API List with has_key set to false"""
cert = create_test_cert()
cert.key_data = ""
cert.save()
self.client.force_login(create_test_admin_user())
response = self.client.get(
reverse(
"authentik_api:certificatekeypair-list",
),
data={"name": cert.name, "has_key": False},
) )
self.assertEqual(200, response.status_code) self.assertEqual(200, response.status_code)
body = loads(response.content.decode()) body = loads(response.content.decode())
@ -162,8 +144,8 @@ class TestCrypto(APITestCase):
response = self.client.get( response = self.client.get(
reverse( reverse(
"authentik_api:certificatekeypair-list", "authentik_api:certificatekeypair-list",
), )
data={"name": cert.name, "include_details": False}, + f"?name={cert.name}&include_details=false"
) )
self.assertEqual(200, response.status_code) self.assertEqual(200, response.status_code)
body = loads(response.content.decode()) body = loads(response.content.decode())
@ -186,8 +168,8 @@ class TestCrypto(APITestCase):
reverse( reverse(
"authentik_api:certificatekeypair-view-certificate", "authentik_api:certificatekeypair-view-certificate",
kwargs={"pk": keypair.pk}, kwargs={"pk": keypair.pk},
), )
data={"download": True}, + "?download",
) )
self.assertEqual(200, response.status_code) self.assertEqual(200, response.status_code)
self.assertIn("Content-Disposition", response) self.assertIn("Content-Disposition", response)
@ -207,8 +189,8 @@ class TestCrypto(APITestCase):
reverse( reverse(
"authentik_api:certificatekeypair-view-private-key", "authentik_api:certificatekeypair-view-private-key",
kwargs={"pk": keypair.pk}, kwargs={"pk": keypair.pk},
), )
data={"download": True}, + "?download",
) )
self.assertEqual(200, response.status_code) self.assertEqual(200, response.status_code)
self.assertIn("Content-Disposition", response) self.assertIn("Content-Disposition", response)
@ -218,7 +200,7 @@ class TestCrypto(APITestCase):
self.client.force_login(create_test_admin_user()) self.client.force_login(create_test_admin_user())
keypair = create_test_cert() keypair = create_test_cert()
provider = OAuth2Provider.objects.create( provider = OAuth2Provider.objects.create(
name=generate_id(), name="test",
client_id="test", client_id="test",
client_secret=generate_key(), client_secret=generate_key(),
authorization_flow=create_test_flow(), authorization_flow=create_test_flow(),

View File

@ -1,154 +0,0 @@
"""Enterprise API Views"""
from datetime import datetime, timedelta
from django.utils.timezone import now
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.decorators import action
from rest_framework.fields import BooleanField, CharField, DateTimeField, IntegerField
from rest_framework.permissions import IsAdminUser, IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import ModelViewSet
from authentik.api.decorators import permission_required
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer
from authentik.core.models import User, UserTypes
from authentik.enterprise.models import License, LicenseKey
from authentik.root.install_id import get_install_id
class LicenseSerializer(ModelSerializer):
"""License Serializer"""
def validate_key(self, key: str) -> str:
"""Validate the license key (install_id and signature)"""
LicenseKey.validate(key)
return key
class Meta:
model = License
fields = [
"license_uuid",
"name",
"key",
"expiry",
"internal_users",
"external_users",
]
extra_kwargs = {
"name": {"read_only": True},
"expiry": {"read_only": True},
"internal_users": {"read_only": True},
"external_users": {"read_only": True},
}
class LicenseSummary(PassiveSerializer):
"""Serializer for license status"""
internal_users = IntegerField(required=True)
external_users = IntegerField(required=True)
valid = BooleanField()
show_admin_warning = BooleanField()
show_user_warning = BooleanField()
read_only = BooleanField()
latest_valid = DateTimeField()
has_license = BooleanField()
class LicenseForecastSerializer(PassiveSerializer):
"""Serializer for license forecast"""
internal_users = IntegerField(required=True)
external_users = IntegerField(required=True)
forecasted_internal_users = IntegerField(required=True)
forecasted_external_users = IntegerField(required=True)
class LicenseViewSet(UsedByMixin, ModelViewSet):
"""License Viewset"""
queryset = License.objects.all()
serializer_class = LicenseSerializer
search_fields = ["name"]
ordering = ["name"]
filterset_fields = ["name"]
@permission_required(None, ["authentik_enterprise.view_license"])
@extend_schema(
request=OpenApiTypes.NONE,
responses={
200: inline_serializer("InstallIDSerializer", {"install_id": CharField(required=True)}),
},
)
@action(detail=False, methods=["GET"], permission_classes=[IsAdminUser])
def get_install_id(self, request: Request) -> Response:
"""Get install_id"""
return Response(
data={
"install_id": get_install_id(),
}
)
@extend_schema(
request=OpenApiTypes.NONE,
responses={
200: LicenseSummary(),
},
)
@action(detail=False, methods=["GET"], permission_classes=[IsAuthenticated])
def summary(self, request: Request) -> Response:
"""Get the total license status"""
total = LicenseKey.get_total()
last_valid = LicenseKey.last_valid_date()
# TODO: move this to a different place?
show_admin_warning = last_valid < now() - timedelta(weeks=2)
show_user_warning = last_valid < now() - timedelta(weeks=4)
read_only = last_valid < now() - timedelta(weeks=6)
latest_valid = datetime.fromtimestamp(total.exp)
response = LicenseSummary(
data={
"internal_users": total.internal_users,
"external_users": total.external_users,
"valid": total.is_valid(),
"show_admin_warning": show_admin_warning,
"show_user_warning": show_user_warning,
"read_only": read_only,
"latest_valid": latest_valid,
"has_license": License.objects.all().count() > 0,
}
)
response.is_valid(raise_exception=True)
return Response(response.data)
@permission_required(None, ["authentik_enterprise.view_license"])
@extend_schema(
request=OpenApiTypes.NONE,
responses={
200: LicenseForecastSerializer(),
},
)
@action(detail=False, methods=["GET"])
def forecast(self, request: Request) -> Response:
"""Forecast how many users will be required in a year"""
last_month = now() - timedelta(days=30)
# Forecast for internal users
internal_in_last_month = User.objects.filter(
type=UserTypes.INTERNAL, date_joined__gte=last_month
).count()
# Forecast for external users
external_in_last_month = LicenseKey.get_external_user_count()
forecast_for_months = 12
response = LicenseForecastSerializer(
data={
"internal_users": LicenseKey.get_default_user_count(),
"external_users": LicenseKey.get_external_user_count(),
"forecasted_internal_users": (internal_in_last_month * forecast_for_months),
"forecasted_external_users": (external_in_last_month * forecast_for_months),
}
)
response.is_valid(raise_exception=True)
return Response(response.data)

View File

@ -9,7 +9,3 @@ class AuthentikEnterpriseConfig(ManagedAppConfig):
label = "authentik_enterprise" label = "authentik_enterprise"
verbose_name = "authentik Enterprise" verbose_name = "authentik Enterprise"
default = True default = True
def reconcile_load_enterprise_signals(self):
"""Load enterprise signals"""
self.import_module("authentik.enterprise.signals")

View File

@ -1,52 +0,0 @@
# Generated by Django 4.1.10 on 2023-07-06 12:51
import uuid
from django.db import migrations, models
import authentik.enterprise.models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="License",
fields=[
(
"license_uuid",
models.UUIDField(
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
),
),
("key", models.TextField(unique=True)),
("name", models.TextField()),
("expiry", models.DateTimeField()),
("users", models.BigIntegerField()),
("external_users", models.BigIntegerField()),
],
),
migrations.CreateModel(
name="LicenseUsage",
fields=[
("expiring", models.BooleanField(default=True)),
("expires", models.DateTimeField(default=authentik.enterprise.models.usage_expiry)),
(
"usage_uuid",
models.UUIDField(
default=uuid.uuid4, editable=False, primary_key=True, serialize=False
),
),
("user_count", models.BigIntegerField()),
("external_user_count", models.BigIntegerField()),
("within_limits", models.BooleanField()),
("record_date", models.DateTimeField(auto_now_add=True)),
],
options={
"abstract": False,
},
),
]

View File

@ -1,36 +0,0 @@
# Generated by Django 4.2.4 on 2023-08-23 10:06
import django.contrib.postgres.indexes
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_enterprise", "0001_initial"),
]
operations = [
migrations.RenameField(
model_name="license",
old_name="users",
new_name="internal_users",
),
migrations.AlterField(
model_name="license",
name="key",
field=models.TextField(),
),
migrations.AddIndex(
model_name="license",
index=django.contrib.postgres.indexes.HashIndex(
fields=["key"], name="authentik_e_key_523e13_hash"
),
),
migrations.AlterModelOptions(
name="licenseusage",
options={
"verbose_name": "License Usage",
"verbose_name_plural": "License Usage Records",
},
),
]

Some files were not shown because too many files have changed in this diff Show More