Compare commits
231 Commits
version/20
...
version/20
| Author | SHA1 | Date | |
|---|---|---|---|
| 2a3b049b01 | |||
| e4a5e86c93 | |||
| 3a51bcd890 | |||
| c28f68400d | |||
| 5d50fc281a | |||
| 9f7d1466e9 | |||
| c815d24806 | |||
| d1200a7e40 | |||
| edd4f9ceae | |||
| 1cfe81887b | |||
| bb5e0ebab1 | |||
| dfda76d896 | |||
| 8fc5114ce4 | |||
| e7b4363d21 | |||
| 53905d1a89 | |||
| 0ad1392632 | |||
| 6db1c914ee | |||
| 00324f922d | |||
| 8a24ddad28 | |||
| 0f85fe3c29 | |||
| 1f05eaa420 | |||
| 84e126a32c | |||
| 9ae69866bd | |||
| 56576a7f44 | |||
| 7f0295ba53 | |||
| 5553b3ff36 | |||
| 6f969525fe | |||
| bac12246fb | |||
| b53ef6e529 | |||
| 39c62afb93 | |||
| c98bdbacc5 | |||
| 1e8d45dc15 | |||
| 202b057ce9 | |||
| d5d8641b37 | |||
| 9dd37689e3 | |||
| cc0832f487 | |||
| b515bf7d2e | |||
| 34fbf3941b | |||
| e73606b54d | |||
| 0a413fe21a | |||
| d1b9f1e6b8 | |||
| e5a6e128e4 | |||
| 9295d1ed0b | |||
| 5d479a6c8f | |||
| 4a773b2b4f | |||
| 8003d67844 | |||
| 58baf97e2d | |||
| 51783c1cbb | |||
| 94290c7e36 | |||
| 123ff7ad1f | |||
| 8f3e863cce | |||
| 3d6c459349 | |||
| 6a583bae49 | |||
| 78e5879d9a | |||
| fdcac2a9ed | |||
| e81715caef | |||
| ab2b13938e | |||
| 5c97a3aef3 | |||
| e6963c543d | |||
| 9ca15983a2 | |||
| 99ef94b7aa | |||
| 133bedafba | |||
| c3faa61ed9 | |||
| da74304221 | |||
| ed6659a46d | |||
| 0abb1f94a4 | |||
| c7e299e0bf | |||
| 8a6590bac8 | |||
| ed717dcfa2 | |||
| b6df42f580 | |||
| 2ea85bd0c4 | |||
| 68fa8105e1 | |||
| 79db0ce4c1 | |||
| 5e23b11764 | |||
| c4e029ffe2 | |||
| 61b5b36192 | |||
| c6cc1b1728 | |||
| 77dd652160 | |||
| 1144944adb | |||
| 7751be284e | |||
| 74382c6287 | |||
| 011babbbd9 | |||
| 3c01a1dd7b | |||
| 6e832be2de | |||
| 46017f2f86 | |||
| da50eb0369 | |||
| b996e3cee7 | |||
| 12735cc14c | |||
| 4d36699b78 | |||
| 8110d2861b | |||
| 1cc60f572d | |||
| 90151a13ae | |||
| f958aa6930 | |||
| 13fbac30a2 | |||
| 4f4cdf16f1 | |||
| 7d75599627 | |||
| 924a13e832 | |||
| ae83c35dfd | |||
| e9102f4e28 | |||
| 9b8c1cbea5 | |||
| 6424bf98da | |||
| 74fb0f9e2a | |||
| 4380f37a77 | |||
| 17fccd44e6 | |||
| 217a8b5610 | |||
| 2cef220a3e | |||
| 5a8c66d325 | |||
| 8de13d3f67 | |||
| 5c22bedbaf | |||
| 8a0f993f0b | |||
| abcf515a69 | |||
| 894f704c27 | |||
| 7798292aa8 | |||
| 3005ca17bd | |||
| 909461e533 | |||
| df838a4023 | |||
| 0f86b62dd3 | |||
| a40c3aeb68 | |||
| 4080738ded | |||
| 4a89be3048 | |||
| e587c53e18 | |||
| 023b97aa69 | |||
| 51365dba74 | |||
| 0d3705685e | |||
| 738e4d5c74 | |||
| b14b9cb0dd | |||
| 2a21ebf7b0 | |||
| 5bc1301043 | |||
| e0e4bf6972 | |||
| 337677ad12 | |||
| 3712d5aee2 | |||
| dd82d55725 | |||
| 8d766efecb | |||
| 9ac3b29418 | |||
| 5000c5b061 | |||
| b362d2af03 | |||
| bcd42fce13 | |||
| 6deddd038f | |||
| 3b47cb64da | |||
| cf5e70c759 | |||
| 20bc38a54b | |||
| 672a4ab1f4 | |||
| 47dd667261 | |||
| d1ac69789b | |||
| 08abf81c6d | |||
| 76bd987e6f | |||
| 5374352411 | |||
| 08eff4cc5d | |||
| c87a9f9489 | |||
| 8f6d700aa8 | |||
| c6843b026c | |||
| 3769c33ef0 | |||
| 8982afaf44 | |||
| 58c221e867 | |||
| 108d3e56e3 | |||
| 145b32c480 | |||
| c788504bb0 | |||
| 34782b31e5 | |||
| 5a3ca13d76 | |||
| 5dc0f3b91b | |||
| f51515f3de | |||
| f978575293 | |||
| cb64eed90d | |||
| db1f7f0400 | |||
| 0d02dbf55c | |||
| 6da78b8c32 | |||
| 3a80bc8bda | |||
| 1aa9c0f9ca | |||
| 2da7a8fede | |||
| 89cb402f42 | |||
| b617fd213f | |||
| 97b0f58f25 | |||
| 49a98bb744 | |||
| f93a00d773 | |||
| 8de40a8a21 | |||
| b9c54e97fa | |||
| f1c55465f7 | |||
| 40c2b2860b | |||
| a92bce322d | |||
| af83308fd4 | |||
| 73d991e75a | |||
| 1eba3f1334 | |||
| b86251255d | |||
| ccab41a6ca | |||
| 0e051031b1 | |||
| aecbe8c585 | |||
| da98022704 | |||
| e13f9c0b38 | |||
| 7941fb9d95 | |||
| d2392b0881 | |||
| b2044d75fb | |||
| 617b64b7db | |||
| 2bf5f2709a | |||
| f03325df28 | |||
| 2b71e5bdfd | |||
| f861737b85 | |||
| 6036d88392 | |||
| bfc8a56a0b | |||
| 8d995011b8 | |||
| 5646141fe2 | |||
| 96b0bc324e | |||
| 335d6edd11 | |||
| 5d9bed130a | |||
| 0a1ab74707 | |||
| ef24b94585 | |||
| 77b0438aa4 | |||
| 2788329880 | |||
| 15ab11be70 | |||
| 8d5460a132 | |||
| 5ba2c80813 | |||
| 06766bdb25 | |||
| fdae13316c | |||
| ae21886e8e | |||
| f5dc81907a | |||
| 40f8ce3c4c | |||
| c934915776 | |||
| d70c8fbcc3 | |||
| 12b26e49ec | |||
| 0ac548d56e | |||
| e771e1857f | |||
| 479e9750c7 | |||
| c5e7801247 | |||
| 48ea15a946 | |||
| e4c06f7356 | |||
| 4d7d866e4b | |||
| 72a93c0959 | |||
| 73733b20b6 | |||
| 3872314931 | |||
| 85c6ede448 | |||
| 49c2bee9d6 | |||
| 6b2c9d7c44 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2021.10.1-rc3
|
||||
current_version = 2021.10.4
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-?(?P<release>.*)
|
||||
|
||||
133
.github/workflows/ci-main.yml
vendored
133
.github/workflows/ci-main.yml
vendored
@ -25,14 +25,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run pylint
|
||||
run: pipenv run pylint authentik tests lifecycle
|
||||
@ -43,14 +43,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run black
|
||||
run: pipenv run black --check authentik tests lifecycle
|
||||
@ -61,14 +61,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run isort
|
||||
run: pipenv run isort --check authentik tests lifecycle
|
||||
@ -79,14 +79,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run bandit
|
||||
run: pipenv run bandit -r authentik tests lifecycle
|
||||
@ -113,14 +113,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: run migrations
|
||||
run: pipenv run python -m lifecycle.migrate
|
||||
@ -137,19 +137,21 @@ jobs:
|
||||
id: ev
|
||||
run: |
|
||||
python ./scripts/gh_env.py
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: checkout stable
|
||||
run: |
|
||||
# Copy current, latest config to local
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||
git checkout ${{ steps.ev.outputs.branchName }} -- .github
|
||||
git checkout ${{ steps.ev.outputs.branchName }} -- scripts
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: |
|
||||
scripts/ci_prepare.sh
|
||||
# Sync anyways since stable will have different dependencies
|
||||
@ -163,8 +165,8 @@ jobs:
|
||||
git checkout ${{ steps.ev.outputs.branchName }}
|
||||
pipenv sync --dev
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- name: migrate to latest
|
||||
run: pipenv run python -m lifecycle.migrate
|
||||
@ -175,14 +177,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
@ -204,14 +206,14 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: scripts/ci_prepare.sh
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
@ -243,14 +245,14 @@ jobs:
|
||||
- uses: testspace-com/setup-testspace@v1
|
||||
with:
|
||||
domain: ${{github.repository_owner}}
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
# - id: cache-pipenv
|
||||
# uses: actions/cache@v2.1.6
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
# env:
|
||||
# INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: |
|
||||
scripts/ci_prepare.sh
|
||||
docker-compose -f tests/e2e/docker-compose.yml up -d
|
||||
@ -288,6 +290,7 @@ jobs:
|
||||
- test-integration
|
||||
- test-e2e
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Docker Buildx
|
||||
|
||||
1
.github/workflows/ci-outpost.yml
vendored
1
.github/workflows/ci-outpost.yml
vendored
@ -31,6 +31,7 @@ jobs:
|
||||
golangci/golangci-lint:v1.39.0 \
|
||||
golangci-lint run -v --timeout 200s
|
||||
build:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
- lint-golint
|
||||
strategy:
|
||||
|
||||
20
.github/workflows/release-publish.yml
vendored
20
.github/workflows/release-publish.yml
vendored
@ -30,14 +30,14 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik:2021.10.1-rc3,
|
||||
beryju/authentik:2021.10.4,
|
||||
beryju/authentik:latest,
|
||||
ghcr.io/goauthentik/server:2021.10.1-rc3,
|
||||
ghcr.io/goauthentik/server:2021.10.4,
|
||||
ghcr.io/goauthentik/server:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.10.1-rc3', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.10.4', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik:latest
|
||||
docker tag beryju/authentik:latest beryju/authentik:stable
|
||||
@ -72,14 +72,14 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-proxy:2021.10.1-rc3,
|
||||
beryju/authentik-proxy:2021.10.4,
|
||||
beryju/authentik-proxy:latest,
|
||||
ghcr.io/goauthentik/proxy:2021.10.1-rc3,
|
||||
ghcr.io/goauthentik/proxy:2021.10.4,
|
||||
ghcr.io/goauthentik/proxy:latest
|
||||
file: proxy.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.10.1-rc3', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.10.4', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik-proxy:latest
|
||||
docker tag beryju/authentik-proxy:latest beryju/authentik-proxy:stable
|
||||
@ -114,14 +114,14 @@ jobs:
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
beryju/authentik-ldap:2021.10.1-rc3,
|
||||
beryju/authentik-ldap:2021.10.4,
|
||||
beryju/authentik-ldap:latest,
|
||||
ghcr.io/goauthentik/ldap:2021.10.1-rc3,
|
||||
ghcr.io/goauthentik/ldap:2021.10.4,
|
||||
ghcr.io/goauthentik/ldap:latest
|
||||
file: ldap.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: Building Docker Image (stable)
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.10.1-rc3', 'rc') }}
|
||||
if: ${{ github.event_name == 'release' && !contains('2021.10.4', 'rc') }}
|
||||
run: |
|
||||
docker pull beryju/authentik-ldap:latest
|
||||
docker tag beryju/authentik-ldap:latest beryju/authentik-ldap:stable
|
||||
@ -170,7 +170,7 @@ jobs:
|
||||
SENTRY_PROJECT: authentik
|
||||
SENTRY_URL: https://sentry.beryju.org
|
||||
with:
|
||||
version: authentik@2021.10.1-rc3
|
||||
version: authentik@2021.10.4
|
||||
environment: beryjuorg-prod
|
||||
sourcemaps: './web/dist'
|
||||
url_prefix: '~/static/dist'
|
||||
|
||||
7
.github/workflows/translation-compile.yml
vendored
7
.github/workflows/translation-compile.yml
vendored
@ -21,14 +21,7 @@ jobs:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- id: cache-pipenv
|
||||
uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.local/share/virtualenvs
|
||||
key: ${{ runner.os }}-pipenv-v2-${{ hashFiles('**/Pipfile.lock') }}
|
||||
- name: prepare
|
||||
env:
|
||||
INSTALL: ${{ steps.cache-pipenv.outputs.cache-hit }}
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gettext
|
||||
|
||||
12
Dockerfile
12
Dockerfile
@ -1,5 +1,5 @@
|
||||
# Stage 1: Lock python dependencies
|
||||
FROM docker.io/python:3.9-slim-buster as locker
|
||||
FROM docker.io/python:3.9-slim-bullseye as locker
|
||||
|
||||
COPY ./Pipfile /app/
|
||||
COPY ./Pipfile.lock /app/
|
||||
@ -11,7 +11,7 @@ RUN pip install pipenv && \
|
||||
pipenv lock -r --dev-only > requirements-dev.txt
|
||||
|
||||
# Stage 2: Build website
|
||||
FROM docker.io/node as website-builder
|
||||
FROM docker.io/node:16 as website-builder
|
||||
|
||||
COPY ./website /static/
|
||||
|
||||
@ -19,7 +19,7 @@ ENV NODE_ENV=production
|
||||
RUN cd /static && npm i && npm run build-docs-only
|
||||
|
||||
# Stage 3: Build webui
|
||||
FROM docker.io/node as web-builder
|
||||
FROM docker.io/node:16 as web-builder
|
||||
|
||||
COPY ./web /static/
|
||||
|
||||
@ -27,7 +27,7 @@ ENV NODE_ENV=production
|
||||
RUN cd /static && npm i && npm run build
|
||||
|
||||
# Stage 4: Build go proxy
|
||||
FROM docker.io/golang:1.17.2 AS builder
|
||||
FROM docker.io/golang:1.17.3-bullseye AS builder
|
||||
|
||||
WORKDIR /work
|
||||
|
||||
@ -47,7 +47,7 @@ COPY ./go.sum /work/go.sum
|
||||
RUN go build -o /work/authentik ./cmd/server/main.go
|
||||
|
||||
# Stage 5: Run
|
||||
FROM docker.io/python:3.9-slim-buster
|
||||
FROM docker.io/python:3.9-slim-bullseye
|
||||
|
||||
WORKDIR /
|
||||
COPY --from=locker /app/requirements.txt /
|
||||
@ -59,7 +59,7 @@ ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends curl ca-certificates gnupg git runit && \
|
||||
curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
|
||||
echo "deb http://apt.postgresql.org/pub/repos/apt buster-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
||||
echo "deb http://apt.postgresql.org/pub/repos/apt bullseye-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends libpq-dev postgresql-client build-essential libxmlsec1-dev pkg-config libmaxminddb0 && \
|
||||
pip install -r /requirements.txt --no-cache-dir && \
|
||||
|
||||
14
Makefile
14
Makefile
@ -30,7 +30,6 @@ lint-fix:
|
||||
website/developer-docs
|
||||
|
||||
lint:
|
||||
pyright authentik tests lifecycle
|
||||
bandit -r authentik tests lifecycle -x node_modules
|
||||
pylint authentik tests lifecycle
|
||||
|
||||
@ -49,7 +48,7 @@ gen-web:
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
openapitools/openapi-generator-cli generate \
|
||||
ghcr.io/beryju/openapi-generator generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/web-api \
|
||||
@ -61,18 +60,19 @@ gen-web:
|
||||
\cp -rfv web-api/* web/node_modules/@goauthentik/api
|
||||
|
||||
gen-outpost:
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O config.yaml
|
||||
mkdir -p templates
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O templates/README.mustache
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/go.mod.mustache -O templates/go.mod.mustache
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
openapitools/openapi-generator-cli generate \
|
||||
--git-host goauthentik.io \
|
||||
--git-repo-id outpost \
|
||||
--git-user-id api \
|
||||
-i /local/schema.yml \
|
||||
-g go \
|
||||
-o /local/api \
|
||||
--additional-properties=packageName=api,enumClassPrefix=true,useOneOfDiscriminatorLookup=true,disallowAdditionalPropertiesIfNotPresent=false
|
||||
rm -f api/go.mod api/go.sum
|
||||
-c /local/config.yaml
|
||||
go mod edit -replace goauthentik.io/api=./api
|
||||
|
||||
gen: gen-build gen-clean gen-web
|
||||
|
||||
|
||||
961
Pipfile.lock
generated
961
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
@ -6,8 +6,8 @@
|
||||
|
||||
| Version | Supported |
|
||||
| ---------- | ------------------ |
|
||||
| 2021.8.x | :white_check_mark: |
|
||||
| 2021.9.x | :white_check_mark: |
|
||||
| 2021.10.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
"""authentik"""
|
||||
__version__ = "2021.10.1-rc3"
|
||||
__version__ = "2021.10.4"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
@ -27,6 +27,7 @@ VERSION_CACHE_TIMEOUT = 8 * 60 * 60 # 8 hours
|
||||
# Chop of the first ^ because we want to search the entire string
|
||||
URL_FINDER = URLValidator.regex.pattern[1:]
|
||||
PROM_INFO = Info("authentik_version", "Currently running authentik version")
|
||||
LOCAL_VERSION = parse(__version__)
|
||||
|
||||
|
||||
def _set_prom_info():
|
||||
@ -48,7 +49,7 @@ def clear_update_notifications():
|
||||
if "new_version" not in notification.event.context:
|
||||
continue
|
||||
notification_version = notification.event.context["new_version"]
|
||||
if notification_version == __version__:
|
||||
if LOCAL_VERSION >= parse(notification_version):
|
||||
notification.delete()
|
||||
|
||||
|
||||
@ -74,8 +75,7 @@ def update_latest_version(self: MonitoredTask):
|
||||
_set_prom_info()
|
||||
# Check if upstream version is newer than what we're running,
|
||||
# and if no event exists yet, create one.
|
||||
local_version = parse(__version__)
|
||||
if local_version < parse(upstream_version):
|
||||
if LOCAL_VERSION < parse(upstream_version):
|
||||
# Event has already been created, don't create duplicate
|
||||
if Event.objects.filter(
|
||||
action=EventAction.UPDATE_AVAILABLE,
|
||||
|
||||
@ -1,19 +0,0 @@
|
||||
"""API tasks"""
|
||||
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
SENTRY_SESSION = get_http_session()
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def sentry_proxy(payload: str):
|
||||
"""Relay data to sentry"""
|
||||
SENTRY_SESSION.post(
|
||||
"https://sentry.beryju.org/api/8/envelope/",
|
||||
data=payload,
|
||||
headers={
|
||||
"Content-Type": "application/octet-stream",
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
@ -4,7 +4,7 @@ from django.urls import include, path
|
||||
from authentik.api.v3.urls import urlpatterns as v3_urls
|
||||
|
||||
urlpatterns = [
|
||||
# Remove in 2022.1
|
||||
# TODO: Remove in 2022.1
|
||||
path("v2beta/", include(v3_urls)),
|
||||
path("v3/", include(v3_urls)),
|
||||
]
|
||||
|
||||
@ -1,65 +0,0 @@
|
||||
"""Sentry tunnel"""
|
||||
from json import loads
|
||||
|
||||
from django.conf import settings
|
||||
from django.http.request import HttpRequest
|
||||
from django.http.response import HttpResponse
|
||||
from rest_framework.authentication import SessionAuthentication
|
||||
from rest_framework.parsers import BaseParser
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.throttling import AnonRateThrottle
|
||||
from rest_framework.views import APIView
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.api.tasks import sentry_proxy
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class PlainTextParser(BaseParser):
|
||||
"""Plain text parser."""
|
||||
|
||||
media_type = "text/plain"
|
||||
|
||||
def parse(self, stream, media_type=None, parser_context=None) -> str:
|
||||
"""Simply return a string representing the body of the request."""
|
||||
return stream.read()
|
||||
|
||||
|
||||
class CsrfExemptSessionAuthentication(SessionAuthentication):
|
||||
"""CSRF-exempt Session authentication"""
|
||||
|
||||
def enforce_csrf(self, request: Request):
|
||||
return # To not perform the csrf check previously happening
|
||||
|
||||
|
||||
class SentryTunnelView(APIView):
|
||||
"""Sentry tunnel, to prevent ad blockers from blocking sentry"""
|
||||
|
||||
serializer_class = None
|
||||
parser_classes = [PlainTextParser]
|
||||
throttle_classes = [AnonRateThrottle]
|
||||
permission_classes = [AllowAny]
|
||||
authentication_classes = [CsrfExemptSessionAuthentication]
|
||||
|
||||
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
"""Sentry tunnel, to prevent ad blockers from blocking sentry"""
|
||||
# Only allow usage of this endpoint when error reporting is enabled
|
||||
if not CONFIG.y_bool("error_reporting.enabled", False):
|
||||
LOGGER.debug("error reporting disabled")
|
||||
return HttpResponse(status=400)
|
||||
# Body is 2 json objects separated by \n
|
||||
full_body = request.body
|
||||
lines = full_body.splitlines()
|
||||
if len(lines) < 1:
|
||||
return HttpResponse(status=400)
|
||||
header = loads(lines[0])
|
||||
# Check that the DSN is what we expect
|
||||
dsn = header.get("dsn", "")
|
||||
if dsn != settings.SENTRY_DSN:
|
||||
LOGGER.debug("Invalid dsn", have=dsn, expected=settings.SENTRY_DSN)
|
||||
return HttpResponse(status=400)
|
||||
sentry_proxy.delay(full_body.decode())
|
||||
return HttpResponse(status=204)
|
||||
@ -11,14 +11,14 @@ from authentik.admin.api.tasks import TaskViewSet
|
||||
from authentik.admin.api.version import VersionView
|
||||
from authentik.admin.api.workers import WorkerView
|
||||
from authentik.api.v3.config import ConfigView
|
||||
from authentik.api.v3.sentry import SentryTunnelView
|
||||
from authentik.api.views import APIBrowserView
|
||||
from authentik.core.api.applications import ApplicationViewSet
|
||||
from authentik.core.api.authenticated_sessions import AuthenticatedSessionViewSet
|
||||
from authentik.core.api.devices import DeviceViewSet
|
||||
from authentik.core.api.groups import GroupViewSet
|
||||
from authentik.core.api.propertymappings import PropertyMappingViewSet
|
||||
from authentik.core.api.providers import ProviderViewSet
|
||||
from authentik.core.api.sources import SourceViewSet
|
||||
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
||||
from authentik.core.api.tokens import TokenViewSet
|
||||
from authentik.core.api.users import UserViewSet
|
||||
from authentik.crypto.api import CertificateKeyPairViewSet
|
||||
@ -136,6 +136,7 @@ router.register("events/transports", NotificationTransportViewSet)
|
||||
router.register("events/rules", NotificationRuleViewSet)
|
||||
|
||||
router.register("sources/all", SourceViewSet)
|
||||
router.register("sources/user_connections/all", UserSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/oauth", UserOAuthSourceConnectionViewSet)
|
||||
router.register("sources/user_connections/plex", PlexSourceConnectionViewSet)
|
||||
router.register("sources/ldap", LDAPSourceViewSet)
|
||||
@ -169,6 +170,7 @@ router.register("propertymappings/saml", SAMLPropertyMappingViewSet)
|
||||
router.register("propertymappings/scope", ScopeMappingViewSet)
|
||||
router.register("propertymappings/notification", NotificationWebhookMappingViewSet)
|
||||
|
||||
router.register("authenticators/all", DeviceViewSet, basename="device")
|
||||
router.register("authenticators/duo", DuoDeviceViewSet)
|
||||
router.register("authenticators/sms", SMSDeviceViewSet)
|
||||
router.register("authenticators/static", StaticDeviceViewSet)
|
||||
@ -246,7 +248,6 @@ urlpatterns = (
|
||||
FlowInspectorView.as_view(),
|
||||
name="flow-inspector",
|
||||
),
|
||||
path("sentry/", SentryTunnelView.as_view(), name="sentry"),
|
||||
path("schema/", cache_page(86400)(SpectacularAPIView.as_view()), name="schema"),
|
||||
]
|
||||
)
|
||||
|
||||
36
authentik/core/api/devices.py
Normal file
36
authentik/core/api/devices.py
Normal file
@ -0,0 +1,36 @@
|
||||
"""Authenticator Devices API Views"""
|
||||
from django_otp import devices_for_user
|
||||
from django_otp.models import Device
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.fields import CharField, IntegerField, SerializerMethodField
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ViewSet
|
||||
|
||||
from authentik.core.api.utils import MetaNameSerializer
|
||||
|
||||
|
||||
class DeviceSerializer(MetaNameSerializer):
|
||||
"""Serializer for Duo authenticator devices"""
|
||||
|
||||
pk = IntegerField()
|
||||
name = CharField()
|
||||
type = SerializerMethodField()
|
||||
|
||||
def get_type(self, instance: Device) -> str:
|
||||
"""Get type of device"""
|
||||
return instance._meta.label
|
||||
|
||||
|
||||
class DeviceViewSet(ViewSet):
|
||||
"""Viewset for authenticator devices"""
|
||||
|
||||
serializer_class = DeviceSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
@extend_schema(responses={200: DeviceSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
"""Get all devices for current user"""
|
||||
devices = devices_for_user(request.user)
|
||||
return Response(DeviceSerializer(devices, many=True).data)
|
||||
@ -42,6 +42,7 @@ class GroupSerializer(ModelSerializer):
|
||||
users_obj = ListSerializer(
|
||||
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
||||
)
|
||||
parent_name = CharField(source="parent.name", read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
@ -51,6 +52,7 @@ class GroupSerializer(ModelSerializer):
|
||||
"name",
|
||||
"is_superuser",
|
||||
"parent",
|
||||
"parent_name",
|
||||
"users",
|
||||
"attributes",
|
||||
"users_obj",
|
||||
|
||||
@ -1,18 +1,21 @@
|
||||
"""Source API Views"""
|
||||
from typing import Iterable
|
||||
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import mixins
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.api.authorization import OwnerFilter, OwnerPermissions
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
||||
from authentik.core.models import Source
|
||||
from authentik.core.models import Source, UserSourceConnection
|
||||
from authentik.core.types import UserSettingSerializer
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
@ -113,3 +116,39 @@ class SourceViewSet(
|
||||
LOGGER.warning(source_settings.errors)
|
||||
matching_sources.append(source_settings.validated_data)
|
||||
return Response(matching_sources)
|
||||
|
||||
|
||||
class UserSourceConnectionSerializer(SourceSerializer):
|
||||
"""OAuth Source Serializer"""
|
||||
|
||||
source = SourceSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = UserSourceConnection
|
||||
fields = [
|
||||
"pk",
|
||||
"user",
|
||||
"source",
|
||||
"created",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"user": {"read_only": True},
|
||||
"created": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
class UserSourceConnectionViewSet(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.UpdateModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
UsedByMixin,
|
||||
mixins.ListModelMixin,
|
||||
GenericViewSet,
|
||||
):
|
||||
"""User-source connection Viewset"""
|
||||
|
||||
queryset = UserSourceConnection.objects.all()
|
||||
serializer_class = UserSourceConnectionSerializer
|
||||
permission_classes = [OwnerPermissions]
|
||||
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||
ordering = ["pk"]
|
||||
|
||||
@ -55,5 +55,5 @@ class TokenBackend(InbuiltBackend):
|
||||
if not tokens.exists():
|
||||
return None
|
||||
token = tokens.first()
|
||||
self.set_method("password", request, token=token)
|
||||
self.set_method("token", request, token=token)
|
||||
return token.user
|
||||
|
||||
0
authentik/core/management/__init__.py
Normal file
0
authentik/core/management/__init__.py
Normal file
0
authentik/core/management/commands/__init__.py
Normal file
0
authentik/core/management/commands/__init__.py
Normal file
15
authentik/core/management/commands/dump_config.py
Normal file
15
authentik/core/management/commands/dump_config.py
Normal file
@ -0,0 +1,15 @@
|
||||
"""Output full config"""
|
||||
from json import dumps
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
class Command(BaseCommand): # pragma: no cover
|
||||
"""Output full config"""
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, **options):
|
||||
"""Check permissions for all apps"""
|
||||
print(dumps(CONFIG.raw, indent=4))
|
||||
@ -81,6 +81,27 @@ class Group(models.Model):
|
||||
)
|
||||
attributes = models.JSONField(default=dict, blank=True)
|
||||
|
||||
def is_member(self, user: "User") -> bool:
|
||||
"""Recursively check if `user` is member of us, or any parent."""
|
||||
query = """
|
||||
WITH RECURSIVE parents AS (
|
||||
SELECT authentik_core_group.*, 0 AS relative_depth
|
||||
FROM authentik_core_group
|
||||
WHERE authentik_core_group.group_uuid = %s
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT authentik_core_group.*, parents.relative_depth - 1
|
||||
FROM authentik_core_group,parents
|
||||
WHERE authentik_core_group.parent_id = parents.group_uuid
|
||||
)
|
||||
SELECT group_uuid
|
||||
FROM parents
|
||||
GROUP BY group_uuid;
|
||||
"""
|
||||
groups = Group.objects.raw(query, [self.group_uuid])
|
||||
return user.ak_groups.filter(pk__in=[group.pk for group in groups]).exists()
|
||||
|
||||
def __str__(self):
|
||||
return f"Group {self.name}"
|
||||
|
||||
@ -153,7 +174,7 @@ class User(GuardianUserMixin, AbstractUser):
|
||||
if mode == "none":
|
||||
return DEFAULT_AVATAR
|
||||
# gravatar uses md5 for their URLs, so md5 can't be avoided
|
||||
mail_hash = md5(self.email.encode("utf-8")).hexdigest() # nosec
|
||||
mail_hash = md5(self.email.lower().encode("utf-8")).hexdigest() # nosec
|
||||
if mode == "gravatar":
|
||||
parameters = [
|
||||
("s", "158"),
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
{% load i18n %}
|
||||
|
||||
{% block head %}
|
||||
<script src="{% static 'dist/AdminInterface.js' %}" type="module"></script>
|
||||
<script src="{% static 'dist/admin/AdminInterface.js' %}" type="module"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
|
||||
@ -11,7 +11,7 @@
|
||||
{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<script src="{% static 'dist/FlowInterface.js' %}" type="module"></script>
|
||||
<script src="{% static 'dist/flow/FlowInterface.js' %}" type="module"></script>
|
||||
<style>
|
||||
.pf-c-background-image::before {
|
||||
--ak-flow-background: url("{{ flow.background_url }}");
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
{% load i18n %}
|
||||
|
||||
{% block head %}
|
||||
<script src="{% static 'dist/UserInterface.js' %}" type="module"></script>
|
||||
<script src="{% static 'dist/user/UserInterface.js' %}" type="module"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
|
||||
40
authentik/core/tests/test_groups.py
Normal file
40
authentik/core/tests/test_groups.py
Normal file
@ -0,0 +1,40 @@
|
||||
"""group tests"""
|
||||
from django.test.testcases import TestCase
|
||||
|
||||
from authentik.core.models import Group, User
|
||||
|
||||
|
||||
class TestGroups(TestCase):
|
||||
"""Test group membership"""
|
||||
|
||||
def test_group_membership_simple(self):
|
||||
"""Test simple membership"""
|
||||
user = User.objects.create(username="user")
|
||||
user2 = User.objects.create(username="user2")
|
||||
group = Group.objects.create(name="group")
|
||||
group.users.add(user)
|
||||
self.assertTrue(group.is_member(user))
|
||||
self.assertFalse(group.is_member(user2))
|
||||
|
||||
def test_group_membership_parent(self):
|
||||
"""Test parent membership"""
|
||||
user = User.objects.create(username="user")
|
||||
user2 = User.objects.create(username="user2")
|
||||
first = Group.objects.create(name="first")
|
||||
second = Group.objects.create(name="second", parent=first)
|
||||
second.users.add(user)
|
||||
self.assertTrue(first.is_member(user))
|
||||
self.assertFalse(first.is_member(user2))
|
||||
|
||||
def test_group_membership_parent_extra(self):
|
||||
"""Test parent membership"""
|
||||
user = User.objects.create(username="user")
|
||||
user2 = User.objects.create(username="user2")
|
||||
first = Group.objects.create(name="first")
|
||||
second = Group.objects.create(name="second", parent=first)
|
||||
third = Group.objects.create(name="third", parent=second)
|
||||
second.users.add(user)
|
||||
self.assertTrue(first.is_member(user))
|
||||
self.assertFalse(first.is_member(user2))
|
||||
self.assertFalse(third.is_member(user))
|
||||
self.assertFalse(third.is_member(user2))
|
||||
@ -141,7 +141,7 @@ class CertificateKeyPairFilter(FilterSet):
|
||||
class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
||||
"""CertificateKeyPair Viewset"""
|
||||
|
||||
queryset = CertificateKeyPair.objects.all()
|
||||
queryset = CertificateKeyPair.objects.exclude(managed__isnull=False)
|
||||
serializer_class = CertificateKeyPairSerializer
|
||||
filterset_class = CertificateKeyPairFilter
|
||||
|
||||
|
||||
@ -7,16 +7,25 @@ from django.core.exceptions import SuspiciousOperation
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_save, pre_delete
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django_otp.plugins.otp_static.models import StaticToken
|
||||
from guardian.models import UserObjectPermission
|
||||
|
||||
from authentik.core.middleware import LOCAL
|
||||
from authentik.core.models import User
|
||||
from authentik.core.models import AuthenticatedSession, User
|
||||
from authentik.events.models import Event, EventAction, Notification
|
||||
from authentik.events.signals import EventNewThread
|
||||
from authentik.events.utils import model_to_dict
|
||||
from authentik.lib.sentry import before_send
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
|
||||
IGNORED_MODELS = (
|
||||
Event,
|
||||
Notification,
|
||||
UserObjectPermission,
|
||||
AuthenticatedSession,
|
||||
StaticToken,
|
||||
)
|
||||
|
||||
|
||||
class AuditMiddleware:
|
||||
"""Register handlers for duration of request-response that log creation/update/deletion
|
||||
@ -82,7 +91,7 @@ class AuditMiddleware:
|
||||
user: User, request: HttpRequest, sender, instance: Model, created: bool, **_
|
||||
):
|
||||
"""Signal handler for all object's post_save"""
|
||||
if isinstance(instance, (Event, Notification, UserObjectPermission)):
|
||||
if isinstance(instance, IGNORED_MODELS):
|
||||
return
|
||||
|
||||
action = EventAction.MODEL_CREATED if created else EventAction.MODEL_UPDATED
|
||||
@ -92,7 +101,7 @@ class AuditMiddleware:
|
||||
# pylint: disable=unused-argument
|
||||
def pre_delete_handler(user: User, request: HttpRequest, sender, instance: Model, **_):
|
||||
"""Signal handler for all object's pre_delete"""
|
||||
if isinstance(instance, (Event, Notification, UserObjectPermission)): # pragma: no cover
|
||||
if isinstance(instance, IGNORED_MODELS): # pragma: no cover
|
||||
return
|
||||
|
||||
EventNewThread(
|
||||
|
||||
@ -1,6 +1,4 @@
|
||||
"""Flow Stage API Views"""
|
||||
from typing import Iterable
|
||||
|
||||
from django.urls.base import reverse
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import mixins
|
||||
@ -15,7 +13,7 @@ from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import MetaNameSerializer, TypeCreateSerializer
|
||||
from authentik.core.types import UserSettingSerializer
|
||||
from authentik.flows.api.flows import FlowSerializer
|
||||
from authentik.flows.models import Stage
|
||||
from authentik.flows.models import ConfigurableStage, Stage
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -86,9 +84,11 @@ class StageViewSet(
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
def user_settings(self, request: Request) -> Response:
|
||||
"""Get all stages the user can configure"""
|
||||
_all_stages: Iterable[Stage] = Stage.objects.all().select_subclasses().order_by("name")
|
||||
stages = []
|
||||
for configurable_stage in all_subclasses(ConfigurableStage):
|
||||
stages += list(configurable_stage.objects.all().order_by("name"))
|
||||
matching_stages: list[dict] = []
|
||||
for stage in _all_stages:
|
||||
for stage in stages:
|
||||
user_settings = stage.ui_user_settings
|
||||
if not user_settings:
|
||||
continue
|
||||
|
||||
@ -545,6 +545,7 @@ class TestFlowExecutor(APITestCase):
|
||||
"password_fields": False,
|
||||
"primary_action": "Log in",
|
||||
"sources": [],
|
||||
"show_source_labels": False,
|
||||
"user_fields": [UserFields.E_MAIL],
|
||||
},
|
||||
)
|
||||
|
||||
@ -60,6 +60,7 @@ class TestFlowInspector(APITestCase):
|
||||
"password_fields": False,
|
||||
"primary_action": "Log in",
|
||||
"sources": [],
|
||||
"show_source_labels": False,
|
||||
"user_fields": ["username"],
|
||||
},
|
||||
)
|
||||
|
||||
@ -68,6 +68,7 @@ outposts:
|
||||
|
||||
cookie_domain: null
|
||||
disable_update_check: false
|
||||
disable_startup_analytics: false
|
||||
avatars: env://AUTHENTIK_AUTHENTIK__AVATARS?gravatar
|
||||
geoip: "./GeoLite2-City.mmdb"
|
||||
|
||||
|
||||
@ -13,6 +13,7 @@ from django.db import InternalError, OperationalError, ProgrammingError
|
||||
from django.http.response import Http404
|
||||
from django_redis.exceptions import ConnectionInterrupted
|
||||
from docker.errors import DockerException
|
||||
from h11 import LocalProtocolError
|
||||
from ldap3.core.exceptions import LDAPException
|
||||
from redis.exceptions import ConnectionError as RedisConnectionError
|
||||
from redis.exceptions import RedisError, ResponseError
|
||||
@ -72,6 +73,7 @@ def before_send(event: dict, hint: dict) -> Optional[dict]:
|
||||
# websocket errors
|
||||
ChannelFull,
|
||||
WebSocketException,
|
||||
LocalProtocolError,
|
||||
# rest_framework error
|
||||
APIException,
|
||||
# celery errors
|
||||
|
||||
@ -1,8 +1,13 @@
|
||||
"""authentik lib reflection utilities"""
|
||||
import os
|
||||
from importlib import import_module
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
|
||||
from django.conf import settings
|
||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
def all_subclasses(cls, sort=True):
|
||||
@ -42,3 +47,16 @@ def get_apps():
|
||||
for _app in apps.get_app_configs():
|
||||
if _app.name.startswith("authentik"):
|
||||
yield _app
|
||||
|
||||
|
||||
def get_env() -> str:
|
||||
"""Get environment in which authentik is currently running"""
|
||||
if SERVICE_HOST_ENV_NAME in os.environ:
|
||||
return "kubernetes"
|
||||
if "CI" in os.environ:
|
||||
return "ci"
|
||||
if Path("/tmp/authentik-mode").exists(): # nosec
|
||||
return "compose"
|
||||
if CONFIG.y_bool("debug"):
|
||||
return "dev"
|
||||
return "custom"
|
||||
|
||||
@ -65,14 +65,14 @@ class PolicyBinding(SerializerModel):
|
||||
# This is quite an ugly hack to prevent pylint from trying
|
||||
# to resolve authentik_core.models.Group
|
||||
# as python import path
|
||||
"authentik_core." + "Group",
|
||||
"authentik_core.Group",
|
||||
on_delete=models.CASCADE,
|
||||
default=None,
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
user = models.ForeignKey(
|
||||
"authentik_core." + "User",
|
||||
"authentik_core.User",
|
||||
on_delete=models.CASCADE,
|
||||
default=None,
|
||||
null=True,
|
||||
@ -96,7 +96,7 @@ class PolicyBinding(SerializerModel):
|
||||
self.policy: Policy
|
||||
return self.policy.passes(request)
|
||||
if self.group:
|
||||
return PolicyResult(self.group.users.filter(pk=request.user.pk).exists())
|
||||
return PolicyResult(self.group.is_member(request.user))
|
||||
if self.user:
|
||||
return PolicyResult(request.user == self.user)
|
||||
return PolicyResult(False)
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
"""LDAPProvider API Views"""
|
||||
from rest_framework.fields import CharField
|
||||
from rest_framework.fields import CharField, ListField
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
|
||||
|
||||
@ -11,6 +11,8 @@ from authentik.providers.ldap.models import LDAPProvider
|
||||
class LDAPProviderSerializer(ProviderSerializer):
|
||||
"""LDAPProvider Serializer"""
|
||||
|
||||
outpost_set = ListField(child=CharField(), read_only=True, source="outpost_set.all")
|
||||
|
||||
class Meta:
|
||||
|
||||
model = LDAPProvider
|
||||
@ -21,6 +23,8 @@ class LDAPProviderSerializer(ProviderSerializer):
|
||||
"tls_server_name",
|
||||
"uid_start_number",
|
||||
"gid_start_number",
|
||||
"outpost_set",
|
||||
"search_mode",
|
||||
]
|
||||
|
||||
|
||||
@ -65,6 +69,7 @@ class LDAPOutpostConfigSerializer(ModelSerializer):
|
||||
"tls_server_name",
|
||||
"uid_start_number",
|
||||
"gid_start_number",
|
||||
"search_mode",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@ -0,0 +1,93 @@
|
||||
# Generated by Django 3.2.8 on 2021-11-05 09:41
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
replaces = [
|
||||
("authentik_providers_ldap", "0001_initial"),
|
||||
("authentik_providers_ldap", "0002_ldapprovider_search_group"),
|
||||
("authentik_providers_ldap", "0003_auto_20210713_1138"),
|
||||
("authentik_providers_ldap", "0004_auto_20210713_2115"),
|
||||
("authentik_providers_ldap", "0005_ldapprovider_search_mode"),
|
||||
]
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0019_source_managed"),
|
||||
("authentik_crypto", "0002_create_self_signed_kp"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="LDAPProvider",
|
||||
fields=[
|
||||
(
|
||||
"provider_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="authentik_core.provider",
|
||||
),
|
||||
),
|
||||
(
|
||||
"base_dn",
|
||||
models.TextField(
|
||||
default="DC=ldap,DC=goauthentik,DC=io",
|
||||
help_text="DN under which objects are accessible.",
|
||||
),
|
||||
),
|
||||
(
|
||||
"search_group",
|
||||
models.ForeignKey(
|
||||
default=None,
|
||||
help_text="Users in this group can do search queries. If not set, every user can execute search queries.",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||
to="authentik_core.group",
|
||||
),
|
||||
),
|
||||
(
|
||||
"certificate",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="authentik_crypto.certificatekeypair",
|
||||
),
|
||||
),
|
||||
("tls_server_name", models.TextField(blank=True, default="")),
|
||||
(
|
||||
"gid_start_number",
|
||||
models.IntegerField(
|
||||
default=4000,
|
||||
help_text="The start for gidNumbers, this number is added to a number generated from the group.Pk to make sure that the numbers aren't too low for POSIX groups. Default is 4000 to ensure that we don't collide with local groups or users primary groups gidNumber",
|
||||
),
|
||||
),
|
||||
(
|
||||
"uid_start_number",
|
||||
models.IntegerField(
|
||||
default=2000,
|
||||
help_text="The start for uidNumbers, this number is added to the user.Pk to make sure that the numbers aren't too low for POSIX users. Default is 2000 to ensure that we don't collide with local users uidNumber",
|
||||
),
|
||||
),
|
||||
(
|
||||
"search_mode",
|
||||
models.TextField(
|
||||
choices=[("direct", "Direct"), ("cached", "Cached")], default="direct"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "LDAP Provider",
|
||||
"verbose_name_plural": "LDAP Providers",
|
||||
},
|
||||
bases=("authentik_core.provider", models.Model),
|
||||
),
|
||||
]
|
||||
@ -0,0 +1,20 @@
|
||||
# Generated by Django 3.2.8 on 2021-11-05 09:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_providers_ldap", "0004_auto_20210713_2115"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="ldapprovider",
|
||||
name="search_mode",
|
||||
field=models.TextField(
|
||||
choices=[("direct", "Direct"), ("cached", "Cached")], default="direct"
|
||||
),
|
||||
),
|
||||
]
|
||||
@ -10,6 +10,13 @@ from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.outposts.models import OutpostModel
|
||||
|
||||
|
||||
class SearchModes(models.TextChoices):
|
||||
"""Search modes"""
|
||||
|
||||
DIRECT = "direct"
|
||||
CACHED = "cached"
|
||||
|
||||
|
||||
class LDAPProvider(OutpostModel, Provider):
|
||||
"""Allow applications to authenticate against authentik's users using LDAP."""
|
||||
|
||||
@ -59,6 +66,8 @@ class LDAPProvider(OutpostModel, Provider):
|
||||
),
|
||||
)
|
||||
|
||||
search_mode = models.TextField(default=SearchModes.DIRECT, choices=SearchModes.choices)
|
||||
|
||||
@property
|
||||
def launch_url(self) -> Optional[str]:
|
||||
"""LDAP never has a launch URL"""
|
||||
|
||||
@ -11,6 +11,7 @@ from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.providers.oauth2.models import ScopeMapping
|
||||
from authentik.providers.oauth2.views.provider import ProviderInfoView
|
||||
from authentik.providers.proxy.models import ProxyMode, ProxyProvider
|
||||
|
||||
@ -36,6 +37,7 @@ class ProxyProviderSerializer(ProviderSerializer):
|
||||
"""ProxyProvider Serializer"""
|
||||
|
||||
redirect_uris = CharField(read_only=True)
|
||||
outpost_set = ListField(child=CharField(), read_only=True, source="outpost_set.all")
|
||||
|
||||
def validate(self, attrs) -> dict[Any, str]:
|
||||
"""Check that internal_host is set when mode is Proxy"""
|
||||
@ -74,6 +76,7 @@ class ProxyProviderSerializer(ProviderSerializer):
|
||||
"redirect_uris",
|
||||
"cookie_domain",
|
||||
"token_validity",
|
||||
"outpost_set",
|
||||
]
|
||||
|
||||
|
||||
@ -108,6 +111,7 @@ class ProxyOutpostConfigSerializer(ModelSerializer):
|
||||
|
||||
oidc_configuration = SerializerMethodField()
|
||||
token_validity = SerializerMethodField()
|
||||
scopes_to_request = SerializerMethodField()
|
||||
|
||||
@extend_schema_field(OpenIDConnectConfigurationSerializer)
|
||||
def get_oidc_configuration(self, obj: ProxyProvider):
|
||||
@ -118,6 +122,14 @@ class ProxyOutpostConfigSerializer(ModelSerializer):
|
||||
"""Get token validity as second count"""
|
||||
return timedelta_from_string(obj.token_validity).total_seconds()
|
||||
|
||||
def get_scopes_to_request(self, obj: ProxyProvider) -> list[str]:
|
||||
"""Get all the scope names the outpost should request,
|
||||
including custom-defined ones"""
|
||||
scope_names = set(
|
||||
ScopeMapping.objects.filter(provider__in=[obj]).values_list("scope_name", flat=True)
|
||||
)
|
||||
return list(scope_names)
|
||||
|
||||
class Meta:
|
||||
|
||||
model = ProxyProvider
|
||||
@ -139,6 +151,7 @@ class ProxyOutpostConfigSerializer(ModelSerializer):
|
||||
"mode",
|
||||
"cookie_domain",
|
||||
"token_validity",
|
||||
"scopes_to_request",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@ -138,7 +138,7 @@ class ProxyProvider(OutpostModel, OAuth2Provider):
|
||||
SCOPE_AK_PROXY,
|
||||
]
|
||||
)
|
||||
self.property_mappings.set(scopes)
|
||||
self.property_mappings.add(*list(scopes))
|
||||
self.redirect_uris = _get_callback_url(self.external_host)
|
||||
|
||||
def __str__(self):
|
||||
|
||||
@ -59,11 +59,13 @@ class AuthNRequestParser:
|
||||
) -> AuthNRequest:
|
||||
root = ElementTree.fromstring(decoded_xml)
|
||||
|
||||
# http://docs.oasis-open.org/security/saml/v2.0/saml-core-2.0-os.pdf
|
||||
# `AssertionConsumerServiceURL` can be omitted, and we should fallback to the
|
||||
# default ACS URL
|
||||
if "AssertionConsumerServiceURL" not in root.attrib:
|
||||
msg = "Missing 'AssertionConsumerServiceURL' attribute"
|
||||
LOGGER.warning(msg)
|
||||
raise CannotHandleAssertion(msg)
|
||||
request_acs_url = root.attrib["AssertionConsumerServiceURL"]
|
||||
request_acs_url = self.provider.acs_url.lower()
|
||||
else:
|
||||
request_acs_url = root.attrib["AssertionConsumerServiceURL"]
|
||||
|
||||
if self.provider.acs_url.lower() != request_acs_url.lower():
|
||||
msg = (
|
||||
|
||||
30
authentik/recovery/management/commands/create_admin_group.py
Normal file
30
authentik/recovery/management/commands/create_admin_group.py
Normal file
@ -0,0 +1,30 @@
|
||||
"""authentik recovery create_admin_group"""
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from authentik.core.models import Group, User
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Create admin group if the default group gets deleted"""
|
||||
|
||||
help = _("Create admin group if the default group gets deleted.")
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("user", action="store", help="User to add to the admin group.")
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""Create admin group if the default group gets deleted"""
|
||||
username = options.get("user")
|
||||
user = User.objects.filter(username=username).first()
|
||||
if not user:
|
||||
self.stderr.write(f"User '{username}' not found.")
|
||||
return
|
||||
group, _ = Group.objects.update_or_create(
|
||||
name="authentik Admins",
|
||||
defaults={
|
||||
"is_superuser": True,
|
||||
},
|
||||
)
|
||||
group.users.add(user)
|
||||
self.stdout.write(f"User '{username}' successfully added to the group 'authentik Admins'.")
|
||||
@ -7,12 +7,9 @@ from django.urls import reverse
|
||||
from django.utils.text import slugify
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext as _
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import Token, TokenIntents, User
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Create Token used to recover access"""
|
||||
|
||||
@ -14,13 +14,13 @@ import importlib
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from hashlib import sha512
|
||||
from json import dumps
|
||||
from tempfile import gettempdir
|
||||
from time import time
|
||||
|
||||
import structlog
|
||||
from celery.schedules import crontab
|
||||
from kubernetes.config.incluster_config import SERVICE_HOST_ENV_NAME
|
||||
from sentry_sdk import init as sentry_init
|
||||
from sentry_sdk.api import set_tag
|
||||
from sentry_sdk.integrations.celery import CeleryIntegration
|
||||
@ -32,6 +32,8 @@ from authentik.core.middleware import structlog_add_request_id
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.logging import add_process_id
|
||||
from authentik.lib.sentry import before_send
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.lib.utils.reflection import get_env
|
||||
from authentik.stages.password import BACKEND_APP_PASSWORD, BACKEND_INBUILT, BACKEND_LDAP
|
||||
|
||||
|
||||
@ -176,6 +178,7 @@ SPECTACULAR_SETTINGS = {
|
||||
"FlowDesignationEnum": "authentik.flows.models.FlowDesignation",
|
||||
"PolicyEngineMode": "authentik.policies.models.PolicyEngineMode",
|
||||
"ProxyMode": "authentik.providers.proxy.models.ProxyMode",
|
||||
"PromptTypeEnum": "authentik.stages.prompt.models.FieldTypes",
|
||||
},
|
||||
"ENUM_ADD_EXPLICIT_BLANK_NULL_CHOICE": False,
|
||||
"POSTPROCESSING_HOOKS": [
|
||||
@ -307,7 +310,7 @@ EMAIL_HOST = CONFIG.y("email.host")
|
||||
EMAIL_PORT = int(CONFIG.y("email.port"))
|
||||
EMAIL_HOST_USER = CONFIG.y("email.username")
|
||||
EMAIL_HOST_PASSWORD = CONFIG.y("email.password")
|
||||
EMAIL_USE_TLS = CONFIG.y_bool("email.use_tls", True)
|
||||
EMAIL_USE_TLS = CONFIG.y_bool("email.use_tls", False)
|
||||
EMAIL_USE_SSL = CONFIG.y_bool("email.use_ssl", False)
|
||||
EMAIL_TIMEOUT = int(CONFIG.y("email.timeout"))
|
||||
DEFAULT_FROM_EMAIL = CONFIG.y("email.from")
|
||||
@ -380,7 +383,8 @@ DBBACKUP_CONNECTOR_MAPPING = {
|
||||
"django_prometheus.db.backends.postgresql": "dbbackup.db.postgresql.PgDumpConnector",
|
||||
}
|
||||
DBBACKUP_TMP_DIR = gettempdir() if DEBUG else "/tmp" # nosec
|
||||
if CONFIG.y("postgresql.s3_backup"):
|
||||
DBBACKUP_CLEANUP_KEEP = 30
|
||||
if CONFIG.y("postgresql.s3_backup.bucket", "") != "":
|
||||
DBBACKUP_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
|
||||
DBBACKUP_STORAGE_OPTIONS = {
|
||||
"access_key": CONFIG.y("postgresql.s3_backup.access_key"),
|
||||
@ -399,6 +403,12 @@ if CONFIG.y("postgresql.s3_backup"):
|
||||
|
||||
# Sentry integration
|
||||
SENTRY_DSN = "https://a579bb09306d4f8b8d8847c052d3a1d3@sentry.beryju.org/8"
|
||||
# Default to empty string as that is what docker has
|
||||
build_hash = os.environ.get(ENV_GIT_HASH_KEY, "")
|
||||
if build_hash == "":
|
||||
build_hash = "tagged"
|
||||
|
||||
env = get_env()
|
||||
_ERROR_REPORTING = CONFIG.y_bool("error_reporting.enabled", False)
|
||||
if _ERROR_REPORTING:
|
||||
# pylint: disable=abstract-class-instantiated
|
||||
@ -415,18 +425,34 @@ if _ERROR_REPORTING:
|
||||
environment=CONFIG.y("error_reporting.environment", "customer"),
|
||||
send_default_pii=CONFIG.y_bool("error_reporting.send_pii", False),
|
||||
)
|
||||
# Default to empty string as that is what docker has
|
||||
build_hash = os.environ.get(ENV_GIT_HASH_KEY, "")
|
||||
if build_hash == "":
|
||||
build_hash = "tagged"
|
||||
set_tag("authentik.build_hash", build_hash)
|
||||
set_tag("authentik.env", "kubernetes" if SERVICE_HOST_ENV_NAME in os.environ else "compose")
|
||||
set_tag("authentik.env", env)
|
||||
set_tag("authentik.component", "backend")
|
||||
j_print(
|
||||
"Error reporting is enabled",
|
||||
env=CONFIG.y("error_reporting.environment", "customer"),
|
||||
)
|
||||
|
||||
if not CONFIG.y_bool("disable_startup_analytics", False):
|
||||
should_send = env not in ["dev", "ci"]
|
||||
if should_send:
|
||||
try:
|
||||
get_http_session().post(
|
||||
"https://goauthentik.io/api/event",
|
||||
json={
|
||||
"domain": "authentik",
|
||||
"name": "pageview",
|
||||
"referrer": f"{__version__} ({build_hash})",
|
||||
"url": f"http://localhost/{env}?utm_source={__version__}&utm_medium={env}",
|
||||
},
|
||||
headers={
|
||||
"User-Agent": sha512(SECRET_KEY.encode("ascii")).hexdigest()[:16],
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
# pylint: disable=bare-except
|
||||
except: # nosec
|
||||
pass
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/2.1/howto/static-files/
|
||||
|
||||
@ -7,7 +7,7 @@ from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.auth import InbuiltBackend
|
||||
from authentik.core.models import User
|
||||
from authentik.sources.ldap.models import LDAPSource
|
||||
from authentik.sources.ldap.models import LDAP_TIMEOUT, LDAPSource
|
||||
|
||||
LOGGER = get_logger()
|
||||
LDAP_DISTINGUISHED_NAME = "distinguishedName"
|
||||
@ -62,6 +62,7 @@ class LDAPBackend(InbuiltBackend):
|
||||
user=user.attributes.get(LDAP_DISTINGUISHED_NAME),
|
||||
password=password,
|
||||
raise_exceptions=True,
|
||||
receive_timeout=LDAP_TIMEOUT,
|
||||
)
|
||||
temp_connection.bind()
|
||||
return user
|
||||
|
||||
@ -9,6 +9,8 @@ from rest_framework.serializers import Serializer
|
||||
from authentik.core.models import Group, PropertyMapping, Source
|
||||
from authentik.lib.models import DomainlessURLValidator
|
||||
|
||||
LDAP_TIMEOUT = 15
|
||||
|
||||
|
||||
class LDAPSource(Source):
|
||||
"""Federate LDAP Directory with authentik, or create new accounts in LDAP."""
|
||||
@ -86,12 +88,13 @@ class LDAPSource(Source):
|
||||
def connection(self) -> Connection:
|
||||
"""Get a fully connected and bound LDAP Connection"""
|
||||
if not self._connection:
|
||||
server = Server(self.server_uri, get_info=ALL)
|
||||
server = Server(self.server_uri, get_info=ALL, connect_timeout=LDAP_TIMEOUT)
|
||||
self._connection = Connection(
|
||||
server,
|
||||
raise_exceptions=True,
|
||||
user=self.bind_cn,
|
||||
password=self.bind_password,
|
||||
receive_timeout=LDAP_TIMEOUT,
|
||||
)
|
||||
|
||||
self._connection.bind()
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
"""Sync LDAP Users and groups into authentik"""
|
||||
from typing import Any
|
||||
|
||||
from deepmerge import always_merger
|
||||
from django.db.models.base import Model
|
||||
from django.db.models.query import QuerySet
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
@ -105,3 +107,24 @@ class BaseLDAPSynchronizer:
|
||||
)
|
||||
properties["attributes"][LDAP_DISTINGUISHED_NAME] = object_dn
|
||||
return properties
|
||||
|
||||
def update_or_create_attributes(
|
||||
self,
|
||||
obj: type[Model],
|
||||
query: dict[str, Any],
|
||||
data: dict[str, Any],
|
||||
) -> tuple[Model, bool]:
|
||||
"""Same as django's update_or_create but correctly update attributes by merging dicts"""
|
||||
instance = obj.objects.filter(**query).first()
|
||||
if not instance:
|
||||
return (obj.objects.create(**data), True)
|
||||
for key, value in data.items():
|
||||
if key == "attributes":
|
||||
continue
|
||||
setattr(instance, key, value)
|
||||
final_atttributes = {}
|
||||
always_merger.merge(final_atttributes, instance.attributes)
|
||||
always_merger.merge(final_atttributes, data.get("attributes", {}))
|
||||
instance.attributes = final_atttributes
|
||||
instance.save()
|
||||
return (instance, False)
|
||||
|
||||
@ -43,12 +43,13 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
# Special check for `users` field, as this is an M2M relation, and cannot be sync'd
|
||||
if "users" in defaults:
|
||||
del defaults["users"]
|
||||
ak_group, created = Group.objects.update_or_create(
|
||||
**{
|
||||
ak_group, created = self.update_or_create_attributes(
|
||||
Group,
|
||||
{
|
||||
f"attributes__{LDAP_UNIQUENESS}": uniq,
|
||||
"parent": self._source.sync_parent_group,
|
||||
"defaults": defaults,
|
||||
}
|
||||
},
|
||||
defaults,
|
||||
)
|
||||
except (IntegrityError, FieldError) as exc:
|
||||
Event.new(
|
||||
|
||||
@ -42,11 +42,8 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
self._logger.debug("Creating user with attributes", **defaults)
|
||||
if "username" not in defaults:
|
||||
raise IntegrityError("Username was not set by propertymappings")
|
||||
ak_user, created = User.objects.update_or_create(
|
||||
**{
|
||||
f"attributes__{LDAP_UNIQUENESS}": uniq,
|
||||
"defaults": defaults,
|
||||
}
|
||||
ak_user, created = self.update_or_create_attributes(
|
||||
User, {f"attributes__{LDAP_UNIQUENESS}": uniq}, defaults
|
||||
)
|
||||
except (IntegrityError, FieldError) as exc:
|
||||
Event.new(
|
||||
|
||||
@ -1,6 +1,4 @@
|
||||
"""LDAP Sync tasks"""
|
||||
from typing import Optional
|
||||
|
||||
from django.utils.text import slugify
|
||||
from ldap3.core.exceptions import LDAPException
|
||||
from structlog.stdlib import get_logger
|
||||
@ -31,8 +29,7 @@ def ldap_sync_all():
|
||||
@CELERY_APP.task(
|
||||
bind=True, base=MonitoredTask, soft_time_limit=60 * 60 * 2, task_time_limit=60 * 60 * 2
|
||||
)
|
||||
# TODO: remove Optional[str] in 2021.10
|
||||
def ldap_sync(self: MonitoredTask, source_pk: str, sync_class: Optional[str] = None):
|
||||
def ldap_sync(self: MonitoredTask, source_pk: str, sync_class: str):
|
||||
"""Synchronization of an LDAP Source"""
|
||||
self.result_timeout_hours = 2
|
||||
try:
|
||||
@ -41,8 +38,6 @@ def ldap_sync(self: MonitoredTask, source_pk: str, sync_class: Optional[str] = N
|
||||
# Because the source couldn't be found, we don't have a UID
|
||||
# to set the state with
|
||||
return
|
||||
if not sync_class:
|
||||
return
|
||||
sync = path_to_class(sync_class)
|
||||
self.set_uid(f"{slugify(source.name)}-{sync.__name__}")
|
||||
try:
|
||||
|
||||
@ -69,10 +69,26 @@ class LDAPSyncTests(TestCase):
|
||||
)
|
||||
self.source.save()
|
||||
connection = PropertyMock(return_value=mock_ad_connection(LDAP_PASSWORD))
|
||||
|
||||
# Create the user beforehand so we can set attributes and check they aren't removed
|
||||
user = User.objects.create(
|
||||
username="user0_sn",
|
||||
attributes={
|
||||
"ldap_uniq": (
|
||||
"S-117-6648368-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-"
|
||||
"0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-"
|
||||
"0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-"
|
||||
"0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0"
|
||||
),
|
||||
"foo": "bar",
|
||||
},
|
||||
)
|
||||
|
||||
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
|
||||
user_sync = UserLDAPSynchronizer(self.source)
|
||||
user_sync.sync()
|
||||
user = User.objects.filter(username="user0_sn").first()
|
||||
self.assertEqual(user.attributes["foo"], "bar")
|
||||
self.assertFalse(user.is_active)
|
||||
self.assertFalse(User.objects.filter(username="user1_sn").exists())
|
||||
|
||||
|
||||
@ -21,6 +21,9 @@ class UserOAuthSourceConnectionSerializer(SourceSerializer):
|
||||
"source",
|
||||
"identifier",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"user": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
class UserOAuthSourceConnectionViewSet(
|
||||
|
||||
@ -1,10 +1,8 @@
|
||||
"""Apple OAuth Views"""
|
||||
from base64 import b64decode
|
||||
from json import loads
|
||||
from time import time
|
||||
from typing import Any, Optional
|
||||
|
||||
from jwt import encode
|
||||
from jwt import decode, encode
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.sources.oauth.clients.oauth2 import OAuth2Client
|
||||
@ -40,7 +38,7 @@ class AppleOAuthClient(OAuth2Client):
|
||||
"iat": now,
|
||||
"exp": now + 86400 * 180,
|
||||
"aud": "https://appleid.apple.com",
|
||||
"sub": self.source.consumer_key,
|
||||
"sub": parts[0],
|
||||
}
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
jwt = encode(payload, self.source.consumer_secret, "ES256", {"kid": parts[2]})
|
||||
@ -49,9 +47,7 @@ class AppleOAuthClient(OAuth2Client):
|
||||
|
||||
def get_profile_info(self, token: dict[str, str]) -> Optional[dict[str, Any]]:
|
||||
id_token = token.get("id_token")
|
||||
_, raw_payload, _ = id_token.split(".")
|
||||
payload = loads(b64decode(raw_payload.encode().decode()))
|
||||
return payload
|
||||
return decode(id_token, options={"verify_signature": False})
|
||||
|
||||
|
||||
class AppleOAuthRedirect(OAuthRedirect):
|
||||
|
||||
@ -12,6 +12,7 @@ class DiscordOAuthRedirect(OAuthRedirect):
|
||||
def get_additional_parameters(self, source): # pragma: no cover
|
||||
return {
|
||||
"scope": "email identify",
|
||||
"prompt": "none",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -5,7 +5,7 @@ from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_sche
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.fields import CharField
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ValidationError
|
||||
@ -18,7 +18,7 @@ from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.flows.challenge import RedirectChallenge
|
||||
from authentik.flows.views.executor import to_stage_response
|
||||
from authentik.sources.plex.models import PlexSource
|
||||
from authentik.sources.plex.models import PlexSource, PlexSourceConnection
|
||||
from authentik.sources.plex.plex import PlexAuth, PlexSourceFlowManager
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -98,21 +98,11 @@ class PlexSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
user_info, identifier = auth_api.get_user_info()
|
||||
# Check friendship first, then check server overlay
|
||||
friends_allowed = False
|
||||
owner_id = None
|
||||
if source.allow_friends:
|
||||
owner_api = PlexAuth(source, source.plex_token)
|
||||
owner_id = owner_api.get_user_info
|
||||
owner_friends = owner_api.get_friends()
|
||||
for friend in owner_friends:
|
||||
if int(friend.get("id", "0")) == int(identifier):
|
||||
friends_allowed = True
|
||||
LOGGER.info(
|
||||
"allowing user for plex because of friend",
|
||||
user=user_info["username"],
|
||||
)
|
||||
friends_allowed = owner_api.check_friends_overlap(identifier)
|
||||
servers_allowed = auth_api.check_server_overlap()
|
||||
owner_allowed = owner_id == identifier
|
||||
if any([friends_allowed, servers_allowed, owner_allowed]):
|
||||
if any([friends_allowed, servers_allowed]):
|
||||
sfm = PlexSourceFlowManager(
|
||||
source=source,
|
||||
request=request,
|
||||
@ -125,3 +115,57 @@ class PlexSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
user=user_info["username"],
|
||||
)
|
||||
raise PermissionDenied("Access denied.")
|
||||
|
||||
@extend_schema(
|
||||
request=PlexTokenRedeemSerializer(),
|
||||
responses={
|
||||
204: OpenApiResponse(),
|
||||
400: OpenApiResponse(description="Token not found"),
|
||||
403: OpenApiResponse(description="Access denied"),
|
||||
},
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="slug",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
)
|
||||
],
|
||||
)
|
||||
@action(
|
||||
methods=["POST"],
|
||||
detail=False,
|
||||
pagination_class=None,
|
||||
filter_backends=[],
|
||||
permission_classes=[IsAuthenticated],
|
||||
)
|
||||
def redeem_token_authenticated(self, request: Request) -> Response:
|
||||
"""Redeem a plex token for an authenticated user, creating a connection"""
|
||||
source: PlexSource = get_object_or_404(
|
||||
PlexSource, slug=request.query_params.get("slug", "")
|
||||
)
|
||||
plex_token = request.data.get("plex_token", None)
|
||||
if not plex_token:
|
||||
raise ValidationError("No plex token given")
|
||||
auth_api = PlexAuth(source, plex_token)
|
||||
user_info, identifier = auth_api.get_user_info()
|
||||
# Check friendship first, then check server overlay
|
||||
friends_allowed = False
|
||||
if source.allow_friends:
|
||||
owner_api = PlexAuth(source, source.plex_token)
|
||||
friends_allowed = owner_api.check_friends_overlap(identifier)
|
||||
servers_allowed = auth_api.check_server_overlap()
|
||||
if any([friends_allowed, servers_allowed]):
|
||||
PlexSourceConnection.objects.create(
|
||||
plex_token=plex_token,
|
||||
user=request.user,
|
||||
identifier=identifier,
|
||||
source=source,
|
||||
)
|
||||
return Response(status=204)
|
||||
LOGGER.warning(
|
||||
"Denying plex connection because no server overlay and no friends and not owner",
|
||||
user=user_info["username"],
|
||||
friends_allowed=friends_allowed,
|
||||
servers_allowed=servers_allowed,
|
||||
)
|
||||
raise PermissionDenied("Access denied.")
|
||||
|
||||
@ -22,6 +22,9 @@ class PlexSourceConnectionSerializer(SourceSerializer):
|
||||
"identifier",
|
||||
"plex_token",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"user": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
class PlexSourceConnectionViewSet(
|
||||
@ -39,3 +42,4 @@ class PlexSourceConnectionViewSet(
|
||||
filterset_fields = ["source__slug"]
|
||||
permission_classes = [OwnerPermissions]
|
||||
filter_backends = [OwnerFilter, DjangoFilterBackend, OrderingFilter, SearchFilter]
|
||||
ordering = ["pk"]
|
||||
|
||||
@ -83,6 +83,7 @@ class PlexSource(Source):
|
||||
data={
|
||||
"title": f"Plex {self.name}",
|
||||
"component": "ak-user-settings-source-plex",
|
||||
"configure_url": self.client_id,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ class PlexAuth:
|
||||
return {
|
||||
"X-Plex-Product": "authentik",
|
||||
"X-Plex-Version": __version__,
|
||||
"X-Plex-Device-Vendor": "BeryJu.org",
|
||||
"X-Plex-Device-Vendor": "goauthentik.io",
|
||||
}
|
||||
|
||||
def get_resources(self) -> list[dict]:
|
||||
@ -96,6 +96,21 @@ class PlexAuth:
|
||||
return True
|
||||
return False
|
||||
|
||||
def check_friends_overlap(self, user_ident: int) -> bool:
|
||||
"""Check if the user is a friend of the owner, or the owner themselves"""
|
||||
friends_allowed = False
|
||||
_, owner_id = self.get_user_info()
|
||||
owner_friends = self.get_friends()
|
||||
for friend in owner_friends:
|
||||
if int(friend.get("id", "0")) == user_ident:
|
||||
friends_allowed = True
|
||||
LOGGER.info(
|
||||
"allowing user for plex because of friend",
|
||||
user=user_ident,
|
||||
)
|
||||
owner_allowed = owner_id == user_ident
|
||||
return any([friends_allowed, owner_allowed])
|
||||
|
||||
|
||||
class PlexSourceFlowManager(SourceFlowManager):
|
||||
"""Flow manager for plex sources"""
|
||||
|
||||
@ -3,6 +3,7 @@ from requests import RequestException
|
||||
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.sources.plex.models import PlexSource
|
||||
from authentik.sources.plex.plex import PlexAuth
|
||||
@ -31,7 +32,7 @@ def check_plex_token(self: MonitoredTask, source_slug: int):
|
||||
self.set_status(
|
||||
TaskResult(
|
||||
TaskResultStatus.ERROR,
|
||||
["Plex token is invalid/an error occurred:", str(exc)],
|
||||
["Plex token is invalid/an error occurred:", exception_to_string(exc)],
|
||||
)
|
||||
)
|
||||
Event.new(
|
||||
|
||||
@ -84,7 +84,9 @@ class AuthenticatorDuoStageView(ChallengeStageView):
|
||||
self.request.session.pop(SESSION_KEY_DUO_USER_ID)
|
||||
self.request.session.pop(SESSION_KEY_DUO_ACTIVATION_CODE)
|
||||
if not existing_device:
|
||||
DuoDevice.objects.create(user=self.get_pending_user(), duo_user_id=user_id, stage=stage)
|
||||
DuoDevice.objects.create(
|
||||
name="Duo Device", user=self.get_pending_user(), duo_user_id=user_id, stage=stage
|
||||
)
|
||||
else:
|
||||
return self.executor.stage_invalid("Device with Credential ID already exists.")
|
||||
return self.executor.stage_ok()
|
||||
|
||||
@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.8 on 2021-10-31 16:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_stages_authenticator_sms", "0001_squashed_0004_auto_20211014_0936"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="authenticatorsmsstage",
|
||||
name="from_number",
|
||||
field=models.TextField(),
|
||||
),
|
||||
]
|
||||
@ -101,7 +101,7 @@ class AuthenticatorSMSStageView(ChallengeStageView):
|
||||
stage: AuthenticatorSMSStage = self.executor.current_stage
|
||||
|
||||
if SESSION_SMS_DEVICE not in self.request.session:
|
||||
device = SMSDevice(user=user, confirmed=False, stage=stage)
|
||||
device = SMSDevice(user=user, confirmed=False, stage=stage, name="SMS Device")
|
||||
device.generate_token(commit=False)
|
||||
if phone_number := self._has_phone_number():
|
||||
device.phone_number = phone_number
|
||||
|
||||
@ -55,7 +55,7 @@ class AuthenticatorStaticStageView(ChallengeStageView):
|
||||
stage: AuthenticatorStaticStage = self.executor.current_stage
|
||||
|
||||
if SESSION_STATIC_DEVICE not in self.request.session:
|
||||
device = StaticDevice(user=user, confirmed=True)
|
||||
device = StaticDevice(user=user, confirmed=True, name="Static Token")
|
||||
tokens = []
|
||||
for _ in range(0, stage.token_count):
|
||||
tokens.append(StaticToken(device=device, token=StaticToken.random_token()))
|
||||
|
||||
@ -81,7 +81,9 @@ class AuthenticatorTOTPStageView(ChallengeStageView):
|
||||
stage: AuthenticatorTOTPStage = self.executor.current_stage
|
||||
|
||||
if SESSION_TOTP_DEVICE not in self.request.session:
|
||||
device = TOTPDevice(user=user, confirmed=True, digits=stage.digits)
|
||||
device = TOTPDevice(
|
||||
user=user, confirmed=True, digits=stage.digits, name="TOTP Authenticator"
|
||||
)
|
||||
|
||||
self.request.session[SESSION_TOTP_DEVICE] = device
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
@ -27,6 +27,8 @@ def default_device_classes() -> list:
|
||||
DeviceClasses.STATIC,
|
||||
DeviceClasses.TOTP,
|
||||
DeviceClasses.WEBAUTHN,
|
||||
DeviceClasses.DUO,
|
||||
DeviceClasses.SMS,
|
||||
]
|
||||
|
||||
|
||||
|
||||
@ -95,7 +95,8 @@ class AuthenticatorValidateStageView(ChallengeStageView):
|
||||
def get_device_challenges(self) -> list[dict]:
|
||||
"""Get a list of all device challenges applicable for the current stage"""
|
||||
challenges = []
|
||||
user_devices = devices_for_user(self.get_pending_user())
|
||||
# Convert to a list to have usable log output instead of just <generator ...>
|
||||
user_devices = list(devices_for_user(self.get_pending_user()))
|
||||
LOGGER.debug("Got devices for user", devices=user_devices)
|
||||
|
||||
# static and totp are only shown once
|
||||
|
||||
@ -75,6 +75,7 @@ class AuthenticatorValidateStageTests(APITestCase):
|
||||
},
|
||||
"user_fields": ["username"],
|
||||
"sources": [],
|
||||
"show_source_labels": False,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@ -136,6 +136,7 @@ class AuthenticatorWebAuthnStageView(ChallengeStageView):
|
||||
credential_id=bytes_to_base64url(webauthn_credential.credential_id),
|
||||
sign_count=webauthn_credential.sign_count,
|
||||
rp_id=get_rp_id(self.request),
|
||||
name="WebAuthn Device",
|
||||
)
|
||||
else:
|
||||
return self.executor.stage_invalid("Device with Credential ID already exists.")
|
||||
|
||||
@ -20,6 +20,7 @@ class IdentificationStageSerializer(StageSerializer):
|
||||
"enrollment_flow",
|
||||
"recovery_flow",
|
||||
"sources",
|
||||
"show_source_labels",
|
||||
]
|
||||
|
||||
|
||||
@ -35,5 +36,6 @@ class IdentificationStageViewSet(UsedByMixin, ModelViewSet):
|
||||
"show_matched_user",
|
||||
"enrollment_flow",
|
||||
"recovery_flow",
|
||||
"show_source_labels",
|
||||
]
|
||||
ordering = ["name"]
|
||||
|
||||
@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.8 on 2021-10-31 16:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_stages_identification", "0011_alter_identificationstage_user_fields"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="identificationstage",
|
||||
name="show_source_labels",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@ -81,6 +81,7 @@ class IdentificationStage(Stage):
|
||||
sources = models.ManyToManyField(
|
||||
Source, default=list, help_text=_("Specify which sources should be shown.")
|
||||
)
|
||||
show_source_labels = models.BooleanField(default=False)
|
||||
|
||||
@property
|
||||
def serializer(self) -> BaseSerializer:
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
"""Identification stage logic"""
|
||||
from dataclasses import asdict
|
||||
from random import SystemRandom
|
||||
from time import sleep
|
||||
from typing import Any, Optional
|
||||
|
||||
@ -15,10 +16,16 @@ from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.core.models import Application, Source, User
|
||||
from authentik.flows.challenge import Challenge, ChallengeResponse, ChallengeTypes
|
||||
from authentik.flows.challenge import (
|
||||
Challenge,
|
||||
ChallengeResponse,
|
||||
ChallengeTypes,
|
||||
RedirectChallenge,
|
||||
)
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, ChallengeStageView
|
||||
from authentik.flows.views.executor import SESSION_KEY_APPLICATION_PRE, challenge_types
|
||||
from authentik.flows.views.executor import SESSION_KEY_APPLICATION_PRE
|
||||
from authentik.sources.plex.models import PlexAuthenticationChallenge
|
||||
from authentik.stages.identification.models import IdentificationStage
|
||||
from authentik.stages.identification.signals import identification_failed
|
||||
from authentik.stages.password.stage import authenticate
|
||||
@ -28,8 +35,11 @@ LOGGER = get_logger()
|
||||
|
||||
@extend_schema_field(
|
||||
PolymorphicProxySerializer(
|
||||
component_name="ChallengeTypes",
|
||||
serializers=challenge_types(),
|
||||
component_name="LoginChallengeTypes",
|
||||
serializers={
|
||||
RedirectChallenge().fields["component"].default: RedirectChallenge,
|
||||
PlexAuthenticationChallenge().fields["component"].default: PlexAuthenticationChallenge,
|
||||
},
|
||||
resource_type_field_name="component",
|
||||
)
|
||||
)
|
||||
@ -57,6 +67,7 @@ class IdentificationChallenge(Challenge):
|
||||
recovery_url = CharField(required=False)
|
||||
primary_action = CharField()
|
||||
sources = LoginSourceSerializer(many=True, required=False)
|
||||
show_source_labels = BooleanField()
|
||||
|
||||
component = CharField(default="ak-stage-identification")
|
||||
|
||||
@ -77,7 +88,8 @@ class IdentificationChallengeResponse(ChallengeResponse):
|
||||
|
||||
pre_user = self.stage.get_user(uid_field)
|
||||
if not pre_user:
|
||||
sleep(0.150)
|
||||
# Sleep a random time (between 90 and 210ms) to "prevent" user enumeration attacks
|
||||
sleep(0.30 * SystemRandom().randint(3, 7))
|
||||
LOGGER.debug("invalid_login", identifier=uid_field)
|
||||
identification_failed.send(sender=self, request=self.stage.request, uid_field=uid_field)
|
||||
# We set the pending_user even on failure so it's part of the context, even
|
||||
@ -152,6 +164,7 @@ class IdentificationStageView(ChallengeStageView):
|
||||
"component": "ak-stage-identification",
|
||||
"user_fields": current_stage.user_fields,
|
||||
"password_fields": bool(current_stage.password_stage),
|
||||
"show_source_labels": current_stage.show_source_labels,
|
||||
}
|
||||
)
|
||||
# If the user has been redirected to us whilst trying to access an
|
||||
|
||||
@ -123,6 +123,7 @@ class TestIdentificationStage(APITestCase):
|
||||
"name": "test",
|
||||
}
|
||||
],
|
||||
"show_source_labels": False,
|
||||
"user_fields": ["email"],
|
||||
},
|
||||
)
|
||||
@ -158,6 +159,7 @@ class TestIdentificationStage(APITestCase):
|
||||
{"code": "invalid", "string": "Failed to " "authenticate."}
|
||||
]
|
||||
},
|
||||
"show_source_labels": False,
|
||||
"flow_info": {
|
||||
"background": self.flow.background_url,
|
||||
"cancel_url": reverse("authentik_flows:cancel"),
|
||||
@ -218,6 +220,7 @@ class TestIdentificationStage(APITestCase):
|
||||
"authentik_core:if-flow",
|
||||
kwargs={"flow_slug": "unique-enrollment-string"},
|
||||
),
|
||||
"show_source_labels": False,
|
||||
"primary_action": "Log in",
|
||||
"flow_info": {
|
||||
"background": flow.background_url,
|
||||
@ -267,6 +270,7 @@ class TestIdentificationStage(APITestCase):
|
||||
"authentik_core:if-flow",
|
||||
kwargs={"flow_slug": "unique-recovery-string"},
|
||||
),
|
||||
"show_source_labels": False,
|
||||
"primary_action": "Log in",
|
||||
"flow_info": {
|
||||
"background": flow.background_url,
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
"""Invitation Stage API Views"""
|
||||
from django_filters.filters import BooleanFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from rest_framework.fields import JSONField
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
@ -21,12 +23,23 @@ class InvitationStageSerializer(StageSerializer):
|
||||
]
|
||||
|
||||
|
||||
class InvitationStageFilter(FilterSet):
|
||||
"""invitation filter"""
|
||||
|
||||
no_flows = BooleanFilter("flow", "isnull")
|
||||
|
||||
class Meta:
|
||||
|
||||
model = InvitationStage
|
||||
fields = ["name", "no_flows", "continue_flow_without_invitation", "stage_uuid"]
|
||||
|
||||
|
||||
class InvitationStageViewSet(UsedByMixin, ModelViewSet):
|
||||
"""InvitationStage Viewset"""
|
||||
|
||||
queryset = InvitationStage.objects.all()
|
||||
serializer_class = InvitationStageSerializer
|
||||
filterset_fields = "__all__"
|
||||
filterset_class = InvitationStageFilter
|
||||
ordering = ["name"]
|
||||
|
||||
|
||||
@ -53,7 +66,7 @@ class InvitationViewSet(UsedByMixin, ModelViewSet):
|
||||
|
||||
queryset = Invitation.objects.all()
|
||||
serializer_class = InvitationSerializer
|
||||
order = ["-expires"]
|
||||
ordering = ["-expires"]
|
||||
search_fields = ["created_by__username", "expires"]
|
||||
filterset_fields = ["created_by__username", "expires"]
|
||||
|
||||
|
||||
@ -4,7 +4,6 @@ from typing import Optional
|
||||
from deepmerge import always_merger
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.http.response import HttpResponseBadRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.flows.models import in_memory_stage
|
||||
@ -50,7 +49,12 @@ class InvitationStageView(StageView):
|
||||
return self.executor.stage_ok()
|
||||
return self.executor.stage_invalid()
|
||||
|
||||
invite: Invitation = get_object_or_404(Invitation, pk=token)
|
||||
invite: Invitation = Invitation.objects.filter(pk=token).first()
|
||||
if not invite:
|
||||
LOGGER.debug("invalid invitation", token=token)
|
||||
if stage.continue_flow_without_invitation:
|
||||
return self.executor.stage_ok()
|
||||
return self.executor.stage_invalid()
|
||||
self.executor.plan.context[INVITATION_IN_EFFECT] = True
|
||||
self.executor.plan.context[INVITATION] = invite
|
||||
|
||||
@ -79,7 +83,9 @@ class InvitationFinalStageView(StageView):
|
||||
if not invitation:
|
||||
LOGGER.warning("InvitationFinalStageView stage called without invitation")
|
||||
return HttpResponseBadRequest
|
||||
if not invitation.single_use:
|
||||
return self.executor.stage_ok()
|
||||
invitation.delete()
|
||||
token = invitation.invite_uuid.hex
|
||||
if invitation.single_use:
|
||||
invitation.delete()
|
||||
LOGGER.debug("Deleted invitation", token=token)
|
||||
del self.executor.plan.context[INVITATION]
|
||||
return self.executor.stage_ok()
|
||||
|
||||
@ -69,7 +69,7 @@ class PasswordChallenge(WithUserInfoChallenge):
|
||||
class PasswordChallengeResponse(ChallengeResponse):
|
||||
"""Password challenge response"""
|
||||
|
||||
password = CharField()
|
||||
password = CharField(trim_whitespace=False)
|
||||
|
||||
component = CharField(default="ak-stage-password")
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
"""prompt models"""
|
||||
from typing import Type
|
||||
from typing import Any, Optional, Type
|
||||
from uuid import uuid4
|
||||
|
||||
from django.db import models
|
||||
@ -13,6 +13,7 @@ from rest_framework.fields import (
|
||||
EmailField,
|
||||
HiddenField,
|
||||
IntegerField,
|
||||
ReadOnlyField,
|
||||
)
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
|
||||
@ -26,6 +27,10 @@ class FieldTypes(models.TextChoices):
|
||||
|
||||
# Simple text field
|
||||
TEXT = "text", _("Text: Simple Text input")
|
||||
# Simple text field
|
||||
TEXT_READ_ONLY = "text_read_only", _(
|
||||
"Text (read-only): Simple Text input, but cannot be edited."
|
||||
)
|
||||
# Same as text, but has autocomplete for password managers
|
||||
USERNAME = (
|
||||
"username",
|
||||
@ -74,13 +79,16 @@ class Prompt(SerializerModel):
|
||||
|
||||
return PromptSerializer
|
||||
|
||||
@property
|
||||
def field(self) -> CharField:
|
||||
def field(self, default: Optional[Any]) -> CharField:
|
||||
"""Get field type for Challenge and response"""
|
||||
field_class = CharField
|
||||
kwargs = {
|
||||
"required": self.required,
|
||||
}
|
||||
if self.type == FieldTypes.TEXT:
|
||||
kwargs["trim_whitespace"] = False
|
||||
if self.type == FieldTypes.TEXT_READ_ONLY:
|
||||
field_class = ReadOnlyField
|
||||
if self.type == FieldTypes.EMAIL:
|
||||
field_class = EmailField
|
||||
if self.type == FieldTypes.NUMBER:
|
||||
@ -97,12 +105,14 @@ class Prompt(SerializerModel):
|
||||
if self.type == FieldTypes.DATE_TIME:
|
||||
field_class = DateTimeField
|
||||
if self.type == FieldTypes.STATIC:
|
||||
kwargs["initial"] = self.placeholder
|
||||
kwargs["default"] = self.placeholder
|
||||
kwargs["required"] = False
|
||||
kwargs["label"] = ""
|
||||
if self.type == FieldTypes.SEPARATOR:
|
||||
kwargs["required"] = False
|
||||
kwargs["label"] = ""
|
||||
if default:
|
||||
kwargs["default"] = default
|
||||
return field_class(**kwargs)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
@ -8,7 +8,7 @@ from django.http import HttpRequest, HttpResponse
|
||||
from django.http.request import QueryDict
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from rest_framework.fields import BooleanField, CharField, IntegerField
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, IntegerField
|
||||
from rest_framework.serializers import ValidationError
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
@ -31,7 +31,7 @@ class StagePromptSerializer(PassiveSerializer):
|
||||
|
||||
field_key = CharField()
|
||||
label = CharField(allow_blank=True)
|
||||
type = CharField()
|
||||
type = ChoiceField(choices=FieldTypes.choices)
|
||||
required = BooleanField()
|
||||
placeholder = CharField(allow_blank=True)
|
||||
order = IntegerField()
|
||||
@ -65,7 +65,8 @@ class PromptChallengeResponse(ChallengeResponse):
|
||||
fields = list(self.stage.fields.all())
|
||||
for field in fields:
|
||||
field: Prompt
|
||||
self.fields[field.field_key] = field.field
|
||||
current = plan.context.get(PLAN_CONTEXT_PROMPT, {}).get(field.field_key)
|
||||
self.fields[field.field_key] = field.field(current)
|
||||
# Special handling for fields with username type
|
||||
# these check for existing users with the same username
|
||||
if field.type == FieldTypes.USERNAME:
|
||||
@ -96,10 +97,11 @@ class PromptChallengeResponse(ChallengeResponse):
|
||||
# Check if we have any static or hidden fields, and ensure they
|
||||
# still have the same value
|
||||
static_hidden_fields: QuerySet[Prompt] = self.stage.fields.filter(
|
||||
type__in=[FieldTypes.HIDDEN, FieldTypes.STATIC]
|
||||
type__in=[FieldTypes.HIDDEN, FieldTypes.STATIC, FieldTypes.TEXT_READ_ONLY]
|
||||
)
|
||||
for static_hidden in static_hidden_fields:
|
||||
attrs[static_hidden.field_key] = static_hidden.placeholder
|
||||
field = self.fields[static_hidden.field_key]
|
||||
attrs[static_hidden.field_key] = field.default
|
||||
|
||||
# Check if we have two password fields, and make sure they are the same
|
||||
password_fields: QuerySet[Prompt] = self.stage.fields.filter(type=FieldTypes.PASSWORD)
|
||||
@ -163,10 +165,17 @@ class PromptStageView(ChallengeStageView):
|
||||
|
||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||
fields = list(self.executor.current_stage.fields.all().order_by("order"))
|
||||
serializers = []
|
||||
context_prompt = self.executor.plan.context.get(PLAN_CONTEXT_PROMPT, {})
|
||||
for field in fields:
|
||||
data = StagePromptSerializer(field).data
|
||||
if field.field_key in context_prompt:
|
||||
data["placeholder"] = context_prompt.get(field.field_key)
|
||||
serializers.append(data)
|
||||
challenge = PromptChallenge(
|
||||
data={
|
||||
"type": ChallengeTypes.NATIVE.value,
|
||||
"fields": [StagePromptSerializer(field).data for field in fields],
|
||||
"fields": serializers,
|
||||
},
|
||||
)
|
||||
return challenge
|
||||
|
||||
@ -21,31 +21,32 @@ var running = true
|
||||
func main() {
|
||||
log.SetLevel(log.DebugLevel)
|
||||
log.SetFormatter(&log.JSONFormatter{})
|
||||
l := log.WithField("logger", "authentik.root")
|
||||
config.DefaultConfig()
|
||||
err := config.LoadConfig("./authentik/lib/default.yml")
|
||||
if err != nil {
|
||||
log.WithError(err).Warning("failed to load default config")
|
||||
l.WithError(err).Warning("failed to load default config")
|
||||
}
|
||||
err = config.LoadConfig("./local.env.yml")
|
||||
if err != nil {
|
||||
log.WithError(err).Debug("no local config to load")
|
||||
l.WithError(err).Debug("no local config to load")
|
||||
}
|
||||
err = config.FromEnv()
|
||||
if err != nil {
|
||||
log.WithError(err).Debug("failed to environment variables")
|
||||
l.WithError(err).Debug("failed to environment variables")
|
||||
}
|
||||
config.ConfigureLogger()
|
||||
|
||||
if config.G.ErrorReporting.Enabled {
|
||||
err := sentry.Init(sentry.ClientOptions{
|
||||
Dsn: "https://a579bb09306d4f8b8d8847c052d3a1d3@sentry.beryju.org/8",
|
||||
Dsn: config.G.ErrorReporting.DSN,
|
||||
AttachStacktrace: true,
|
||||
TracesSampleRate: 0.6,
|
||||
Release: fmt.Sprintf("authentik@%s", constants.VERSION),
|
||||
Environment: config.G.ErrorReporting.Environment,
|
||||
})
|
||||
if err != nil {
|
||||
log.WithError(err).Warning("failed to init sentry")
|
||||
l.WithError(err).Warning("failed to init sentry")
|
||||
}
|
||||
}
|
||||
|
||||
@ -56,31 +57,31 @@ func main() {
|
||||
|
||||
g := gounicorn.NewGoUnicorn()
|
||||
ws := web.NewWebServer(g)
|
||||
defer g.Kill()
|
||||
defer ws.Shutdown()
|
||||
g.HealthyCallback = func() {
|
||||
if !config.G.Web.DisableEmbeddedOutpost {
|
||||
go attemptProxyStart(ws, u)
|
||||
}
|
||||
}
|
||||
go web.RunMetricsServer()
|
||||
for {
|
||||
go attemptStartBackend(g)
|
||||
ws.Start()
|
||||
if !config.G.Web.DisableEmbeddedOutpost {
|
||||
go attemptProxyStart(ws, u)
|
||||
}
|
||||
|
||||
<-ex
|
||||
running = false
|
||||
log.WithField("logger", "authentik").Info("shutting down webserver")
|
||||
l.WithField("logger", "authentik").Info("shutting down gunicorn")
|
||||
go g.Kill()
|
||||
l.WithField("logger", "authentik").Info("shutting down webserver")
|
||||
go ws.Shutdown()
|
||||
log.WithField("logger", "authentik").Info("killing gunicorn")
|
||||
g.Kill()
|
||||
}
|
||||
}
|
||||
|
||||
func attemptStartBackend(g *gounicorn.GoUnicorn) {
|
||||
for {
|
||||
err := g.Start()
|
||||
if !running {
|
||||
return
|
||||
}
|
||||
err := g.Start()
|
||||
log.WithField("logger", "authentik.router").WithError(err).Warning("gunicorn process died, restarting")
|
||||
}
|
||||
}
|
||||
@ -88,8 +89,6 @@ func attemptStartBackend(g *gounicorn.GoUnicorn) {
|
||||
func attemptProxyStart(ws *web.WebServer, u *url.URL) {
|
||||
maxTries := 100
|
||||
attempt := 0
|
||||
// Sleep to wait for the app server to start
|
||||
time.Sleep(30 * time.Second)
|
||||
for {
|
||||
log.WithField("logger", "authentik").Debug("attempting to init outpost")
|
||||
ac := ak.NewAPIController(*u, config.G.SecretKey)
|
||||
|
||||
@ -17,7 +17,7 @@ services:
|
||||
image: redis:alpine
|
||||
restart: unless-stopped
|
||||
server:
|
||||
image: ${AUTHENTIK_IMAGE:-goauthentik.io/server}:${AUTHENTIK_TAG:-2021.10.1-rc3}
|
||||
image: ${AUTHENTIK_IMAGE:-goauthentik.io/server}:${AUTHENTIK_TAG:-2021.10.4}
|
||||
restart: unless-stopped
|
||||
command: server
|
||||
environment:
|
||||
@ -38,7 +38,7 @@ services:
|
||||
- "0.0.0.0:9000:9000"
|
||||
- "0.0.0.0:9443:9443"
|
||||
worker:
|
||||
image: ${AUTHENTIK_IMAGE:-goauthentik.io/server}:${AUTHENTIK_TAG:-2021.10.1-rc3}
|
||||
image: ${AUTHENTIK_IMAGE:-goauthentik.io/server}:${AUTHENTIK_TAG:-2021.10.4}
|
||||
restart: unless-stopped
|
||||
command: worker
|
||||
environment:
|
||||
|
||||
13
go.mod
13
go.mod
@ -9,12 +9,8 @@ require (
|
||||
github.com/garyburd/redigo v1.6.2 // indirect
|
||||
github.com/getsentry/sentry-go v0.11.0
|
||||
github.com/go-ldap/ldap/v3 v3.4.1
|
||||
github.com/go-openapi/analysis v0.20.1 // indirect
|
||||
github.com/go-openapi/errors v0.20.0 // indirect
|
||||
github.com/go-openapi/runtime v0.20.0
|
||||
github.com/go-openapi/strfmt v0.20.3
|
||||
github.com/go-openapi/swag v0.19.15 // indirect
|
||||
github.com/go-openapi/validate v0.20.2 // indirect
|
||||
github.com/go-openapi/runtime v0.21.0
|
||||
github.com/go-openapi/strfmt v0.21.0
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/google/uuid v1.3.0
|
||||
@ -26,15 +22,14 @@ require (
|
||||
github.com/imdario/mergo v0.3.12
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
|
||||
github.com/nmcclain/ldap v0.0.0-20191021200707-3b3b69a7e9e3
|
||||
github.com/nmcclain/ldap v0.0.0-20210720162743-7f8d1e44eeba
|
||||
github.com/pires/go-proxyproto v0.6.1
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/pquerna/cachecontrol v0.0.0-20201205024021-ac21108117ac // indirect
|
||||
github.com/prometheus/client_golang v1.11.0
|
||||
github.com/recws-org/recws v1.3.1
|
||||
github.com/sirupsen/logrus v1.8.1
|
||||
go.mongodb.org/mongo-driver v1.5.2 // indirect
|
||||
goauthentik.io/api v0.2021101.4
|
||||
goauthentik.io/api v0.2021103.2
|
||||
golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2 // indirect
|
||||
golang.org/x/net v0.0.0-20210510120150-4163338589ed // indirect
|
||||
golang.org/x/oauth2 v0.0.0-20210323180902-22b0adad7558
|
||||
|
||||
44
go.sum
44
go.sum
@ -150,8 +150,8 @@ github.com/go-openapi/errors v0.19.6/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpX
|
||||
github.com/go-openapi/errors v0.19.7/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
|
||||
github.com/go-openapi/errors v0.19.8/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
|
||||
github.com/go-openapi/errors v0.19.9/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
|
||||
github.com/go-openapi/errors v0.20.0 h1:Sxpo9PjEHDzhs3FbnGNonvDgWcMW2U7wGTcDDSFSceM=
|
||||
github.com/go-openapi/errors v0.20.0/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
|
||||
github.com/go-openapi/errors v0.20.1 h1:j23mMDtRxMwIobkpId7sWh7Ddcx4ivaoqUbfXx5P+a8=
|
||||
github.com/go-openapi/errors v0.20.1/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
|
||||
github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M=
|
||||
github.com/go-openapi/jsonpointer v0.18.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M=
|
||||
github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg=
|
||||
@ -162,8 +162,9 @@ github.com/go-openapi/jsonreference v0.17.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3Hfo
|
||||
github.com/go-openapi/jsonreference v0.18.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I=
|
||||
github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=
|
||||
github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8=
|
||||
github.com/go-openapi/jsonreference v0.19.5 h1:1WJP/wi4OjB4iV8KVbH73rQaoialJrqv8gitZLxGLtM=
|
||||
github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg=
|
||||
github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs=
|
||||
github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns=
|
||||
github.com/go-openapi/loads v0.17.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU=
|
||||
github.com/go-openapi/loads v0.18.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU=
|
||||
github.com/go-openapi/loads v0.19.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU=
|
||||
@ -173,16 +174,17 @@ github.com/go-openapi/loads v0.19.5/go.mod h1:dswLCAdonkRufe/gSUC3gN8nTSaB9uaS2e
|
||||
github.com/go-openapi/loads v0.19.6/go.mod h1:brCsvE6j8mnbmGBh103PT/QLHfbyDxA4hsKvYBNEGVc=
|
||||
github.com/go-openapi/loads v0.19.7/go.mod h1:brCsvE6j8mnbmGBh103PT/QLHfbyDxA4hsKvYBNEGVc=
|
||||
github.com/go-openapi/loads v0.20.0/go.mod h1:2LhKquiE513rN5xC6Aan6lYOSddlL8Mp20AW9kpviM4=
|
||||
github.com/go-openapi/loads v0.20.2 h1:z5p5Xf5wujMxS1y8aP+vxwW5qYT2zdJBbXKmQUG3lcc=
|
||||
github.com/go-openapi/loads v0.20.2/go.mod h1:hTVUotJ+UonAMMZsvakEgmWKgtulweO9vYP2bQYKA/o=
|
||||
github.com/go-openapi/loads v0.21.0 h1:jYtUO4wwP7psAweisP/MDoOpdzsYEESdoPcsWjHDR68=
|
||||
github.com/go-openapi/loads v0.21.0/go.mod h1:rHYve9nZrQ4CJhyeIIFJINGCg1tQpx2yJrrNo8sf1ws=
|
||||
github.com/go-openapi/runtime v0.0.0-20180920151709-4f900dc2ade9/go.mod h1:6v9a6LTXWQCdL8k1AO3cvqx5OtZY/Y9wKTgaoP6YRfA=
|
||||
github.com/go-openapi/runtime v0.19.0/go.mod h1:OwNfisksmmaZse4+gpV3Ne9AyMOlP1lt4sK4FXt0O64=
|
||||
github.com/go-openapi/runtime v0.19.4/go.mod h1:X277bwSUBxVlCYR3r7xgZZGKVvBd/29gLDlFGtJ8NL4=
|
||||
github.com/go-openapi/runtime v0.19.15/go.mod h1:dhGWCTKRXlAfGnQG0ONViOZpjfg0m2gUt9nTQPQZuoo=
|
||||
github.com/go-openapi/runtime v0.19.16/go.mod h1:5P9104EJgYcizotuXhEuUrzVc+j1RiSjahULvYmlv98=
|
||||
github.com/go-openapi/runtime v0.19.24/go.mod h1:Lm9YGCeecBnUUkFTxPC4s1+lwrkJ0pthx8YvyjCfkgk=
|
||||
github.com/go-openapi/runtime v0.20.0 h1:DEV4oYH28MqakaabtbxH0cjvlzFegi/15kfUVCfiZW0=
|
||||
github.com/go-openapi/runtime v0.20.0/go.mod h1:2WnLRxMiOUWNN0UZskSkxW0+WXdfB1KmqRKCFH+ZWYk=
|
||||
github.com/go-openapi/runtime v0.21.0 h1:giZ8eT26R+/rx6RX2MkYjZPY8vPYVKDhP/mOazrQHzM=
|
||||
github.com/go-openapi/runtime v0.21.0/go.mod h1:aQg+kaIQEn+A2CRSY1TxbM8+sT9g2V3aLc1FbIAnbbs=
|
||||
github.com/go-openapi/spec v0.17.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI=
|
||||
github.com/go-openapi/spec v0.18.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI=
|
||||
github.com/go-openapi/spec v0.19.2/go.mod h1:sCxk3jxKgioEJikev4fgkNmwS+3kuYdJtcsZsD5zxMY=
|
||||
@ -192,8 +194,9 @@ github.com/go-openapi/spec v0.19.8/go.mod h1:Hm2Jr4jv8G1ciIAo+frC/Ft+rR2kQDh8JHK
|
||||
github.com/go-openapi/spec v0.19.15/go.mod h1:+81FIL1JwC5P3/Iuuozq3pPE9dXdIEGxFutcFKaVbmU=
|
||||
github.com/go-openapi/spec v0.20.0/go.mod h1:+81FIL1JwC5P3/Iuuozq3pPE9dXdIEGxFutcFKaVbmU=
|
||||
github.com/go-openapi/spec v0.20.1/go.mod h1:93x7oh+d+FQsmsieroS4cmR3u0p/ywH649a3qwC9OsQ=
|
||||
github.com/go-openapi/spec v0.20.3 h1:uH9RQ6vdyPSs2pSy9fL8QPspDF2AMIMPtmK5coSSjtQ=
|
||||
github.com/go-openapi/spec v0.20.3/go.mod h1:gG4F8wdEDN+YPBMVnzE85Rbhf+Th2DTvA9nFPQ5AYEg=
|
||||
github.com/go-openapi/spec v0.20.4 h1:O8hJrt0UMnhHcluhIdUgCLRWyM2x7QkBXRvOs7m+O1M=
|
||||
github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I=
|
||||
github.com/go-openapi/strfmt v0.17.0/go.mod h1:P82hnJI0CXkErkXi8IKjPbNBM6lV6+5pLP5l494TcyU=
|
||||
github.com/go-openapi/strfmt v0.18.0/go.mod h1:P82hnJI0CXkErkXi8IKjPbNBM6lV6+5pLP5l494TcyU=
|
||||
github.com/go-openapi/strfmt v0.19.0/go.mod h1:+uW+93UVvGGq2qGaZxdDeJqSAqBqBdl+ZPMF/cC8nDY=
|
||||
@ -203,8 +206,9 @@ github.com/go-openapi/strfmt v0.19.4/go.mod h1:eftuHTlB/dI8Uq8JJOyRlieZf+WkkxUuk
|
||||
github.com/go-openapi/strfmt v0.19.5/go.mod h1:eftuHTlB/dI8Uq8JJOyRlieZf+WkkxUuk0dgdHXr2Qk=
|
||||
github.com/go-openapi/strfmt v0.19.11/go.mod h1:UukAYgTaQfqJuAFlNxxMWNvMYiwiXtLsF2VwmoFtbtc=
|
||||
github.com/go-openapi/strfmt v0.20.0/go.mod h1:UukAYgTaQfqJuAFlNxxMWNvMYiwiXtLsF2VwmoFtbtc=
|
||||
github.com/go-openapi/strfmt v0.20.3 h1:YVG4ZgPZ00km/lRHrIf7c6cKL5/4FAUtG2T9RxWAgDY=
|
||||
github.com/go-openapi/strfmt v0.20.3/go.mod h1:43urheQI9dNtE5lTZQfuFJvjYJKPrxicATpEfZwHUNk=
|
||||
github.com/go-openapi/strfmt v0.20.2/go.mod h1:43urheQI9dNtE5lTZQfuFJvjYJKPrxicATpEfZwHUNk=
|
||||
github.com/go-openapi/strfmt v0.21.0 h1:hX2qEZKmYks+t0hKeb4VTJpUm2UYsdL3+DCid5swxIs=
|
||||
github.com/go-openapi/strfmt v0.21.0/go.mod h1:ZRQ409bWMj+SOgXofQAGTIo2Ebu72Gs+WaRADcS5iNg=
|
||||
github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
|
||||
github.com/go-openapi/swag v0.18.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
|
||||
github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
||||
@ -223,8 +227,8 @@ github.com/go-openapi/validate v0.19.10/go.mod h1:RKEZTUWDkxKQxN2jDT7ZnZi2bhZlbN
|
||||
github.com/go-openapi/validate v0.19.12/go.mod h1:Rzou8hA/CBw8donlS6WNEUQupNvUZ0waH08tGe6kAQ4=
|
||||
github.com/go-openapi/validate v0.19.15/go.mod h1:tbn/fdOwYHgrhPBzidZfJC2MIVvs9GA7monOmWBbeCI=
|
||||
github.com/go-openapi/validate v0.20.1/go.mod h1:b60iJT+xNNLfaQJUqLI7946tYiFEOuE9E4k54HpKcJ0=
|
||||
github.com/go-openapi/validate v0.20.2 h1:AhqDegYV3J3iQkMPJSXkvzymHKMTw0BST3RK3hTT4ts=
|
||||
github.com/go-openapi/validate v0.20.2/go.mod h1:e7OJoKNgd0twXZwIn0A43tHbvIcr/rZIVCbJBpTUoY0=
|
||||
github.com/go-openapi/validate v0.20.3 h1:GZPPhhKSZrE8HjB4eEkoYAZmoWA4+tCemSgINH1/vKw=
|
||||
github.com/go-openapi/validate v0.20.3/go.mod h1:goDdqVGiigM3jChcrYJxD2joalke3ZXeftD16byIjA4=
|
||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
@ -375,6 +379,7 @@ github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+o
|
||||
github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||
github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||
github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
@ -431,8 +436,8 @@ github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWb
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484 h1:D9EvfGQvlkKaDr2CRKN++7HbSXbefUNDrPq60T+g24s=
|
||||
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484/go.mod h1:O1EljZ+oHprtxDDPHiMWVo/5dBT6PlvWX5PSwj80aBA=
|
||||
github.com/nmcclain/ldap v0.0.0-20191021200707-3b3b69a7e9e3 h1:NNis9uuNpG5h97Dvxxo53Scg02qBg+3Nfabg6zjFGu8=
|
||||
github.com/nmcclain/ldap v0.0.0-20191021200707-3b3b69a7e9e3/go.mod h1:YtrVB1/v9Td9SyjXpjYVmbdKgj9B0nPTBsdGUxy0i8U=
|
||||
github.com/nmcclain/ldap v0.0.0-20210720162743-7f8d1e44eeba h1:DO8NFYdcRv1dnyAINJIBm6Bw2XibtLvQniNFGzf2W8E=
|
||||
github.com/nmcclain/ldap v0.0.0-20210720162743-7f8d1e44eeba/go.mod h1:4S0XndRL8HNOaQBfdViJ2F/GPCgL524xlXRuXFH12/U=
|
||||
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
@ -547,15 +552,15 @@ go.mongodb.org/mongo-driver v1.4.3/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4S
|
||||
go.mongodb.org/mongo-driver v1.4.4/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc=
|
||||
go.mongodb.org/mongo-driver v1.4.6/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc=
|
||||
go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw=
|
||||
go.mongodb.org/mongo-driver v1.5.2 h1:AsxOLoJTgP6YNM0fXWw4OjdluYmWzQYp+lFJL7xu9fU=
|
||||
go.mongodb.org/mongo-driver v1.5.2/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw=
|
||||
go.mongodb.org/mongo-driver v1.7.3 h1:G4l/eYY9VrQAK/AUgkV0koQKzQnyddnWxrd/Etf0jIs=
|
||||
go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg=
|
||||
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
||||
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
goauthentik.io/api v0.2021101.4 h1:8IDZjW7ddBR0i551SS21PRXOG6FTAz9IUY4wFUw3naI=
|
||||
goauthentik.io/api v0.2021101.4/go.mod h1:02nnD4FRd8lu8A1+ZuzqownBgvAhdCKzqkKX8v7JMTE=
|
||||
goauthentik.io/api v0.2021103.2 h1:/Qzya7FJnrtp9vx/d/VJHPD9BcL565jXUlQPxaXw11k=
|
||||
goauthentik.io/api v0.2021103.2/go.mod h1:02nnD4FRd8lu8A1+ZuzqownBgvAhdCKzqkKX8v7JMTE=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
@ -644,6 +649,7 @@ golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwY
|
||||
golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
|
||||
golang.org/x/net v0.0.0-20210510120150-4163338589ed h1:p9UgmWI9wKpfYmgaV/IZKGdXc5qEK45tDwwwDyjS26I=
|
||||
golang.org/x/net v0.0.0-20210510120150-4163338589ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
@ -709,6 +715,7 @@ golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40 h1:JWgyZ1qgdTaF3N3oxC+MdTV7qvEEgHo3otj+HB5CM7Q=
|
||||
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
@ -720,8 +727,9 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
|
||||
@ -26,6 +26,7 @@ func DefaultConfig() {
|
||||
LogLevel: "info",
|
||||
ErrorReporting: ErrorReportingConfig{
|
||||
Enabled: false,
|
||||
DSN: "https://a579bb09306d4f8b8d8847c052d3a1d3@sentry.beryju.org/8",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -42,4 +42,5 @@ type ErrorReportingConfig struct {
|
||||
Enabled bool `yaml:"enabled" env:"AUTHENTIK_ERROR_REPORTING__ENABLED"`
|
||||
Environment string `yaml:"environment" env:"AUTHENTIK_ERROR_REPORTING__ENVIRONMENT"`
|
||||
SendPII bool `yaml:"send_pii" env:"AUTHENTIK_ERROR_REPORTING__SEND_PII"`
|
||||
DSN string
|
||||
}
|
||||
|
||||
@ -17,4 +17,4 @@ func OutpostUserAgent() string {
|
||||
return fmt.Sprintf("authentik-outpost@%s (build=%s)", VERSION, BUILD())
|
||||
}
|
||||
|
||||
const VERSION = "2021.10.1-rc3"
|
||||
const VERSION = "2021.10.4"
|
||||
|
||||
@ -4,6 +4,8 @@ import (
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"runtime"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
@ -11,6 +13,8 @@ import (
|
||||
)
|
||||
|
||||
type GoUnicorn struct {
|
||||
HealthyCallback func()
|
||||
|
||||
log *log.Entry
|
||||
p *exec.Cmd
|
||||
started bool
|
||||
@ -21,10 +25,11 @@ type GoUnicorn struct {
|
||||
func NewGoUnicorn() *GoUnicorn {
|
||||
logger := log.WithField("logger", "authentik.router.unicorn")
|
||||
g := &GoUnicorn{
|
||||
log: logger,
|
||||
started: false,
|
||||
killed: false,
|
||||
alive: false,
|
||||
log: logger,
|
||||
started: false,
|
||||
killed: false,
|
||||
alive: false,
|
||||
HealthyCallback: func() {},
|
||||
}
|
||||
g.initCmd()
|
||||
return g
|
||||
@ -46,7 +51,7 @@ func (g *GoUnicorn) IsRunning() bool {
|
||||
|
||||
func (g *GoUnicorn) Start() error {
|
||||
if g.killed {
|
||||
g.log.Debug("Not restarting gunicorn since we're killed")
|
||||
g.log.Debug("Not restarting gunicorn since we're shutdown")
|
||||
return nil
|
||||
}
|
||||
if g.started {
|
||||
@ -76,6 +81,7 @@ func (g *GoUnicorn) healthcheck() {
|
||||
for range time.Tick(time.Second) {
|
||||
if check() {
|
||||
g.log.Info("backend is alive, backing off with healthchecks")
|
||||
g.HealthyCallback()
|
||||
break
|
||||
}
|
||||
g.log.Debug("backend not alive yet")
|
||||
@ -87,8 +93,15 @@ func (g *GoUnicorn) healthcheck() {
|
||||
|
||||
func (g *GoUnicorn) Kill() {
|
||||
g.killed = true
|
||||
err := g.p.Process.Kill()
|
||||
var err error
|
||||
if runtime.GOOS == "darwin" {
|
||||
g.log.WithField("method", "kill").Warning("stopping gunicorn")
|
||||
err = g.p.Process.Kill()
|
||||
} else {
|
||||
g.log.WithField("method", "sigterm").Warning("stopping gunicorn")
|
||||
err = syscall.Kill(g.p.Process.Pid, syscall.SIGTERM)
|
||||
}
|
||||
if err != nil {
|
||||
g.log.WithError(err).Warning("failed to kill gunicorn")
|
||||
g.log.WithError(err).Warning("failed to stop gunicorn")
|
||||
}
|
||||
}
|
||||
|
||||
@ -10,7 +10,6 @@ import (
|
||||
|
||||
"github.com/go-openapi/strfmt"
|
||||
"github.com/google/uuid"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/recws-org/recws"
|
||||
"goauthentik.io/api"
|
||||
@ -35,7 +34,8 @@ type APIController struct {
|
||||
|
||||
logger *log.Entry
|
||||
|
||||
reloadOffset time.Duration
|
||||
reloadOffset time.Duration
|
||||
lastWsReconnect time.Time
|
||||
|
||||
wsConn *recws.RecConn
|
||||
instanceUUID uuid.UUID
|
||||
@ -118,7 +118,7 @@ func (a *APIController) OnRefresh() error {
|
||||
}
|
||||
a.Outpost = outposts.Results[0]
|
||||
|
||||
log.WithField("name", a.Outpost.Name).Debug("Fetched outpost configuration")
|
||||
a.logger.WithField("name", a.Outpost.Name).Debug("Fetched outpost configuration")
|
||||
return a.Server.Refresh()
|
||||
}
|
||||
|
||||
@ -130,18 +130,10 @@ func (a *APIController) StartBackgorundTasks() error {
|
||||
"version": constants.VERSION,
|
||||
"build": constants.BUILD(),
|
||||
}).Set(1)
|
||||
err := a.OnRefresh()
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to run initial refresh")
|
||||
} else {
|
||||
LastUpdate.With(prometheus.Labels{
|
||||
"uuid": a.instanceUUID.String(),
|
||||
"outpost_name": a.Outpost.Name,
|
||||
"outpost_type": a.Server.Type(),
|
||||
"version": constants.VERSION,
|
||||
"build": constants.BUILD(),
|
||||
}).SetToCurrentTime()
|
||||
}
|
||||
go func() {
|
||||
a.logger.Debug("Starting WS re-connector...")
|
||||
a.startWSReConnector()
|
||||
}()
|
||||
go func() {
|
||||
a.logger.Debug("Starting WS Handler...")
|
||||
a.startWSHandler()
|
||||
|
||||
@ -56,6 +56,7 @@ func (ac *APIController) initWS(akURL url.URL, outpostUUID strfmt.UUID) {
|
||||
if err != nil {
|
||||
ac.logger.WithField("logger", "authentik.outpost.ak-ws").WithError(err).Warning("Failed to hello to authentik")
|
||||
}
|
||||
ac.lastWsReconnect = time.Now()
|
||||
}
|
||||
|
||||
// Shutdown Gracefully stops all workers, disconnects from websocket
|
||||
@ -69,6 +70,20 @@ func (ac *APIController) Shutdown() {
|
||||
}
|
||||
}
|
||||
|
||||
func (ac *APIController) startWSReConnector() {
|
||||
for {
|
||||
time.Sleep(time.Second * 5)
|
||||
if ac.wsConn.IsConnected() {
|
||||
continue
|
||||
}
|
||||
if time.Since(ac.lastWsReconnect).Seconds() > 30 {
|
||||
ac.wsConn.CloseAndReconnect()
|
||||
ac.logger.Info("Reconnecting websocket")
|
||||
ac.lastWsReconnect = time.Now()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (ac *APIController) startWSHandler() {
|
||||
logger := ac.logger.WithField("loop", "ws-handler")
|
||||
for {
|
||||
@ -80,8 +95,7 @@ func (ac *APIController) startWSHandler() {
|
||||
"outpost_type": ac.Server.Type(),
|
||||
"uuid": ac.instanceUUID.String(),
|
||||
}).Set(0)
|
||||
logger.WithError(err).Warning("ws write error, reconnecting")
|
||||
ac.wsConn.CloseAndReconnect()
|
||||
logger.WithError(err).Warning("ws read error")
|
||||
time.Sleep(time.Second * 5)
|
||||
continue
|
||||
}
|
||||
@ -126,8 +140,7 @@ func (ac *APIController) startWSHealth() {
|
||||
err := ac.wsConn.WriteJSON(aliveMsg)
|
||||
ac.logger.WithField("loop", "ws-health").Trace("hello'd")
|
||||
if err != nil {
|
||||
ac.logger.WithField("loop", "ws-health").WithError(err).Warning("ws write error, reconnecting")
|
||||
ac.wsConn.CloseAndReconnect()
|
||||
ac.logger.WithField("loop", "ws-health").WithError(err).Warning("ws write error")
|
||||
time.Sleep(time.Second * 5)
|
||||
continue
|
||||
} else {
|
||||
|
||||
@ -32,7 +32,7 @@ func doGlobalSetup(config map[string]interface{}) {
|
||||
default:
|
||||
log.SetLevel(log.DebugLevel)
|
||||
}
|
||||
log.WithField("buildHash", constants.BUILD()).WithField("version", constants.VERSION).Info("Starting authentik outpost")
|
||||
log.WithField("logger", "authentik.outpost").WithField("hash", constants.BUILD()).WithField("version", constants.VERSION).Info("Starting authentik outpost")
|
||||
|
||||
sentryEnv := "customer-outpost"
|
||||
sentryEnable := true
|
||||
|
||||
@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/http/cookiejar"
|
||||
"net/url"
|
||||
@ -18,7 +17,6 @@ import (
|
||||
"goauthentik.io/api"
|
||||
"goauthentik.io/internal/constants"
|
||||
"goauthentik.io/internal/outpost/ak"
|
||||
"goauthentik.io/internal/utils"
|
||||
)
|
||||
|
||||
type StageComponent string
|
||||
@ -103,8 +101,8 @@ type ChallengeInt interface {
|
||||
GetResponseErrors() map[string][]api.ErrorDetail
|
||||
}
|
||||
|
||||
func (fe *FlowExecutor) DelegateClientIP(a net.Addr) {
|
||||
fe.cip = utils.GetIP(a)
|
||||
func (fe *FlowExecutor) DelegateClientIP(a string) {
|
||||
fe.cip = a
|
||||
fe.api.GetConfig().AddDefaultHeader(HeaderAuthentikRemoteIP, fe.cip)
|
||||
}
|
||||
|
||||
@ -132,8 +130,8 @@ func (fe *FlowExecutor) getAnswer(stage StageComponent) string {
|
||||
|
||||
// WarmUp Ensure authentik's flow cache is warmed up
|
||||
func (fe *FlowExecutor) WarmUp() error {
|
||||
defer fe.sp.Finish()
|
||||
gcsp := sentry.StartSpan(fe.Context, "authentik.outposts.flow_executor.get_challenge")
|
||||
defer gcsp.Finish()
|
||||
req := fe.api.FlowsApi.FlowsExecutorGet(gcsp.Context(), fe.flowSlug).Query(fe.Params.Encode())
|
||||
_, _, err := req.Execute()
|
||||
return err
|
||||
@ -192,6 +190,7 @@ func (fe *FlowExecutor) solveFlowChallenge(depth int) (bool, error) {
|
||||
}
|
||||
devId32 := int32(devId)
|
||||
inner := api.NewAuthenticatorValidationChallengeResponseRequest()
|
||||
inner.SelectedChallenge = (*api.DeviceChallengeRequest)(deviceChallenge)
|
||||
inner.Duo = &devId32
|
||||
responseReq = responseReq.FlowChallengeResponseRequest(api.AuthenticatorValidationChallengeResponseRequestAsFlowChallengeResponseRequest(inner))
|
||||
case string(StageAccessDenied):
|
||||
|
||||
@ -1,23 +0,0 @@
|
||||
package ldap
|
||||
|
||||
import "crypto/tls"
|
||||
|
||||
func (ls *LDAPServer) getCertificates(info *tls.ClientHelloInfo) (*tls.Certificate, error) {
|
||||
if len(ls.providers) == 1 {
|
||||
if ls.providers[0].cert != nil {
|
||||
ls.log.WithField("server-name", info.ServerName).Debug("We only have a single provider, using their cert")
|
||||
return ls.providers[0].cert, nil
|
||||
}
|
||||
}
|
||||
for _, provider := range ls.providers {
|
||||
if provider.tlsServerName == &info.ServerName {
|
||||
if provider.cert == nil {
|
||||
ls.log.WithField("server-name", info.ServerName).Debug("Handler does not have a certificate")
|
||||
return ls.defaultCert, nil
|
||||
}
|
||||
return provider.cert, nil
|
||||
}
|
||||
}
|
||||
ls.log.WithField("server-name", info.ServerName).Debug("Fallback to default cert")
|
||||
return ls.defaultCert, nil
|
||||
}
|
||||
@ -1,44 +1,18 @@
|
||||
package ldap
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net"
|
||||
"strings"
|
||||
|
||||
"github.com/getsentry/sentry-go"
|
||||
"github.com/google/uuid"
|
||||
"github.com/nmcclain/ldap"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"goauthentik.io/internal/outpost/ldap/bind"
|
||||
"goauthentik.io/internal/outpost/ldap/metrics"
|
||||
"goauthentik.io/internal/utils"
|
||||
)
|
||||
|
||||
type BindRequest struct {
|
||||
BindDN string
|
||||
BindPW string
|
||||
id string
|
||||
conn net.Conn
|
||||
log *log.Entry
|
||||
ctx context.Context
|
||||
}
|
||||
|
||||
func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LDAPResultCode, error) {
|
||||
span := sentry.StartSpan(context.TODO(), "authentik.providers.ldap.bind",
|
||||
sentry.TransactionName("authentik.providers.ldap.bind"))
|
||||
rid := uuid.New().String()
|
||||
span.SetTag("request_uid", rid)
|
||||
span.SetTag("user.username", bindDN)
|
||||
req, span := bind.NewRequest(bindDN, bindPW, conn)
|
||||
|
||||
bindDN = strings.ToLower(bindDN)
|
||||
req := BindRequest{
|
||||
BindDN: bindDN,
|
||||
BindPW: bindPW,
|
||||
conn: conn,
|
||||
log: ls.log.WithField("bindDN", bindDN).WithField("requestId", rid).WithField("client", utils.GetIP(conn.RemoteAddr())),
|
||||
id: rid,
|
||||
ctx: span.Context(),
|
||||
}
|
||||
defer func() {
|
||||
span.Finish()
|
||||
metrics.Requests.With(prometheus.Labels{
|
||||
@ -46,19 +20,19 @@ func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LD
|
||||
"type": "bind",
|
||||
"filter": "",
|
||||
"dn": req.BindDN,
|
||||
"client": utils.GetIP(req.conn.RemoteAddr()),
|
||||
"client": req.RemoteAddr(),
|
||||
}).Observe(float64(span.EndTime.Sub(span.StartTime)))
|
||||
req.log.WithField("took-ms", span.EndTime.Sub(span.StartTime).Milliseconds()).Info("Bind request")
|
||||
req.Log().WithField("took-ms", span.EndTime.Sub(span.StartTime).Milliseconds()).Info("Bind request")
|
||||
}()
|
||||
for _, instance := range ls.providers {
|
||||
username, err := instance.getUsername(bindDN)
|
||||
username, err := instance.binder.GetUsername(bindDN)
|
||||
if err == nil {
|
||||
return instance.Bind(username, req)
|
||||
return instance.binder.Bind(username, req)
|
||||
} else {
|
||||
req.log.WithError(err).Debug("Username not for instance")
|
||||
req.Log().WithError(err).Debug("Username not for instance")
|
||||
}
|
||||
}
|
||||
req.log.WithField("request", "bind").Warning("No provider found for request")
|
||||
req.Log().WithField("request", "bind").Warning("No provider found for request")
|
||||
metrics.RequestsRejected.With(prometheus.Labels{
|
||||
"outpost_name": ls.ac.Outpost.Name,
|
||||
"type": "bind",
|
||||
@ -68,10 +42,3 @@ func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LD
|
||||
}).Inc()
|
||||
return ldap.LDAPResultOperationsError, nil
|
||||
}
|
||||
|
||||
func (ls *LDAPServer) TimerFlowCacheExpiry() {
|
||||
for _, p := range ls.providers {
|
||||
ls.log.WithField("flow", p.flowSlug).Debug("Pre-heating flow cache")
|
||||
p.TimerFlowCacheExpiry()
|
||||
}
|
||||
}
|
||||
|
||||
9
internal/outpost/ldap/bind/binder.go
Normal file
9
internal/outpost/ldap/bind/binder.go
Normal file
@ -0,0 +1,9 @@
|
||||
package bind
|
||||
|
||||
import "github.com/nmcclain/ldap"
|
||||
|
||||
type Binder interface {
|
||||
GetUsername(string) (string, error)
|
||||
Bind(username string, req *Request) (ldap.LDAPResultCode, error)
|
||||
TimerFlowCacheExpiry()
|
||||
}
|
||||
@ -1,4 +1,4 @@
|
||||
package ldap
|
||||
package direct
|
||||
|
||||
import (
|
||||
"context"
|
||||
@ -12,14 +12,30 @@ import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
"goauthentik.io/api"
|
||||
"goauthentik.io/internal/outpost"
|
||||
"goauthentik.io/internal/outpost/ldap/bind"
|
||||
"goauthentik.io/internal/outpost/ldap/flags"
|
||||
"goauthentik.io/internal/outpost/ldap/metrics"
|
||||
"goauthentik.io/internal/utils"
|
||||
"goauthentik.io/internal/outpost/ldap/server"
|
||||
)
|
||||
|
||||
const ContextUserKey = "ak_user"
|
||||
|
||||
func (pi *ProviderInstance) getUsername(dn string) (string, error) {
|
||||
if !strings.HasSuffix(strings.ToLower(dn), strings.ToLower(pi.BaseDN)) {
|
||||
type DirectBinder struct {
|
||||
si server.LDAPServerInstance
|
||||
log *log.Entry
|
||||
}
|
||||
|
||||
func NewDirectBinder(si server.LDAPServerInstance) *DirectBinder {
|
||||
db := &DirectBinder{
|
||||
si: si,
|
||||
log: log.WithField("logger", "authentik.outpost.ldap.binder.direct"),
|
||||
}
|
||||
db.log.Info("initialised direct binder")
|
||||
return db
|
||||
}
|
||||
|
||||
func (db *DirectBinder) GetUsername(dn string) (string, error) {
|
||||
if !strings.HasSuffix(strings.ToLower(dn), strings.ToLower(db.si.GetBaseDN())) {
|
||||
return "", errors.New("invalid base DN")
|
||||
}
|
||||
dns, err := goldap.ParseDN(dn)
|
||||
@ -36,13 +52,13 @@ func (pi *ProviderInstance) getUsername(dn string) (string, error) {
|
||||
return "", errors.New("failed to find cn")
|
||||
}
|
||||
|
||||
func (pi *ProviderInstance) Bind(username string, req BindRequest) (ldap.LDAPResultCode, error) {
|
||||
fe := outpost.NewFlowExecutor(req.ctx, pi.flowSlug, pi.s.ac.Client.GetConfig(), log.Fields{
|
||||
func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResultCode, error) {
|
||||
fe := outpost.NewFlowExecutor(req.Context(), db.si.GetFlowSlug(), db.si.GetAPIClient().GetConfig(), log.Fields{
|
||||
"bindDN": req.BindDN,
|
||||
"client": utils.GetIP(req.conn.RemoteAddr()),
|
||||
"requestId": req.id,
|
||||
"client": req.RemoteAddr(),
|
||||
"requestId": req.ID(),
|
||||
})
|
||||
fe.DelegateClientIP(req.conn.RemoteAddr())
|
||||
fe.DelegateClientIP(req.RemoteAddr())
|
||||
fe.Params.Add("goauthentik.io/outpost/ldap", "true")
|
||||
|
||||
fe.Answers[outpost.StageIdentification] = username
|
||||
@ -51,83 +67,82 @@ func (pi *ProviderInstance) Bind(username string, req BindRequest) (ldap.LDAPRes
|
||||
passed, err := fe.Execute()
|
||||
if !passed {
|
||||
metrics.RequestsRejected.With(prometheus.Labels{
|
||||
"outpost_name": pi.outpostName,
|
||||
"outpost_name": db.si.GetOutpostName(),
|
||||
"type": "bind",
|
||||
"reason": "invalid_credentials",
|
||||
"dn": req.BindDN,
|
||||
"client": utils.GetIP(req.conn.RemoteAddr()),
|
||||
"client": req.RemoteAddr(),
|
||||
}).Inc()
|
||||
return ldap.LDAPResultInvalidCredentials, nil
|
||||
}
|
||||
if err != nil {
|
||||
metrics.RequestsRejected.With(prometheus.Labels{
|
||||
"outpost_name": pi.outpostName,
|
||||
"outpost_name": db.si.GetOutpostName(),
|
||||
"type": "bind",
|
||||
"reason": "flow_error",
|
||||
"dn": req.BindDN,
|
||||
"client": utils.GetIP(req.conn.RemoteAddr()),
|
||||
"client": req.RemoteAddr(),
|
||||
}).Inc()
|
||||
req.log.WithError(err).Warning("failed to execute flow")
|
||||
req.Log().WithError(err).Warning("failed to execute flow")
|
||||
return ldap.LDAPResultOperationsError, nil
|
||||
}
|
||||
|
||||
access, err := fe.CheckApplicationAccess(pi.appSlug)
|
||||
access, err := fe.CheckApplicationAccess(db.si.GetAppSlug())
|
||||
if !access {
|
||||
req.log.Info("Access denied for user")
|
||||
req.Log().Info("Access denied for user")
|
||||
metrics.RequestsRejected.With(prometheus.Labels{
|
||||
"outpost_name": pi.outpostName,
|
||||
"outpost_name": db.si.GetOutpostName(),
|
||||
"type": "bind",
|
||||
"reason": "access_denied",
|
||||
"dn": req.BindDN,
|
||||
"client": utils.GetIP(req.conn.RemoteAddr()),
|
||||
"client": req.RemoteAddr(),
|
||||
}).Inc()
|
||||
return ldap.LDAPResultInsufficientAccessRights, nil
|
||||
}
|
||||
if err != nil {
|
||||
metrics.RequestsRejected.With(prometheus.Labels{
|
||||
"outpost_name": pi.outpostName,
|
||||
"outpost_name": db.si.GetOutpostName(),
|
||||
"type": "bind",
|
||||
"reason": "access_check_fail",
|
||||
"dn": req.BindDN,
|
||||
"client": utils.GetIP(req.conn.RemoteAddr()),
|
||||
"client": req.RemoteAddr(),
|
||||
}).Inc()
|
||||
req.log.WithError(err).Warning("failed to check access")
|
||||
req.Log().WithError(err).Warning("failed to check access")
|
||||
return ldap.LDAPResultOperationsError, nil
|
||||
}
|
||||
req.log.Info("User has access")
|
||||
uisp := sentry.StartSpan(req.ctx, "authentik.providers.ldap.bind.user_info")
|
||||
req.Log().Info("User has access")
|
||||
uisp := sentry.StartSpan(req.Context(), "authentik.providers.ldap.bind.user_info")
|
||||
// Get user info to store in context
|
||||
userInfo, _, err := fe.ApiClient().CoreApi.CoreUsersMeRetrieve(context.Background()).Execute()
|
||||
if err != nil {
|
||||
metrics.RequestsRejected.With(prometheus.Labels{
|
||||
"outpost_name": pi.outpostName,
|
||||
"outpost_name": db.si.GetOutpostName(),
|
||||
"type": "bind",
|
||||
"reason": "user_info_fail",
|
||||
"dn": req.BindDN,
|
||||
"client": utils.GetIP(req.conn.RemoteAddr()),
|
||||
"client": req.RemoteAddr(),
|
||||
}).Inc()
|
||||
req.log.WithError(err).Warning("failed to get user info")
|
||||
req.Log().WithError(err).Warning("failed to get user info")
|
||||
return ldap.LDAPResultOperationsError, nil
|
||||
}
|
||||
pi.boundUsersMutex.Lock()
|
||||
cs := pi.SearchAccessCheck(userInfo.User)
|
||||
pi.boundUsers[req.BindDN] = UserFlags{
|
||||
cs := db.SearchAccessCheck(userInfo.User)
|
||||
flags := flags.UserFlags{
|
||||
UserPk: userInfo.User.Pk,
|
||||
CanSearch: cs != nil,
|
||||
}
|
||||
if pi.boundUsers[req.BindDN].CanSearch {
|
||||
req.log.WithField("group", cs).Info("Allowed access to search")
|
||||
db.si.SetFlags(req.BindDN, flags)
|
||||
if flags.CanSearch {
|
||||
req.Log().WithField("group", cs).Info("Allowed access to search")
|
||||
}
|
||||
uisp.Finish()
|
||||
defer pi.boundUsersMutex.Unlock()
|
||||
return ldap.LDAPResultSuccess, nil
|
||||
}
|
||||
|
||||
// SearchAccessCheck Check if the current user is allowed to search
|
||||
func (pi *ProviderInstance) SearchAccessCheck(user api.UserSelf) *string {
|
||||
func (db *DirectBinder) SearchAccessCheck(user api.UserSelf) *string {
|
||||
for _, group := range user.Groups {
|
||||
for _, allowedGroup := range pi.searchAllowedGroups {
|
||||
pi.log.WithField("userGroup", group.Pk).WithField("allowedGroup", allowedGroup).Trace("Checking search access")
|
||||
for _, allowedGroup := range db.si.GetSearchAllowedGroups() {
|
||||
db.log.WithField("userGroup", group.Pk).WithField("allowedGroup", allowedGroup).Trace("Checking search access")
|
||||
if group.Pk == allowedGroup.String() {
|
||||
return &group.Name
|
||||
}
|
||||
@ -136,13 +151,13 @@ func (pi *ProviderInstance) SearchAccessCheck(user api.UserSelf) *string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pi *ProviderInstance) TimerFlowCacheExpiry() {
|
||||
fe := outpost.NewFlowExecutor(context.Background(), pi.flowSlug, pi.s.ac.Client.GetConfig(), log.Fields{})
|
||||
func (db *DirectBinder) TimerFlowCacheExpiry() {
|
||||
fe := outpost.NewFlowExecutor(context.Background(), db.si.GetFlowSlug(), db.si.GetAPIClient().GetConfig(), log.Fields{})
|
||||
fe.Params.Add("goauthentik.io/outpost/ldap", "true")
|
||||
fe.Params.Add("goauthentik.io/outpost/ldap-warmup", "true")
|
||||
|
||||
err := fe.WarmUp()
|
||||
if err != nil {
|
||||
pi.log.WithError(err).Warning("failed to warm up flow cache")
|
||||
db.log.WithError(err).Warning("failed to warm up flow cache")
|
||||
}
|
||||
}
|
||||
55
internal/outpost/ldap/bind/request.go
Normal file
55
internal/outpost/ldap/bind/request.go
Normal file
@ -0,0 +1,55 @@
|
||||
package bind
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net"
|
||||
"strings"
|
||||
|
||||
"github.com/getsentry/sentry-go"
|
||||
"github.com/google/uuid"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"goauthentik.io/internal/utils"
|
||||
)
|
||||
|
||||
type Request struct {
|
||||
BindDN string
|
||||
BindPW string
|
||||
id string
|
||||
conn net.Conn
|
||||
log *log.Entry
|
||||
ctx context.Context
|
||||
}
|
||||
|
||||
func NewRequest(bindDN string, bindPW string, conn net.Conn) (*Request, *sentry.Span) {
|
||||
span := sentry.StartSpan(context.TODO(), "authentik.providers.ldap.bind",
|
||||
sentry.TransactionName("authentik.providers.ldap.bind"))
|
||||
rid := uuid.New().String()
|
||||
span.SetTag("request_uid", rid)
|
||||
span.SetTag("user.username", bindDN)
|
||||
|
||||
bindDN = strings.ToLower(bindDN)
|
||||
return &Request{
|
||||
BindDN: bindDN,
|
||||
BindPW: bindPW,
|
||||
conn: conn,
|
||||
log: log.WithField("bindDN", bindDN).WithField("requestId", rid).WithField("client", utils.GetIP(conn.RemoteAddr())),
|
||||
id: rid,
|
||||
ctx: span.Context(),
|
||||
}, span
|
||||
}
|
||||
|
||||
func (r *Request) Context() context.Context {
|
||||
return r.ctx
|
||||
}
|
||||
|
||||
func (r *Request) Log() *log.Entry {
|
||||
return r.log
|
||||
}
|
||||
|
||||
func (r *Request) RemoteAddr() string {
|
||||
return utils.GetIP(r.conn.RemoteAddr())
|
||||
}
|
||||
|
||||
func (r *Request) ID() string {
|
||||
return r.id
|
||||
}
|
||||
@ -1,32 +0,0 @@
|
||||
package ldap
|
||||
|
||||
import (
|
||||
"net"
|
||||
"time"
|
||||
)
|
||||
|
||||
func (ls *LDAPServer) Close(boundDN string, conn net.Conn) error {
|
||||
for _, p := range ls.providers {
|
||||
p.delayDeleteUserInfo(boundDN)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pi *ProviderInstance) delayDeleteUserInfo(dn string) {
|
||||
ticker := time.NewTicker(30 * time.Second)
|
||||
quit := make(chan struct{})
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
pi.boundUsersMutex.Lock()
|
||||
delete(pi.boundUsers, dn)
|
||||
pi.boundUsersMutex.Unlock()
|
||||
close(quit)
|
||||
case <-quit:
|
||||
ticker.Stop()
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
21
internal/outpost/ldap/constants/constants.go
Normal file
21
internal/outpost/ldap/constants/constants.go
Normal file
@ -0,0 +1,21 @@
|
||||
package constants
|
||||
|
||||
const (
|
||||
OCGroup = "group"
|
||||
OCGroupOfUniqueNames = "groupOfUniqueNames"
|
||||
OCAKGroup = "goauthentik.io/ldap/group"
|
||||
OCAKVirtualGroup = "goauthentik.io/ldap/virtual-group"
|
||||
)
|
||||
|
||||
const (
|
||||
OCUser = "user"
|
||||
OCOrgPerson = "organizationalPerson"
|
||||
OCInetOrgPerson = "inetOrgPerson"
|
||||
OCAKUser = "goauthentik.io/ldap/user"
|
||||
)
|
||||
|
||||
const (
|
||||
OUUsers = "users"
|
||||
OUGroups = "groups"
|
||||
OUVirtualGroups = "virtual-groups"
|
||||
)
|
||||
33
internal/outpost/ldap/entries.go
Normal file
33
internal/outpost/ldap/entries.go
Normal file
@ -0,0 +1,33 @@
|
||||
package ldap
|
||||
|
||||
import (
|
||||
"github.com/nmcclain/ldap"
|
||||
"goauthentik.io/api"
|
||||
"goauthentik.io/internal/outpost/ldap/constants"
|
||||
"goauthentik.io/internal/outpost/ldap/utils"
|
||||
)
|
||||
|
||||
func (pi *ProviderInstance) UserEntry(u api.User) *ldap.Entry {
|
||||
dn := pi.GetUserDN(u.Username)
|
||||
attrs := utils.AKAttrsToLDAP(u.Attributes)
|
||||
|
||||
attrs = utils.EnsureAttributes(attrs, map[string][]string{
|
||||
"memberOf": pi.GroupsForUser(u),
|
||||
// Old fields for backwards compatibility
|
||||
"accountStatus": {utils.BoolToString(*u.IsActive)},
|
||||
"superuser": {utils.BoolToString(u.IsSuperuser)},
|
||||
// End old fields
|
||||
"goauthentik.io/ldap/active": {utils.BoolToString(*u.IsActive)},
|
||||
"goauthentik.io/ldap/superuser": {utils.BoolToString(u.IsSuperuser)},
|
||||
"cn": {u.Username},
|
||||
"sAMAccountName": {u.Username},
|
||||
"uid": {u.Uid},
|
||||
"name": {u.Name},
|
||||
"displayName": {u.Name},
|
||||
"mail": {*u.Email},
|
||||
"objectClass": {constants.OCUser, constants.OCOrgPerson, constants.OCInetOrgPerson, constants.OCAKUser},
|
||||
"uidNumber": {pi.GetUidNumber(u)},
|
||||
"gidNumber": {pi.GetUidNumber(u)},
|
||||
})
|
||||
return &ldap.Entry{DN: dn, Attributes: attrs}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user