Compare commits
77 Commits
version/0.
...
version/0.
| Author | SHA1 | Date | |
|---|---|---|---|
| 8a105cf5a0 | |||
| 9e384df79e | |||
| c0bfd32d39 | |||
| 7be680cbe5 | |||
| 93bf8eaa82 | |||
| 1248585dca | |||
| 1319c480c4 | |||
| 1911e8e3a9 | |||
| 4198c5363f | |||
| 207aae15a8 | |||
| 50531b8a36 | |||
| e5e4824920 | |||
| 085247e2dc | |||
| f766594ab0 | |||
| d1e469e282 | |||
| 79e4500827 | |||
| 42702fa96a | |||
| 9deb3ad80f | |||
| 9877ef99c4 | |||
| c304b40e1b | |||
| f0e6d6f417 | |||
| 54de5c981e | |||
| a446775fe2 | |||
| 7393d8720b | |||
| 287cb72d6f | |||
| c5eff4bdd6 | |||
| e9a33ed8ab | |||
| 875173a86e | |||
| df7642b365 | |||
| 3bc1c0aa8b | |||
| 8951f5695e | |||
| 7401278707 | |||
| e99f6e289b | |||
| 07da6ffa69 | |||
| dc18730094 | |||
| a202679bfb | |||
| 1edcda58ba | |||
| 5cb7f0794e | |||
| 7e8e3893eb | |||
| e91e286ebc | |||
| ef4a115b61 | |||
| b79b73f5c6 | |||
| 056e3ed15b | |||
| fb5e210af8 | |||
| e5e2615f15 | |||
| 6c72a9e2e8 | |||
| c04d0a373a | |||
| bd74e518a7 | |||
| 3b76af4eaa | |||
| 706448dc14 | |||
| 34793f7cef | |||
| ba96c9526e | |||
| 617432deaa | |||
| 36bf2be16d | |||
| 912ed343e6 | |||
| 2e15df295a | |||
| eaab3f62cb | |||
| aa615b0fd6 | |||
| b775f2788c | |||
| 9c28db3d89 | |||
| 67360bd6e9 | |||
| 4f6f8c7cae | |||
| 3b82ad798b | |||
| 8827f06ac1 | |||
| 251672a67d | |||
| 4ffc0e2a08 | |||
| 4e1808632d | |||
| 791627d3ce | |||
| f3df3a0157 | |||
| 6aaae53a19 | |||
| 4d84f6d598 | |||
| 4e2349b6d9 | |||
| cd57b8f7f3 | |||
| 40b1fc06b0 | |||
| 02fa217e28 | |||
| 6652514358 | |||
| dcd3dc9744 |
@ -1,10 +1,10 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 0.12.7-stable
|
current_version = 0.12.11-stable
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)
|
||||||
serialize = {major}.{minor}.{patch}-{release}
|
serialize = {major}.{minor}.{patch}-{release}
|
||||||
message = new release: {new_version}
|
message = release: {new_version}
|
||||||
tag_name = version/{new_version}
|
tag_name = version/{new_version}
|
||||||
|
|
||||||
[bumpversion:part:release]
|
[bumpversion:part:release]
|
||||||
@ -15,9 +15,9 @@ values =
|
|||||||
beta
|
beta
|
||||||
stable
|
stable
|
||||||
|
|
||||||
[bumpversion:file:docs/installation/docker-compose.md]
|
[bumpversion:file:website/docs/installation/docker-compose.md]
|
||||||
|
|
||||||
[bumpversion:file:docs/installation/kubernetes.md]
|
[bumpversion:file:website/docs/installation/kubernetes.md]
|
||||||
|
|
||||||
[bumpversion:file:docker-compose.yml]
|
[bumpversion:file:docker-compose.yml]
|
||||||
|
|
||||||
|
|||||||
@ -6,7 +6,7 @@ omit =
|
|||||||
manage.py
|
manage.py
|
||||||
*/migrations/*
|
*/migrations/*
|
||||||
*/apps.py
|
*/apps.py
|
||||||
docs/
|
website/
|
||||||
|
|
||||||
[report]
|
[report]
|
||||||
sort = Cover
|
sort = Cover
|
||||||
|
|||||||
@ -3,4 +3,4 @@ helm
|
|||||||
passbook-ui
|
passbook-ui
|
||||||
static
|
static
|
||||||
*.env.yml
|
*.env.yml
|
||||||
node_modules/
|
**/node_modules
|
||||||
|
|||||||
14
.github/workflows/release.yml
vendored
14
.github/workflows/release.yml
vendored
@ -18,11 +18,11 @@ jobs:
|
|||||||
- name: Building Docker Image
|
- name: Building Docker Image
|
||||||
run: docker build
|
run: docker build
|
||||||
--no-cache
|
--no-cache
|
||||||
-t beryju/passbook:0.12.7-stable
|
-t beryju/passbook:0.12.11-stable
|
||||||
-t beryju/passbook:latest
|
-t beryju/passbook:latest
|
||||||
-f Dockerfile .
|
-f Dockerfile .
|
||||||
- name: Push Docker Container to Registry (versioned)
|
- name: Push Docker Container to Registry (versioned)
|
||||||
run: docker push beryju/passbook:0.12.7-stable
|
run: docker push beryju/passbook:0.12.11-stable
|
||||||
- name: Push Docker Container to Registry (latest)
|
- name: Push Docker Container to Registry (latest)
|
||||||
run: docker push beryju/passbook:latest
|
run: docker push beryju/passbook:latest
|
||||||
build-proxy:
|
build-proxy:
|
||||||
@ -48,11 +48,11 @@ jobs:
|
|||||||
cd proxy
|
cd proxy
|
||||||
docker build \
|
docker build \
|
||||||
--no-cache \
|
--no-cache \
|
||||||
-t beryju/passbook-proxy:0.12.7-stable \
|
-t beryju/passbook-proxy:0.12.11-stable \
|
||||||
-t beryju/passbook-proxy:latest \
|
-t beryju/passbook-proxy:latest \
|
||||||
-f Dockerfile .
|
-f Dockerfile .
|
||||||
- name: Push Docker Container to Registry (versioned)
|
- name: Push Docker Container to Registry (versioned)
|
||||||
run: docker push beryju/passbook-proxy:0.12.7-stable
|
run: docker push beryju/passbook-proxy:0.12.11-stable
|
||||||
- name: Push Docker Container to Registry (latest)
|
- name: Push Docker Container to Registry (latest)
|
||||||
run: docker push beryju/passbook-proxy:latest
|
run: docker push beryju/passbook-proxy:latest
|
||||||
build-static:
|
build-static:
|
||||||
@ -77,11 +77,11 @@ jobs:
|
|||||||
run: docker build
|
run: docker build
|
||||||
--no-cache
|
--no-cache
|
||||||
--network=$(docker network ls | grep github | awk '{print $1}')
|
--network=$(docker network ls | grep github | awk '{print $1}')
|
||||||
-t beryju/passbook-static:0.12.7-stable
|
-t beryju/passbook-static:0.12.11-stable
|
||||||
-t beryju/passbook-static:latest
|
-t beryju/passbook-static:latest
|
||||||
-f static.Dockerfile .
|
-f static.Dockerfile .
|
||||||
- name: Push Docker Container to Registry (versioned)
|
- name: Push Docker Container to Registry (versioned)
|
||||||
run: docker push beryju/passbook-static:0.12.7-stable
|
run: docker push beryju/passbook-static:0.12.11-stable
|
||||||
- name: Push Docker Container to Registry (latest)
|
- name: Push Docker Container to Registry (latest)
|
||||||
run: docker push beryju/passbook-static:latest
|
run: docker push beryju/passbook-static:latest
|
||||||
test-release:
|
test-release:
|
||||||
@ -114,5 +114,5 @@ jobs:
|
|||||||
SENTRY_PROJECT: passbook
|
SENTRY_PROJECT: passbook
|
||||||
SENTRY_URL: https://sentry.beryju.org
|
SENTRY_URL: https://sentry.beryju.org
|
||||||
with:
|
with:
|
||||||
tagName: 0.12.7-stable
|
tagName: 0.12.11-stable
|
||||||
environment: beryjuorg-prod
|
environment: beryjuorg-prod
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -199,3 +199,4 @@ local.env.yml
|
|||||||
|
|
||||||
# Selenium Screenshots
|
# Selenium Screenshots
|
||||||
selenium_screenshots/**
|
selenium_screenshots/**
|
||||||
|
backups/
|
||||||
|
|||||||
@ -9,3 +9,4 @@ ignore-paths:
|
|||||||
|
|
||||||
uses:
|
uses:
|
||||||
- django
|
- django
|
||||||
|
- celery
|
||||||
|
|||||||
21
.pylintrc
21
.pylintrc
@ -1,16 +1,29 @@
|
|||||||
[MASTER]
|
[MASTER]
|
||||||
|
|
||||||
disable=arguments-differ,no-self-use,fixme,locally-disabled,too-many-ancestors,too-few-public-methods,import-outside-toplevel,bad-continuation,signature-differs,similarities,cyclic-import
|
disable =
|
||||||
|
arguments-differ,
|
||||||
|
no-self-use,
|
||||||
|
fixme,
|
||||||
|
locally-disabled,
|
||||||
|
too-many-ancestors,
|
||||||
|
too-few-public-methods,
|
||||||
|
import-outside-toplevel,
|
||||||
|
bad-continuation,
|
||||||
|
signature-differs,
|
||||||
|
similarities,
|
||||||
|
cyclic-import,
|
||||||
|
protected-access,
|
||||||
|
unsubscriptable-object # remove when pylint is upgraded to 2.6
|
||||||
|
|
||||||
load-plugins=pylint_django,pylint.extensions.bad_builtin
|
load-plugins=pylint_django,pylint.extensions.bad_builtin
|
||||||
|
|
||||||
extension-pkg-whitelist=lxml
|
extension-pkg-whitelist=lxml,xmlsec
|
||||||
|
|
||||||
# Allow constants to be shorter than normal (and lowercase, for settings.py)
|
# Allow constants to be shorter than normal (and lowercase, for settings.py)
|
||||||
const-rgx=[a-zA-Z0-9_]{1,40}$
|
const-rgx=[a-zA-Z0-9_]{1,40}$
|
||||||
|
|
||||||
ignored-modules=django-otp
|
ignored-modules=django-otp
|
||||||
|
generated-members=xmlsec.constants.*,xmlsec.tree.*,xmlsec.template.*
|
||||||
ignore=migrations
|
ignore=migrations
|
||||||
max-attributes=12
|
max-attributes=12
|
||||||
|
max-branches=20
|
||||||
jobs=12
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
FROM python:3.8-slim-buster as locker
|
FROM python:3.9-slim-buster as locker
|
||||||
|
|
||||||
COPY ./Pipfile /app/
|
COPY ./Pipfile /app/
|
||||||
COPY ./Pipfile.lock /app/
|
COPY ./Pipfile.lock /app/
|
||||||
@ -9,7 +9,7 @@ RUN pip install pipenv && \
|
|||||||
pipenv lock -r > requirements.txt && \
|
pipenv lock -r > requirements.txt && \
|
||||||
pipenv lock -rd > requirements-dev.txt
|
pipenv lock -rd > requirements-dev.txt
|
||||||
|
|
||||||
FROM python:3.8-slim-buster
|
FROM python:3.9-slim-buster
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
COPY --from=locker /app/requirements.txt /
|
COPY --from=locker /app/requirements.txt /
|
||||||
@ -20,7 +20,7 @@ RUN apt-get update && \
|
|||||||
curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
|
curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
|
||||||
echo "deb http://apt.postgresql.org/pub/repos/apt buster-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
echo "deb http://apt.postgresql.org/pub/repos/apt buster-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
apt-get install -y --no-install-recommends postgresql-client-12 postgresql-client-11 build-essential && \
|
apt-get install -y --no-install-recommends postgresql-client-12 postgresql-client-11 build-essential libxmlsec1-dev pkg-config && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
pip install -r /requirements.txt --no-cache-dir && \
|
pip install -r /requirements.txt --no-cache-dir && \
|
||||||
apt-get remove --purge -y build-essential && \
|
apt-get remove --purge -y build-essential && \
|
||||||
@ -41,5 +41,6 @@ COPY ./manage.py /
|
|||||||
COPY ./lifecycle/ /lifecycle
|
COPY ./lifecycle/ /lifecycle
|
||||||
|
|
||||||
USER passbook
|
USER passbook
|
||||||
|
STOPSIGNAL SIGINT
|
||||||
|
|
||||||
ENTRYPOINT [ "/lifecycle/bootstrap.sh" ]
|
ENTRYPOINT [ "/lifecycle/bootstrap.sh" ]
|
||||||
|
|||||||
4
Pipfile
4
Pipfile
@ -35,7 +35,6 @@ qrcode = "*"
|
|||||||
requests-oauthlib = "*"
|
requests-oauthlib = "*"
|
||||||
sentry-sdk = "*"
|
sentry-sdk = "*"
|
||||||
service_identity = "*"
|
service_identity = "*"
|
||||||
signxml = "*"
|
|
||||||
structlog = "*"
|
structlog = "*"
|
||||||
swagger-spec-validator = "*"
|
swagger-spec-validator = "*"
|
||||||
urllib3 = {extras = ["secure"],version = "*"}
|
urllib3 = {extras = ["secure"],version = "*"}
|
||||||
@ -44,9 +43,10 @@ channels = "*"
|
|||||||
channels-redis = "*"
|
channels-redis = "*"
|
||||||
kubernetes = "*"
|
kubernetes = "*"
|
||||||
docker = "*"
|
docker = "*"
|
||||||
|
xmlsec = "*"
|
||||||
|
|
||||||
[requires]
|
[requires]
|
||||||
python_version = "3.8"
|
python_version = "3.9"
|
||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
autopep8 = "*"
|
autopep8 = "*"
|
||||||
|
|||||||
754
Pipfile.lock
generated
754
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
12
README.md
12
README.md
@ -1,4 +1,4 @@
|
|||||||
<img src="docs/images/logo.svg" height="50" alt="passbook logo"><img src="docs/images/brand_inverted.svg" height="50" alt="passbook">
|
<img src="website/static/img/logo.svg" height="50" alt="passbook logo"><img src="website/static/img/brand_inverted.svg" height="50" alt="passbook">
|
||||||
|
|
||||||
[](https://dev.azure.com/beryjuorg/passbook/_build?definitionId=1)
|
[](https://dev.azure.com/beryjuorg/passbook/_build?definitionId=1)
|
||||||

|

|
||||||
@ -13,18 +13,18 @@ passbook is an open-source Identity Provider focused on flexibility and versatil
|
|||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
For small/test setups it is recommended to use docker-compose, see the [documentation](https://passbook.beryju.org/installation/docker-compose/)
|
For small/test setups it is recommended to use docker-compose, see the [documentation](https://passbook.beryju.org/website/docs/installation/docker-compose/)
|
||||||
|
|
||||||
For bigger setups, there is a Helm Chart in the `helm/` directory. This is documented [here](https://passbook.beryju.org//installation/kubernetes/)
|
For bigger setups, there is a Helm Chart in the `helm/` directory. This is documented [here](https://passbook.beryju.org/website/docs/installation/kubernetes/)
|
||||||
|
|
||||||
## Screenshots
|
## Screenshots
|
||||||
|
|
||||||

|

|
||||||

|

|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
To develop on passbook, you need a system with Python 3.7+ (3.8 is recommended). passbook uses [pipenv](https://pipenv.pypa.io/en/latest/) for managing dependencies.
|
To develop on passbook, you need a system with Python 3.8+ (3.9 is recommended). passbook uses [pipenv](https://pipenv.pypa.io/en/latest/) for managing dependencies.
|
||||||
|
|
||||||
To get started, run
|
To get started, run
|
||||||
|
|
||||||
|
|||||||
@ -22,10 +22,11 @@ stages:
|
|||||||
steps:
|
steps:
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '3.8'
|
versionSpec: '3.9'
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
|
sudo apt install -y libxmlsec1-dev pkg-config
|
||||||
sudo pip install -U wheel pipenv
|
sudo pip install -U wheel pipenv
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
@ -37,10 +38,11 @@ stages:
|
|||||||
steps:
|
steps:
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '3.8'
|
versionSpec: '3.9'
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
|
sudo apt install -y libxmlsec1-dev pkg-config
|
||||||
sudo pip install -U wheel pipenv
|
sudo pip install -U wheel pipenv
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
@ -52,10 +54,11 @@ stages:
|
|||||||
steps:
|
steps:
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '3.8'
|
versionSpec: '3.9'
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
|
sudo apt install -y libxmlsec1-dev pkg-config
|
||||||
sudo pip install -U wheel pipenv
|
sudo pip install -U wheel pipenv
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
pipenv install --dev prospector --skip-lock
|
pipenv install --dev prospector --skip-lock
|
||||||
@ -68,10 +71,11 @@ stages:
|
|||||||
steps:
|
steps:
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '3.8'
|
versionSpec: '3.9'
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
|
sudo apt install -y libxmlsec1-dev pkg-config
|
||||||
sudo pip install -U wheel pipenv
|
sudo pip install -U wheel pipenv
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
@ -86,13 +90,14 @@ stages:
|
|||||||
version: '12.x'
|
version: '12.x'
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '3.8'
|
versionSpec: '3.9'
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
script: npm install -g pyright@1.1.79
|
script: npm install -g pyright@1.1.79
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
|
sudo apt install -y libxmlsec1-dev pkg-config
|
||||||
sudo pip install -U wheel pipenv
|
sudo pip install -U wheel pipenv
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
@ -106,7 +111,7 @@ stages:
|
|||||||
steps:
|
steps:
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '3.8'
|
versionSpec: '3.9'
|
||||||
- task: DockerCompose@0
|
- task: DockerCompose@0
|
||||||
displayName: Run services
|
displayName: Run services
|
||||||
inputs:
|
inputs:
|
||||||
@ -116,6 +121,7 @@ stages:
|
|||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
|
sudo apt install -y libxmlsec1-dev pkg-config
|
||||||
sudo pip install -U wheel pipenv
|
sudo pip install -U wheel pipenv
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
@ -128,6 +134,9 @@ stages:
|
|||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '3.8'
|
versionSpec: '3.8'
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: '3.9'
|
||||||
- task: DockerCompose@0
|
- task: DockerCompose@0
|
||||||
displayName: Run services
|
displayName: Run services
|
||||||
inputs:
|
inputs:
|
||||||
@ -139,6 +148,7 @@ stages:
|
|||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
git checkout $(git describe --abbrev=0 --match 'version/*')
|
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||||
|
sudo apt install -y libxmlsec1-dev pkg-config
|
||||||
sudo pip install -U wheel pipenv
|
sudo pip install -U wheel pipenv
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
@ -162,7 +172,7 @@ stages:
|
|||||||
steps:
|
steps:
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '3.8'
|
versionSpec: '3.9'
|
||||||
- task: DockerCompose@0
|
- task: DockerCompose@0
|
||||||
displayName: Run services
|
displayName: Run services
|
||||||
inputs:
|
inputs:
|
||||||
@ -179,6 +189,7 @@ stages:
|
|||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
|
sudo apt install -y libxmlsec1-dev pkg-config
|
||||||
sudo pip install -U wheel pipenv
|
sudo pip install -U wheel pipenv
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
@ -204,7 +215,7 @@ stages:
|
|||||||
steps:
|
steps:
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '3.8'
|
versionSpec: '3.9'
|
||||||
- task: DockerCompose@0
|
- task: DockerCompose@0
|
||||||
displayName: Run services
|
displayName: Run services
|
||||||
inputs:
|
inputs:
|
||||||
@ -221,6 +232,7 @@ stages:
|
|||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
|
sudo apt install -y libxmlsec1-dev pkg-config
|
||||||
sudo pip install -U wheel pipenv
|
sudo pip install -U wheel pipenv
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
- task: DockerCompose@0
|
- task: DockerCompose@0
|
||||||
@ -286,10 +298,11 @@ stages:
|
|||||||
path: "coverage-unittest/"
|
path: "coverage-unittest/"
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '3.8'
|
versionSpec: '3.9'
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
|
sudo apt install -y libxmlsec1-dev pkg-config
|
||||||
sudo pip install -U wheel pipenv
|
sudo pip install -U wheel pipenv
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
pipenv run coverage combine coverage-e2e/coverage coverage-unittest/coverage
|
pipenv run coverage combine coverage-e2e/coverage coverage-unittest/coverage
|
||||||
|
|||||||
@ -19,7 +19,7 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- internal
|
- internal
|
||||||
server:
|
server:
|
||||||
image: beryju/passbook:${PASSBOOK_TAG:-0.12.7-stable}
|
image: beryju/passbook:${PASSBOOK_TAG:-0.12.11-stable}
|
||||||
command: server
|
command: server
|
||||||
environment:
|
environment:
|
||||||
PASSBOOK_REDIS__HOST: redis
|
PASSBOOK_REDIS__HOST: redis
|
||||||
@ -40,7 +40,7 @@ services:
|
|||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
worker:
|
worker:
|
||||||
image: beryju/passbook:${PASSBOOK_TAG:-0.12.7-stable}
|
image: beryju/passbook:${PASSBOOK_TAG:-0.12.11-stable}
|
||||||
command: worker
|
command: worker
|
||||||
networks:
|
networks:
|
||||||
- internal
|
- internal
|
||||||
@ -54,7 +54,7 @@ services:
|
|||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
static:
|
static:
|
||||||
image: beryju/passbook-static:${PASSBOOK_TAG:-0.12.7-stable}
|
image: beryju/passbook-static:${PASSBOOK_TAG:-0.12.11-stable}
|
||||||
networks:
|
networks:
|
||||||
- internal
|
- internal
|
||||||
labels:
|
labels:
|
||||||
|
|||||||
@ -1,26 +0,0 @@
|
|||||||
# Passbook User Object
|
|
||||||
|
|
||||||
The User object has the following attributes:
|
|
||||||
|
|
||||||
- `username`: User's username.
|
|
||||||
- `email` User's email.
|
|
||||||
- `name` User's display name.
|
|
||||||
- `is_staff` Boolean field if user is staff.
|
|
||||||
- `is_active` Boolean field if user is active.
|
|
||||||
- `date_joined` Date user joined/was created.
|
|
||||||
- `password_change_date` Date password was last changed.
|
|
||||||
- `attributes` Dynamic attributes.
|
|
||||||
- `pb_groups` This is a queryset of all the user's groups.
|
|
||||||
|
|
||||||
You can do additional filtering like `user.pb_groups.filter(name__startswith='test')`, see [here](https://docs.djangoproject.com/en/3.1/ref/models/querysets/#id4)
|
|
||||||
|
|
||||||
To get the name of all groups, you can do `[group.name for group in user.pb_groups.all()]`
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
List all the User's group names:
|
|
||||||
|
|
||||||
```python
|
|
||||||
for group in user.pb_groups.all():
|
|
||||||
yield group.name
|
|
||||||
```
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
# OTP Stage
|
|
||||||
|
|
||||||
This stage offers a generic Time-based One-time Password authentication step.
|
|
||||||
|
|
||||||
You can optionally enforce this step, which will force every user without OTP setup to configure it.
|
|
||||||
|
|
||||||
This stage uses a 6-digit Code with a 30 second time-drift. This is currently not changeable.
|
|
||||||
@ -1,8 +0,0 @@
|
|||||||
# User Delete Stage
|
|
||||||
|
|
||||||
!!! danger
|
|
||||||
This stage deletes the `pending_user` without any confirmation. You have to make sure the user is aware of this.
|
|
||||||
|
|
||||||
This stage is intended for an unenrollment flow. It deletes the currently pending user.
|
|
||||||
|
|
||||||
The pending user is also removed from the current session.
|
|
||||||
@ -1,73 +0,0 @@
|
|||||||
# Kubernetes
|
|
||||||
|
|
||||||
For a mid to high-load installation, Kubernetes is recommended. passbook is installed using a helm-chart.
|
|
||||||
|
|
||||||
This installation automatically applies database migrations on startup. After the installation is done, you can use `pbadmin` as username and password.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
###################################
|
|
||||||
# Values directly affecting passbook
|
|
||||||
###################################
|
|
||||||
image:
|
|
||||||
name: beryju/passbook
|
|
||||||
name_static: beryju/passbook-static
|
|
||||||
tag: 0.12.7-stable
|
|
||||||
|
|
||||||
serverReplicas: 1
|
|
||||||
workerReplicas: 1
|
|
||||||
|
|
||||||
# Enable the Kubernetes integration which lets passbook deploy outposts into kubernetes
|
|
||||||
kubernetesIntegration: true
|
|
||||||
|
|
||||||
config:
|
|
||||||
# Optionally specify fixed secret_key, otherwise generated automatically
|
|
||||||
# secretKey: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
|
||||||
# Enable error reporting
|
|
||||||
errorReporting:
|
|
||||||
enabled: false
|
|
||||||
environment: customer
|
|
||||||
sendPii: false
|
|
||||||
# Log level used by web and worker
|
|
||||||
# Can be either debug, info, warning, error
|
|
||||||
logLevel: warning
|
|
||||||
|
|
||||||
# Enable Database Backups to S3
|
|
||||||
# backup:
|
|
||||||
# accessKey: access-key
|
|
||||||
# secretKey: secret-key
|
|
||||||
# bucket: s3-bucket
|
|
||||||
# region: eu-central-1
|
|
||||||
# host: s3-host
|
|
||||||
|
|
||||||
ingress:
|
|
||||||
annotations: {}
|
|
||||||
# kubernetes.io/ingress.class: nginx
|
|
||||||
# kubernetes.io/tls-acme: "true"
|
|
||||||
hosts:
|
|
||||||
- passbook.k8s.local
|
|
||||||
tls: []
|
|
||||||
# - secretName: chart-example-tls
|
|
||||||
# hosts:
|
|
||||||
# - passbook.k8s.local
|
|
||||||
|
|
||||||
###################################
|
|
||||||
# Values controlling dependencies
|
|
||||||
###################################
|
|
||||||
|
|
||||||
install:
|
|
||||||
postgresql: true
|
|
||||||
redis: true
|
|
||||||
|
|
||||||
# These values influence the bundled postgresql and redis charts, but are also used by passbook to connect
|
|
||||||
postgresql:
|
|
||||||
postgresqlDatabase: passbook
|
|
||||||
|
|
||||||
redis:
|
|
||||||
cluster:
|
|
||||||
enabled: false
|
|
||||||
master:
|
|
||||||
persistence:
|
|
||||||
enabled: false
|
|
||||||
# https://stackoverflow.com/a/59189742
|
|
||||||
disableCommands: []
|
|
||||||
```
|
|
||||||
@ -1,75 +0,0 @@
|
|||||||
# Ansible Tower / AWX Integration
|
|
||||||
|
|
||||||
## What is Tower
|
|
||||||
|
|
||||||
From https://docs.ansible.com/ansible/2.5/reference_appendices/tower.html
|
|
||||||
|
|
||||||
!!! note ""
|
|
||||||
Ansible Tower (formerly ‘AWX’) is a web-based solution that makes Ansible even more easy to use for IT teams of all kinds. It’s designed to be the hub for all of your automation tasks.
|
|
||||||
|
|
||||||
Tower allows you to control access to who can access what, even allowing sharing of SSH credentials without someone being able to transfer those credentials. Inventory can be graphically managed or synced with a wide variety of cloud sources. It logs all of your jobs, integrates well with LDAP, and has an amazing browsable REST API. Command line tools are available for easy integration with Jenkins as well. Provisioning callbacks provide great support for autoscaling topologies.
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
AWX is the open-source version of Tower. The term "AWX" will be used interchangeably throughout this document.
|
|
||||||
|
|
||||||
## Preparation
|
|
||||||
|
|
||||||
The following placeholders will be used:
|
|
||||||
|
|
||||||
- `awx.company` is the FQDN of the AWX/Tower install.
|
|
||||||
- `passbook.company` is the FQDN of the passbook install.
|
|
||||||
|
|
||||||
Create an application in passbook and note the slug, as this will be used later. Create a SAML provider with the following parameters:
|
|
||||||
|
|
||||||
- ACS URL: `https://awx.company/sso/complete/saml/`
|
|
||||||
- Audience: `awx`
|
|
||||||
- Service Provider Binding: Post
|
|
||||||
- Issuer: `https://awx.company/sso/metadata/saml/`
|
|
||||||
|
|
||||||
You can of course use a custom signing certificate, and adjust durations.
|
|
||||||
|
|
||||||
## AWX Configuration
|
|
||||||
|
|
||||||
Navigate to `https://awx.company/#/settings/auth` to configure SAML. Set the Field `SAML SERVICE PROVIDER ENTITY ID` to `awx`.
|
|
||||||
|
|
||||||
For the fields `SAML SERVICE PROVIDER PUBLIC CERTIFICATE` and `SAML SERVICE PROVIDER PRIVATE KEY`, you can either use custom certificates, or use the self-signed pair generated by passbook.
|
|
||||||
|
|
||||||
Provide metadata in the `SAML Service Provider Organization Info` field:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"en-US": {
|
|
||||||
"name": "passbook",
|
|
||||||
"url": "https://passbook.company",
|
|
||||||
"displayname": "passbook"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Provide metadata in the `SAML Service Provider Technical Contact` and `SAML Service Provider Technical Contact` fields:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"givenName": "Admin Name",
|
|
||||||
"emailAddress": "admin@company"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
In the `SAML Enabled Identity Providers` paste the following configuration:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"passbook": {
|
|
||||||
"attr_username": "urn:oid:2.16.840.1.113730.3.1.241",
|
|
||||||
"attr_user_permanent_id": "urn:oid:0.9.2342.19200300.100.1.1",
|
|
||||||
"x509cert": "MIIDEjCCAfqgAwIBAgIRAJZ9pOZ1g0xjiHtQAAejsMEwDQYJKoZIhvcNAQELBQAwMDEuMCwGA1UEAwwlcGFzc2Jvb2sgU2VsZi1zaWduZWQgU0FNTCBDZXJ0aWZpY2F0ZTAeFw0xOTEyMjYyMDEwNDFaFw0yMDEyMjYyMDEwNDFaMFkxLjAsBgNVBAMMJXBhc3Nib29rIFNlbGYtc2lnbmVkIFNBTUwgQ2VydGlmaWNhdGUxETAPBgNVBAoMCHBhc3Nib29rMRQwEgYDVQQLDAtTZWxmLXNpZ25lZDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAO/ktBYZkY9xAijF4acvzX6Q1K8KoIZeyde8fVgcWBz4L5FgDQ4/dni4k2YAcPdwteGL4nKVzetUzjbRCBUNuO6lqU4J4WNNX4Xg4Ir7XLRoAQeo+omTPBdpJ1p02HjtN5jT01umN3bK2yto1e37CJhK6WJiaXqRewPxh4lI4aqdj3BhFkJ3I3r2qxaWOAXQ6X7fg3w/ny7QP53//ouZo7hSLY3GIcRKgvdjjVM3OW5C3WLpOq5Dez5GWVJ17aeFCfGQ8bwFKde6qfYqyGcU9xHB36TtVHB9hSFP/tUFhkiSOxtsrYwCgCyXm4UTSpP+wiNyjKfFw7qGLBvA2hGTNw8CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAh9PeAqPRQk1/SSygIFADZBi08O/DPCshFwEHvJATIcTzcDD8UGAjXh+H5OlkDyX7KyrcaNvYaafCUo63A+WprdtdY5Ty6SBEwTYyiQyQfwM9BfK+imCoif1Ai7xAelD7p9lNazWq7JU+H/Ep7U7Q7LvpxAbK0JArt+IWTb2NcMb3OWE1r0gFbs44O1l6W9UbJTbyLMzbGbe5i+NHlgnwPwuhtRMh0NUYabGHKcHbhwyFhfGAQv2dAp5KF1E5gu6ZzCiFePzc0FrqXQyb2zpFYcJHXquiqaOeG7cZxRHYcjrl10Vxzki64XVA9BpdELgKSnupDGUEJsRUt3WVOmvZuA==",
|
|
||||||
"url": "https://passbook.company/application/saml/awx/login/",
|
|
||||||
"attr_last_name": "User.LastName",
|
|
||||||
"entity_id": "https://awx.company/sso/metadata/saml/",
|
|
||||||
"attr_email": "urn:oid:0.9.2342.19200300.100.1.3",
|
|
||||||
"attr_first_name": "urn:oid:2.5.4.3"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
`x509cert` is the certificate configured in passbook. Remove the `--BEGIN CERTIFICATE--` and `--END CERTIFICATE--` headers, then enter the cert as one non-breaking string.
|
|
||||||
@ -1,83 +0,0 @@
|
|||||||
# VMware vCenter Integration
|
|
||||||
|
|
||||||
## What is vCenter
|
|
||||||
|
|
||||||
From https://en.wikipedia.org/wiki/VCenter
|
|
||||||
|
|
||||||
!!! note ""
|
|
||||||
|
|
||||||
vCenter Server is the centralized management utility for VMware, and is used to manage virtual machines, multiple ESXi hosts, and all dependent components from a single centralized location. VMware vMotion and svMotion require the use of vCenter and ESXi hosts.
|
|
||||||
|
|
||||||
!!! warning
|
|
||||||
|
|
||||||
This requires passbook 0.10.3 or newer.
|
|
||||||
|
|
||||||
!!! warning
|
|
||||||
|
|
||||||
This requires VMware vCenter 7.0.0 or newer.
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
|
|
||||||
It seems that the vCenter still needs to be joined to the Active Directory Domain, otherwise group membership does not work correctly. We're working on a fix for this, for the meantime your vCenter should be part of your Domain.
|
|
||||||
|
|
||||||
## Preparation
|
|
||||||
|
|
||||||
The following placeholders will be used:
|
|
||||||
|
|
||||||
- `vcenter.company` is the FQDN of the vCenter server.
|
|
||||||
- `passbook.company` is the FQDN of the passbook install.
|
|
||||||
|
|
||||||
Since vCenter only allows OpenID-Connect in combination with Active Directory, it is recommended to have passbook sync with the same Active Directory.
|
|
||||||
|
|
||||||
### Step 1
|
|
||||||
|
|
||||||
Under *Property Mappings*, create a *Scope Mapping*. Give it a name like "OIDC-Scope-VMware-vCenter". Set the scope name to `openid` and the expression to the following
|
|
||||||
|
|
||||||
```python
|
|
||||||
return {
|
|
||||||
"domain": "<your active directory domain>",
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 2
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
If your Active Directory Schema is the same as your Email address schema, skip to Step 3.
|
|
||||||
|
|
||||||
Under *Sources*, click *Edit* and ensure that "Autogenerated Active Directory Mapping: userPrincipalName -> attributes.upn" has been added to your source.
|
|
||||||
|
|
||||||
### Step 3
|
|
||||||
|
|
||||||
Under *Providers*, create an OAuth2/OpenID Provider with these settings:
|
|
||||||
|
|
||||||
- Client Type: Confidential
|
|
||||||
- Response Type: code (ADFS Compatibility Mode, sends id_token as access_token)
|
|
||||||
- JWT Algorithm: RS256
|
|
||||||
- Redirect URI: `https://vcenter.company/ui/login/oauth2/authcode`
|
|
||||||
- Post Logout Redirect URIs: `https://vcenter.company/ui/login`
|
|
||||||
- Sub Mode: If your Email address Schema matches your UPN, select "Based on the User's Email...", otherwise select "Based on the User's UPN...".
|
|
||||||
- Scopes: Select the Scope Mapping you've created in Step 1
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
### Step 4
|
|
||||||
|
|
||||||
Create an application which uses this provider. Optionally apply access restrictions to the application.
|
|
||||||
|
|
||||||
Set the Launch URL to `https://vcenter.company/ui/login/oauth2`. This will skip vCenter's User Prompt and directly log you in.
|
|
||||||
|
|
||||||
## vCenter Setup
|
|
||||||
|
|
||||||
Login as local Administrator account (most likely ends with vsphere.local). Using the Menu in the Navigation bar, navigate to *Administration -> Single Sing-on -> Configuration*.
|
|
||||||
|
|
||||||
Click on *Change Identity Provider* in the top-right corner.
|
|
||||||
|
|
||||||
In the wizard, select "Microsoft ADFS" and click Next.
|
|
||||||
|
|
||||||
Fill in the Client Identifier and Shared Secret from the Provider in passbook. For the OpenID Address, click on *View Setup URLs* in passbook, and copy the OpenID Configuration URL.
|
|
||||||
|
|
||||||
On the next page, fill in your Active Directory Connection Details. These should be similar to what you have set in passbook.
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
If your vCenter was already setup with LDAP beforehand, your Role assignments will continue to work.
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
# Outpost deployment in docker-compose
|
|
||||||
|
|
||||||
To deploy an outpost with docker-compose, use this snippet in your docker-compose file.
|
|
||||||
|
|
||||||
You can also run the outpost in a separate docker-compose project, you just have to ensure that the outpost container can reach your application container.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
version: '3.5'
|
|
||||||
|
|
||||||
services:
|
|
||||||
passbook_proxy:
|
|
||||||
image: beryju/passbook-proxy:0.10.0-stable
|
|
||||||
ports:
|
|
||||||
- 4180:4180
|
|
||||||
- 4443:4443
|
|
||||||
environment:
|
|
||||||
PASSBOOK_HOST: https://your-passbook.tld
|
|
||||||
PASSBOOK_INSECURE: 'false'
|
|
||||||
PASSBOOK_TOKEN: token-generated-by-passbook
|
|
||||||
```
|
|
||||||
@ -1,99 +0,0 @@
|
|||||||
# Outpost deployment on Kubernetes
|
|
||||||
|
|
||||||
Use the following manifest, replacing all values surrounded with `__`.
|
|
||||||
|
|
||||||
Afterwards, configure the proxy provider to connect to `<service name>.<namespace>.svc.cluster.local`, and update your Ingress to connect to the `passbook-outpost` service.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
apiVersion: v1
|
|
||||||
kind: Secret
|
|
||||||
metadata:
|
|
||||||
labels:
|
|
||||||
app.kubernetes.io/instance: test
|
|
||||||
app.kubernetes.io/managed-by: passbook.beryju.org
|
|
||||||
app.kubernetes.io/name: passbook-proxy
|
|
||||||
app.kubernetes.io/version: 0.10.0
|
|
||||||
name: passbook-outpost-api
|
|
||||||
stringData:
|
|
||||||
passbook_host: '__PASSBOOK_URL__'
|
|
||||||
passbook_host_insecure: 'true'
|
|
||||||
token: '__PASSBOOK_TOKEN__'
|
|
||||||
type: Opaque
|
|
||||||
---
|
|
||||||
apiVersion: v1
|
|
||||||
kind: Service
|
|
||||||
metadata:
|
|
||||||
labels:
|
|
||||||
app.kubernetes.io/instance: test
|
|
||||||
app.kubernetes.io/managed-by: passbook.beryju.org
|
|
||||||
app.kubernetes.io/name: passbook-proxy
|
|
||||||
app.kubernetes.io/version: 0.10.0
|
|
||||||
name: passbook-outpost
|
|
||||||
spec:
|
|
||||||
ports:
|
|
||||||
- name: http
|
|
||||||
port: 4180
|
|
||||||
protocol: TCP
|
|
||||||
targetPort: http
|
|
||||||
- name: https
|
|
||||||
port: 4443
|
|
||||||
protocol: TCP
|
|
||||||
targetPort: https
|
|
||||||
selector:
|
|
||||||
app.kubernetes.io/instance: test
|
|
||||||
app.kubernetes.io/managed-by: passbook.beryju.org
|
|
||||||
app.kubernetes.io/name: passbook-proxy
|
|
||||||
app.kubernetes.io/version: 0.10.0
|
|
||||||
type: ClusterIP
|
|
||||||
---
|
|
||||||
apiVersion: apps/v1
|
|
||||||
kind: Deployment
|
|
||||||
metadata:
|
|
||||||
labels:
|
|
||||||
app.kubernetes.io/instance: test
|
|
||||||
app.kubernetes.io/managed-by: passbook.beryju.org
|
|
||||||
app.kubernetes.io/name: passbook-proxy
|
|
||||||
app.kubernetes.io/version: 0.10.0
|
|
||||||
name: passbook-outpost
|
|
||||||
spec:
|
|
||||||
selector:
|
|
||||||
matchLabels:
|
|
||||||
app.kubernetes.io/instance: test
|
|
||||||
app.kubernetes.io/managed-by: passbook.beryju.org
|
|
||||||
app.kubernetes.io/name: passbook-proxy
|
|
||||||
app.kubernetes.io/version: 0.10.0
|
|
||||||
template:
|
|
||||||
metadata:
|
|
||||||
labels:
|
|
||||||
app.kubernetes.io/instance: test
|
|
||||||
app.kubernetes.io/managed-by: passbook.beryju.org
|
|
||||||
app.kubernetes.io/name: passbook-proxy
|
|
||||||
app.kubernetes.io/version: 0.10.0
|
|
||||||
spec:
|
|
||||||
containers:
|
|
||||||
- env:
|
|
||||||
- name: PASSBOOK_HOST
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
key: passbook_host
|
|
||||||
name: passbook-outpost-api
|
|
||||||
- name: PASSBOOK_TOKEN
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
key: token
|
|
||||||
name: passbook-outpost-api
|
|
||||||
- name: PASSBOOK_INSECURE
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
key: passbook_host_insecure
|
|
||||||
name: passbook-outpost-api
|
|
||||||
image: beryju/passbook-proxy:0.10.0-stable
|
|
||||||
name: proxy
|
|
||||||
ports:
|
|
||||||
- containerPort: 4180
|
|
||||||
name: http
|
|
||||||
protocol: TCP
|
|
||||||
- containerPort: 4443
|
|
||||||
name: https
|
|
||||||
protocol: TCP
|
|
||||||
```
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
# Outposts
|
|
||||||
|
|
||||||
An outpost is a single deployment of a passbook component, which can be deployed in a completely separate environment. Currently, only the Proxy Provider is supported as outpost.
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
Upon creation, a service account and a token is generated. The service account only has permissions to read the outpost and provider configuration. This token is used by the Outpost to connect to passbook.
|
|
||||||
|
|
||||||
To deploy an outpost, see: <a name="deploy">
|
|
||||||
|
|
||||||
- [Kubernetes](deploy-kubernetes.md)
|
|
||||||
- [docker-compose](deploy-docker-compose.md)
|
|
||||||
|
|
||||||
In future versions, this snippet will be automatically generated. You will also be able to deploy an outpost directly into a kubernetes cluster.
|
|
||||||
@ -1,41 +0,0 @@
|
|||||||
# Expression Policies
|
|
||||||
|
|
||||||
!!! notice
|
|
||||||
These variables are available in addition to the common variables/functions defined in [**Expressions**](../expressions/index.md)
|
|
||||||
|
|
||||||
The passing of the policy is determined by the return value of the code. Use `return True` to pass a policy and `return False` to fail it.
|
|
||||||
|
|
||||||
### Available Functions
|
|
||||||
|
|
||||||
#### `pb_message(message: str)`
|
|
||||||
|
|
||||||
Add a message, visible by the end user. This can be used to show the reason why they were denied.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```python
|
|
||||||
pb_message("Access denied")
|
|
||||||
return False
|
|
||||||
```
|
|
||||||
|
|
||||||
### Context variables
|
|
||||||
|
|
||||||
- `request`: A PolicyRequest object, which has the following properties:
|
|
||||||
- `request.user`: The current user, against which the policy is applied. ([ref](../expressions/reference/user-object.md))
|
|
||||||
- `request.http_request`: The Django HTTP Request. ([ref](https://docs.djangoproject.com/en/3.0/ref/request-response/#httprequest-objects))
|
|
||||||
- `request.obj`: A Django Model instance. This is only set if the policy is ran against an object.
|
|
||||||
- `request.context`: A dictionary with dynamic data. This depends on the origin of the execution.
|
|
||||||
- `pb_is_sso_flow`: Boolean which is true if request was initiated by authenticating through an external provider.
|
|
||||||
- `pb_client_ip`: Client's IP Address or 255.255.255.255 if no IP Address could be extracted. Can be [compared](../expressions/index.md#comparing-ip-addresses), for example
|
|
||||||
|
|
||||||
```python
|
|
||||||
return pb_client_ip in ip_network('10.0.0.0/24')
|
|
||||||
```
|
|
||||||
|
|
||||||
Additionally, when the policy is executed from a flow, every variable from the flow's current context is accessible under the `context` object.
|
|
||||||
|
|
||||||
This includes the following:
|
|
||||||
|
|
||||||
- `prompt_data`: Data which has been saved from a prompt stage or an external source.
|
|
||||||
- `application`: The application the user is in the process of authorizing.
|
|
||||||
- `pending_user`: The currently pending user
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
# Property Mapping Expressions
|
|
||||||
|
|
||||||
The property mapping should return a value that is expected by the Provider/Source. Supported types are documented in the individual Provider/Source. Returning `None` is always accepted and would simply skip the mapping for which `None` was returned.
|
|
||||||
|
|
||||||
!!! notice
|
|
||||||
These variables are available in addition to the common variables/functions defined in [**Expressions**](../expressions/index.md)
|
|
||||||
|
|
||||||
### Context Variables
|
|
||||||
|
|
||||||
- `user`: The current user. This may be `None` if there is no contextual user. ([ref](../expressions/reference/user-object.md))
|
|
||||||
- `request`: The current request. This may be `None` if there is no contextual request. ([ref](https://docs.djangoproject.com/en/3.0/ref/request-response/#httprequest-objects))
|
|
||||||
- Other arbitrary arguments given by the provider, this is documented on the Provider/Source.
|
|
||||||
@ -1,31 +0,0 @@
|
|||||||
# OAuth2 Provider
|
|
||||||
|
|
||||||
This provider supports both generic OAuth2 as well as OpenID Connect
|
|
||||||
|
|
||||||
Scopes can be configured using Scope Mappings, a type of [Property Mappings](../property-mappings/index.md#scope-mapping).
|
|
||||||
|
|
||||||
Endpoint | URL
|
|
||||||
---------|---
|
|
||||||
Authorization | `/application/o/authorize/`
|
|
||||||
Token | `/application/o/token/`
|
|
||||||
User Info | `/application/o/userinfo/`
|
|
||||||
End Session | `/application/o/end-session/`
|
|
||||||
Introspect | `/application/o/end-session/`
|
|
||||||
JWKS | `/application/o/<application slug>/jwks/`
|
|
||||||
OpenID Configuration | `/application/o/<application slug>/.well-known/openid-configuration`
|
|
||||||
|
|
||||||
## GitHub Compatibility
|
|
||||||
|
|
||||||
This provider also exposes a GitHub-compatible endpoint. This endpoint can be used by applications, which support authenticating against GitHub Enterprise, but not generic OpenID Connect.
|
|
||||||
|
|
||||||
To use any of the GitHub Compatibility scopes, you have to use the GitHub Compatibility Endpoints.
|
|
||||||
|
|
||||||
|
|
||||||
Endpoint | URL
|
|
||||||
---------|---
|
|
||||||
Authorization | `/login/oauth/authorize`
|
|
||||||
Token | `/login/oauth/access_token`
|
|
||||||
User Info | `/user`
|
|
||||||
User Teams Info | `/user/teams`
|
|
||||||
|
|
||||||
To access the user's email address, a scope of `user:email` is required. To access their groups, `read:org` is required. Because these scopes are handled by a different endpoint, they are not customisable as a Scope Mapping.
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
# Proxy Provider
|
|
||||||
|
|
||||||
!!! info
|
|
||||||
This provider is to be used in conjunction with [Outposts](../outposts/outposts.md)
|
|
||||||
|
|
||||||
This provider protects applications, which have no built-in support for OAuth2 or SAML. This is done by running a lightweight Reverse Proxy in front of the application, which authenticates the requests.
|
|
||||||
|
|
||||||
passbook Proxy is based on [oauth2_proxy](https://github.com/oauth2-proxy/oauth2-proxy), but has been integrated more tightly with passbook.
|
|
||||||
|
|
||||||
The Proxy these extra headers to the application:
|
|
||||||
|
|
||||||
Header Name | Value
|
|
||||||
-------------|-------
|
|
||||||
X-Auth-Request-User | The user's unique identifier
|
|
||||||
X-Auth-Request-Email | The user's email address
|
|
||||||
X-Auth-Request-Preferred-Username | The user's username
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
# SAML Provider
|
|
||||||
|
|
||||||
This provider allows you to integrate enterprise software using the SAML2 Protocol. It supports signed requests and uses [Property Mappings](../property-mappings/index.md#saml-property-mapping) to determine which fields are exposed and what values they return. This makes it possible to expose vendor-specific fields.
|
|
||||||
Default fields are exposed through auto-generated Property Mappings, which are prefixed with "Autogenerated".
|
|
||||||
|
|
||||||
|
|
||||||
Endpoint | URL
|
|
||||||
---------|---
|
|
||||||
SSO (Redirect binding) | `/application/saml/<application slug>/sso/binding/redirect/`
|
|
||||||
SSO (POST binding) | `/application/saml/<application slug>/sso/binding/post/`
|
|
||||||
IdP-initiated login | `/application/saml/<application slug>/sso/binding/init/`
|
|
||||||
Metadata Download | `/application/saml/<application slug>/metadata/`
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
mkdocs
|
|
||||||
mkdocs-material
|
|
||||||
@ -1 +0,0 @@
|
|||||||
3.7
|
|
||||||
@ -1,73 +0,0 @@
|
|||||||
# Upgrading to 0.10
|
|
||||||
|
|
||||||
This update brings a lot of big features, such as:
|
|
||||||
|
|
||||||
- New OAuth2/OpenID Provider
|
|
||||||
|
|
||||||
This new provider merges both OAuth2 and OpenID. It is based on the codebase of the old provider, which has been simplified and cleaned from the ground up. Support for Property Mappings has also been added. Because of this change, OpenID and OAuth2 Providers will have to be re-created.
|
|
||||||
|
|
||||||
- Proxy Provider
|
|
||||||
|
|
||||||
Due to this new OAuth2 Provider, the Application Gateway Provider, now simply called "Proxy Provider" has been revamped as well. The new passbook Proxy integrates more tightly with passbook via the new Outposts system. The new proxy also supports multiple applications per proxy instance, can configure TLS based on passbook Keypairs, and more.
|
|
||||||
|
|
||||||
See [Proxy](../providers/proxy.md)
|
|
||||||
|
|
||||||
- Outpost System
|
|
||||||
|
|
||||||
This is a new Object type, currently used only by the Proxy Provider. It manages the creation and permissions of service accounts, which are used by the outposts to communicate with passbook.
|
|
||||||
|
|
||||||
See [Outposts](../outposts/outposts.md)
|
|
||||||
|
|
||||||
- Flow Import/Export
|
|
||||||
|
|
||||||
Flows can now be imported and exported. This feature can be used as a backup system, or to share complex flows with other people. Example flows have also been added to the documentation to help you get going with passbook.
|
|
||||||
|
|
||||||
## Under the hood
|
|
||||||
|
|
||||||
- passbook now runs on Django 3.1 and Channels with complete ASGI enabled
|
|
||||||
- uwsgi has been replaced with Gunicorn and uvicorn
|
|
||||||
- Elastic APM has been replaced with Sentry Performance metrics
|
|
||||||
- Flow title is now configurable separately from the name
|
|
||||||
- All logging output is now json
|
|
||||||
|
|
||||||
## Upgrading
|
|
||||||
|
|
||||||
### docker-compose
|
|
||||||
|
|
||||||
The docker-compose file has been updated, please download the latest from `https://raw.githubusercontent.com/BeryJu/passbook/master/docker-compose.yml`.
|
|
||||||
By default, the new compose file uses a fixed version to prevent unintended updates.
|
|
||||||
|
|
||||||
Before updating the file, stop all containers. Then download the file, pull the new containers and start the database.
|
|
||||||
|
|
||||||
```
|
|
||||||
docker-compose down
|
|
||||||
docker-compose pull
|
|
||||||
docker-compose up --no-start
|
|
||||||
docker-compose start redis postgrseql
|
|
||||||
docker-compose run --rm server migrate
|
|
||||||
docker-compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
### Helm
|
|
||||||
|
|
||||||
A few options have changed:
|
|
||||||
|
|
||||||
- `error_reporting` was changed from a simple boolean to a dictionary:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
error_reporting:
|
|
||||||
enabled: false
|
|
||||||
environment: customer
|
|
||||||
send_pii: false
|
|
||||||
```
|
|
||||||
|
|
||||||
- The `apm` and `monitoring` blocks have been removed.
|
|
||||||
- `serverReplicas` and `workerReplicas` have been added
|
|
||||||
|
|
||||||
### Upgrading
|
|
||||||
|
|
||||||
This upgrade only applies if you are upgrading from a running 0.9 instance. Passbook detects this on startup, and automatically executes this upgrade.
|
|
||||||
|
|
||||||
Because this upgrade brings the new OAuth2 Provider, the old providers will be lost in the process. Make sure to take note of the providers you want to bring over.
|
|
||||||
|
|
||||||
Another side-effect of this upgrade is the change of OAuth2 URLs, see [here](../providers/oauth2.md).
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
# Upgrading to 0.11
|
|
||||||
|
|
||||||
This update brings these headline features:
|
|
||||||
|
|
||||||
- Add Backup and Restore, currently only externally schedulable, documented [here](https://passbook.beryju.org/maintenance/backups/)
|
|
||||||
- New Admin Dashboard with more metrics and Charts
|
|
||||||
|
|
||||||
Shows successful and failed logins from the last 24 hours, as well as the most used applications
|
|
||||||
- Add search to all table views
|
|
||||||
- Outpost now supports a Docker Controller, which installs the Outpost on the same host as passbook, updates and manages it
|
|
||||||
- Add Token Identifier
|
|
||||||
|
|
||||||
Tokens now have an identifier which is used to reference to them, so the Primary key is not shown in URLs
|
|
||||||
- `core/applications/list` API now shows applications the user has access to via policies
|
|
||||||
|
|
||||||
## Upgrading
|
|
||||||
|
|
||||||
This upgrade can be done as with minor upgrades, the only external change is the new docker-compose file, which enabled the Docker Integration for Outposts. To use this feature, please download the latest docker-compose from [here](https://raw.githubusercontent.com/BeryJu/passbook/master/docker-compose.yml).
|
|
||||||
|
|
||||||
Afterwards, you can simply run `docker-compose up -d` and then the normal upgrade command of `docker-compose run --rm server migrate`.
|
|
||||||
@ -1,63 +0,0 @@
|
|||||||
# Upgrading to 0.12
|
|
||||||
|
|
||||||
This update brings these headline features:
|
|
||||||
|
|
||||||
- Rewrite Outpost state Logic, which now supports multiple concurrent Outpost instances.
|
|
||||||
- Add Kubernetes Integration for Outposts, which deploys and maintains Outposts with High Availability in a Kubernetes Cluster
|
|
||||||
- Add System Task Overview to see all background tasks, their status, the log output, and retry them
|
|
||||||
- Alerts now disappear automatically
|
|
||||||
- Audit Logs are now searchable
|
|
||||||
- Users can now create their own Tokens to access the API
|
|
||||||
- docker-compose deployment now uses traefik 2.3
|
|
||||||
|
|
||||||
Fixes:
|
|
||||||
|
|
||||||
- Fix high CPU Usage of the proxy when Websocket connections fail
|
|
||||||
|
|
||||||
## Upgrading
|
|
||||||
|
|
||||||
### docker-compose
|
|
||||||
|
|
||||||
Docker-compose users should download the latest docker-compose file from [here](https://raw.githubusercontent.com/BeryJu/passbook/master/docker-compose.yml). This includes the new traefik 2.3.
|
|
||||||
|
|
||||||
Afterwards, you can simply run `docker-compose up -d` and then the normal upgrade command of `docker-compose run --rm server migrate`.
|
|
||||||
|
|
||||||
### Kubernetes
|
|
||||||
|
|
||||||
For Kubernetes users, there are some changes to the helm values.
|
|
||||||
|
|
||||||
The values change from
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
config:
|
|
||||||
# Optionally specify fixed secret_key, otherwise generated automatically
|
|
||||||
# secret_key: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
|
||||||
# Enable error reporting
|
|
||||||
error_reporting:
|
|
||||||
enabled: false
|
|
||||||
environment: customer
|
|
||||||
send_pii: false
|
|
||||||
# Log level used by web and worker
|
|
||||||
# Can be either debug, info, warning, error
|
|
||||||
log_level: warning
|
|
||||||
```
|
|
||||||
|
|
||||||
to
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
config:
|
|
||||||
# Optionally specify fixed secret_key, otherwise generated automatically
|
|
||||||
# secretKey: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
|
||||||
# Enable error reporting
|
|
||||||
errorReporting:
|
|
||||||
enabled: false
|
|
||||||
environment: customer
|
|
||||||
sendPii: false
|
|
||||||
# Log level used by web and worker
|
|
||||||
# Can be either debug, info, warning, error
|
|
||||||
logLevel: warning
|
|
||||||
```
|
|
||||||
|
|
||||||
in order to be consistent with the rest of the settings.
|
|
||||||
|
|
||||||
There is also a new setting called `kubernetesIntegration`, which controls the Kubernetes integration for passbook. When enabled (the default), a Service Account is created, which allows passbook to deploy and update Outposts.
|
|
||||||
@ -9,7 +9,7 @@ curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash -
|
|||||||
sudo apt-get install -y nodejs
|
sudo apt-get install -y nodejs
|
||||||
sudo npm install -g yarn
|
sudo npm install -g yarn
|
||||||
# Setup python
|
# Setup python
|
||||||
sudo apt install -y python3.8 python3-pip
|
sudo apt install -y python3.9 python3-pip libxmlsec1-dev pkg-config
|
||||||
# Setup docker
|
# Setup docker
|
||||||
sudo pip3 install pipenv
|
sudo pip3 install pipenv
|
||||||
|
|
||||||
|
|||||||
@ -16,9 +16,9 @@ from passbook import __version__
|
|||||||
from passbook.core.models import Application
|
from passbook.core.models import Application
|
||||||
from passbook.flows.models import Flow
|
from passbook.flows.models import Flow
|
||||||
from passbook.outposts.models import (
|
from passbook.outposts.models import (
|
||||||
|
DockerServiceConnection,
|
||||||
Outpost,
|
Outpost,
|
||||||
OutpostConfig,
|
OutpostConfig,
|
||||||
OutpostDeploymentType,
|
|
||||||
OutpostType,
|
OutpostType,
|
||||||
)
|
)
|
||||||
from passbook.providers.proxy.models import ProxyProvider
|
from passbook.providers.proxy.models import ProxyProvider
|
||||||
@ -76,7 +76,6 @@ class TestProviderProxy(SeleniumTestCase):
|
|||||||
outpost: Outpost = Outpost.objects.create(
|
outpost: Outpost = Outpost.objects.create(
|
||||||
name="proxy_outpost",
|
name="proxy_outpost",
|
||||||
type=OutpostType.PROXY,
|
type=OutpostType.PROXY,
|
||||||
deployment_type=OutpostDeploymentType.CUSTOM,
|
|
||||||
)
|
)
|
||||||
outpost.providers.add(proxy)
|
outpost.providers.add(proxy)
|
||||||
outpost.save()
|
outpost.save()
|
||||||
@ -128,10 +127,11 @@ class TestProviderProxyConnect(ChannelsLiveServerTestCase):
|
|||||||
proxy.save()
|
proxy.save()
|
||||||
# we need to create an application to actually access the proxy
|
# we need to create an application to actually access the proxy
|
||||||
Application.objects.create(name="proxy", slug="proxy", provider=proxy)
|
Application.objects.create(name="proxy", slug="proxy", provider=proxy)
|
||||||
|
service_connection = DockerServiceConnection.objects.get(local=True)
|
||||||
outpost: Outpost = Outpost.objects.create(
|
outpost: Outpost = Outpost.objects.create(
|
||||||
name="proxy_outpost",
|
name="proxy_outpost",
|
||||||
type=OutpostType.PROXY,
|
type=OutpostType.PROXY,
|
||||||
deployment_type=OutpostDeploymentType.DOCKER,
|
service_connection=service_connection,
|
||||||
_config=asdict(
|
_config=asdict(
|
||||||
OutpostConfig(passbook_host=self.live_server_url, log_level="debug")
|
OutpostConfig(passbook_host=self.live_server_url, log_level="debug")
|
||||||
),
|
),
|
||||||
|
|||||||
@ -156,8 +156,7 @@ def retry(max_retires=3, exceptions=None):
|
|||||||
raise exc
|
raise exc
|
||||||
logger.debug("Retrying on error", exc=exc, test=self)
|
logger.debug("Retrying on error", exc=exc, test=self)
|
||||||
self.tearDown()
|
self.tearDown()
|
||||||
# pylint: disable=protected-access
|
self._post_teardown() # noqa
|
||||||
self._post_teardown()
|
|
||||||
self.setUp()
|
self.setUp()
|
||||||
return wrapper(self, *args, **kwargs)
|
return wrapper(self, *args, **kwargs)
|
||||||
|
|
||||||
|
|||||||
@ -4,8 +4,8 @@ name: passbook
|
|||||||
home: https://passbook.beryju.org
|
home: https://passbook.beryju.org
|
||||||
sources:
|
sources:
|
||||||
- https://github.com/BeryJu/passbook
|
- https://github.com/BeryJu/passbook
|
||||||
version: "0.12.7-stable"
|
version: "0.12.11-stable"
|
||||||
icon: https://raw.githubusercontent.com/BeryJu/passbook/master/docs/images/logo.svg
|
icon: https://raw.githubusercontent.com/BeryJu/passbook/master/website/static/img/logo.svg
|
||||||
dependencies:
|
dependencies:
|
||||||
- name: postgresql
|
- name: postgresql
|
||||||
version: 9.4.1
|
version: 9.4.1
|
||||||
|
|||||||
@ -25,4 +25,4 @@
|
|||||||
| install.redis | true | Enables/disables the packaged Redis Chart
|
| install.redis | true | Enables/disables the packaged Redis Chart
|
||||||
| postgresql.postgresqlPassword | | Password used for PostgreSQL, generated automatically.
|
| postgresql.postgresqlPassword | | Password used for PostgreSQL, generated automatically.
|
||||||
|
|
||||||
For more info, see https://passbook.beryju.org/ and https://passbook.beryju.org/installation/kubernetes/
|
For more info, see https://passbook.beryju.org/ and https://passbook.beryju.org/docs/installation/kubernetes/
|
||||||
|
|||||||
@ -4,7 +4,7 @@
|
|||||||
image:
|
image:
|
||||||
name: beryju/passbook
|
name: beryju/passbook
|
||||||
name_static: beryju/passbook-static
|
name_static: beryju/passbook-static
|
||||||
tag: 0.12.7-stable
|
tag: 0.12.11-stable
|
||||||
|
|
||||||
serverReplicas: 1
|
serverReplicas: 1
|
||||||
workerReplicas: 1
|
workerReplicas: 1
|
||||||
|
|||||||
@ -1,4 +1,6 @@
|
|||||||
"""Gunicorn config"""
|
"""Gunicorn config"""
|
||||||
|
import os
|
||||||
|
import warnings
|
||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@ -13,6 +15,8 @@ worker_class = "uvicorn.workers.UvicornWorker"
|
|||||||
# Docker containers don't have /tmp as tmpfs
|
# Docker containers don't have /tmp as tmpfs
|
||||||
worker_tmp_dir = "/dev/shm"
|
worker_tmp_dir = "/dev/shm"
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.root.settings")
|
||||||
|
|
||||||
logconfig_dict = {
|
logconfig_dict = {
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"disable_existing_loggers": False,
|
"disable_existing_loggers": False,
|
||||||
@ -49,3 +53,5 @@ if Path("/var/run/secrets/kubernetes.io").exists():
|
|||||||
else:
|
else:
|
||||||
worker = cpu_count() * 2 + 1
|
worker = cpu_count() * 2 + 1
|
||||||
threads = 4
|
threads = 4
|
||||||
|
|
||||||
|
warnings.simplefilter("once")
|
||||||
|
|||||||
95
mkdocs.yml
95
mkdocs.yml
@ -1,95 +0,0 @@
|
|||||||
site_name: passbook Docs
|
|
||||||
site_url: https://passbook.beryju.org/
|
|
||||||
copyright: "Copyright © 2019 - 2020 BeryJu.org"
|
|
||||||
|
|
||||||
nav:
|
|
||||||
- Home: index.md
|
|
||||||
- Terminology: terminology.md
|
|
||||||
- Installation:
|
|
||||||
- docker-compose: installation/docker-compose.md
|
|
||||||
- Kubernetes: installation/kubernetes.md
|
|
||||||
- Reverse Proxy: installation/reverse-proxy.md
|
|
||||||
- Flows:
|
|
||||||
Overview: flow/flows.md
|
|
||||||
Examples: flow/examples/examples.md
|
|
||||||
- Stages:
|
|
||||||
- Captcha Stage: flow/stages/captcha/index.md
|
|
||||||
- Dummy Stage: flow/stages/dummy/index.md
|
|
||||||
- Email Stage: flow/stages/email/index.md
|
|
||||||
- Identification Stage: flow/stages/identification/index.md
|
|
||||||
- Invitation Stage: flow/stages/invitation/index.md
|
|
||||||
- OTP Stage: flow/stages/otp/index.md
|
|
||||||
- Password Stage: flow/stages/password/index.md
|
|
||||||
- Prompt Stage: flow/stages/prompt/index.md
|
|
||||||
- Prompt Stage Validation: flow/stages/prompt/validation.md
|
|
||||||
- User Delete Stage: flow/stages/user_delete.md
|
|
||||||
- User Login Stage: flow/stages/user_login.md
|
|
||||||
- User Logout Stage: flow/stages/user_logout.md
|
|
||||||
- User Write Stage: flow/stages/user_write.md
|
|
||||||
- Sources: sources.md
|
|
||||||
- Providers:
|
|
||||||
- OAuth2: providers/oauth2.md
|
|
||||||
- SAML: providers/saml.md
|
|
||||||
- Proxy: providers/proxy.md
|
|
||||||
- Outposts:
|
|
||||||
- Overview: outposts/outposts.md
|
|
||||||
- Upgrading: outposts/upgrading.md
|
|
||||||
- Deploy on docker-compose: outposts/deploy-docker-compose.md
|
|
||||||
- Deploy on Kubernetes: outposts/deploy-kubernetes.md
|
|
||||||
- Expressions:
|
|
||||||
- Overview: expressions/index.md
|
|
||||||
- Reference:
|
|
||||||
- User Object: expressions/reference/user-object.md
|
|
||||||
- Property Mappings:
|
|
||||||
- Overview: property-mappings/index.md
|
|
||||||
- Expressions: property-mappings/expression.md
|
|
||||||
- Policies:
|
|
||||||
- Overview: policies/index.md
|
|
||||||
- Expression: policies/expression.md
|
|
||||||
- Integrations:
|
|
||||||
- as Source:
|
|
||||||
- Active Directory: integrations/sources/active-directory/index.md
|
|
||||||
- as Provider:
|
|
||||||
- Amazon Web Services: integrations/services/aws/index.md
|
|
||||||
- GitLab: integrations/services/gitlab/index.md
|
|
||||||
- Rancher: integrations/services/rancher/index.md
|
|
||||||
- Harbor: integrations/services/harbor/index.md
|
|
||||||
- Sentry: integrations/services/sentry/index.md
|
|
||||||
- Ansible Tower/AWX: integrations/services/tower-awx/index.md
|
|
||||||
- VMware vCenter: integrations/services/vmware-vcenter/index.md
|
|
||||||
- Ubuntu Landscape: integrations/services/ubuntu-landscape/index.md
|
|
||||||
- Sonarr: integrations/services/sonarr/index.md
|
|
||||||
- Tautulli: integrations/services/tautulli/index.md
|
|
||||||
- Maintenance:
|
|
||||||
- Backups: maintenance/backups/index.md
|
|
||||||
- Upgrading:
|
|
||||||
- to 0.9: upgrading/to-0.9.md
|
|
||||||
- to 0.10: upgrading/to-0.10.md
|
|
||||||
- to 0.11: upgrading/to-0.11.md
|
|
||||||
- to 0.12: upgrading/to-0.12.md
|
|
||||||
- Troubleshooting:
|
|
||||||
- Access problems: troubleshooting/access.md
|
|
||||||
|
|
||||||
repo_name: "BeryJu/passbook"
|
|
||||||
repo_url: https://github.com/BeryJu/passbook
|
|
||||||
theme:
|
|
||||||
name: material
|
|
||||||
logo: images/logo.svg
|
|
||||||
favicon: images/logo.svg
|
|
||||||
palette:
|
|
||||||
scheme: slate
|
|
||||||
primary: white
|
|
||||||
|
|
||||||
markdown_extensions:
|
|
||||||
- toc:
|
|
||||||
permalink: "¶"
|
|
||||||
- admonition
|
|
||||||
- codehilite
|
|
||||||
- pymdownx.betterem:
|
|
||||||
smart_enable: all
|
|
||||||
- pymdownx.inlinehilite
|
|
||||||
- pymdownx.magiclink
|
|
||||||
- attr_list
|
|
||||||
|
|
||||||
plugins:
|
|
||||||
- search
|
|
||||||
@ -1,2 +1,2 @@
|
|||||||
"""passbook"""
|
"""passbook"""
|
||||||
__version__ = "0.12.7-stable"
|
__version__ = "0.12.11-stable"
|
||||||
|
|||||||
@ -46,12 +46,29 @@
|
|||||||
{% trans 'Providers' %}
|
{% trans 'Providers' %}
|
||||||
</a>
|
</a>
|
||||||
</li>
|
</li>
|
||||||
|
<li class="pf-c-nav__item pf-m-expanded">
|
||||||
|
<a href="#" class="pf-c-nav__link" aria-expanded="true">{% trans 'Outposts' %}
|
||||||
|
<span class="pf-c-nav__toggle">
|
||||||
|
<i class="fas fa-angle-right" aria-hidden="true"></i>
|
||||||
|
</span>
|
||||||
|
</a>
|
||||||
|
<section class="pf-c-nav__subnav">
|
||||||
|
<ul class="pf-c-nav__simple-list">
|
||||||
<li class="pf-c-nav__item">
|
<li class="pf-c-nav__item">
|
||||||
<a href="{% url 'passbook_admin:outposts' %}"
|
<a href="{% url 'passbook_admin:outposts' %}"
|
||||||
class="pf-c-nav__link {% is_active 'passbook_admin:outposts' 'passbook_admin:outpost-create' 'passbook_admin:outpost-update' 'passbook_admin:outpost-delete' %}">
|
class="pf-c-nav__link {% is_active 'passbook_admin:outposts' 'passbook_admin:outpost-create' 'passbook_admin:outpost-update' 'passbook_admin:outpost-delete' %}">
|
||||||
{% trans 'Outposts' %}
|
{% trans 'Outposts' %}
|
||||||
</a>
|
</a>
|
||||||
</li>
|
</li>
|
||||||
|
<li class="pf-c-nav__item">
|
||||||
|
<a href="{% url 'passbook_admin:outpost-service-connections' %}"
|
||||||
|
class="pf-c-nav__link {% is_active 'passbook_admin:outpost-service-connections' 'passbook_admin:outpost-service-connections-create' 'passbook_admin:outpost-service-connections-update' 'passbook_admin:outpost-service-connections-delete' %}">
|
||||||
|
{% trans 'Service Connections' %}
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</section>
|
||||||
|
</li>
|
||||||
<li class="pf-c-nav__item">
|
<li class="pf-c-nav__item">
|
||||||
<a href="{% url 'passbook_admin:property-mappings' %}"
|
<a href="{% url 'passbook_admin:property-mappings' %}"
|
||||||
class="pf-c-nav__link {% is_active 'passbook_admin:property-mappings' 'passbook_admin:property-mapping-create' 'passbook_admin:property-mapping-update' 'passbook_admin:property-mapping-delete' %}">
|
class="pf-c-nav__link {% is_active 'passbook_admin:property-mappings' 'passbook_admin:property-mapping-create' 'passbook_admin:property-mapping-update' 'passbook_admin:property-mapping-delete' %}">
|
||||||
|
|||||||
@ -0,0 +1,135 @@
|
|||||||
|
{% extends "administration/base.html" %}
|
||||||
|
|
||||||
|
{% load i18n %}
|
||||||
|
{% load humanize %}
|
||||||
|
{% load passbook_utils %}
|
||||||
|
{% load admin_reflection %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<section class="pf-c-page__main-section pf-m-light">
|
||||||
|
<div class="pf-c-content">
|
||||||
|
<h1>
|
||||||
|
<i class="pf-icon-integration"></i>
|
||||||
|
{% trans 'Outpost Service-Connections' %}
|
||||||
|
</h1>
|
||||||
|
<p>{% trans "Outpost Service-Connections define how passbook connects to external platforms to manage and deploy Outposts." %}</p>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
<section class="pf-c-page__main-section pf-m-no-padding-mobile">
|
||||||
|
<div class="pf-c-card">
|
||||||
|
{% if object_list %}
|
||||||
|
<div class="pf-c-toolbar">
|
||||||
|
<div class="pf-c-toolbar__content">
|
||||||
|
{% include 'partials/toolbar_search.html' %}
|
||||||
|
<div class="pf-c-toolbar__bulk-select">
|
||||||
|
<div class="pf-c-dropdown">
|
||||||
|
<button class="pf-m-primary pf-c-dropdown__toggle" type="button">
|
||||||
|
<span class="pf-c-dropdown__toggle-text">{% trans 'Create' %}</span>
|
||||||
|
<i class="fas fa-caret-down pf-c-dropdown__toggle-icon" aria-hidden="true"></i>
|
||||||
|
</button>
|
||||||
|
<ul class="pf-c-dropdown__menu" hidden>
|
||||||
|
{% for type, name in types.items %}
|
||||||
|
<li>
|
||||||
|
<a class="pf-c-dropdown__menu-item" href="{% url 'passbook_admin:outpost-service-connection-create' %}?type={{ type }}&back={{ request.get_full_path }}">
|
||||||
|
{{ name|verbose_name }}<br>
|
||||||
|
<small>
|
||||||
|
{{ name|doc }}
|
||||||
|
</small>
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% include 'partials/pagination.html' %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<table class="pf-c-table pf-m-compact pf-m-grid-xl" role="grid">
|
||||||
|
<thead>
|
||||||
|
<tr role="row">
|
||||||
|
<th role="columnheader" scope="col">{% trans 'Name' %}</th>
|
||||||
|
<th role="columnheader" scope="col">{% trans 'Type' %}</th>
|
||||||
|
<th role="columnheader" scope="col">{% trans 'Local?' %}</th>
|
||||||
|
<th role="columnheader" scope="col">{% trans 'Status' %}</th>
|
||||||
|
<th role="cell"></th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody role="rowgroup">
|
||||||
|
{% for sc in object_list %}
|
||||||
|
<tr role="row">
|
||||||
|
<th role="columnheader">
|
||||||
|
<span>{{ sc.name }}</span>
|
||||||
|
</th>
|
||||||
|
<td role="cell">
|
||||||
|
<span>
|
||||||
|
{{ sc|verbose_name }}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td role="cell">
|
||||||
|
<span>
|
||||||
|
{{ sc.local|yesno:"Yes,No" }}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td role="cell">
|
||||||
|
<span>
|
||||||
|
{% if sc.state.healthy %}
|
||||||
|
<i class="fas fa-check pf-m-success"></i> {{ sc.state.version }}
|
||||||
|
{% else %}
|
||||||
|
<i class="fas fa-times pf-m-danger"></i> {% trans 'Unhealthy' %}
|
||||||
|
{% endif %}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<a class="pf-c-button pf-m-secondary" href="{% url 'passbook_admin:outpost-service-connection-update' pk=sc.pk %}?back={{ request.get_full_path }}">{% trans 'Edit' %}</a>
|
||||||
|
<a class="pf-c-button pf-m-danger" href="{% url 'passbook_admin:outpost-service-connection-delete' pk=sc.pk %}?back={{ request.get_full_path }}">{% trans 'Delete' %}</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
<div class="pf-c-pagination pf-m-bottom">
|
||||||
|
{% include 'partials/pagination.html' %}
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="pf-c-toolbar">
|
||||||
|
<div class="pf-c-toolbar__content">
|
||||||
|
{% include 'partials/toolbar_search.html' %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="pf-c-empty-state">
|
||||||
|
<div class="pf-c-empty-state__content">
|
||||||
|
<i class="fas fa-map-marker pf-c-empty-state__icon" aria-hidden="true"></i>
|
||||||
|
<h1 class="pf-c-title pf-m-lg">
|
||||||
|
{% trans 'No Outpost Service Connections.' %}
|
||||||
|
</h1>
|
||||||
|
<div class="pf-c-empty-state__body">
|
||||||
|
{% if request.GET.search != "" %}
|
||||||
|
{% trans "Your search query doesn't match any outposts." %}
|
||||||
|
{% else %}
|
||||||
|
{% trans 'Currently no service connections exist. Click the button below to create one.' %}
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div class="pf-c-dropdown">
|
||||||
|
<button class="pf-m-primary pf-c-dropdown__toggle" type="button">
|
||||||
|
<span class="pf-c-dropdown__toggle-text">{% trans 'Create' %}</span>
|
||||||
|
<i class="fas fa-caret-down pf-c-dropdown__toggle-icon" aria-hidden="true"></i>
|
||||||
|
</button>
|
||||||
|
<ul class="pf-c-dropdown__menu" hidden>
|
||||||
|
{% for type, name in types.items %}
|
||||||
|
<li>
|
||||||
|
<a class="pf-c-dropdown__menu-item" href="{% url 'passbook_admin:outpost-service-connection-create' %}?type={{ type }}&back={{ request.get_full_path }}">
|
||||||
|
{{ name|verbose_name }}<br>
|
||||||
|
<small>
|
||||||
|
{{ name|doc }}
|
||||||
|
</small>
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
{% endblock %}
|
||||||
@ -7,10 +7,11 @@ from passbook.admin.views import (
|
|||||||
flows,
|
flows,
|
||||||
groups,
|
groups,
|
||||||
outposts,
|
outposts,
|
||||||
|
outposts_service_connections,
|
||||||
overview,
|
overview,
|
||||||
policies,
|
policies,
|
||||||
policies_bindings,
|
policies_bindings,
|
||||||
property_mapping,
|
property_mappings,
|
||||||
providers,
|
providers,
|
||||||
sources,
|
sources,
|
||||||
stages,
|
stages,
|
||||||
@ -225,22 +226,22 @@ urlpatterns = [
|
|||||||
# Property Mappings
|
# Property Mappings
|
||||||
path(
|
path(
|
||||||
"property-mappings/",
|
"property-mappings/",
|
||||||
property_mapping.PropertyMappingListView.as_view(),
|
property_mappings.PropertyMappingListView.as_view(),
|
||||||
name="property-mappings",
|
name="property-mappings",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"property-mappings/create/",
|
"property-mappings/create/",
|
||||||
property_mapping.PropertyMappingCreateView.as_view(),
|
property_mappings.PropertyMappingCreateView.as_view(),
|
||||||
name="property-mapping-create",
|
name="property-mapping-create",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"property-mappings/<uuid:pk>/update/",
|
"property-mappings/<uuid:pk>/update/",
|
||||||
property_mapping.PropertyMappingUpdateView.as_view(),
|
property_mappings.PropertyMappingUpdateView.as_view(),
|
||||||
name="property-mapping-update",
|
name="property-mapping-update",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"property-mappings/<uuid:pk>/delete/",
|
"property-mappings/<uuid:pk>/delete/",
|
||||||
property_mapping.PropertyMappingDeleteView.as_view(),
|
property_mappings.PropertyMappingDeleteView.as_view(),
|
||||||
name="property-mapping-delete",
|
name="property-mapping-delete",
|
||||||
),
|
),
|
||||||
# Users
|
# Users
|
||||||
@ -312,6 +313,27 @@ urlpatterns = [
|
|||||||
outposts.OutpostDeleteView.as_view(),
|
outposts.OutpostDeleteView.as_view(),
|
||||||
name="outpost-delete",
|
name="outpost-delete",
|
||||||
),
|
),
|
||||||
|
# Outpost Service Connections
|
||||||
|
path(
|
||||||
|
"outposts/service_connections/",
|
||||||
|
outposts_service_connections.OutpostServiceConnectionListView.as_view(),
|
||||||
|
name="outpost-service-connections",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"outposts/service_connections/create/",
|
||||||
|
outposts_service_connections.OutpostServiceConnectionCreateView.as_view(),
|
||||||
|
name="outpost-service-connection-create",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"outposts/service_connections/<uuid:pk>/update/",
|
||||||
|
outposts_service_connections.OutpostServiceConnectionUpdateView.as_view(),
|
||||||
|
name="outpost-service-connection-update",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"outposts/service_connections/<uuid:pk>/delete/",
|
||||||
|
outposts_service_connections.OutpostServiceConnectionDeleteView.as_view(),
|
||||||
|
name="outpost-service-connection-delete",
|
||||||
|
),
|
||||||
# Tasks
|
# Tasks
|
||||||
path(
|
path(
|
||||||
"tasks/",
|
"tasks/",
|
||||||
|
|||||||
@ -147,5 +147,5 @@ class FlowExportView(LoginRequiredMixin, PermissionRequiredMixin, DetailView):
|
|||||||
flow: Flow = self.get_object()
|
flow: Flow = self.get_object()
|
||||||
exporter = FlowExporter(flow)
|
exporter = FlowExporter(flow)
|
||||||
response = JsonResponse(exporter.export(), encoder=DataclassEncoder, safe=False)
|
response = JsonResponse(exporter.export(), encoder=DataclassEncoder, safe=False)
|
||||||
response["Content-Disposition"] = f'attachment; filename="{flow.slug}.json"'
|
response["Content-Disposition"] = f'attachment; filename="{flow.slug}.pbflow"'
|
||||||
return response
|
return response
|
||||||
|
|||||||
83
passbook/admin/views/outposts_service_connections.py
Normal file
83
passbook/admin/views/outposts_service_connections.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
"""passbook OutpostServiceConnection administration"""
|
||||||
|
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||||
|
from django.contrib.auth.mixins import (
|
||||||
|
PermissionRequiredMixin as DjangoPermissionRequiredMixin,
|
||||||
|
)
|
||||||
|
from django.contrib.messages.views import SuccessMessageMixin
|
||||||
|
from django.urls import reverse_lazy
|
||||||
|
from django.utils.translation import gettext as _
|
||||||
|
from guardian.mixins import PermissionListMixin, PermissionRequiredMixin
|
||||||
|
|
||||||
|
from passbook.admin.views.utils import (
|
||||||
|
BackSuccessUrlMixin,
|
||||||
|
DeleteMessageView,
|
||||||
|
InheritanceCreateView,
|
||||||
|
InheritanceListView,
|
||||||
|
InheritanceUpdateView,
|
||||||
|
SearchListMixin,
|
||||||
|
UserPaginateListMixin,
|
||||||
|
)
|
||||||
|
from passbook.outposts.models import OutpostServiceConnection
|
||||||
|
|
||||||
|
|
||||||
|
class OutpostServiceConnectionListView(
|
||||||
|
LoginRequiredMixin,
|
||||||
|
PermissionListMixin,
|
||||||
|
UserPaginateListMixin,
|
||||||
|
SearchListMixin,
|
||||||
|
InheritanceListView,
|
||||||
|
):
|
||||||
|
"""Show list of all outpost-service-connections"""
|
||||||
|
|
||||||
|
model = OutpostServiceConnection
|
||||||
|
permission_required = "passbook_outposts.add_outpostserviceconnection"
|
||||||
|
template_name = "administration/outpost_service_connection/list.html"
|
||||||
|
ordering = "pk"
|
||||||
|
search_fields = ["pk", "name"]
|
||||||
|
|
||||||
|
|
||||||
|
class OutpostServiceConnectionCreateView(
|
||||||
|
SuccessMessageMixin,
|
||||||
|
BackSuccessUrlMixin,
|
||||||
|
LoginRequiredMixin,
|
||||||
|
DjangoPermissionRequiredMixin,
|
||||||
|
InheritanceCreateView,
|
||||||
|
):
|
||||||
|
"""Create new OutpostServiceConnection"""
|
||||||
|
|
||||||
|
model = OutpostServiceConnection
|
||||||
|
permission_required = "passbook_outposts.add_outpostserviceconnection"
|
||||||
|
|
||||||
|
template_name = "generic/create.html"
|
||||||
|
success_url = reverse_lazy("passbook_admin:outpost-service-connections")
|
||||||
|
success_message = _("Successfully created OutpostServiceConnection")
|
||||||
|
|
||||||
|
|
||||||
|
class OutpostServiceConnectionUpdateView(
|
||||||
|
SuccessMessageMixin,
|
||||||
|
BackSuccessUrlMixin,
|
||||||
|
LoginRequiredMixin,
|
||||||
|
PermissionRequiredMixin,
|
||||||
|
InheritanceUpdateView,
|
||||||
|
):
|
||||||
|
"""Update outpostserviceconnection"""
|
||||||
|
|
||||||
|
model = OutpostServiceConnection
|
||||||
|
permission_required = "passbook_outposts.change_outpostserviceconnection"
|
||||||
|
|
||||||
|
template_name = "generic/update.html"
|
||||||
|
success_url = reverse_lazy("passbook_admin:outpost-service-connections")
|
||||||
|
success_message = _("Successfully updated OutpostServiceConnection")
|
||||||
|
|
||||||
|
|
||||||
|
class OutpostServiceConnectionDeleteView(
|
||||||
|
LoginRequiredMixin, PermissionRequiredMixin, DeleteMessageView
|
||||||
|
):
|
||||||
|
"""Delete outpostserviceconnection"""
|
||||||
|
|
||||||
|
model = OutpostServiceConnection
|
||||||
|
permission_required = "passbook_outposts.delete_outpostserviceconnection"
|
||||||
|
|
||||||
|
template_name = "generic/delete.html"
|
||||||
|
success_url = reverse_lazy("passbook_admin:outpost-service-connections")
|
||||||
|
success_message = _("Successfully deleted OutpostServiceConnection")
|
||||||
@ -32,8 +32,8 @@ class ProviderListView(
|
|||||||
model = Provider
|
model = Provider
|
||||||
permission_required = "passbook_core.add_provider"
|
permission_required = "passbook_core.add_provider"
|
||||||
template_name = "administration/provider/list.html"
|
template_name = "administration/provider/list.html"
|
||||||
ordering = "id"
|
ordering = "pk"
|
||||||
search_fields = ["id", "name"]
|
search_fields = ["pk", "name"]
|
||||||
|
|
||||||
|
|
||||||
class ProviderCreateView(
|
class ProviderCreateView(
|
||||||
|
|||||||
@ -19,7 +19,11 @@ from passbook.core.api.tokens import TokenViewSet
|
|||||||
from passbook.core.api.users import UserViewSet
|
from passbook.core.api.users import UserViewSet
|
||||||
from passbook.crypto.api import CertificateKeyPairViewSet
|
from passbook.crypto.api import CertificateKeyPairViewSet
|
||||||
from passbook.flows.api import FlowStageBindingViewSet, FlowViewSet, StageViewSet
|
from passbook.flows.api import FlowStageBindingViewSet, FlowViewSet, StageViewSet
|
||||||
from passbook.outposts.api import OutpostViewSet
|
from passbook.outposts.api import (
|
||||||
|
DockerServiceConnectionViewSet,
|
||||||
|
KubernetesServiceConnectionViewSet,
|
||||||
|
OutpostViewSet,
|
||||||
|
)
|
||||||
from passbook.policies.api import PolicyBindingViewSet, PolicyViewSet
|
from passbook.policies.api import PolicyBindingViewSet, PolicyViewSet
|
||||||
from passbook.policies.dummy.api import DummyPolicyViewSet
|
from passbook.policies.dummy.api import DummyPolicyViewSet
|
||||||
from passbook.policies.expiry.api import PasswordExpiryPolicyViewSet
|
from passbook.policies.expiry.api import PasswordExpiryPolicyViewSet
|
||||||
@ -29,7 +33,7 @@ from passbook.policies.hibp.api import HaveIBeenPwendPolicyViewSet
|
|||||||
from passbook.policies.password.api import PasswordPolicyViewSet
|
from passbook.policies.password.api import PasswordPolicyViewSet
|
||||||
from passbook.policies.reputation.api import ReputationPolicyViewSet
|
from passbook.policies.reputation.api import ReputationPolicyViewSet
|
||||||
from passbook.providers.oauth2.api import OAuth2ProviderViewSet, ScopeMappingViewSet
|
from passbook.providers.oauth2.api import OAuth2ProviderViewSet, ScopeMappingViewSet
|
||||||
from passbook.providers.proxy.api import OutpostConfigViewSet, ProxyProviderViewSet
|
from passbook.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
|
||||||
from passbook.providers.saml.api import SAMLPropertyMappingViewSet, SAMLProviderViewSet
|
from passbook.providers.saml.api import SAMLPropertyMappingViewSet, SAMLProviderViewSet
|
||||||
from passbook.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
from passbook.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
||||||
from passbook.sources.oauth.api import OAuthSourceViewSet
|
from passbook.sources.oauth.api import OAuthSourceViewSet
|
||||||
@ -66,7 +70,11 @@ router.register("core/users", UserViewSet)
|
|||||||
router.register("core/tokens", TokenViewSet)
|
router.register("core/tokens", TokenViewSet)
|
||||||
|
|
||||||
router.register("outposts/outposts", OutpostViewSet)
|
router.register("outposts/outposts", OutpostViewSet)
|
||||||
router.register("outposts/proxy", OutpostConfigViewSet)
|
router.register("outposts/service_connections/docker", DockerServiceConnectionViewSet)
|
||||||
|
router.register(
|
||||||
|
"outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet
|
||||||
|
)
|
||||||
|
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
|
||||||
|
|
||||||
router.register("flows/instances", FlowViewSet)
|
router.register("flows/instances", FlowViewSet)
|
||||||
router.register("flows/bindings", FlowStageBindingViewSet)
|
router.register("flows/bindings", FlowStageBindingViewSet)
|
||||||
|
|||||||
@ -37,6 +37,7 @@ def clean_expired_models(self: MonitoredTask):
|
|||||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
def backup_database(self: MonitoredTask): # pragma: no cover
|
def backup_database(self: MonitoredTask): # pragma: no cover
|
||||||
"""Database backup"""
|
"""Database backup"""
|
||||||
|
self.result_timeout_hours = 25
|
||||||
try:
|
try:
|
||||||
start = datetime.now()
|
start = datetime.now()
|
||||||
out = StringIO()
|
out = StringIO()
|
||||||
|
|||||||
@ -54,7 +54,7 @@ class CertificateKeyPair(CreatedUpdatedModel):
|
|||||||
@property
|
@property
|
||||||
def private_key(self) -> Optional[RSAPrivateKey]:
|
def private_key(self) -> Optional[RSAPrivateKey]:
|
||||||
"""Get python cryptography PrivateKey instance"""
|
"""Get python cryptography PrivateKey instance"""
|
||||||
if not self._private_key:
|
if not self._private_key and self._private_key != "":
|
||||||
self._private_key = load_pem_private_key(
|
self._private_key = load_pem_private_key(
|
||||||
str.encode("\n".join([x.strip() for x in self.key_data.split("\n")])),
|
str.encode("\n".join([x.strip() for x in self.key_data.split("\n")])),
|
||||||
password=None,
|
password=None,
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
"""Flow and Stage forms"""
|
"""Flow and Stage forms"""
|
||||||
|
|
||||||
from django import forms
|
from django import forms
|
||||||
|
from django.core.validators import FileExtensionValidator
|
||||||
from django.forms import ValidationError
|
from django.forms import ValidationError
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
@ -62,7 +63,9 @@ class FlowStageBindingForm(forms.ModelForm):
|
|||||||
class FlowImportForm(forms.Form):
|
class FlowImportForm(forms.Form):
|
||||||
"""Form used for flow importing"""
|
"""Form used for flow importing"""
|
||||||
|
|
||||||
flow = forms.FileField()
|
flow = forms.FileField(
|
||||||
|
validators=[FileExtensionValidator(allowed_extensions=["pbflow"])]
|
||||||
|
)
|
||||||
|
|
||||||
def clean_flow(self):
|
def clean_flow(self):
|
||||||
"""Check if the flow is valid and rewind the file to the start"""
|
"""Check if the flow is valid and rewind the file to the start"""
|
||||||
|
|||||||
@ -12,7 +12,7 @@ class TestTransferDocs(TransactionTestCase):
|
|||||||
"""Empty class, test methods are added dynamically"""
|
"""Empty class, test methods are added dynamically"""
|
||||||
|
|
||||||
|
|
||||||
def generic_view_tester(file_name: str) -> Callable:
|
def pbflow_tester(file_name: str) -> Callable:
|
||||||
"""This is used instead of subTest for better visibility"""
|
"""This is used instead of subTest for better visibility"""
|
||||||
|
|
||||||
def tester(self: TestTransferDocs):
|
def tester(self: TestTransferDocs):
|
||||||
@ -24,8 +24,6 @@ def generic_view_tester(file_name: str) -> Callable:
|
|||||||
return tester
|
return tester
|
||||||
|
|
||||||
|
|
||||||
for flow_file in glob("docs/flow/examples/*.json"):
|
for flow_file in glob("website/static/flows/*.pbflow"):
|
||||||
method_name = Path(flow_file).stem.replace("-", "_").replace(".", "_")
|
method_name = Path(flow_file).stem.replace("-", "_").replace(".", "_")
|
||||||
setattr(
|
setattr(TestTransferDocs, f"test_flow_{method_name}", pbflow_tester(flow_file))
|
||||||
TestTransferDocs, f"test_flow_{method_name}", generic_view_tester(flow_file)
|
|
||||||
)
|
|
||||||
|
|||||||
@ -30,6 +30,6 @@ passbook:
|
|||||||
# Optionally add links to the footer on the login page
|
# Optionally add links to the footer on the login page
|
||||||
footer_links:
|
footer_links:
|
||||||
- name: Documentation
|
- name: Documentation
|
||||||
|
href: https://passbook.beryju.org/docs/
|
||||||
|
- name: passbook Website
|
||||||
href: https://passbook.beryju.org/
|
href: https://passbook.beryju.org/
|
||||||
# - name: test
|
|
||||||
# href: https://test
|
|
||||||
|
|||||||
@ -27,7 +27,7 @@ class BaseEvaluator:
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
# update passbook/policies/expression/templates/policy/expression/form.html
|
# update passbook/policies/expression/templates/policy/expression/form.html
|
||||||
# update docs/policies/expression/index.md
|
# update website/docs/policies/expression.md
|
||||||
self._globals = {
|
self._globals = {
|
||||||
"regex_match": BaseEvaluator.expr_filter_regex_match,
|
"regex_match": BaseEvaluator.expr_filter_regex_match,
|
||||||
"regex_replace": BaseEvaluator.expr_filter_regex_replace,
|
"regex_replace": BaseEvaluator.expr_filter_regex_replace,
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
"""passbook sentry integration"""
|
"""passbook sentry integration"""
|
||||||
|
from aioredis.errors import ConnectionClosedError, ReplyError
|
||||||
from billiard.exceptions import WorkerLostError
|
from billiard.exceptions import WorkerLostError
|
||||||
from botocore.client import ClientError
|
from botocore.client import ClientError
|
||||||
from celery.exceptions import CeleryError
|
from celery.exceptions import CeleryError
|
||||||
@ -8,7 +9,7 @@ from django.db import InternalError, OperationalError, ProgrammingError
|
|||||||
from django_redis.exceptions import ConnectionInterrupted
|
from django_redis.exceptions import ConnectionInterrupted
|
||||||
from ldap3.core.exceptions import LDAPException
|
from ldap3.core.exceptions import LDAPException
|
||||||
from redis.exceptions import ConnectionError as RedisConnectionError
|
from redis.exceptions import ConnectionError as RedisConnectionError
|
||||||
from redis.exceptions import RedisError
|
from redis.exceptions import RedisError, ResponseError
|
||||||
from rest_framework.exceptions import APIException
|
from rest_framework.exceptions import APIException
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
from websockets.exceptions import WebSocketException
|
from websockets.exceptions import WebSocketException
|
||||||
@ -23,26 +24,37 @@ class SentryIgnoredException(Exception):
|
|||||||
def before_send(event, hint):
|
def before_send(event, hint):
|
||||||
"""Check if error is database error, and ignore if so"""
|
"""Check if error is database error, and ignore if so"""
|
||||||
ignored_classes = (
|
ignored_classes = (
|
||||||
|
# Inbuilt types
|
||||||
|
KeyboardInterrupt,
|
||||||
|
ConnectionResetError,
|
||||||
|
OSError,
|
||||||
|
# Django DB Errors
|
||||||
OperationalError,
|
OperationalError,
|
||||||
InternalError,
|
InternalError,
|
||||||
ProgrammingError,
|
ProgrammingError,
|
||||||
ConnectionInterrupted,
|
|
||||||
APIException,
|
|
||||||
ConnectionResetError,
|
|
||||||
RedisConnectionError,
|
|
||||||
WorkerLostError,
|
|
||||||
DisallowedHost,
|
DisallowedHost,
|
||||||
ConnectionResetError,
|
|
||||||
KeyboardInterrupt,
|
|
||||||
ClientError,
|
|
||||||
ValidationError,
|
ValidationError,
|
||||||
OSError,
|
# Redis errors
|
||||||
|
RedisConnectionError,
|
||||||
|
ConnectionInterrupted,
|
||||||
RedisError,
|
RedisError,
|
||||||
SentryIgnoredException,
|
ResponseError,
|
||||||
CeleryError,
|
ReplyError,
|
||||||
LDAPException,
|
ConnectionClosedError,
|
||||||
|
# websocket errors
|
||||||
ChannelFull,
|
ChannelFull,
|
||||||
WebSocketException,
|
WebSocketException,
|
||||||
|
# rest_framework error
|
||||||
|
APIException,
|
||||||
|
# celery errors
|
||||||
|
WorkerLostError,
|
||||||
|
CeleryError,
|
||||||
|
# S3 errors
|
||||||
|
ClientError,
|
||||||
|
# custom baseclass
|
||||||
|
SentryIgnoredException,
|
||||||
|
# ldap errors
|
||||||
|
LDAPException,
|
||||||
)
|
)
|
||||||
if "exc_info" in hint:
|
if "exc_info" in hint:
|
||||||
_, exc_value, _ = hint["exc_info"]
|
_, exc_value, _ = hint["exc_info"]
|
||||||
|
|||||||
@ -66,13 +66,13 @@ class TaskInfo:
|
|||||||
"""Delete task info from cache"""
|
"""Delete task info from cache"""
|
||||||
return cache.delete(f"task_{self.task_name}")
|
return cache.delete(f"task_{self.task_name}")
|
||||||
|
|
||||||
def save(self):
|
def save(self, timeout_hours=6):
|
||||||
"""Save task into cache"""
|
"""Save task into cache"""
|
||||||
key = f"task_{self.task_name}"
|
key = f"task_{self.task_name}"
|
||||||
if self.result.uid:
|
if self.result.uid:
|
||||||
key += f"_{self.result.uid}"
|
key += f"_{self.result.uid}"
|
||||||
self.task_name += f"_{self.result.uid}"
|
self.task_name += f"_{self.result.uid}"
|
||||||
cache.set(key, self, timeout=6 * 60 * 60)
|
cache.set(key, self, timeout=timeout_hours * 60 * 60)
|
||||||
|
|
||||||
|
|
||||||
class MonitoredTask(Task):
|
class MonitoredTask(Task):
|
||||||
@ -90,6 +90,7 @@ class MonitoredTask(Task):
|
|||||||
self.save_on_success = True
|
self.save_on_success = True
|
||||||
self._uid = None
|
self._uid = None
|
||||||
self._result = TaskResult(status=TaskResultStatus.ERROR, messages=[])
|
self._result = TaskResult(status=TaskResultStatus.ERROR, messages=[])
|
||||||
|
self.result_timeout_hours = 6
|
||||||
|
|
||||||
def set_uid(self, uid: str):
|
def set_uid(self, uid: str):
|
||||||
"""Set UID, so in the case of an unexpected error its saved correctly"""
|
"""Set UID, so in the case of an unexpected error its saved correctly"""
|
||||||
@ -115,7 +116,7 @@ class MonitoredTask(Task):
|
|||||||
task_call_func=self.__name__,
|
task_call_func=self.__name__,
|
||||||
task_call_args=args,
|
task_call_args=args,
|
||||||
task_call_kwargs=kwargs,
|
task_call_kwargs=kwargs,
|
||||||
).save()
|
).save(self.result_timeout_hours)
|
||||||
return super().after_return(status, retval, task_id, args, kwargs, einfo=einfo)
|
return super().after_return(status, retval, task_id, args, kwargs, einfo=einfo)
|
||||||
|
|
||||||
# pylint: disable=too-many-arguments
|
# pylint: disable=too-many-arguments
|
||||||
@ -131,7 +132,7 @@ class MonitoredTask(Task):
|
|||||||
task_call_func=self.__name__,
|
task_call_func=self.__name__,
|
||||||
task_call_args=args,
|
task_call_args=args,
|
||||||
task_call_kwargs=kwargs,
|
task_call_kwargs=kwargs,
|
||||||
).save()
|
).save(self.result_timeout_hours)
|
||||||
return super().on_failure(exc, task_id, args, kwargs, einfo=einfo)
|
return super().on_failure(exc, task_id, args, kwargs, einfo=einfo)
|
||||||
|
|
||||||
def run(self, *args, **kwargs):
|
def run(self, *args, **kwargs):
|
||||||
|
|||||||
@ -2,7 +2,11 @@
|
|||||||
from rest_framework.serializers import JSONField, ModelSerializer
|
from rest_framework.serializers import JSONField, ModelSerializer
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import (
|
||||||
|
DockerServiceConnection,
|
||||||
|
KubernetesServiceConnection,
|
||||||
|
Outpost,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class OutpostSerializer(ModelSerializer):
|
class OutpostSerializer(ModelSerializer):
|
||||||
@ -13,7 +17,7 @@ class OutpostSerializer(ModelSerializer):
|
|||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Outpost
|
model = Outpost
|
||||||
fields = ["pk", "name", "providers", "_config"]
|
fields = ["pk", "name", "providers", "service_connection", "_config"]
|
||||||
|
|
||||||
|
|
||||||
class OutpostViewSet(ModelViewSet):
|
class OutpostViewSet(ModelViewSet):
|
||||||
@ -21,3 +25,35 @@ class OutpostViewSet(ModelViewSet):
|
|||||||
|
|
||||||
queryset = Outpost.objects.all()
|
queryset = Outpost.objects.all()
|
||||||
serializer_class = OutpostSerializer
|
serializer_class = OutpostSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class DockerServiceConnectionSerializer(ModelSerializer):
|
||||||
|
"""DockerServiceConnection Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = DockerServiceConnection
|
||||||
|
fields = ["pk", "name", "local", "url", "tls"]
|
||||||
|
|
||||||
|
|
||||||
|
class DockerServiceConnectionViewSet(ModelViewSet):
|
||||||
|
"""DockerServiceConnection Viewset"""
|
||||||
|
|
||||||
|
queryset = DockerServiceConnection.objects.all()
|
||||||
|
serializer_class = DockerServiceConnectionSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class KubernetesServiceConnectionSerializer(ModelSerializer):
|
||||||
|
"""KubernetesServiceConnection Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = KubernetesServiceConnection
|
||||||
|
fields = ["pk", "name", "local", "kubeconfig"]
|
||||||
|
|
||||||
|
|
||||||
|
class KubernetesServiceConnectionViewSet(ModelViewSet):
|
||||||
|
"""KubernetesServiceConnection Viewset"""
|
||||||
|
|
||||||
|
queryset = KubernetesServiceConnection.objects.all()
|
||||||
|
serializer_class = KubernetesServiceConnectionSerializer
|
||||||
|
|||||||
@ -1,7 +1,20 @@
|
|||||||
"""passbook outposts app config"""
|
"""passbook outposts app config"""
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
from os import R_OK, access
|
||||||
|
from os.path import expanduser
|
||||||
|
from pathlib import Path
|
||||||
|
from socket import gethostname
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import yaml
|
||||||
from django.apps import AppConfig
|
from django.apps import AppConfig
|
||||||
|
from django.db import ProgrammingError
|
||||||
|
from docker.constants import DEFAULT_UNIX_SOCKET
|
||||||
|
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
|
||||||
|
from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
|
||||||
|
from structlog import get_logger
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
class PassbookOutpostConfig(AppConfig):
|
class PassbookOutpostConfig(AppConfig):
|
||||||
@ -14,3 +27,48 @@ class PassbookOutpostConfig(AppConfig):
|
|||||||
|
|
||||||
def ready(self):
|
def ready(self):
|
||||||
import_module("passbook.outposts.signals")
|
import_module("passbook.outposts.signals")
|
||||||
|
try:
|
||||||
|
self.init_local_connection()
|
||||||
|
except ProgrammingError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def init_local_connection(self):
|
||||||
|
"""Check if local kubernetes or docker connections should be created"""
|
||||||
|
from passbook.outposts.models import (
|
||||||
|
KubernetesServiceConnection,
|
||||||
|
DockerServiceConnection,
|
||||||
|
)
|
||||||
|
|
||||||
|
if Path(SERVICE_TOKEN_FILENAME).exists():
|
||||||
|
LOGGER.debug("Detected in-cluster Kubernetes Config")
|
||||||
|
if not KubernetesServiceConnection.objects.filter(local=True).exists():
|
||||||
|
LOGGER.debug("Created Service Connection for in-cluster")
|
||||||
|
KubernetesServiceConnection.objects.create(
|
||||||
|
name="Local Kubernetes Cluster", local=True, kubeconfig={}
|
||||||
|
)
|
||||||
|
# For development, check for the existence of a kubeconfig file
|
||||||
|
kubeconfig_path = expanduser(KUBE_CONFIG_DEFAULT_LOCATION)
|
||||||
|
if Path(kubeconfig_path).exists():
|
||||||
|
LOGGER.debug("Detected kubeconfig")
|
||||||
|
kubeconfig_local_name = f"k8s-{gethostname()}"
|
||||||
|
if not KubernetesServiceConnection.objects.filter(
|
||||||
|
name=kubeconfig_local_name
|
||||||
|
).exists():
|
||||||
|
LOGGER.debug("Creating kubeconfig Service Connection")
|
||||||
|
with open(kubeconfig_path, "r") as _kubeconfig:
|
||||||
|
KubernetesServiceConnection.objects.create(
|
||||||
|
name=kubeconfig_local_name,
|
||||||
|
kubeconfig=yaml.safe_load(_kubeconfig),
|
||||||
|
)
|
||||||
|
unix_socket_path = urlparse(DEFAULT_UNIX_SOCKET).path
|
||||||
|
socket = Path(unix_socket_path)
|
||||||
|
if socket.exists() and access(socket, R_OK):
|
||||||
|
LOGGER.debug("Detected local docker socket")
|
||||||
|
if not DockerServiceConnection.objects.filter(local=True).exists():
|
||||||
|
LOGGER.debug("Created Service Connection for docker")
|
||||||
|
DockerServiceConnection.objects.create(
|
||||||
|
name="Local Docker connection",
|
||||||
|
local=True,
|
||||||
|
url=unix_socket_path,
|
||||||
|
tls=True,
|
||||||
|
)
|
||||||
|
|||||||
@ -5,11 +5,11 @@ from structlog import get_logger
|
|||||||
from structlog.testing import capture_logs
|
from structlog.testing import capture_logs
|
||||||
|
|
||||||
from passbook.lib.sentry import SentryIgnoredException
|
from passbook.lib.sentry import SentryIgnoredException
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import Outpost, OutpostServiceConnection
|
||||||
|
|
||||||
|
|
||||||
class ControllerException(SentryIgnoredException):
|
class ControllerException(SentryIgnoredException):
|
||||||
"""Exception raise when anything fails during controller run"""
|
"""Exception raised when anything fails during controller run"""
|
||||||
|
|
||||||
|
|
||||||
class BaseController:
|
class BaseController:
|
||||||
@ -18,9 +18,11 @@ class BaseController:
|
|||||||
deployment_ports: Dict[str, int]
|
deployment_ports: Dict[str, int]
|
||||||
|
|
||||||
outpost: Outpost
|
outpost: Outpost
|
||||||
|
connection: OutpostServiceConnection
|
||||||
|
|
||||||
def __init__(self, outpost: Outpost):
|
def __init__(self, outpost: Outpost, connection: OutpostServiceConnection):
|
||||||
self.outpost = outpost
|
self.outpost = outpost
|
||||||
|
self.connection = connection
|
||||||
self.logger = get_logger()
|
self.logger = get_logger()
|
||||||
self.deployment_ports = {}
|
self.deployment_ports = {}
|
||||||
|
|
||||||
|
|||||||
@ -3,14 +3,18 @@ from time import sleep
|
|||||||
from typing import Dict, Tuple
|
from typing import Dict, Tuple
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from docker import DockerClient, from_env
|
from docker import DockerClient
|
||||||
from docker.errors import DockerException, NotFound
|
from docker.errors import DockerException, NotFound
|
||||||
from docker.models.containers import Container
|
from docker.models.containers import Container
|
||||||
from yaml import safe_dump
|
from yaml import safe_dump
|
||||||
|
|
||||||
from passbook import __version__
|
from passbook import __version__
|
||||||
from passbook.outposts.controllers.base import BaseController, ControllerException
|
from passbook.outposts.controllers.base import BaseController, ControllerException
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import (
|
||||||
|
DockerServiceConnection,
|
||||||
|
Outpost,
|
||||||
|
ServiceConnectionInvalid,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class DockerController(BaseController):
|
class DockerController(BaseController):
|
||||||
@ -19,14 +23,15 @@ class DockerController(BaseController):
|
|||||||
client: DockerClient
|
client: DockerClient
|
||||||
|
|
||||||
container: Container
|
container: Container
|
||||||
|
connection: DockerServiceConnection
|
||||||
|
|
||||||
image_base = "beryju/passbook"
|
image_base = "beryju/passbook"
|
||||||
|
|
||||||
def __init__(self, outpost: Outpost) -> None:
|
def __init__(self, outpost: Outpost, connection: DockerServiceConnection) -> None:
|
||||||
super().__init__(outpost)
|
super().__init__(outpost, connection)
|
||||||
try:
|
try:
|
||||||
self.client = from_env()
|
self.client = connection.client()
|
||||||
except DockerException as exc:
|
except ServiceConnectionInvalid as exc:
|
||||||
raise ControllerException from exc
|
raise ControllerException from exc
|
||||||
|
|
||||||
def _get_labels(self) -> Dict[str, str]:
|
def _get_labels(self) -> Dict[str, str]:
|
||||||
|
|||||||
@ -36,7 +36,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
|
|
||||||
def __init__(self, controller: "KubernetesController") -> None:
|
def __init__(self, controller: "KubernetesController") -> None:
|
||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = AppsV1Api()
|
self.api = AppsV1Api(controller.client)
|
||||||
self.outpost = self.controller.outpost
|
self.outpost = self.controller.outpost
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@ -23,7 +23,7 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
|
|||||||
|
|
||||||
def __init__(self, controller: "KubernetesController") -> None:
|
def __init__(self, controller: "KubernetesController") -> None:
|
||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = CoreV1Api()
|
self.api = CoreV1Api(controller.client)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
|
|||||||
@ -18,7 +18,7 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
|
|||||||
|
|
||||||
def __init__(self, controller: "KubernetesController") -> None:
|
def __init__(self, controller: "KubernetesController") -> None:
|
||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = CoreV1Api()
|
self.api = CoreV1Api(controller.client)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
|
|||||||
@ -3,8 +3,7 @@ from io import StringIO
|
|||||||
from typing import Dict, List, Type
|
from typing import Dict, List, Type
|
||||||
|
|
||||||
from kubernetes.client import OpenApiException
|
from kubernetes.client import OpenApiException
|
||||||
from kubernetes.config import load_incluster_config, load_kube_config
|
from kubernetes.client.api_client import ApiClient
|
||||||
from kubernetes.config.config_exception import ConfigException
|
|
||||||
from structlog.testing import capture_logs
|
from structlog.testing import capture_logs
|
||||||
from yaml import dump_all
|
from yaml import dump_all
|
||||||
|
|
||||||
@ -13,7 +12,7 @@ from passbook.outposts.controllers.k8s.base import KubernetesObjectReconciler
|
|||||||
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
|
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
|
||||||
from passbook.outposts.controllers.k8s.secret import SecretReconciler
|
from passbook.outposts.controllers.k8s.secret import SecretReconciler
|
||||||
from passbook.outposts.controllers.k8s.service import ServiceReconciler
|
from passbook.outposts.controllers.k8s.service import ServiceReconciler
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import KubernetesServiceConnection, Outpost
|
||||||
|
|
||||||
|
|
||||||
class KubernetesController(BaseController):
|
class KubernetesController(BaseController):
|
||||||
@ -22,12 +21,14 @@ class KubernetesController(BaseController):
|
|||||||
reconcilers: Dict[str, Type[KubernetesObjectReconciler]]
|
reconcilers: Dict[str, Type[KubernetesObjectReconciler]]
|
||||||
reconcile_order: List[str]
|
reconcile_order: List[str]
|
||||||
|
|
||||||
def __init__(self, outpost: Outpost) -> None:
|
client: ApiClient
|
||||||
super().__init__(outpost)
|
connection: KubernetesServiceConnection
|
||||||
try:
|
|
||||||
load_incluster_config()
|
def __init__(
|
||||||
except ConfigException:
|
self, outpost: Outpost, connection: KubernetesServiceConnection
|
||||||
load_kube_config()
|
) -> None:
|
||||||
|
super().__init__(outpost, connection)
|
||||||
|
self.client = connection.client()
|
||||||
self.reconcilers = {
|
self.reconcilers = {
|
||||||
"secret": SecretReconciler,
|
"secret": SecretReconciler,
|
||||||
"deployment": DeploymentReconciler,
|
"deployment": DeploymentReconciler,
|
||||||
|
|||||||
@ -4,7 +4,12 @@ from django import forms
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from passbook.admin.fields import CodeMirrorWidget, YAMLField
|
from passbook.admin.fields import CodeMirrorWidget, YAMLField
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import (
|
||||||
|
DockerServiceConnection,
|
||||||
|
KubernetesServiceConnection,
|
||||||
|
Outpost,
|
||||||
|
OutpostServiceConnection,
|
||||||
|
)
|
||||||
from passbook.providers.proxy.models import ProxyProvider
|
from passbook.providers.proxy.models import ProxyProvider
|
||||||
|
|
||||||
|
|
||||||
@ -14,6 +19,9 @@ class OutpostForm(forms.ModelForm):
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.fields["providers"].queryset = ProxyProvider.objects.all()
|
self.fields["providers"].queryset = ProxyProvider.objects.all()
|
||||||
|
self.fields[
|
||||||
|
"service_connection"
|
||||||
|
].queryset = OutpostServiceConnection.objects.select_subclasses()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
@ -21,7 +29,7 @@ class OutpostForm(forms.ModelForm):
|
|||||||
fields = [
|
fields = [
|
||||||
"name",
|
"name",
|
||||||
"type",
|
"type",
|
||||||
"deployment_type",
|
"service_connection",
|
||||||
"providers",
|
"providers",
|
||||||
"_config",
|
"_config",
|
||||||
]
|
]
|
||||||
@ -33,3 +41,40 @@ class OutpostForm(forms.ModelForm):
|
|||||||
"_config": YAMLField,
|
"_config": YAMLField,
|
||||||
}
|
}
|
||||||
labels = {"_config": _("Configuration")}
|
labels = {"_config": _("Configuration")}
|
||||||
|
|
||||||
|
|
||||||
|
class DockerServiceConnectionForm(forms.ModelForm):
|
||||||
|
"""Docker service-connection form"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = DockerServiceConnection
|
||||||
|
fields = ["name", "local", "url", "tls"]
|
||||||
|
widgets = {
|
||||||
|
"name": forms.TextInput,
|
||||||
|
"url": forms.TextInput,
|
||||||
|
}
|
||||||
|
labels = {
|
||||||
|
"url": _("URL"),
|
||||||
|
"tls": _("TLS"),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class KubernetesServiceConnectionForm(forms.ModelForm):
|
||||||
|
"""Kubernetes service-connection form"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = KubernetesServiceConnection
|
||||||
|
fields = [
|
||||||
|
"name",
|
||||||
|
"local",
|
||||||
|
"kubeconfig",
|
||||||
|
]
|
||||||
|
widgets = {
|
||||||
|
"name": forms.TextInput,
|
||||||
|
"kubeconfig": CodeMirrorWidget,
|
||||||
|
}
|
||||||
|
field_classes = {
|
||||||
|
"kubeconfig": YAMLField,
|
||||||
|
}
|
||||||
|
|||||||
@ -6,10 +6,17 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
|||||||
|
|
||||||
|
|
||||||
def fix_missing_token_identifier(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
def fix_missing_token_identifier(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
User = apps.get_model("passbook_core", "User")
|
||||||
|
Token = apps.get_model("passbook_core", "Token")
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import Outpost
|
||||||
|
|
||||||
for outpost in Outpost.objects.using(schema_editor.connection.alias).all():
|
for outpost in (
|
||||||
token = outpost.token
|
Outpost.objects.using(schema_editor.connection.alias).all().only("pk")
|
||||||
|
):
|
||||||
|
user_identifier = outpost.user_identifier
|
||||||
|
user = User.objects.get(username=user_identifier)
|
||||||
|
tokens = Token.objects.filter(user=user)
|
||||||
|
for token in tokens:
|
||||||
if token.identifier != outpost.token_identifier:
|
if token.identifier != outpost.token_identifier:
|
||||||
token.identifier = outpost.token_identifier
|
token.identifier = outpost.token_identifier
|
||||||
token.save()
|
token.save()
|
||||||
|
|||||||
172
passbook/outposts/migrations/0010_service_connection.py
Normal file
172
passbook/outposts/migrations/0010_service_connection.py
Normal file
@ -0,0 +1,172 @@
|
|||||||
|
# Generated by Django 3.1.3 on 2020-11-04 09:11
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.apps.registry import Apps
|
||||||
|
from django.core.exceptions import FieldError
|
||||||
|
from django.db import migrations, models
|
||||||
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
|
|
||||||
|
import passbook.lib.models
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_to_service_connection(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
Outpost = apps.get_model("passbook_outposts", "Outpost")
|
||||||
|
DockerServiceConnection = apps.get_model(
|
||||||
|
"passbook_outposts", "DockerServiceConnection"
|
||||||
|
)
|
||||||
|
KubernetesServiceConnection = apps.get_model(
|
||||||
|
"passbook_outposts", "KubernetesServiceConnection"
|
||||||
|
)
|
||||||
|
from passbook.outposts.apps import PassbookOutpostConfig
|
||||||
|
|
||||||
|
# Ensure that local connection have been created
|
||||||
|
PassbookOutpostConfig.init_local_connection(None)
|
||||||
|
|
||||||
|
docker = DockerServiceConnection.objects.filter(local=True).first()
|
||||||
|
k8s = KubernetesServiceConnection.objects.filter(local=True).first()
|
||||||
|
|
||||||
|
try:
|
||||||
|
for outpost in (
|
||||||
|
Outpost.objects.using(db_alias).all().exclude(deployment_type="custom")
|
||||||
|
):
|
||||||
|
if outpost.deployment_type == "kubernetes":
|
||||||
|
outpost.service_connection = k8s
|
||||||
|
elif outpost.deployment_type == "docker":
|
||||||
|
outpost.service_connection = docker
|
||||||
|
outpost.save()
|
||||||
|
except FieldError:
|
||||||
|
# This is triggered during e2e tests when this function is called on an already-upgraded
|
||||||
|
# schema
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("passbook_outposts", "0009_fix_missing_token_identifier"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="OutpostServiceConnection",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"uuid",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4,
|
||||||
|
editable=False,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("name", models.TextField()),
|
||||||
|
(
|
||||||
|
"local",
|
||||||
|
models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
help_text="If enabled, use the local connection. Required Docker socket/Kubernetes Integration",
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="DockerServiceConnection",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"outpostserviceconnection_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="passbook_outposts.outpostserviceconnection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("url", models.TextField()),
|
||||||
|
("tls", models.BooleanField()),
|
||||||
|
],
|
||||||
|
bases=("passbook_outposts.outpostserviceconnection",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="KubernetesServiceConnection",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"outpostserviceconnection_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="passbook_outposts.outpostserviceconnection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("kubeconfig", models.JSONField()),
|
||||||
|
],
|
||||||
|
bases=("passbook_outposts.outpostserviceconnection",),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="outpost",
|
||||||
|
name="service_connection",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="Select Service-Connection passbook should use to manage this outpost. Leave empty if passbook should not handle the deployment.",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
to="passbook_outposts.outpostserviceconnection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunPython(migrate_to_service_connection),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="outpost",
|
||||||
|
name="deployment_type",
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="dockerserviceconnection",
|
||||||
|
options={
|
||||||
|
"verbose_name": "Docker Service-Connection",
|
||||||
|
"verbose_name_plural": "Docker Service-Connections",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="kubernetesserviceconnection",
|
||||||
|
options={
|
||||||
|
"verbose_name": "Kubernetes Service-Connection",
|
||||||
|
"verbose_name_plural": "Kubernetes Service-Connections",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="outpost",
|
||||||
|
name="service_connection",
|
||||||
|
field=passbook.lib.models.InheritanceForeignKey(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="Select Service-Connection passbook should use to manage this outpost. Leave empty if passbook should not handle the deployment.",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
to="passbook_outposts.outpostserviceconnection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="outpostserviceconnection",
|
||||||
|
options={
|
||||||
|
"verbose_name": "Outpost Service-Connection",
|
||||||
|
"verbose_name_plural": "Outpost Service-Connections",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="kubernetesserviceconnection",
|
||||||
|
name="kubeconfig",
|
||||||
|
field=models.JSONField(
|
||||||
|
default=None,
|
||||||
|
help_text="Paste your kubeconfig here. passbook will automatically use the currently selected context.",
|
||||||
|
),
|
||||||
|
preserve_default=False,
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -1,28 +1,46 @@
|
|||||||
"""Outpost models"""
|
"""Outpost models"""
|
||||||
from dataclasses import asdict, dataclass, field
|
from dataclasses import asdict, dataclass, field
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict, Iterable, List, Optional, Union
|
from typing import Dict, Iterable, List, Optional, Type, Union
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from dacite import from_dict
|
from dacite import from_dict
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db import models, transaction
|
from django.db import models, transaction
|
||||||
from django.db.models.base import Model
|
from django.db.models.base import Model
|
||||||
|
from django.forms.models import ModelForm
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from docker.client import DockerClient
|
||||||
|
from docker.errors import DockerException
|
||||||
from guardian.models import UserObjectPermission
|
from guardian.models import UserObjectPermission
|
||||||
from guardian.shortcuts import assign_perm
|
from guardian.shortcuts import assign_perm
|
||||||
|
from kubernetes.client import VersionApi, VersionInfo
|
||||||
|
from kubernetes.client.api_client import ApiClient
|
||||||
|
from kubernetes.client.configuration import Configuration
|
||||||
|
from kubernetes.client.exceptions import OpenApiException
|
||||||
|
from kubernetes.config.config_exception import ConfigException
|
||||||
|
from kubernetes.config.incluster_config import load_incluster_config
|
||||||
|
from kubernetes.config.kube_config import load_kube_config_from_dict
|
||||||
|
from model_utils.managers import InheritanceManager
|
||||||
from packaging.version import LegacyVersion, Version, parse
|
from packaging.version import LegacyVersion, Version, parse
|
||||||
|
from urllib3.exceptions import HTTPError
|
||||||
|
|
||||||
from passbook import __version__
|
from passbook import __version__
|
||||||
from passbook.core.models import Provider, Token, TokenIntents, User
|
from passbook.core.models import Provider, Token, TokenIntents, User
|
||||||
from passbook.lib.config import CONFIG
|
from passbook.lib.config import CONFIG
|
||||||
|
from passbook.lib.models import InheritanceForeignKey
|
||||||
|
from passbook.lib.sentry import SentryIgnoredException
|
||||||
from passbook.lib.utils.template import render_to_string
|
from passbook.lib.utils.template import render_to_string
|
||||||
|
|
||||||
OUR_VERSION = parse(__version__)
|
OUR_VERSION = parse(__version__)
|
||||||
OUTPOST_HELLO_INTERVAL = 10
|
OUTPOST_HELLO_INTERVAL = 10
|
||||||
|
|
||||||
|
|
||||||
|
class ServiceConnectionInvalid(SentryIgnoredException):
|
||||||
|
""""Exception raised when a Service Connection has invalid parameters"""
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class OutpostConfig:
|
class OutpostConfig:
|
||||||
"""Configuration an outpost uses to configure it self"""
|
"""Configuration an outpost uses to configure it self"""
|
||||||
@ -60,19 +78,158 @@ class OutpostType(models.TextChoices):
|
|||||||
PROXY = "proxy"
|
PROXY = "proxy"
|
||||||
|
|
||||||
|
|
||||||
class OutpostDeploymentType(models.TextChoices):
|
|
||||||
"""Deployment types that are managed through passbook"""
|
|
||||||
|
|
||||||
KUBERNETES = "kubernetes"
|
|
||||||
DOCKER = "docker"
|
|
||||||
CUSTOM = "custom"
|
|
||||||
|
|
||||||
|
|
||||||
def default_outpost_config():
|
def default_outpost_config():
|
||||||
"""Get default outpost config"""
|
"""Get default outpost config"""
|
||||||
return asdict(OutpostConfig(passbook_host=""))
|
return asdict(OutpostConfig(passbook_host=""))
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class OutpostServiceConnectionState:
|
||||||
|
"""State of an Outpost Service Connection"""
|
||||||
|
|
||||||
|
version: str
|
||||||
|
healthy: bool
|
||||||
|
|
||||||
|
|
||||||
|
class OutpostServiceConnection(models.Model):
|
||||||
|
"""Connection details for an Outpost Controller, like Docker or Kubernetes"""
|
||||||
|
|
||||||
|
uuid = models.UUIDField(default=uuid4, editable=False, primary_key=True)
|
||||||
|
name = models.TextField()
|
||||||
|
|
||||||
|
local = models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
unique=True,
|
||||||
|
help_text=_(
|
||||||
|
(
|
||||||
|
"If enabled, use the local connection. Required Docker "
|
||||||
|
"socket/Kubernetes Integration"
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
objects = InheritanceManager()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state(self) -> OutpostServiceConnectionState:
|
||||||
|
"""Get state of service connection"""
|
||||||
|
state_key = f"outpost_service_connection_{self.pk.hex}"
|
||||||
|
state = cache.get(state_key, None)
|
||||||
|
if not state:
|
||||||
|
state = self._get_state()
|
||||||
|
cache.set(state_key, state, timeout=0)
|
||||||
|
return state
|
||||||
|
|
||||||
|
def _get_state(self) -> OutpostServiceConnectionState:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def form(self) -> Type[ModelForm]:
|
||||||
|
"""Return Form class used to edit this object"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
verbose_name = _("Outpost Service-Connection")
|
||||||
|
verbose_name_plural = _("Outpost Service-Connections")
|
||||||
|
|
||||||
|
|
||||||
|
class DockerServiceConnection(OutpostServiceConnection):
|
||||||
|
"""Service Connection to a Docker endpoint"""
|
||||||
|
|
||||||
|
url = models.TextField()
|
||||||
|
tls = models.BooleanField()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def form(self) -> Type[ModelForm]:
|
||||||
|
from passbook.outposts.forms import DockerServiceConnectionForm
|
||||||
|
|
||||||
|
return DockerServiceConnectionForm
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Docker Service-Connection {self.name}"
|
||||||
|
|
||||||
|
def client(self) -> DockerClient:
|
||||||
|
"""Get DockerClient"""
|
||||||
|
try:
|
||||||
|
client = None
|
||||||
|
if self.local:
|
||||||
|
client = DockerClient.from_env()
|
||||||
|
else:
|
||||||
|
client = DockerClient(
|
||||||
|
base_url=self.url,
|
||||||
|
tls=self.tls,
|
||||||
|
)
|
||||||
|
client.containers.list()
|
||||||
|
except DockerException as exc:
|
||||||
|
raise ServiceConnectionInvalid from exc
|
||||||
|
return client
|
||||||
|
|
||||||
|
def _get_state(self) -> OutpostServiceConnectionState:
|
||||||
|
try:
|
||||||
|
client = self.client()
|
||||||
|
return OutpostServiceConnectionState(
|
||||||
|
version=client.info()["ServerVersion"], healthy=True
|
||||||
|
)
|
||||||
|
except ServiceConnectionInvalid:
|
||||||
|
return OutpostServiceConnectionState(version="", healthy=False)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
verbose_name = _("Docker Service-Connection")
|
||||||
|
verbose_name_plural = _("Docker Service-Connections")
|
||||||
|
|
||||||
|
|
||||||
|
class KubernetesServiceConnection(OutpostServiceConnection):
|
||||||
|
"""Service Connection to a Kubernetes cluster"""
|
||||||
|
|
||||||
|
kubeconfig = models.JSONField(
|
||||||
|
help_text=_(
|
||||||
|
(
|
||||||
|
"Paste your kubeconfig here. passbook will automatically use "
|
||||||
|
"the currently selected context."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def form(self) -> Type[ModelForm]:
|
||||||
|
from passbook.outposts.forms import KubernetesServiceConnectionForm
|
||||||
|
|
||||||
|
return KubernetesServiceConnectionForm
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Kubernetes Service-Connection {self.name}"
|
||||||
|
|
||||||
|
def _get_state(self) -> OutpostServiceConnectionState:
|
||||||
|
try:
|
||||||
|
client = self.client()
|
||||||
|
api_instance = VersionApi(client)
|
||||||
|
version: VersionInfo = api_instance.get_code()
|
||||||
|
return OutpostServiceConnectionState(
|
||||||
|
version=version.git_version, healthy=True
|
||||||
|
)
|
||||||
|
except (OpenApiException, HTTPError):
|
||||||
|
return OutpostServiceConnectionState(version="", healthy=False)
|
||||||
|
|
||||||
|
def client(self) -> ApiClient:
|
||||||
|
"""Get Kubernetes client configured from kubeconfig"""
|
||||||
|
config = Configuration()
|
||||||
|
try:
|
||||||
|
if self.local:
|
||||||
|
load_incluster_config(client_configuration=config)
|
||||||
|
else:
|
||||||
|
load_kube_config_from_dict(self.kubeconfig, client_configuration=config)
|
||||||
|
return ApiClient(config)
|
||||||
|
except ConfigException as exc:
|
||||||
|
raise ServiceConnectionInvalid from exc
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
verbose_name = _("Kubernetes Service-Connection")
|
||||||
|
verbose_name_plural = _("Kubernetes Service-Connections")
|
||||||
|
|
||||||
|
|
||||||
class Outpost(models.Model):
|
class Outpost(models.Model):
|
||||||
"""Outpost instance which manages a service user and token"""
|
"""Outpost instance which manages a service user and token"""
|
||||||
|
|
||||||
@ -80,13 +237,20 @@ class Outpost(models.Model):
|
|||||||
name = models.TextField()
|
name = models.TextField()
|
||||||
|
|
||||||
type = models.TextField(choices=OutpostType.choices, default=OutpostType.PROXY)
|
type = models.TextField(choices=OutpostType.choices, default=OutpostType.PROXY)
|
||||||
deployment_type = models.TextField(
|
service_connection = InheritanceForeignKey(
|
||||||
choices=OutpostDeploymentType.choices,
|
OutpostServiceConnection,
|
||||||
default=OutpostDeploymentType.CUSTOM,
|
default=None,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
help_text=_(
|
help_text=_(
|
||||||
"Select between passbook-managed deployment types or a custom deployment."
|
(
|
||||||
),
|
"Select Service-Connection passbook should use to manage this outpost. "
|
||||||
|
"Leave empty if passbook should not handle the deployment."
|
||||||
)
|
)
|
||||||
|
),
|
||||||
|
on_delete=models.SET_DEFAULT,
|
||||||
|
)
|
||||||
|
|
||||||
_config = models.JSONField(default=default_outpost_config)
|
_config = models.JSONField(default=default_outpost_config)
|
||||||
|
|
||||||
providers = models.ManyToManyField(Provider)
|
providers = models.ManyToManyField(Provider)
|
||||||
@ -111,12 +275,17 @@ class Outpost(models.Model):
|
|||||||
"""Get outpost's health status"""
|
"""Get outpost's health status"""
|
||||||
return OutpostState.for_outpost(self)
|
return OutpostState.for_outpost(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_identifier(self):
|
||||||
|
"""Username for service user"""
|
||||||
|
return f"pb-outpost-{self.uuid.hex}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user(self) -> User:
|
def user(self) -> User:
|
||||||
"""Get/create user with access to all required objects"""
|
"""Get/create user with access to all required objects"""
|
||||||
users = User.objects.filter(username=f"pb-outpost-{self.uuid.hex}")
|
users = User.objects.filter(username=self.user_identifier)
|
||||||
if not users.exists():
|
if not users.exists():
|
||||||
user: User = User.objects.create(username=f"pb-outpost-{self.uuid.hex}")
|
user: User = User.objects.create(username=self.user_identifier)
|
||||||
user.set_unusable_password()
|
user.set_unusable_password()
|
||||||
user.save()
|
user.save()
|
||||||
else:
|
else:
|
||||||
|
|||||||
@ -7,4 +7,9 @@ CELERY_BEAT_SCHEDULE = {
|
|||||||
"schedule": crontab(minute="*/5"),
|
"schedule": crontab(minute="*/5"),
|
||||||
"options": {"queue": "passbook_scheduled"},
|
"options": {"queue": "passbook_scheduled"},
|
||||||
},
|
},
|
||||||
|
"outposts_service_connection_check": {
|
||||||
|
"task": "passbook.outposts.tasks.outpost_service_connection_monitor",
|
||||||
|
"schedule": crontab(minute=0, hour="*"),
|
||||||
|
"options": {"queue": "passbook_scheduled"},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,6 +3,7 @@ from typing import Any
|
|||||||
|
|
||||||
from asgiref.sync import async_to_sync
|
from asgiref.sync import async_to_sync
|
||||||
from channels.layers import get_channel_layer
|
from channels.layers import get_channel_layer
|
||||||
|
from django.core.cache import cache
|
||||||
from django.db.models.base import Model
|
from django.db.models.base import Model
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
@ -11,9 +12,11 @@ from passbook.lib.tasks import MonitoredTask, TaskResult, TaskResultStatus
|
|||||||
from passbook.lib.utils.reflection import path_to_class
|
from passbook.lib.utils.reflection import path_to_class
|
||||||
from passbook.outposts.controllers.base import ControllerException
|
from passbook.outposts.controllers.base import ControllerException
|
||||||
from passbook.outposts.models import (
|
from passbook.outposts.models import (
|
||||||
|
DockerServiceConnection,
|
||||||
|
KubernetesServiceConnection,
|
||||||
Outpost,
|
Outpost,
|
||||||
OutpostDeploymentType,
|
|
||||||
OutpostModel,
|
OutpostModel,
|
||||||
|
OutpostServiceConnection,
|
||||||
OutpostState,
|
OutpostState,
|
||||||
OutpostType,
|
OutpostType,
|
||||||
)
|
)
|
||||||
@ -27,12 +30,29 @@ LOGGER = get_logger()
|
|||||||
@CELERY_APP.task()
|
@CELERY_APP.task()
|
||||||
def outpost_controller_all():
|
def outpost_controller_all():
|
||||||
"""Launch Controller for all Outposts which support it"""
|
"""Launch Controller for all Outposts which support it"""
|
||||||
for outpost in Outpost.objects.exclude(
|
for outpost in Outpost.objects.exclude(service_connection=None):
|
||||||
deployment_type=OutpostDeploymentType.CUSTOM
|
|
||||||
):
|
|
||||||
outpost_controller.delay(outpost.pk.hex)
|
outpost_controller.delay(outpost.pk.hex)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task()
|
||||||
|
def outpost_service_connection_state(state_pk: Any):
|
||||||
|
"""Update cached state of a service connection"""
|
||||||
|
connection: OutpostServiceConnection = (
|
||||||
|
OutpostServiceConnection.objects.filter(pk=state_pk).select_subclasses().first()
|
||||||
|
)
|
||||||
|
cache.delete(f"outpost_service_connection_{connection.pk.hex}")
|
||||||
|
_ = connection.state
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
|
def outpost_service_connection_monitor(self: MonitoredTask):
|
||||||
|
"""Regularly check the state of Outpost Service Connections"""
|
||||||
|
for connection in OutpostServiceConnection.objects.select_subclasses():
|
||||||
|
cache.delete(f"outpost_service_connection_{connection.pk.hex}")
|
||||||
|
_ = connection.state
|
||||||
|
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL))
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
def outpost_controller(self: MonitoredTask, outpost_pk: str):
|
def outpost_controller(self: MonitoredTask, outpost_pk: str):
|
||||||
"""Create/update/monitor the deployment of an Outpost"""
|
"""Create/update/monitor the deployment of an Outpost"""
|
||||||
@ -41,10 +61,13 @@ def outpost_controller(self: MonitoredTask, outpost_pk: str):
|
|||||||
self.set_uid(slugify(outpost.name))
|
self.set_uid(slugify(outpost.name))
|
||||||
try:
|
try:
|
||||||
if outpost.type == OutpostType.PROXY:
|
if outpost.type == OutpostType.PROXY:
|
||||||
if outpost.deployment_type == OutpostDeploymentType.KUBERNETES:
|
service_connection = outpost.service_connection
|
||||||
logs = ProxyKubernetesController(outpost).up_with_logs()
|
if isinstance(service_connection, DockerServiceConnection):
|
||||||
if outpost.deployment_type == OutpostDeploymentType.DOCKER:
|
logs = ProxyDockerController(outpost, service_connection).up_with_logs()
|
||||||
logs = ProxyDockerController(outpost).up_with_logs()
|
if isinstance(service_connection, KubernetesServiceConnection):
|
||||||
|
logs = ProxyKubernetesController(
|
||||||
|
outpost, service_connection
|
||||||
|
).up_with_logs()
|
||||||
except ControllerException as exc:
|
except ControllerException as exc:
|
||||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||||
else:
|
else:
|
||||||
@ -56,10 +79,11 @@ def outpost_pre_delete(outpost_pk: str):
|
|||||||
"""Delete outpost objects before deleting the DB Object"""
|
"""Delete outpost objects before deleting the DB Object"""
|
||||||
outpost = Outpost.objects.get(pk=outpost_pk)
|
outpost = Outpost.objects.get(pk=outpost_pk)
|
||||||
if outpost.type == OutpostType.PROXY:
|
if outpost.type == OutpostType.PROXY:
|
||||||
if outpost.deployment_type == OutpostDeploymentType.KUBERNETES:
|
service_connection = outpost.service_connection
|
||||||
ProxyKubernetesController(outpost).down()
|
if isinstance(service_connection, DockerServiceConnection):
|
||||||
if outpost.deployment_type == OutpostDeploymentType.DOCKER:
|
ProxyDockerController(outpost, service_connection).down()
|
||||||
ProxyDockerController(outpost).down()
|
if isinstance(service_connection, KubernetesServiceConnection):
|
||||||
|
ProxyKubernetesController(outpost, service_connection).down()
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task()
|
@CELERY_APP.task()
|
||||||
@ -89,6 +113,10 @@ def outpost_post_save(model_class: str, model_pk: Any):
|
|||||||
outpost_send_update(instance)
|
outpost_send_update(instance)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if isinstance(instance, OutpostServiceConnection):
|
||||||
|
LOGGER.debug("triggering ServiceConnection state update", instance=instance)
|
||||||
|
outpost_service_connection_state.delay(instance.pk)
|
||||||
|
|
||||||
for field in instance._meta.get_fields():
|
for field in instance._meta.get_fields():
|
||||||
# Each field is checked if it has a `related_model` attribute (when ForeginKeys or M2Ms)
|
# Each field is checked if it has a `related_model` attribute (when ForeginKeys or M2Ms)
|
||||||
# are used, and if it has a value
|
# are used, and if it has a value
|
||||||
@ -123,6 +151,9 @@ def outpost_send_update(model_instace: Model):
|
|||||||
|
|
||||||
def _outpost_single_update(outpost: Outpost, layer=None):
|
def _outpost_single_update(outpost: Outpost, layer=None):
|
||||||
"""Update outpost instances connected to a single outpost"""
|
"""Update outpost instances connected to a single outpost"""
|
||||||
|
# Ensure token again, because this function is called when anything related to an
|
||||||
|
# OutpostModel is saved, so we can be sure permissions are right
|
||||||
|
_ = outpost.token
|
||||||
if not layer: # pragma: no cover
|
if not layer: # pragma: no cover
|
||||||
layer = get_channel_layer()
|
layer = get_channel_layer()
|
||||||
for state in OutpostState.for_outpost(outpost):
|
for state in OutpostState.for_outpost(outpost):
|
||||||
|
|||||||
@ -12,7 +12,7 @@
|
|||||||
<h1 class="pf-c-title pf-m-2xl" id="modal-title">{% trans 'Outpost Deployment Info' %}</h1>
|
<h1 class="pf-c-title pf-m-2xl" id="modal-title">{% trans 'Outpost Deployment Info' %}</h1>
|
||||||
</div>
|
</div>
|
||||||
<div class="pf-c-modal-box__body" id="modal-description">
|
<div class="pf-c-modal-box__body" id="modal-description">
|
||||||
<p><a href="https://passbook.beryju.org/outposts/outposts/#deploy">{% trans 'View deployment documentation' %}</a></p>
|
<p><a href="https://passbook.beryju.org/docs/outposts/outposts/#deploy">{% trans 'View deployment documentation' %}</a></p>
|
||||||
<form class="pf-c-form">
|
<form class="pf-c-form">
|
||||||
<div class="pf-c-form__group">
|
<div class="pf-c-form__group">
|
||||||
<label class="pf-c-form__label" for="help-text-simple-form-name">
|
<label class="pf-c-form__label" for="help-text-simple-form-name">
|
||||||
|
|||||||
@ -11,7 +11,7 @@ from passbook.flows.models import Flow
|
|||||||
from passbook.outposts.controllers.k8s.base import NeedsUpdate
|
from passbook.outposts.controllers.k8s.base import NeedsUpdate
|
||||||
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
|
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
|
||||||
from passbook.outposts.controllers.kubernetes import KubernetesController
|
from passbook.outposts.controllers.kubernetes import KubernetesController
|
||||||
from passbook.outposts.models import Outpost, OutpostDeploymentType, OutpostType
|
from passbook.outposts.models import KubernetesServiceConnection, Outpost, OutpostType
|
||||||
from passbook.providers.proxy.models import ProxyProvider
|
from passbook.providers.proxy.models import ProxyProvider
|
||||||
|
|
||||||
|
|
||||||
@ -29,7 +29,6 @@ class OutpostTests(TestCase):
|
|||||||
outpost: Outpost = Outpost.objects.create(
|
outpost: Outpost = Outpost.objects.create(
|
||||||
name="test",
|
name="test",
|
||||||
type=OutpostType.PROXY,
|
type=OutpostType.PROXY,
|
||||||
deployment_type=OutpostDeploymentType.CUSTOM,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Before we add a provider, the user should only have access to the outpost
|
# Before we add a provider, the user should only have access to the outpost
|
||||||
@ -79,17 +78,18 @@ class OutpostKubernetesTests(TestCase):
|
|||||||
external_host="http://localhost",
|
external_host="http://localhost",
|
||||||
authorization_flow=Flow.objects.first(),
|
authorization_flow=Flow.objects.first(),
|
||||||
)
|
)
|
||||||
|
self.service_connection = KubernetesServiceConnection.objects.first()
|
||||||
self.outpost: Outpost = Outpost.objects.create(
|
self.outpost: Outpost = Outpost.objects.create(
|
||||||
name="test",
|
name="test",
|
||||||
type=OutpostType.PROXY,
|
type=OutpostType.PROXY,
|
||||||
deployment_type=OutpostDeploymentType.KUBERNETES,
|
service_connection=self.service_connection,
|
||||||
)
|
)
|
||||||
self.outpost.providers.add(self.provider)
|
self.outpost.providers.add(self.provider)
|
||||||
self.outpost.save()
|
self.outpost.save()
|
||||||
|
|
||||||
def test_deployment_reconciler(self):
|
def test_deployment_reconciler(self):
|
||||||
"""test that deployment requires update"""
|
"""test that deployment requires update"""
|
||||||
controller = KubernetesController(self.outpost)
|
controller = KubernetesController(self.outpost, self.service_connection)
|
||||||
deployment_reconciler = DeploymentReconciler(controller)
|
deployment_reconciler = DeploymentReconciler(controller)
|
||||||
|
|
||||||
self.assertIsNotNone(deployment_reconciler.retrieve())
|
self.assertIsNotNone(deployment_reconciler.retrieve())
|
||||||
|
|||||||
@ -12,7 +12,12 @@ from structlog import get_logger
|
|||||||
|
|
||||||
from passbook.core.models import User
|
from passbook.core.models import User
|
||||||
from passbook.outposts.controllers.docker import DockerController
|
from passbook.outposts.controllers.docker import DockerController
|
||||||
from passbook.outposts.models import Outpost, OutpostType
|
from passbook.outposts.models import (
|
||||||
|
DockerServiceConnection,
|
||||||
|
KubernetesServiceConnection,
|
||||||
|
Outpost,
|
||||||
|
OutpostType,
|
||||||
|
)
|
||||||
from passbook.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
from passbook.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
@ -35,7 +40,7 @@ class DockerComposeView(LoginRequiredMixin, View):
|
|||||||
)
|
)
|
||||||
manifest = ""
|
manifest = ""
|
||||||
if outpost.type == OutpostType.PROXY:
|
if outpost.type == OutpostType.PROXY:
|
||||||
controller = DockerController(outpost)
|
controller = DockerController(outpost, DockerServiceConnection())
|
||||||
manifest = controller.get_static_deployment()
|
manifest = controller.get_static_deployment()
|
||||||
|
|
||||||
return HttpResponse(manifest, content_type="text/vnd.yaml")
|
return HttpResponse(manifest, content_type="text/vnd.yaml")
|
||||||
@ -53,7 +58,9 @@ class KubernetesManifestView(LoginRequiredMixin, View):
|
|||||||
)
|
)
|
||||||
manifest = ""
|
manifest = ""
|
||||||
if outpost.type == OutpostType.PROXY:
|
if outpost.type == OutpostType.PROXY:
|
||||||
controller = ProxyKubernetesController(outpost)
|
controller = ProxyKubernetesController(
|
||||||
|
outpost, KubernetesServiceConnection()
|
||||||
|
)
|
||||||
manifest = controller.get_static_deployment()
|
manifest = controller.get_static_deployment()
|
||||||
|
|
||||||
return HttpResponse(manifest, content_type="text/vnd.yaml")
|
return HttpResponse(manifest, content_type="text/vnd.yaml")
|
||||||
|
|||||||
@ -12,5 +12,4 @@ class PassbookPoliciesConfig(AppConfig):
|
|||||||
verbose_name = "passbook Policies"
|
verbose_name = "passbook Policies"
|
||||||
|
|
||||||
def ready(self):
|
def ready(self):
|
||||||
"""Load policy cache clearing signals"""
|
|
||||||
import_module("passbook.policies.signals")
|
import_module("passbook.policies.signals")
|
||||||
|
|||||||
@ -32,7 +32,7 @@ class PolicyEvaluator(BaseEvaluator):
|
|||||||
|
|
||||||
def set_policy_request(self, request: PolicyRequest):
|
def set_policy_request(self, request: PolicyRequest):
|
||||||
"""Update context based on policy request (if http request is given, update that too)"""
|
"""Update context based on policy request (if http request is given, update that too)"""
|
||||||
# update docs/policies/expression/index.md
|
# update website/docs/policies/expression.md
|
||||||
self._context["pb_is_sso_flow"] = request.context.get(PLAN_CONTEXT_SSO, False)
|
self._context["pb_is_sso_flow"] = request.context.get(PLAN_CONTEXT_SSO, False)
|
||||||
if request.http_request:
|
if request.http_request:
|
||||||
self.set_http_request(request.http_request)
|
self.set_http_request(request.http_request)
|
||||||
@ -41,7 +41,7 @@ class PolicyEvaluator(BaseEvaluator):
|
|||||||
|
|
||||||
def set_http_request(self, request: HttpRequest):
|
def set_http_request(self, request: HttpRequest):
|
||||||
"""Update context based on http request"""
|
"""Update context based on http request"""
|
||||||
# update docs/policies/expression/index.md
|
# update website/docs/policies/expression.md
|
||||||
self._context["pb_client_ip"] = ip_address(
|
self._context["pb_client_ip"] = ip_address(
|
||||||
get_client_ip(request) or "255.255.255.255"
|
get_client_ip(request) or "255.255.255.255"
|
||||||
)
|
)
|
||||||
|
|||||||
@ -108,11 +108,10 @@ class ProxyOutpostConfigSerializer(ModelSerializer):
|
|||||||
@swagger_serializer_method(serializer_or_field=OpenIDConnectConfigurationSerializer)
|
@swagger_serializer_method(serializer_or_field=OpenIDConnectConfigurationSerializer)
|
||||||
def get_oidc_configuration(self, obj: ProxyProvider):
|
def get_oidc_configuration(self, obj: ProxyProvider):
|
||||||
"""Embed OpenID Connect provider information"""
|
"""Embed OpenID Connect provider information"""
|
||||||
# pylint: disable=protected-access
|
|
||||||
return ProviderInfoView(request=self.context["request"]._request).get_info(obj)
|
return ProviderInfoView(request=self.context["request"]._request).get_info(obj)
|
||||||
|
|
||||||
|
|
||||||
class OutpostConfigViewSet(ModelViewSet):
|
class ProxyOutpostConfigViewSet(ModelViewSet):
|
||||||
"""ProxyProvider Viewset"""
|
"""ProxyProvider Viewset"""
|
||||||
|
|
||||||
queryset = ProxyProvider.objects.filter(application__isnull=False)
|
queryset = ProxyProvider.objects.filter(application__isnull=False)
|
||||||
|
|||||||
@ -3,15 +3,15 @@ from typing import Dict
|
|||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from passbook.outposts.controllers.docker import DockerController
|
from passbook.outposts.controllers.docker import DockerController
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import DockerServiceConnection, Outpost
|
||||||
from passbook.providers.proxy.models import ProxyProvider
|
from passbook.providers.proxy.models import ProxyProvider
|
||||||
|
|
||||||
|
|
||||||
class ProxyDockerController(DockerController):
|
class ProxyDockerController(DockerController):
|
||||||
"""Proxy Provider Docker Contoller"""
|
"""Proxy Provider Docker Contoller"""
|
||||||
|
|
||||||
def __init__(self, outpost: Outpost):
|
def __init__(self, outpost: Outpost, connection: DockerServiceConnection):
|
||||||
super().__init__(outpost)
|
super().__init__(outpost, connection)
|
||||||
self.deployment_ports = {
|
self.deployment_ports = {
|
||||||
"http": 4180,
|
"http": 4180,
|
||||||
"https": 4443,
|
"https": 4443,
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
"""Kubernetes Ingress Reconciler"""
|
"""Kubernetes Ingress Reconciler"""
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING, Dict
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from kubernetes.client import (
|
from kubernetes.client import (
|
||||||
@ -30,7 +30,7 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
|
|||||||
|
|
||||||
def __init__(self, controller: "KubernetesController") -> None:
|
def __init__(self, controller: "KubernetesController") -> None:
|
||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = NetworkingV1beta1Api()
|
self.api = NetworkingV1beta1Api(controller.client)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
@ -67,11 +67,24 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
|
|||||||
if have_hosts_tls != expected_hosts_tls:
|
if have_hosts_tls != expected_hosts_tls:
|
||||||
raise NeedsUpdate()
|
raise NeedsUpdate()
|
||||||
|
|
||||||
|
def get_ingress_annotations(self) -> Dict[str, str]:
|
||||||
|
"""Get ingress annotations"""
|
||||||
|
annotations = {
|
||||||
|
# Ensure that with multiple proxy replicas deployed, the same CSRF request
|
||||||
|
# goes to the same pod
|
||||||
|
"nginx.ingress.kubernetes.io/affinity": "cookie",
|
||||||
|
"traefik.ingress.kubernetes.io/affinity": "true",
|
||||||
|
}
|
||||||
|
annotations.update(
|
||||||
|
self.controller.outpost.config.kubernetes_ingress_annotations
|
||||||
|
)
|
||||||
|
return dict()
|
||||||
|
|
||||||
def get_reference_object(self) -> NetworkingV1beta1Ingress:
|
def get_reference_object(self) -> NetworkingV1beta1Ingress:
|
||||||
"""Get deployment object for outpost"""
|
"""Get deployment object for outpost"""
|
||||||
meta = self.get_object_meta(
|
meta = self.get_object_meta(
|
||||||
name=self.name,
|
name=self.name,
|
||||||
annotations=self.controller.outpost.config.kubernetes_ingress_annotations,
|
annotations=self.get_ingress_annotations(),
|
||||||
)
|
)
|
||||||
rules = []
|
rules = []
|
||||||
tls_hosts = []
|
tls_hosts = []
|
||||||
|
|||||||
@ -1,14 +1,14 @@
|
|||||||
"""Proxy Provider Kubernetes Contoller"""
|
"""Proxy Provider Kubernetes Contoller"""
|
||||||
from passbook.outposts.controllers.kubernetes import KubernetesController
|
from passbook.outposts.controllers.kubernetes import KubernetesController
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import KubernetesServiceConnection, Outpost
|
||||||
from passbook.providers.proxy.controllers.k8s.ingress import IngressReconciler
|
from passbook.providers.proxy.controllers.k8s.ingress import IngressReconciler
|
||||||
|
|
||||||
|
|
||||||
class ProxyKubernetesController(KubernetesController):
|
class ProxyKubernetesController(KubernetesController):
|
||||||
"""Proxy Provider Kubernetes Contoller"""
|
"""Proxy Provider Kubernetes Contoller"""
|
||||||
|
|
||||||
def __init__(self, outpost: Outpost):
|
def __init__(self, outpost: Outpost, connection: KubernetesServiceConnection):
|
||||||
super().__init__(outpost)
|
super().__init__(outpost, connection)
|
||||||
self.deployment_ports = {
|
self.deployment_ports = {
|
||||||
"http": 4180,
|
"http": 4180,
|
||||||
"https": 4443,
|
"https": 4443,
|
||||||
|
|||||||
@ -6,7 +6,7 @@ import yaml
|
|||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
|
||||||
from passbook.flows.models import Flow
|
from passbook.flows.models import Flow
|
||||||
from passbook.outposts.models import Outpost, OutpostDeploymentType, OutpostType
|
from passbook.outposts.models import KubernetesServiceConnection, Outpost, OutpostType
|
||||||
from passbook.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
from passbook.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
||||||
from passbook.providers.proxy.models import ProxyProvider
|
from passbook.providers.proxy.models import ProxyProvider
|
||||||
|
|
||||||
@ -23,15 +23,16 @@ class TestControllers(TestCase):
|
|||||||
external_host="http://localhost",
|
external_host="http://localhost",
|
||||||
authorization_flow=Flow.objects.first(),
|
authorization_flow=Flow.objects.first(),
|
||||||
)
|
)
|
||||||
|
service_connection = KubernetesServiceConnection.objects.first()
|
||||||
outpost: Outpost = Outpost.objects.create(
|
outpost: Outpost = Outpost.objects.create(
|
||||||
name="test",
|
name="test",
|
||||||
type=OutpostType.PROXY,
|
type=OutpostType.PROXY,
|
||||||
deployment_type=OutpostDeploymentType.KUBERNETES,
|
service_connection=service_connection,
|
||||||
)
|
)
|
||||||
outpost.providers.add(provider)
|
outpost.providers.add(provider)
|
||||||
outpost.save()
|
outpost.save()
|
||||||
|
|
||||||
controller = ProxyKubernetesController(outpost)
|
controller = ProxyKubernetesController(outpost, service_connection)
|
||||||
manifest = controller.get_static_deployment()
|
manifest = controller.get_static_deployment()
|
||||||
self.assertEqual(len(list(yaml.load_all(manifest, Loader=yaml.SafeLoader))), 4)
|
self.assertEqual(len(list(yaml.load_all(manifest, Loader=yaml.SafeLoader))), 4)
|
||||||
|
|
||||||
@ -43,14 +44,15 @@ class TestControllers(TestCase):
|
|||||||
external_host="http://localhost",
|
external_host="http://localhost",
|
||||||
authorization_flow=Flow.objects.first(),
|
authorization_flow=Flow.objects.first(),
|
||||||
)
|
)
|
||||||
|
service_connection = KubernetesServiceConnection.objects.first()
|
||||||
outpost: Outpost = Outpost.objects.create(
|
outpost: Outpost = Outpost.objects.create(
|
||||||
name="test",
|
name="test",
|
||||||
type=OutpostType.PROXY,
|
type=OutpostType.PROXY,
|
||||||
deployment_type=OutpostDeploymentType.KUBERNETES,
|
service_connection=service_connection,
|
||||||
)
|
)
|
||||||
outpost.providers.add(provider)
|
outpost.providers.add(provider)
|
||||||
outpost.save()
|
outpost.save()
|
||||||
|
|
||||||
controller = ProxyKubernetesController(outpost)
|
controller = ProxyKubernetesController(outpost, service_connection)
|
||||||
controller.up()
|
controller.up()
|
||||||
controller.down()
|
controller.down()
|
||||||
|
|||||||
@ -24,7 +24,7 @@ class SAMLProviderSerializer(ModelSerializer):
|
|||||||
"digest_algorithm",
|
"digest_algorithm",
|
||||||
"signature_algorithm",
|
"signature_algorithm",
|
||||||
"signing_kp",
|
"signing_kp",
|
||||||
"require_signing",
|
"verification_kp",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -7,6 +7,7 @@ from django.utils.translation import gettext as _
|
|||||||
|
|
||||||
from passbook.admin.fields import CodeMirrorWidget
|
from passbook.admin.fields import CodeMirrorWidget
|
||||||
from passbook.core.expression import PropertyMappingEvaluator
|
from passbook.core.expression import PropertyMappingEvaluator
|
||||||
|
from passbook.crypto.models import CertificateKeyPair
|
||||||
from passbook.flows.models import Flow, FlowDesignation
|
from passbook.flows.models import Flow, FlowDesignation
|
||||||
from passbook.providers.saml.models import SAMLPropertyMapping, SAMLProvider
|
from passbook.providers.saml.models import SAMLPropertyMapping, SAMLProvider
|
||||||
|
|
||||||
@ -20,6 +21,9 @@ class SAMLProviderForm(forms.ModelForm):
|
|||||||
designation=FlowDesignation.AUTHORIZATION
|
designation=FlowDesignation.AUTHORIZATION
|
||||||
)
|
)
|
||||||
self.fields["property_mappings"].queryset = SAMLPropertyMapping.objects.all()
|
self.fields["property_mappings"].queryset = SAMLPropertyMapping.objects.all()
|
||||||
|
self.fields["signing_kp"].queryset = CertificateKeyPair.objects.exclude(
|
||||||
|
key_data__iexact=""
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
@ -34,11 +38,11 @@ class SAMLProviderForm(forms.ModelForm):
|
|||||||
"assertion_valid_not_before",
|
"assertion_valid_not_before",
|
||||||
"assertion_valid_not_on_or_after",
|
"assertion_valid_not_on_or_after",
|
||||||
"session_valid_not_on_or_after",
|
"session_valid_not_on_or_after",
|
||||||
"property_mappings",
|
|
||||||
"digest_algorithm",
|
"digest_algorithm",
|
||||||
"require_signing",
|
|
||||||
"signature_algorithm",
|
"signature_algorithm",
|
||||||
"signing_kp",
|
"signing_kp",
|
||||||
|
"verification_kp",
|
||||||
|
"property_mappings",
|
||||||
]
|
]
|
||||||
widgets = {
|
widgets = {
|
||||||
"name": forms.TextInput(),
|
"name": forms.TextInput(),
|
||||||
|
|||||||
@ -0,0 +1,28 @@
|
|||||||
|
# Generated by Django 3.1.3 on 2020-11-08 21:22
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("passbook_crypto", "0002_create_self_signed_kp"),
|
||||||
|
("passbook_providers_saml", "0006_remove_samlprovider_name"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="samlprovider",
|
||||||
|
name="verification_kp",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
help_text="If selected, incoming assertion's Signatures will be validated.",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="+",
|
||||||
|
to="passbook_crypto.certificatekeypair",
|
||||||
|
verbose_name="Verification Keypair",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -0,0 +1,71 @@
|
|||||||
|
# Generated by Django 3.1.3 on 2020-11-12 10:36
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("passbook_crypto", "0002_create_self_signed_kp"),
|
||||||
|
("passbook_providers_saml", "0007_samlprovider_verification_kp"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="samlprovider",
|
||||||
|
name="require_signing",
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="samlprovider",
|
||||||
|
name="audience",
|
||||||
|
field=models.TextField(
|
||||||
|
default="",
|
||||||
|
help_text="Value of the audience restriction field of the asseration.",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="samlprovider",
|
||||||
|
name="issuer",
|
||||||
|
field=models.TextField(
|
||||||
|
default="passbook", help_text="Also known as EntityID"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="samlprovider",
|
||||||
|
name="signing_kp",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="Keypair used to sign outgoing Responses going to the Service Provider.",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
to="passbook_crypto.certificatekeypair",
|
||||||
|
verbose_name="Signing Keypair",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="samlprovider",
|
||||||
|
name="sp_binding",
|
||||||
|
field=models.TextField(
|
||||||
|
choices=[("redirect", "Redirect"), ("post", "Post")],
|
||||||
|
default="redirect",
|
||||||
|
help_text="This determines how passbook sends the response back to the Service Provider.",
|
||||||
|
verbose_name="Service Provider Binding",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="samlprovider",
|
||||||
|
name="verification_kp",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="When selected, incoming assertion's Signatures will be validated against this certificate. To allow unsigned Requests, leave on default.",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="+",
|
||||||
|
to="passbook_crypto.certificatekeypair",
|
||||||
|
verbose_name="Verification Certificate",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -0,0 +1,69 @@
|
|||||||
|
# Generated by Django 3.1.3 on 2020-11-12 20:16
|
||||||
|
|
||||||
|
from django.apps.registry import Apps
|
||||||
|
from django.db import migrations, models
|
||||||
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
|
|
||||||
|
from passbook.sources.saml.processors import constants
|
||||||
|
|
||||||
|
|
||||||
|
def update_algorithms(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
SAMLProvider = apps.get_model("passbook_providers_saml", "SAMLProvider")
|
||||||
|
signature_translation_map = {
|
||||||
|
"rsa-sha1": constants.RSA_SHA1,
|
||||||
|
"rsa-sha256": constants.RSA_SHA256,
|
||||||
|
"ecdsa-sha256": constants.RSA_SHA256,
|
||||||
|
"dsa-sha1": constants.DSA_SHA1,
|
||||||
|
}
|
||||||
|
digest_translation_map = {
|
||||||
|
"sha1": constants.SHA1,
|
||||||
|
"sha256": constants.SHA256,
|
||||||
|
}
|
||||||
|
|
||||||
|
for source in SAMLProvider.objects.all():
|
||||||
|
source.signature_algorithm = signature_translation_map.get(
|
||||||
|
source.signature_algorithm, constants.RSA_SHA256
|
||||||
|
)
|
||||||
|
source.digest_algorithm = digest_translation_map.get(
|
||||||
|
source.digest_algorithm, constants.SHA256
|
||||||
|
)
|
||||||
|
source.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("passbook_providers_saml", "0008_auto_20201112_1036"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="samlprovider",
|
||||||
|
name="digest_algorithm",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
(constants.SHA1, "SHA1"),
|
||||||
|
(constants.SHA256, "SHA256"),
|
||||||
|
(constants.SHA384, "SHA384"),
|
||||||
|
(constants.SHA512, "SHA512"),
|
||||||
|
],
|
||||||
|
default=constants.SHA256,
|
||||||
|
max_length=50,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="samlprovider",
|
||||||
|
name="signature_algorithm",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
(constants.RSA_SHA1, "RSA-SHA1"),
|
||||||
|
(constants.RSA_SHA256, "RSA-SHA256"),
|
||||||
|
(constants.RSA_SHA384, "RSA-SHA384"),
|
||||||
|
(constants.RSA_SHA512, "RSA-SHA512"),
|
||||||
|
(constants.DSA_SHA1, "DSA-SHA1"),
|
||||||
|
],
|
||||||
|
default=constants.RSA_SHA256,
|
||||||
|
max_length=50,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -13,6 +13,17 @@ from passbook.core.models import PropertyMapping, Provider
|
|||||||
from passbook.crypto.models import CertificateKeyPair
|
from passbook.crypto.models import CertificateKeyPair
|
||||||
from passbook.lib.utils.template import render_to_string
|
from passbook.lib.utils.template import render_to_string
|
||||||
from passbook.lib.utils.time import timedelta_string_validator
|
from passbook.lib.utils.time import timedelta_string_validator
|
||||||
|
from passbook.sources.saml.processors.constants import (
|
||||||
|
DSA_SHA1,
|
||||||
|
RSA_SHA1,
|
||||||
|
RSA_SHA256,
|
||||||
|
RSA_SHA384,
|
||||||
|
RSA_SHA512,
|
||||||
|
SHA1,
|
||||||
|
SHA256,
|
||||||
|
SHA384,
|
||||||
|
SHA512,
|
||||||
|
)
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
@ -28,12 +39,21 @@ class SAMLProvider(Provider):
|
|||||||
"""SAML 2.0 Endpoint for applications which support SAML."""
|
"""SAML 2.0 Endpoint for applications which support SAML."""
|
||||||
|
|
||||||
acs_url = models.URLField(verbose_name=_("ACS URL"))
|
acs_url = models.URLField(verbose_name=_("ACS URL"))
|
||||||
audience = models.TextField(default="")
|
audience = models.TextField(
|
||||||
issuer = models.TextField(help_text=_("Also known as EntityID"))
|
default="",
|
||||||
|
help_text=_("Value of the audience restriction field of the asseration."),
|
||||||
|
)
|
||||||
|
issuer = models.TextField(help_text=_("Also known as EntityID"), default="passbook")
|
||||||
sp_binding = models.TextField(
|
sp_binding = models.TextField(
|
||||||
choices=SAMLBindings.choices,
|
choices=SAMLBindings.choices,
|
||||||
default=SAMLBindings.REDIRECT,
|
default=SAMLBindings.REDIRECT,
|
||||||
verbose_name=_("Service Prodier Binding"),
|
verbose_name=_("Service Provider Binding"),
|
||||||
|
help_text=_(
|
||||||
|
(
|
||||||
|
"This determines how passbook sends the "
|
||||||
|
"response back to the Service Provider."
|
||||||
|
)
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
assertion_valid_not_before = models.TextField(
|
assertion_valid_not_before = models.TextField(
|
||||||
@ -71,39 +91,52 @@ class SAMLProvider(Provider):
|
|||||||
digest_algorithm = models.CharField(
|
digest_algorithm = models.CharField(
|
||||||
max_length=50,
|
max_length=50,
|
||||||
choices=(
|
choices=(
|
||||||
("sha1", _("SHA1")),
|
(SHA1, _("SHA1")),
|
||||||
("sha256", _("SHA256")),
|
(SHA256, _("SHA256")),
|
||||||
|
(SHA384, _("SHA384")),
|
||||||
|
(SHA512, _("SHA512")),
|
||||||
),
|
),
|
||||||
default="sha256",
|
default=SHA256,
|
||||||
)
|
)
|
||||||
signature_algorithm = models.CharField(
|
signature_algorithm = models.CharField(
|
||||||
max_length=50,
|
max_length=50,
|
||||||
choices=(
|
choices=(
|
||||||
("rsa-sha1", _("RSA-SHA1")),
|
(RSA_SHA1, _("RSA-SHA1")),
|
||||||
("rsa-sha256", _("RSA-SHA256")),
|
(RSA_SHA256, _("RSA-SHA256")),
|
||||||
("ecdsa-sha256", _("ECDSA-SHA256")),
|
(RSA_SHA384, _("RSA-SHA384")),
|
||||||
("dsa-sha1", _("DSA-SHA1")),
|
(RSA_SHA512, _("RSA-SHA512")),
|
||||||
|
(DSA_SHA1, _("DSA-SHA1")),
|
||||||
),
|
),
|
||||||
default="rsa-sha256",
|
default=RSA_SHA256,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
verification_kp = models.ForeignKey(
|
||||||
|
CertificateKeyPair,
|
||||||
|
default=None,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text=_(
|
||||||
|
(
|
||||||
|
"When selected, incoming assertion's Signatures will be validated against this "
|
||||||
|
"certificate. To allow unsigned Requests, leave on default."
|
||||||
|
)
|
||||||
|
),
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
verbose_name=_("Verification Certificate"),
|
||||||
|
related_name="+",
|
||||||
|
)
|
||||||
signing_kp = models.ForeignKey(
|
signing_kp = models.ForeignKey(
|
||||||
CertificateKeyPair,
|
CertificateKeyPair,
|
||||||
default=None,
|
default=None,
|
||||||
null=True,
|
null=True,
|
||||||
help_text=_("Singing is enabled upon selection of a Key Pair."),
|
blank=True,
|
||||||
|
help_text=_(
|
||||||
|
"Keypair used to sign outgoing Responses going to the Service Provider."
|
||||||
|
),
|
||||||
on_delete=models.SET_NULL,
|
on_delete=models.SET_NULL,
|
||||||
verbose_name=_("Signing Keypair"),
|
verbose_name=_("Signing Keypair"),
|
||||||
)
|
)
|
||||||
|
|
||||||
require_signing = models.BooleanField(
|
|
||||||
default=False,
|
|
||||||
help_text=_(
|
|
||||||
"Require Requests to be signed by an X509 Certificate. "
|
|
||||||
"Must match the Certificate selected in `Singing Keypair`."
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def launch_url(self) -> Optional[str]:
|
def launch_url(self) -> Optional[str]:
|
||||||
"""Guess launch_url based on acs URL"""
|
"""Guess launch_url based on acs URL"""
|
||||||
|
|||||||
@ -2,10 +2,10 @@
|
|||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from types import GeneratorType
|
from types import GeneratorType
|
||||||
|
|
||||||
|
import xmlsec
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from lxml import etree # nosec
|
from lxml import etree # nosec
|
||||||
from lxml.etree import Element, SubElement # nosec
|
from lxml.etree import Element, SubElement # nosec
|
||||||
from signxml import XMLSigner, XMLVerifier, strip_pem_header
|
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
|
|
||||||
from passbook.core.exceptions import PropertyMappingExpressionException
|
from passbook.core.exceptions import PropertyMappingExpressionException
|
||||||
@ -16,14 +16,15 @@ from passbook.providers.saml.utils import get_random_id
|
|||||||
from passbook.providers.saml.utils.time import get_time_string
|
from passbook.providers.saml.utils.time import get_time_string
|
||||||
from passbook.sources.saml.exceptions import UnsupportedNameIDFormat
|
from passbook.sources.saml.exceptions import UnsupportedNameIDFormat
|
||||||
from passbook.sources.saml.processors.constants import (
|
from passbook.sources.saml.processors.constants import (
|
||||||
|
DIGEST_ALGORITHM_TRANSLATION_MAP,
|
||||||
NS_MAP,
|
NS_MAP,
|
||||||
NS_SAML_ASSERTION,
|
NS_SAML_ASSERTION,
|
||||||
NS_SAML_PROTOCOL,
|
NS_SAML_PROTOCOL,
|
||||||
NS_SIGNATURE,
|
|
||||||
SAML_NAME_ID_FORMAT_EMAIL,
|
SAML_NAME_ID_FORMAT_EMAIL,
|
||||||
SAML_NAME_ID_FORMAT_PERSISTENT,
|
SAML_NAME_ID_FORMAT_PERSISTENT,
|
||||||
SAML_NAME_ID_FORMAT_TRANSIENT,
|
SAML_NAME_ID_FORMAT_TRANSIENT,
|
||||||
SAML_NAME_ID_FORMAT_X509,
|
SAML_NAME_ID_FORMAT_X509,
|
||||||
|
SIGN_ALGORITHM_TRANSFORM_MAP,
|
||||||
)
|
)
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
@ -186,12 +187,16 @@ class AssertionProcessor:
|
|||||||
assertion.append(self.get_issuer())
|
assertion.append(self.get_issuer())
|
||||||
|
|
||||||
if self.provider.signing_kp:
|
if self.provider.signing_kp:
|
||||||
# We need a placeholder signature as SAML requires the signature to be between
|
sign_algorithm_transform = SIGN_ALGORITHM_TRANSFORM_MAP.get(
|
||||||
# Issuer and subject
|
self.provider.signature_algorithm, xmlsec.constants.TransformRsaSha1
|
||||||
signature_placeholder = SubElement(
|
|
||||||
assertion, f"{{{NS_SIGNATURE}}}Signature", nsmap=NS_MAP
|
|
||||||
)
|
)
|
||||||
signature_placeholder.attrib["Id"] = "placeholder"
|
signature = xmlsec.template.create(
|
||||||
|
assertion,
|
||||||
|
xmlsec.constants.TransformExclC14N,
|
||||||
|
sign_algorithm_transform,
|
||||||
|
ns="ds", # type: ignore
|
||||||
|
)
|
||||||
|
assertion.append(signature)
|
||||||
|
|
||||||
assertion.append(self.get_assertion_subject())
|
assertion.append(self.get_assertion_subject())
|
||||||
assertion.append(self.get_assertion_conditions())
|
assertion.append(self.get_assertion_conditions())
|
||||||
@ -223,20 +228,36 @@ class AssertionProcessor:
|
|||||||
"""Build string XML Response and sign if signing is enabled."""
|
"""Build string XML Response and sign if signing is enabled."""
|
||||||
root_response = self.get_response()
|
root_response = self.get_response()
|
||||||
if self.provider.signing_kp:
|
if self.provider.signing_kp:
|
||||||
signer = XMLSigner(
|
digest_algorithm_transform = DIGEST_ALGORITHM_TRANSLATION_MAP.get(
|
||||||
c14n_algorithm="http://www.w3.org/2001/10/xml-exc-c14n#",
|
self.provider.digest_algorithm, xmlsec.constants.TransformSha1
|
||||||
signature_algorithm=self.provider.signature_algorithm,
|
|
||||||
digest_algorithm=self.provider.digest_algorithm,
|
|
||||||
)
|
)
|
||||||
x509_data = strip_pem_header(
|
assertion = root_response.xpath("//saml:Assertion", namespaces=NS_MAP)[0]
|
||||||
self.provider.signing_kp.certificate_data
|
xmlsec.tree.add_ids(assertion, ["ID"])
|
||||||
).replace("\n", "")
|
signature_node = xmlsec.tree.find_node(
|
||||||
signed = signer.sign(
|
assertion, xmlsec.constants.NodeSignature
|
||||||
root_response,
|
|
||||||
key=self.provider.signing_kp.private_key,
|
|
||||||
cert=[x509_data],
|
|
||||||
reference_uri=self._assertion_id,
|
|
||||||
)
|
)
|
||||||
XMLVerifier().verify(signed, x509_cert=x509_data)
|
ref = xmlsec.template.add_reference(
|
||||||
return etree.tostring(signed).decode("utf-8") # nosec
|
signature_node,
|
||||||
|
digest_algorithm_transform,
|
||||||
|
uri="#" + self._assertion_id,
|
||||||
|
)
|
||||||
|
xmlsec.template.add_transform(ref, xmlsec.constants.TransformEnveloped)
|
||||||
|
xmlsec.template.add_transform(ref, xmlsec.constants.TransformExclC14N)
|
||||||
|
key_info = xmlsec.template.ensure_key_info(signature_node)
|
||||||
|
xmlsec.template.add_x509_data(key_info)
|
||||||
|
|
||||||
|
ctx = xmlsec.SignatureContext()
|
||||||
|
|
||||||
|
key = xmlsec.Key.from_memory(
|
||||||
|
self.provider.signing_kp.key_data,
|
||||||
|
xmlsec.constants.KeyDataFormatPem,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
key.load_cert_from_memory(
|
||||||
|
self.provider.signing_kp.certificate_data,
|
||||||
|
xmlsec.constants.KeyDataFormatCertPem,
|
||||||
|
)
|
||||||
|
ctx.key = key
|
||||||
|
ctx.sign(signature_node)
|
||||||
|
|
||||||
return etree.tostring(root_response).decode("utf-8") # nosec
|
return etree.tostring(root_response).decode("utf-8") # nosec
|
||||||
|
|||||||
@ -4,9 +4,9 @@ from typing import Iterator, Optional
|
|||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from django.shortcuts import reverse
|
from django.shortcuts import reverse
|
||||||
from lxml.etree import Element, SubElement, tostring # nosec
|
from lxml.etree import Element, SubElement, tostring # nosec
|
||||||
from signxml.util import strip_pem_header
|
|
||||||
|
|
||||||
from passbook.providers.saml.models import SAMLProvider
|
from passbook.providers.saml.models import SAMLProvider
|
||||||
|
from passbook.providers.saml.utils.encoding import strip_pem_header
|
||||||
from passbook.sources.saml.processors.constants import (
|
from passbook.sources.saml.processors.constants import (
|
||||||
NS_MAP,
|
NS_MAP,
|
||||||
NS_SAML_METADATA,
|
NS_SAML_METADATA,
|
||||||
@ -42,7 +42,7 @@ class MetadataProcessor:
|
|||||||
)
|
)
|
||||||
x509_certificate.text = strip_pem_header(
|
x509_certificate.text = strip_pem_header(
|
||||||
self.provider.signing_kp.certificate_data.replace("\r", "")
|
self.provider.signing_kp.certificate_data.replace("\r", "")
|
||||||
).replace("\n", "")
|
)
|
||||||
return key_descriptor
|
return key_descriptor
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@ -4,22 +4,33 @@ from dataclasses import dataclass
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
from urllib.parse import quote_plus
|
from urllib.parse import quote_plus
|
||||||
|
|
||||||
from cryptography.exceptions import InvalidSignature
|
import xmlsec
|
||||||
from cryptography.hazmat.primitives import hashes
|
|
||||||
from cryptography.hazmat.primitives.asymmetric import padding
|
|
||||||
from defusedxml import ElementTree
|
from defusedxml import ElementTree
|
||||||
from signxml import XMLVerifier
|
from lxml import etree # nosec
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
|
|
||||||
from passbook.providers.saml.exceptions import CannotHandleAssertion
|
from passbook.providers.saml.exceptions import CannotHandleAssertion
|
||||||
from passbook.providers.saml.models import SAMLProvider
|
from passbook.providers.saml.models import SAMLProvider
|
||||||
from passbook.providers.saml.utils.encoding import decode_base64_and_inflate
|
from passbook.providers.saml.utils.encoding import decode_base64_and_inflate
|
||||||
from passbook.sources.saml.processors.constants import (
|
from passbook.sources.saml.processors.constants import (
|
||||||
|
DSA_SHA1,
|
||||||
|
NS_MAP,
|
||||||
NS_SAML_PROTOCOL,
|
NS_SAML_PROTOCOL,
|
||||||
|
RSA_SHA1,
|
||||||
|
RSA_SHA256,
|
||||||
|
RSA_SHA384,
|
||||||
|
RSA_SHA512,
|
||||||
SAML_NAME_ID_FORMAT_EMAIL,
|
SAML_NAME_ID_FORMAT_EMAIL,
|
||||||
)
|
)
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
ERROR_SIGNATURE_REQUIRED_BUT_ABSENT = (
|
||||||
|
"Verification Certificate configured, but request is not signed."
|
||||||
|
)
|
||||||
|
ERROR_SIGNATURE_EXISTS_BUT_NO_VERIFIER = (
|
||||||
|
"Provider does not have a Validation Certificate configured."
|
||||||
|
)
|
||||||
|
ERROR_FAILED_TO_VERIFY = "Failed to verify signature"
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -67,15 +78,38 @@ class AuthNRequestParser:
|
|||||||
|
|
||||||
def parse(self, saml_request: str, relay_state: Optional[str]) -> AuthNRequest:
|
def parse(self, saml_request: str, relay_state: Optional[str]) -> AuthNRequest:
|
||||||
"""Validate and parse raw request with enveloped signautre."""
|
"""Validate and parse raw request with enveloped signautre."""
|
||||||
decoded_xml = decode_base64_and_inflate(saml_request)
|
decoded_xml = b64decode(saml_request.encode()).decode()
|
||||||
|
|
||||||
if self.provider.signing_kp:
|
verifier = self.provider.verification_kp
|
||||||
try:
|
|
||||||
XMLVerifier().verify(
|
root = etree.fromstring(decoded_xml) # nosec
|
||||||
decoded_xml, x509_cert=self.provider.signing_kp.certificate_data
|
xmlsec.tree.add_ids(root, ["ID"])
|
||||||
|
signature_nodes = root.xpath(
|
||||||
|
"/samlp:AuthnRequest/ds:Signature", namespaces=NS_MAP
|
||||||
)
|
)
|
||||||
except InvalidSignature as exc:
|
if len(signature_nodes) != 1:
|
||||||
raise CannotHandleAssertion("Failed to verify signature") from exc
|
raise CannotHandleAssertion(ERROR_SIGNATURE_REQUIRED_BUT_ABSENT)
|
||||||
|
|
||||||
|
signature_node = signature_nodes[0]
|
||||||
|
|
||||||
|
if verifier and signature_node is None:
|
||||||
|
raise CannotHandleAssertion(ERROR_SIGNATURE_REQUIRED_BUT_ABSENT)
|
||||||
|
|
||||||
|
if signature_node is not None:
|
||||||
|
if not verifier:
|
||||||
|
raise CannotHandleAssertion(ERROR_SIGNATURE_EXISTS_BUT_NO_VERIFIER)
|
||||||
|
|
||||||
|
try:
|
||||||
|
ctx = xmlsec.SignatureContext()
|
||||||
|
key = xmlsec.Key.from_memory(
|
||||||
|
verifier.certificate_data,
|
||||||
|
xmlsec.constants.KeyDataFormatCertPem,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
ctx.key = key
|
||||||
|
ctx.verify(signature_node)
|
||||||
|
except xmlsec.VerificationError as exc:
|
||||||
|
raise CannotHandleAssertion(ERROR_FAILED_TO_VERIFY) from exc
|
||||||
|
|
||||||
return self._parse_xml(decoded_xml, relay_state)
|
return self._parse_xml(decoded_xml, relay_state)
|
||||||
|
|
||||||
@ -89,27 +123,45 @@ class AuthNRequestParser:
|
|||||||
"""Validate and parse raw request with detached signature"""
|
"""Validate and parse raw request with detached signature"""
|
||||||
decoded_xml = decode_base64_and_inflate(saml_request)
|
decoded_xml = decode_base64_and_inflate(saml_request)
|
||||||
|
|
||||||
|
verifier = self.provider.verification_kp
|
||||||
|
|
||||||
|
if verifier and not (signature and sig_alg):
|
||||||
|
raise CannotHandleAssertion(ERROR_SIGNATURE_REQUIRED_BUT_ABSENT)
|
||||||
|
|
||||||
if signature and sig_alg:
|
if signature and sig_alg:
|
||||||
# if sig_alg == "http://www.w3.org/2000/09/xmldsig#rsa-sha1":
|
if not verifier:
|
||||||
sig_hash = hashes.SHA1() # nosec
|
raise CannotHandleAssertion(ERROR_SIGNATURE_EXISTS_BUT_NO_VERIFIER)
|
||||||
|
|
||||||
querystring = f"SAMLRequest={quote_plus(saml_request)}&"
|
querystring = f"SAMLRequest={quote_plus(saml_request)}&"
|
||||||
if relay_state is not None:
|
if relay_state is not None:
|
||||||
querystring += f"RelayState={quote_plus(relay_state)}&"
|
querystring += f"RelayState={quote_plus(relay_state)}&"
|
||||||
querystring += f"SigAlg={sig_alg}"
|
querystring += f"SigAlg={quote_plus(sig_alg)}"
|
||||||
|
|
||||||
public_key = self.provider.signing_kp.private_key.public_key()
|
dsig_ctx = xmlsec.SignatureContext()
|
||||||
try:
|
key = xmlsec.Key.from_memory(
|
||||||
public_key.verify(
|
verifier.certificate_data, xmlsec.constants.KeyDataFormatCertPem, None
|
||||||
b64decode(signature),
|
|
||||||
querystring.encode(),
|
|
||||||
padding.PSS(
|
|
||||||
mgf=padding.MGF1(sig_hash), salt_length=padding.PSS.MAX_LENGTH
|
|
||||||
),
|
|
||||||
sig_hash,
|
|
||||||
)
|
)
|
||||||
except InvalidSignature as exc:
|
dsig_ctx.key = key
|
||||||
raise CannotHandleAssertion("Failed to verify signature") from exc
|
|
||||||
|
sign_algorithm_transform_map = {
|
||||||
|
DSA_SHA1: xmlsec.constants.TransformDsaSha1,
|
||||||
|
RSA_SHA1: xmlsec.constants.TransformRsaSha1,
|
||||||
|
RSA_SHA256: xmlsec.constants.TransformRsaSha256,
|
||||||
|
RSA_SHA384: xmlsec.constants.TransformRsaSha384,
|
||||||
|
RSA_SHA512: xmlsec.constants.TransformRsaSha512,
|
||||||
|
}
|
||||||
|
sign_algorithm_transform = sign_algorithm_transform_map.get(
|
||||||
|
sig_alg, xmlsec.constants.TransformRsaSha1
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
dsig_ctx.verify_binary(
|
||||||
|
querystring.encode("utf-8"),
|
||||||
|
sign_algorithm_transform,
|
||||||
|
b64decode(signature),
|
||||||
|
)
|
||||||
|
except xmlsec.VerificationError as exc:
|
||||||
|
raise CannotHandleAssertion(ERROR_FAILED_TO_VERIFY) from exc
|
||||||
return self._parse_xml(decoded_xml, relay_state)
|
return self._parse_xml(decoded_xml, relay_state)
|
||||||
|
|
||||||
def idp_initiated(self) -> AuthNRequest:
|
def idp_initiated(self) -> AuthNRequest:
|
||||||
|
|||||||
@ -1,4 +1,6 @@
|
|||||||
"""Test AuthN Request generator and parser"""
|
"""Test AuthN Request generator and parser"""
|
||||||
|
from base64 import b64encode
|
||||||
|
|
||||||
from django.contrib.sessions.middleware import SessionMiddleware
|
from django.contrib.sessions.middleware import SessionMiddleware
|
||||||
from django.http.request import HttpRequest, QueryDict
|
from django.http.request import HttpRequest, QueryDict
|
||||||
from django.test import RequestFactory, TestCase
|
from django.test import RequestFactory, TestCase
|
||||||
@ -6,18 +8,55 @@ from guardian.utils import get_anonymous_user
|
|||||||
|
|
||||||
from passbook.crypto.models import CertificateKeyPair
|
from passbook.crypto.models import CertificateKeyPair
|
||||||
from passbook.flows.models import Flow
|
from passbook.flows.models import Flow
|
||||||
from passbook.providers.saml.models import SAMLProvider
|
from passbook.providers.saml.models import SAMLPropertyMapping, SAMLProvider
|
||||||
from passbook.providers.saml.processors.assertion import AssertionProcessor
|
from passbook.providers.saml.processors.assertion import AssertionProcessor
|
||||||
from passbook.providers.saml.processors.request_parser import AuthNRequestParser
|
from passbook.providers.saml.processors.request_parser import AuthNRequestParser
|
||||||
from passbook.providers.saml.utils.encoding import deflate_and_base64_encode
|
|
||||||
from passbook.sources.saml.exceptions import MismatchedRequestID
|
from passbook.sources.saml.exceptions import MismatchedRequestID
|
||||||
from passbook.sources.saml.models import SAMLSource
|
from passbook.sources.saml.models import SAMLSource
|
||||||
|
from passbook.sources.saml.processors.constants import SAML_NAME_ID_FORMAT_EMAIL
|
||||||
from passbook.sources.saml.processors.request import (
|
from passbook.sources.saml.processors.request import (
|
||||||
SESSION_REQUEST_ID,
|
SESSION_REQUEST_ID,
|
||||||
RequestProcessor,
|
RequestProcessor,
|
||||||
)
|
)
|
||||||
from passbook.sources.saml.processors.response import ResponseProcessor
|
from passbook.sources.saml.processors.response import ResponseProcessor
|
||||||
|
|
||||||
|
REDIRECT_REQUEST = (
|
||||||
|
"fZLNbsIwEIRfJfIdbKeFgEUipXAoEm0jSHvopTLJplhK7NTr9Oft6yRUKhekPdk73+yOdoWyqVuRdu6k9/DRAbrgu6k1iu"
|
||||||
|
"EjJp3VwkhUKLRsAIUrxCF92IlwykRrjTOFqUmQIoJ1yui10dg1YA9gP1UBz/tdTE7OtSgo5WzKQzYditGeP8GW9rSQZk+H"
|
||||||
|
"nAQbb6+07EGj7EI1j8SCeaVs21oVQ9dAoRqcf6OIhh6VLpV+pxZKZaFwlATbTUzeyqKazaqiDCO5WEQwZzKCagkwr8obWc"
|
||||||
|
"qjb0PsYKvRSe1iErKQTTj3lYdc3HLBl68kyM4L340u19M5j4LiPs+zybjgC1gclvMNJFn104vB2P5L/TpW/kVNkqvBrug/"
|
||||||
|
"+mjVikeP224y4/P7CdK6Nl9rC9JBTDihySi5vIbkFw=="
|
||||||
|
)
|
||||||
|
REDIRECT_SIGNATURE = (
|
||||||
|
"UlOe1BItHVHM+io6rUZAenIqfibm7hM6wr9I1rcP5kPJ4N8cbkyqmAMh5LD2lUq3PDERJfjdO/oOKnvJmbD2y9MOObyR2d"
|
||||||
|
"7Udv62KERrA0qM917Q+w8wrLX7w2nHY96EDvkXD4iAomR5EE9dHRuubDy7uRv2syEevc0gfoLi7W/5vp96vJgsaSqxnTp+"
|
||||||
|
"QiYq49KyWyMtxRULF2yd+vYDnHCDME73mNSULEHfwCU71dvbKpnFaej78q7wS20gUk6ysOOXXtvDHbiVcpUb/9oyDgNAxU"
|
||||||
|
"jVvPdh96AhBFj2HCuGZhP0CGotafTciu6YlsiwUpuBkIYgZmNWYa3FR9LS4Q=="
|
||||||
|
)
|
||||||
|
REDIRECT_SIG_ALG = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"
|
||||||
|
REDIRECT_RELAY_STATE = (
|
||||||
|
"ss:mem:7a054b4af44f34f89dd2d973f383c250b6b076e7f06cfa8276008a6504eaf3c7"
|
||||||
|
)
|
||||||
|
REDIRECT_CERT = """-----BEGIN CERTIFICATE-----
|
||||||
|
MIIDCDCCAfCgAwIBAgIRAM5s+bhOHk4ChSpPkGSh0NswDQYJKoZIhvcNAQELBQAw
|
||||||
|
KzEpMCcGA1UEAwwgcGFzc2Jvb2sgU2VsZi1zaWduZWQgQ2VydGlmaWNhdGUwHhcN
|
||||||
|
MjAxMTA3MjAzNDIxWhcNMjExMTA4MjAzNDIxWjBUMSkwJwYDVQQDDCBwYXNzYm9v
|
||||||
|
ayBTZWxmLXNpZ25lZCBDZXJ0aWZpY2F0ZTERMA8GA1UECgwIcGFzc2Jvb2sxFDAS
|
||||||
|
BgNVBAsMC1NlbGYtc2lnbmVkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
|
||||||
|
AQEAuh+Bv6a/ogpic72X/sq86YiLzVjixnGqjc4wpsPPP00GX8jUAZJL4Tjo+sYK
|
||||||
|
IU2DF2/azlVqjkbLho4rGuuc8YkbFXBEXPYc5h3bseO2vk6sbbbWKV0mro1VFhBh
|
||||||
|
T59hBORuMMefmQdhFzsRNOGklIptQdg0quD8ET3+/uNfIT98S2ruZdYteFls46Sa
|
||||||
|
MokZFYVD6pWEYV4P2MKVAFqJX9bqBW0LfCCfFqHAOJjUZj9dtleg86d2WfedUOG2
|
||||||
|
LK0iLrydjhThbI0GUDhv0jWYkRlv04fdJ1WSRANYA3gBOnyw+Iigh2xNnYbVZMXT
|
||||||
|
I0BupIJ4UoODMc4QpD2GYJ6oGwIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCCEF3e
|
||||||
|
Y99KxEBSR4H4/TvKbnh4QtHswOf7MaGdjtrld7l4u4Hc4NEklNdDn1XLKhZwnq3Z
|
||||||
|
LRsRlJutDzZ18SRmAJPXPbka7z7D+LA1mbNQElOgiKyQHD9rIJSBr6X5SM9As3CR
|
||||||
|
7QUsb8dg7kc+Jn7WuLZIEVxxMtekt0buWEdMJiklF0tCS3LNsP083FaQk/H1K0z6
|
||||||
|
3PWP26EFdwir3RyTKLY5CBLjKrUAo9O1l/WBVFYbdetnipbGGu5f6nk6nnxbwLLI
|
||||||
|
Dm52Vkq+xFDDUq9IqIoYvLaE86MDvtpMQEx65tIGU19vUf3fL/+sSfdRZ1HDzP4d
|
||||||
|
qNAZMq1DqpibfCBg
|
||||||
|
-----END CERTIFICATE-----"""
|
||||||
|
|
||||||
|
|
||||||
def dummy_get_response(request: HttpRequest): # pragma: no cover
|
def dummy_get_response(request: HttpRequest): # pragma: no cover
|
||||||
"""Dummy get_response for SessionMiddleware"""
|
"""Dummy get_response for SessionMiddleware"""
|
||||||
@ -28,17 +67,21 @@ class TestAuthNRequest(TestCase):
|
|||||||
"""Test AuthN Request generator and parser"""
|
"""Test AuthN Request generator and parser"""
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.provider = SAMLProvider.objects.create(
|
cert = CertificateKeyPair.objects.first()
|
||||||
|
self.provider: SAMLProvider = SAMLProvider.objects.create(
|
||||||
authorization_flow=Flow.objects.get(
|
authorization_flow=Flow.objects.get(
|
||||||
slug="default-provider-authorization-implicit-consent"
|
slug="default-provider-authorization-implicit-consent"
|
||||||
),
|
),
|
||||||
acs_url="http://testserver/source/saml/provider/acs/",
|
acs_url="http://testserver/source/saml/provider/acs/",
|
||||||
signing_kp=CertificateKeyPair.objects.first(),
|
signing_kp=cert,
|
||||||
|
verification_kp=cert,
|
||||||
)
|
)
|
||||||
|
self.provider.property_mappings.set(SAMLPropertyMapping.objects.all())
|
||||||
|
self.provider.save()
|
||||||
self.source = SAMLSource.objects.create(
|
self.source = SAMLSource.objects.create(
|
||||||
slug="provider",
|
slug="provider",
|
||||||
issuer="passbook",
|
issuer="passbook",
|
||||||
signing_kp=CertificateKeyPair.objects.first(),
|
signing_kp=cert,
|
||||||
)
|
)
|
||||||
self.factory = RequestFactory()
|
self.factory = RequestFactory()
|
||||||
|
|
||||||
@ -55,14 +98,15 @@ class TestAuthNRequest(TestCase):
|
|||||||
request = request_proc.build_auth_n()
|
request = request_proc.build_auth_n()
|
||||||
# Now we check the ID and signature
|
# Now we check the ID and signature
|
||||||
parsed_request = AuthNRequestParser(self.provider).parse(
|
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||||
deflate_and_base64_encode(request), "test_state"
|
b64encode(request.encode()).decode(), "test_state"
|
||||||
)
|
)
|
||||||
self.assertEqual(parsed_request.id, request_proc.request_id)
|
self.assertEqual(parsed_request.id, request_proc.request_id)
|
||||||
self.assertEqual(parsed_request.relay_state, "test_state")
|
self.assertEqual(parsed_request.relay_state, "test_state")
|
||||||
|
|
||||||
def test_signed_valid_detached(self):
|
def test_request_full_signed(self):
|
||||||
"""Test generated AuthNRequest with valid signature (detached)"""
|
"""Test full SAML Request/Response flow, fully signed"""
|
||||||
http_request = self.factory.get("/")
|
http_request = self.factory.get("/")
|
||||||
|
http_request.user = get_anonymous_user()
|
||||||
|
|
||||||
middleware = SessionMiddleware(dummy_get_response)
|
middleware = SessionMiddleware(dummy_get_response)
|
||||||
middleware.process_request(http_request)
|
middleware.process_request(http_request)
|
||||||
@ -70,13 +114,22 @@ class TestAuthNRequest(TestCase):
|
|||||||
|
|
||||||
# First create an AuthNRequest
|
# First create an AuthNRequest
|
||||||
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
||||||
params = request_proc.build_auth_n_detached()
|
request = request_proc.build_auth_n()
|
||||||
# Now we check the ID and signature
|
|
||||||
parsed_request = AuthNRequestParser(self.provider).parse_detached(
|
# To get an assertion we need a parsed request (parsed by provider)
|
||||||
params["SAMLRequest"], "test_state", params["Signature"], params["SigAlg"]
|
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||||
|
b64encode(request.encode()).decode(), "test_state"
|
||||||
)
|
)
|
||||||
self.assertEqual(parsed_request.id, request_proc.request_id)
|
# Now create a response and convert it to string (provider)
|
||||||
self.assertEqual(parsed_request.relay_state, "test_state")
|
response_proc = AssertionProcessor(self.provider, http_request, parsed_request)
|
||||||
|
response = response_proc.build_response()
|
||||||
|
|
||||||
|
# Now parse the response (source)
|
||||||
|
http_request.POST = QueryDict(mutable=True)
|
||||||
|
http_request.POST["SAMLResponse"] = b64encode(response.encode()).decode()
|
||||||
|
|
||||||
|
response_parser = ResponseProcessor(self.source)
|
||||||
|
response_parser.parse(http_request)
|
||||||
|
|
||||||
def test_request_id_invalid(self):
|
def test_request_id_invalid(self):
|
||||||
"""Test generated AuthNRequest with invalid request ID"""
|
"""Test generated AuthNRequest with invalid request ID"""
|
||||||
@ -97,7 +150,7 @@ class TestAuthNRequest(TestCase):
|
|||||||
|
|
||||||
# To get an assertion we need a parsed request (parsed by provider)
|
# To get an assertion we need a parsed request (parsed by provider)
|
||||||
parsed_request = AuthNRequestParser(self.provider).parse(
|
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||||
deflate_and_base64_encode(request), "test_state"
|
b64encode(request.encode()).decode(), "test_state"
|
||||||
)
|
)
|
||||||
# Now create a response and convert it to string (provider)
|
# Now create a response and convert it to string (provider)
|
||||||
response_proc = AssertionProcessor(self.provider, http_request, parsed_request)
|
response_proc = AssertionProcessor(self.provider, http_request, parsed_request)
|
||||||
@ -105,9 +158,54 @@ class TestAuthNRequest(TestCase):
|
|||||||
|
|
||||||
# Now parse the response (source)
|
# Now parse the response (source)
|
||||||
http_request.POST = QueryDict(mutable=True)
|
http_request.POST = QueryDict(mutable=True)
|
||||||
http_request.POST["SAMLResponse"] = deflate_and_base64_encode(response)
|
http_request.POST["SAMLResponse"] = b64encode(response.encode()).decode()
|
||||||
|
|
||||||
response_parser = ResponseProcessor(self.source)
|
response_parser = ResponseProcessor(self.source)
|
||||||
|
|
||||||
with self.assertRaises(MismatchedRequestID):
|
with self.assertRaises(MismatchedRequestID):
|
||||||
response_parser.parse(http_request)
|
response_parser.parse(http_request)
|
||||||
|
|
||||||
|
def test_signed_valid_detached(self):
|
||||||
|
"""Test generated AuthNRequest with valid signature (detached)"""
|
||||||
|
http_request = self.factory.get("/")
|
||||||
|
|
||||||
|
middleware = SessionMiddleware(dummy_get_response)
|
||||||
|
middleware.process_request(http_request)
|
||||||
|
http_request.session.save()
|
||||||
|
|
||||||
|
# First create an AuthNRequest
|
||||||
|
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
||||||
|
params = request_proc.build_auth_n_detached()
|
||||||
|
# Now we check the ID and signature
|
||||||
|
parsed_request = AuthNRequestParser(self.provider).parse_detached(
|
||||||
|
params["SAMLRequest"],
|
||||||
|
params["RelayState"],
|
||||||
|
params["Signature"],
|
||||||
|
params["SigAlg"],
|
||||||
|
)
|
||||||
|
self.assertEqual(parsed_request.id, request_proc.request_id)
|
||||||
|
self.assertEqual(parsed_request.relay_state, "test_state")
|
||||||
|
|
||||||
|
def test_signed_detached_static(self):
|
||||||
|
"""Test request with detached signature,
|
||||||
|
taken from https://www.samltool.com/generic_sso_req.php"""
|
||||||
|
static_keypair = CertificateKeyPair.objects.create(
|
||||||
|
name="samltool", certificate_data=REDIRECT_CERT
|
||||||
|
)
|
||||||
|
provider = SAMLProvider(
|
||||||
|
name="samltool",
|
||||||
|
authorization_flow=Flow.objects.get(
|
||||||
|
slug="default-provider-authorization-implicit-consent"
|
||||||
|
),
|
||||||
|
acs_url="https://10.120.20.200/saml-sp/SAML2/POST",
|
||||||
|
audience="https://10.120.20.200/saml-sp/SAML2/POST",
|
||||||
|
issuer="https://10.120.20.200/saml-sp/SAML2/POST",
|
||||||
|
signing_kp=static_keypair,
|
||||||
|
verification_kp=static_keypair,
|
||||||
|
)
|
||||||
|
parsed_request = AuthNRequestParser(provider).parse_detached(
|
||||||
|
REDIRECT_REQUEST, REDIRECT_RELAY_STATE, REDIRECT_SIGNATURE, REDIRECT_SIG_ALG
|
||||||
|
)
|
||||||
|
self.assertEqual(parsed_request.id, "_dcf55fcd27a887e60a7ef9ee6fd3adab")
|
||||||
|
self.assertEqual(parsed_request.name_id_policy, SAML_NAME_ID_FORMAT_EMAIL)
|
||||||
|
self.assertEqual(parsed_request.relay_state, REDIRECT_RELAY_STATE)
|
||||||
|
|||||||
@ -2,6 +2,9 @@
|
|||||||
import base64
|
import base64
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
|
PEM_HEADER = "-----BEGIN CERTIFICATE-----"
|
||||||
|
PEM_FOOTER = "-----END CERTIFICATE-----"
|
||||||
|
|
||||||
|
|
||||||
def decode_base64_and_inflate(encoded: str, encoding="utf-8") -> str:
|
def decode_base64_and_inflate(encoded: str, encoding="utf-8") -> str:
|
||||||
"""Base64 decode and ZLib decompress b64string"""
|
"""Base64 decode and ZLib decompress b64string"""
|
||||||
@ -22,3 +25,8 @@ def deflate_and_base64_encode(inflated: str, encoding="utf-8"):
|
|||||||
def nice64(src: str) -> str:
|
def nice64(src: str) -> str:
|
||||||
"""Returns src base64-encoded and formatted nicely for our XML. """
|
"""Returns src base64-encoded and formatted nicely for our XML. """
|
||||||
return base64.b64encode(src.encode()).decode("utf-8").replace("\n", "")
|
return base64.b64encode(src.encode()).decode("utf-8").replace("\n", "")
|
||||||
|
|
||||||
|
|
||||||
|
def strip_pem_header(cert: str) -> str:
|
||||||
|
"""Remove PEM Headers"""
|
||||||
|
return cert.replace(PEM_HEADER, "").replace(PEM_FOOTER, "").replace("\n", "")
|
||||||
|
|||||||
@ -127,7 +127,7 @@ class SAMLSSOBindingPOSTView(SAMLSSOView):
|
|||||||
def check_saml_request(self) -> Optional[HttpRequest]:
|
def check_saml_request(self) -> Optional[HttpRequest]:
|
||||||
"""Handle POST bindings"""
|
"""Handle POST bindings"""
|
||||||
if REQUEST_KEY_SAML_REQUEST not in self.request.POST:
|
if REQUEST_KEY_SAML_REQUEST not in self.request.POST:
|
||||||
LOGGER.info("handle_saml_request: SAML payload missing")
|
LOGGER.info("check_saml_request: SAML payload missing")
|
||||||
return bad_request_message(
|
return bad_request_message(
|
||||||
self.request, "The SAML request payload is missing."
|
self.request, "The SAML request payload is missing."
|
||||||
)
|
)
|
||||||
|
|||||||
@ -6,23 +6,27 @@ It exposes the ASGI callable as a module-level variable named ``application``.
|
|||||||
For more information on this file, see
|
For more information on this file, see
|
||||||
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
|
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
|
||||||
"""
|
"""
|
||||||
import os
|
|
||||||
import typing
|
import typing
|
||||||
from time import time
|
from time import time
|
||||||
from typing import Any, ByteString, Dict
|
from typing import Any, ByteString, Dict
|
||||||
|
|
||||||
import django
|
import django
|
||||||
from asgiref.compatibility import guarantee_single_callable
|
from asgiref.compatibility import guarantee_single_callable
|
||||||
from channels.routing import get_default_application
|
from channels.routing import ProtocolTypeRouter, URLRouter
|
||||||
from defusedxml import defuse_stdlib
|
from defusedxml import defuse_stdlib
|
||||||
|
from django.core.asgi import get_asgi_application
|
||||||
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
|
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
|
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.root.settings")
|
# DJANGO_SETTINGS_MODULE is set in gunicorn.conf.py
|
||||||
|
|
||||||
defuse_stdlib()
|
defuse_stdlib()
|
||||||
django.setup()
|
django.setup()
|
||||||
|
|
||||||
|
# pylint: disable=wrong-import-position
|
||||||
|
from passbook.root import websocket # noqa # isort:skip
|
||||||
|
|
||||||
|
|
||||||
# See https://github.com/encode/starlette/blob/master/starlette/types.py
|
# See https://github.com/encode/starlette/blob/master/starlette/types.py
|
||||||
Scope = typing.MutableMapping[str, typing.Any]
|
Scope = typing.MutableMapping[str, typing.Any]
|
||||||
Message = typing.MutableMapping[str, typing.Any]
|
Message = typing.MutableMapping[str, typing.Any]
|
||||||
@ -129,5 +133,14 @@ class ASGILogger:
|
|||||||
|
|
||||||
|
|
||||||
application = ASGILogger(
|
application = ASGILogger(
|
||||||
guarantee_single_callable(SentryAsgiMiddleware(get_default_application()))
|
guarantee_single_callable(
|
||||||
|
SentryAsgiMiddleware(
|
||||||
|
ProtocolTypeRouter(
|
||||||
|
{
|
||||||
|
"http": get_asgi_application(),
|
||||||
|
"websocket": URLRouter(websocket.websocket_urlpatterns),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|||||||
@ -1,12 +0,0 @@
|
|||||||
"""root Websocket URLS"""
|
|
||||||
from channels.routing import ProtocolTypeRouter, URLRouter
|
|
||||||
from django.urls import path
|
|
||||||
|
|
||||||
from passbook.outposts.channels import OutpostConsumer
|
|
||||||
|
|
||||||
application = ProtocolTypeRouter(
|
|
||||||
{
|
|
||||||
# (http->django views is added by default)
|
|
||||||
"websocket": URLRouter([path("ws/outpost/<uuid:pk>/", OutpostConsumer)]),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@ -208,7 +208,7 @@ TEMPLATES = [
|
|||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
ASGI_APPLICATION = "passbook.root.routing.application"
|
ASGI_APPLICATION = "passbook.root.asgi.application"
|
||||||
|
|
||||||
CHANNEL_LAYERS = {
|
CHANNEL_LAYERS = {
|
||||||
"default": {
|
"default": {
|
||||||
@ -322,7 +322,7 @@ if not DEBUG and _ERROR_REPORTING:
|
|||||||
],
|
],
|
||||||
before_send=before_send,
|
before_send=before_send,
|
||||||
release="passbook@%s" % __version__,
|
release="passbook@%s" % __version__,
|
||||||
traces_sample_rate=1.0,
|
traces_sample_rate=0.6,
|
||||||
environment=CONFIG.y("error_reporting.environment", "customer"),
|
environment=CONFIG.y("error_reporting.environment", "customer"),
|
||||||
send_default_pii=CONFIG.y_bool("error_reporting.send_pii", False),
|
send_default_pii=CONFIG.y_bool("error_reporting.send_pii", False),
|
||||||
)
|
)
|
||||||
|
|||||||
6
passbook/root/websocket.py
Normal file
6
passbook/root/websocket.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
"""root Websocket URLS"""
|
||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
from passbook.outposts.channels import OutpostConsumer
|
||||||
|
|
||||||
|
websocket_urlpatterns = [path("ws/outpost/<uuid:pk>/", OutpostConsumer.as_asgi())]
|
||||||
@ -19,8 +19,10 @@ class SAMLSourceSerializer(ModelSerializer):
|
|||||||
"allow_idp_initiated",
|
"allow_idp_initiated",
|
||||||
"name_id_policy",
|
"name_id_policy",
|
||||||
"binding_type",
|
"binding_type",
|
||||||
"temporary_user_delete_after",
|
|
||||||
"signing_kp",
|
"signing_kp",
|
||||||
|
"digest_algorithm",
|
||||||
|
"signature_algorithm",
|
||||||
|
"temporary_user_delete_after",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -12,3 +12,7 @@ class UnsupportedNameIDFormat(SentryIgnoredException):
|
|||||||
|
|
||||||
class MismatchedRequestID(SentryIgnoredException):
|
class MismatchedRequestID(SentryIgnoredException):
|
||||||
"""Exception raised when the returned request ID doesn't match the saved ID."""
|
"""Exception raised when the returned request ID doesn't match the saved ID."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidSignature(SentryIgnoredException):
|
||||||
|
"""Signature of XML Object is either missing or invalid"""
|
||||||
|
|||||||
@ -35,8 +35,10 @@ class SAMLSourceForm(forms.ModelForm):
|
|||||||
"binding_type",
|
"binding_type",
|
||||||
"name_id_policy",
|
"name_id_policy",
|
||||||
"allow_idp_initiated",
|
"allow_idp_initiated",
|
||||||
"temporary_user_delete_after",
|
|
||||||
"signing_kp",
|
"signing_kp",
|
||||||
|
"digest_algorithm",
|
||||||
|
"signature_algorithm",
|
||||||
|
"temporary_user_delete_after",
|
||||||
]
|
]
|
||||||
widgets = {
|
widgets = {
|
||||||
"name": forms.TextInput(),
|
"name": forms.TextInput(),
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user