Compare commits

..

33 Commits

Author SHA1 Message Date
b00573bde2 new release: 0.12.4-stable 2020-10-20 22:31:31 +02:00
aeee3ad7f9 e2e: add @retry decorator to make e2e tests more reliable 2020-10-20 18:51:17 +02:00
ef021495ef flows: revert evaluate_on_call rename for backwards compatibility 2020-10-20 15:41:50 +02:00
061eab4b36 docs: fix keys for example flows 2020-10-20 15:14:41 +02:00
870e01f836 flows: rename re_evaluate_policies to evaluate_on_call, add evaluate_on_plan 2020-10-20 15:06:36 +02:00
e2ca72adf0 stages/user_login: only show successful login message at login stage 2020-10-20 12:11:59 +02:00
395ef43eae policies/expression: fix ip_network not being imported by default 2020-10-20 12:05:56 +02:00
a4cc653757 new release: 0.12.3-stable 2020-10-20 10:24:45 +02:00
db4ff20906 outposts: fix service using incorrect pod selector 2020-10-20 10:18:05 +02:00
1f0fbd33b6 build(deps): bump urllib3 from 1.25.10 to 1.25.11 (#287)
Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.25.10 to 1.25.11.
- [Release notes](https://github.com/urllib3/urllib3/releases)
- [Changelog](https://github.com/urllib3/urllib3/blob/master/CHANGES.rst)
- [Commits](https://github.com/urllib3/urllib3/compare/1.25.10...1.25.11)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-10-20 10:17:46 +02:00
5de8d2721e build(deps): bump uvicorn from 0.12.1 to 0.12.2 (#286)
Bumps [uvicorn](https://github.com/encode/uvicorn) from 0.12.1 to 0.12.2.
- [Release notes](https://github.com/encode/uvicorn/releases)
- [Changelog](https://github.com/encode/uvicorn/blob/master/CHANGELOG.md)
- [Commits](https://github.com/encode/uvicorn/compare/0.12.1...0.12.2)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-10-20 10:09:37 +02:00
0d65da9a9e build(deps): bump boto3 from 1.15.18 to 1.16.0 (#288)
Bumps [boto3](https://github.com/boto/boto3) from 1.15.18 to 1.16.0.
- [Release notes](https://github.com/boto/boto3/releases)
- [Changelog](https://github.com/boto/boto3/blob/develop/CHANGELOG.rst)
- [Commits](https://github.com/boto/boto3/compare/1.15.18...1.16.0)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-10-20 09:34:55 +02:00
4316ee4330 root: implement db backups with monitored task, update docs 2020-10-19 22:17:47 +02:00
2ed9a1dbe3 */tasks: update phrasing 2020-10-19 21:35:31 +02:00
8e03824d20 lib: always set task's UID, even for unexpected errors 2020-10-19 21:30:21 +02:00
754dbdd0e5 outpost: fix logs for kubernetes controller 2020-10-19 21:29:58 +02:00
e13d348315 new release: 0.12.2-stable 2020-10-19 19:36:36 +02:00
169f3ebe5b outposts: fix logger again 2020-10-19 18:52:17 +02:00
f8ad604e85 outposts: add more tests 2020-10-19 17:47:51 +02:00
774b9c8a61 outposts: update kubernetes controller to use pk as identifier instead of name 2020-10-19 17:39:12 +02:00
d8c522233e outposts: fix outpost mangling log output 2020-10-19 16:54:11 +02:00
82d50f7eaa outposts: fix list showing questionmark when only one outpost is registered 2020-10-19 16:34:16 +02:00
1c426c5136 outposts: trigger deployment re-create when selector changes 2020-10-19 16:21:39 +02:00
d6e14cc551 proxy: show version on startup 2020-10-19 16:21:13 +02:00
c3917ebc2e lifecycle: fix formatting 2020-10-19 16:13:45 +02:00
7203bd37a3 outposts: replace migration with string backup handler 2020-10-19 16:04:38 +02:00
597188c7ee lifecycle: fix migration trying to load all classes 2020-10-19 15:55:16 +02:00
ac4c314042 new release: 0.12.1-stable 2020-10-19 15:30:27 +02:00
05866d3544 providers/proxy: fix creation of ingress 2020-10-19 15:06:50 +02:00
6596bc6034 helm: fix permissions for ingresses in networking 2020-10-19 14:55:14 +02:00
c6661ef4d2 lifecycle: add migration to 0.12 which removes old outpost state from cache 2020-10-19 14:35:38 +02:00
386e23dfac core: fix api signature for view_key 2020-10-19 14:35:22 +02:00
5d7220ca70 helm: fix keys for s3 backup 2020-10-19 14:30:44 +02:00
62 changed files with 429 additions and 214 deletions

View File

@ -1,5 +1,5 @@
[bumpversion] [bumpversion]
current_version = 0.12.0-stable current_version = 0.12.4-stable
tag = True tag = True
commit = True commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*) parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)

View File

@ -18,11 +18,11 @@ jobs:
- name: Building Docker Image - name: Building Docker Image
run: docker build run: docker build
--no-cache --no-cache
-t beryju/passbook:0.12.0-stable -t beryju/passbook:0.12.4-stable
-t beryju/passbook:latest -t beryju/passbook:latest
-f Dockerfile . -f Dockerfile .
- name: Push Docker Container to Registry (versioned) - name: Push Docker Container to Registry (versioned)
run: docker push beryju/passbook:0.12.0-stable run: docker push beryju/passbook:0.12.4-stable
- name: Push Docker Container to Registry (latest) - name: Push Docker Container to Registry (latest)
run: docker push beryju/passbook:latest run: docker push beryju/passbook:latest
build-proxy: build-proxy:
@ -48,11 +48,11 @@ jobs:
cd proxy cd proxy
docker build \ docker build \
--no-cache \ --no-cache \
-t beryju/passbook-proxy:0.12.0-stable \ -t beryju/passbook-proxy:0.12.4-stable \
-t beryju/passbook-proxy:latest \ -t beryju/passbook-proxy:latest \
-f Dockerfile . -f Dockerfile .
- name: Push Docker Container to Registry (versioned) - name: Push Docker Container to Registry (versioned)
run: docker push beryju/passbook-proxy:0.12.0-stable run: docker push beryju/passbook-proxy:0.12.4-stable
- name: Push Docker Container to Registry (latest) - name: Push Docker Container to Registry (latest)
run: docker push beryju/passbook-proxy:latest run: docker push beryju/passbook-proxy:latest
build-static: build-static:
@ -77,11 +77,11 @@ jobs:
run: docker build run: docker build
--no-cache --no-cache
--network=$(docker network ls | grep github | awk '{print $1}') --network=$(docker network ls | grep github | awk '{print $1}')
-t beryju/passbook-static:0.12.0-stable -t beryju/passbook-static:0.12.4-stable
-t beryju/passbook-static:latest -t beryju/passbook-static:latest
-f static.Dockerfile . -f static.Dockerfile .
- name: Push Docker Container to Registry (versioned) - name: Push Docker Container to Registry (versioned)
run: docker push beryju/passbook-static:0.12.0-stable run: docker push beryju/passbook-static:0.12.4-stable
- name: Push Docker Container to Registry (latest) - name: Push Docker Container to Registry (latest)
run: docker push beryju/passbook-static:latest run: docker push beryju/passbook-static:latest
test-release: test-release:
@ -114,5 +114,5 @@ jobs:
SENTRY_PROJECT: passbook SENTRY_PROJECT: passbook
SENTRY_URL: https://sentry.beryju.org SENTRY_URL: https://sentry.beryju.org
with: with:
tagName: 0.12.0-stable tagName: 0.12.4-stable
environment: beryjuorg-prod environment: beryjuorg-prod

36
Pipfile.lock generated
View File

@ -74,18 +74,18 @@
}, },
"boto3": { "boto3": {
"hashes": [ "hashes": [
"sha256:9ab957090f7893172768bb8b8d2c5cce0afd36a9d36d73a9fb14168f72d75a8b", "sha256:2e16f02c8b832d401d958d7ca0a14c5bc7da17827918e6b24e5bc43dce8f496e",
"sha256:f56148e2c6b9a2d704218da42f07d72f00270bfddb13bc1bdea20d3327daa51e" "sha256:ab5353a968a4e664b9da2dd950169b755066525fcbfdfc90e7e49c8333d95c19"
], ],
"index": "pypi", "index": "pypi",
"version": "==1.15.18" "version": "==1.16.0"
}, },
"botocore": { "botocore": {
"hashes": [ "hashes": [
"sha256:de5f9fc0c7e88ee7ba831fa27475be258ae09ece99143ed623d3618a3c84ee2c", "sha256:226effa72e3ddd0a802e812c0e204999393ca7982fee754cc0c770a7a1caef3a",
"sha256:e224754230e7e015836ba20037cac6321e8e2ce9b8627c14d579fcb37249decd" "sha256:9bf8586b69f20cf0a8ed1e27338cd10ce847751d1a2fd98b92662565c8a2df24"
], ],
"version": "==1.18.18" "version": "==1.19.0"
}, },
"cachetools": { "cachetools": {
"hashes": [ "hashes": [
@ -1100,23 +1100,23 @@
"secure" "secure"
], ],
"hashes": [ "hashes": [
"sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a", "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2",
"sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"
], ],
"index": "pypi", "index": "pypi",
"markers": null, "markers": null,
"version": "==1.25.10" "version": "==1.25.11"
}, },
"uvicorn": { "uvicorn": {
"extras": [ "extras": [
"standard" "standard"
], ],
"hashes": [ "hashes": [
"sha256:a461e76406088f448f36323f5ac774d50e5a552b6ccb54e4fca8d83ef614a7c2", "sha256:8ff7495c74b8286a341526ff9efa3988ebab9a4b2f561c7438c3cb420992d7dd",
"sha256:d06a25caa8dc680ad92eb3ec67363f5281c092059613a1cc0100acba37fc0f45" "sha256:e5dbed4a8a44c7b04376021021d63798d6a7bcfae9c654a0b153577b93854fba"
], ],
"index": "pypi", "index": "pypi",
"version": "==0.12.1" "version": "==0.12.2"
}, },
"uvloop": { "uvloop": {
"hashes": [ "hashes": [
@ -1476,10 +1476,10 @@
}, },
"pbr": { "pbr": {
"hashes": [ "hashes": [
"sha256:14bfd98f51c78a3dd22a1ef45cf194ad79eee4a19e8e1a0d5c7f8e81ffe182ea", "sha256:5fad80b613c402d5b7df7bd84812548b2a61e9977387a80a5fc5c396492b13c9",
"sha256:5adc0f9fc64319d8df5ca1e4e06eea674c26b80e6f00c530b18ce6a6592ead15" "sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00"
], ],
"version": "==5.5.0" "version": "==5.5.1"
}, },
"pep8-naming": { "pep8-naming": {
"hashes": [ "hashes": [
@ -1745,12 +1745,12 @@
"secure" "secure"
], ],
"hashes": [ "hashes": [
"sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a", "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2",
"sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461" "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"
], ],
"index": "pypi", "index": "pypi",
"markers": null, "markers": null,
"version": "==1.25.10" "version": "==1.25.11"
}, },
"wrapt": { "wrapt": {
"hashes": [ "hashes": [

View File

@ -179,13 +179,13 @@ stages:
- task: CmdLine@2 - task: CmdLine@2
inputs: inputs:
script: | script: |
export PB_TEST_K8S=true
sudo pip install -U wheel pipenv sudo pip install -U wheel pipenv
pipenv install --dev pipenv install --dev
- task: CmdLine@2 - task: CmdLine@2
displayName: Run full test suite displayName: Run full test suite
inputs: inputs:
script: | script: |
export PB_TEST_K8S=true
pipenv run coverage run ./manage.py test passbook -v 3 pipenv run coverage run ./manage.py test passbook -v 3
- task: CmdLine@2 - task: CmdLine@2
inputs: inputs:
@ -221,7 +221,6 @@ stages:
- task: CmdLine@2 - task: CmdLine@2
inputs: inputs:
script: | script: |
export PB_TEST_K8S=true
sudo pip install -U wheel pipenv sudo pip install -U wheel pipenv
pipenv install --dev pipenv install --dev
- task: DockerCompose@0 - task: DockerCompose@0
@ -241,6 +240,7 @@ stages:
displayName: Run full test suite displayName: Run full test suite
inputs: inputs:
script: | script: |
export PB_TEST_K8S=true
pipenv run coverage run ./manage.py test e2e -v 3 --failfast pipenv run coverage run ./manage.py test e2e -v 3 --failfast
- task: CmdLine@2 - task: CmdLine@2
condition: always() condition: always()

View File

@ -19,7 +19,7 @@ services:
networks: networks:
- internal - internal
server: server:
image: beryju/passbook:${PASSBOOK_TAG:-0.12.0-stable} image: beryju/passbook:${PASSBOOK_TAG:-0.12.4-stable}
command: server command: server
environment: environment:
PASSBOOK_REDIS__HOST: redis PASSBOOK_REDIS__HOST: redis
@ -40,7 +40,7 @@ services:
env_file: env_file:
- .env - .env
worker: worker:
image: beryju/passbook:${PASSBOOK_TAG:-0.12.0-stable} image: beryju/passbook:${PASSBOOK_TAG:-0.12.4-stable}
command: worker command: worker
networks: networks:
- internal - internal
@ -54,7 +54,7 @@ services:
env_file: env_file:
- .env - .env
static: static:
image: beryju/passbook-static:${PASSBOOK_TAG:-0.12.0-stable} image: beryju/passbook-static:${PASSBOOK_TAG:-0.12.4-stable}
networks: networks:
- internal - internal
labels: labels:

View File

@ -95,7 +95,8 @@
}, },
"model": "passbook_flows.flowstagebinding", "model": "passbook_flows.flowstagebinding",
"attrs": { "attrs": {
"re_evaluate_policies": false "evaluate_on_plan": false,
"re_evaluate_policies": true
} }
}, },
{ {

View File

@ -13,7 +13,7 @@ Download the latest `docker-compose.yml` from [here](https://raw.githubuserconte
To optionally enable error-reporting, run `echo PASSBOOK_ERROR_REPORTING__ENABLED=true >> .env` To optionally enable error-reporting, run `echo PASSBOOK_ERROR_REPORTING__ENABLED=true >> .env`
To optionally deploy a different version run `echo PASSBOOK_TAG=0.12.0-stable >> .env` To optionally deploy a different version run `echo PASSBOOK_TAG=0.12.4-stable >> .env`
If this is a fresh passbook install run the following commands to generate a password: If this is a fresh passbook install run the following commands to generate a password:

View File

@ -11,7 +11,7 @@ This installation automatically applies database migrations on startup. After th
image: image:
name: beryju/passbook name: beryju/passbook
name_static: beryju/passbook-static name_static: beryju/passbook-static
tag: 0.12.0-stable tag: 0.12.4-stable
nameOverride: "" nameOverride: ""
@ -35,8 +35,8 @@ config:
# Enable Database Backups to S3 # Enable Database Backups to S3
# backup: # backup:
# access_key: access-key # accessKey: access-key
# secret_key: secret-key # secretKey: secret-key
# bucket: s3-bucket # bucket: s3-bucket
# region: eu-central-1 # region: eu-central-1
# host: s3-host # host: s3-host

View File

@ -6,6 +6,10 @@
### Backup ### Backup
!!! notice
Local backups are **enabled** by default, and will be run daily at 00:00
Local backups can be created by running the following command in your passbook installation directory Local backups can be created by running the following command in your passbook installation directory
``` ```
@ -14,15 +18,6 @@ docker-compose run --rm worker backup
This will dump the current database into the `./backups` folder. By defaults, the last 10 Backups are kept. This will dump the current database into the `./backups` folder. By defaults, the last 10 Backups are kept.
To schedule these backups, use the following snippet in a crontab
```
0 0 * * * bash -c "cd <passbook install location> && docker-compose run --rm worker backup" >/dev/null
```
!!! notice
passbook does support automatic backups on a schedule, however this is currently not recommended, as there is no way to monitor these scheduled tasks.
### Restore ### Restore
@ -42,11 +37,7 @@ After you've restored the backup, it is recommended to restart all services with
### S3 Configuration ### S3 Configuration
!!! notice #### Preparation
To trigger backups with S3 enabled, use the same commands as above.
#### S3 Preparation
passbook expects the bucket you select to already exist. The IAM User given to passbook should have the following permissions passbook expects the bucket you select to already exist. The IAM User given to passbook should have the following permissions
@ -101,11 +92,11 @@ Simply enable these options in your values.yaml file
```yaml ```yaml
# Enable Database Backups to S3 # Enable Database Backups to S3
backup: backup:
access_key: access-key accessKey: access-key
secret_key: secret-key secretKey: secret-key
bucket: s3-bucket bucket: s3-bucket
region: eu-central-1 region: eu-central-1
host: s3-host host: s3-host
``` ```
Afterwards, run a `helm upgrade` to update the ConfigMap. Because passbook-scheduled backups are not recommended currently, a Kubernetes CronJob is created that runs the backup daily. Afterwards, run a `helm upgrade` to update the ConfigMap. Backups are done automatically as above, at 00:00 every day.

View File

@ -26,7 +26,11 @@ return False
- `request.obj`: A Django Model instance. This is only set if the policy is ran against an object. - `request.obj`: A Django Model instance. This is only set if the policy is ran against an object.
- `request.context`: A dictionary with dynamic data. This depends on the origin of the execution. - `request.context`: A dictionary with dynamic data. This depends on the origin of the execution.
- `pb_is_sso_flow`: Boolean which is true if request was initiated by authenticating through an external provider. - `pb_is_sso_flow`: Boolean which is true if request was initiated by authenticating through an external provider.
- `pb_client_ip`: Client's IP Address or '255.255.255.255' if no IP Address could be extracted. Can be [compared](../expressions/index.md#comparing-ip-addresses) - `pb_client_ip`: Client's IP Address or 255.255.255.255 if no IP Address could be extracted. Can be [compared](../expressions/index.md#comparing-ip-addresses), for example
```python
return pb_client_ip in ip_network('10.0.0.0/24')
```
Additionally, when the policy is executed from a flow, every variable from the flow's current context is accessible under the `context` object. Additionally, when the policy is executed from a flow, every variable from the flow's current context is accessible under the `context` object.

View File

@ -8,7 +8,7 @@ from docker.types import Healthcheck
from selenium.webdriver.common.by import By from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as ec from selenium.webdriver.support import expected_conditions as ec
from e2e.utils import USER, SeleniumTestCase from e2e.utils import USER, SeleniumTestCase, retry
from passbook.flows.models import Flow, FlowDesignation, FlowStageBinding from passbook.flows.models import Flow, FlowDesignation, FlowStageBinding
from passbook.stages.email.models import EmailStage, EmailTemplates from passbook.stages.email.models import EmailStage, EmailTemplates
from passbook.stages.identification.models import IdentificationStage from passbook.stages.identification.models import IdentificationStage
@ -34,6 +34,7 @@ class TestFlowsEnroll(SeleniumTestCase):
), ),
} }
@retry()
def test_enroll_2_step(self): def test_enroll_2_step(self):
"""Test 2-step enroll flow""" """Test 2-step enroll flow"""
# First stage fields # First stage fields
@ -119,6 +120,7 @@ class TestFlowsEnroll(SeleniumTestCase):
"foo@bar.baz", "foo@bar.baz",
) )
@retry()
@override_settings(EMAIL_BACKEND="django.core.mail.backends.smtp.EmailBackend") @override_settings(EMAIL_BACKEND="django.core.mail.backends.smtp.EmailBackend")
def test_enroll_email(self): def test_enroll_email(self):
"""Test enroll with Email verification""" """Test enroll with Email verification"""

View File

@ -5,13 +5,14 @@ from unittest.case import skipUnless
from selenium.webdriver.common.by import By from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys from selenium.webdriver.common.keys import Keys
from e2e.utils import USER, SeleniumTestCase from e2e.utils import USER, SeleniumTestCase, retry
@skipUnless(platform.startswith("linux"), "requires local docker") @skipUnless(platform.startswith("linux"), "requires local docker")
class TestFlowsLogin(SeleniumTestCase): class TestFlowsLogin(SeleniumTestCase):
"""test default login flow""" """test default login flow"""
@retry()
def test_login(self): def test_login(self):
"""test default login flow""" """test default login flow"""
self.driver.get(f"{self.live_server_url}/flows/default-authentication-flow/") self.driver.get(f"{self.live_server_url}/flows/default-authentication-flow/")

View File

@ -12,7 +12,7 @@ from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as ec from selenium.webdriver.support import expected_conditions as ec
from e2e.utils import USER, SeleniumTestCase from e2e.utils import USER, SeleniumTestCase, retry
from passbook.flows.models import Flow, FlowStageBinding from passbook.flows.models import Flow, FlowStageBinding
from passbook.stages.otp_validate.models import OTPValidateStage from passbook.stages.otp_validate.models import OTPValidateStage
@ -21,6 +21,7 @@ from passbook.stages.otp_validate.models import OTPValidateStage
class TestFlowsOTP(SeleniumTestCase): class TestFlowsOTP(SeleniumTestCase):
"""test flow with otp stages""" """test flow with otp stages"""
@retry()
def test_otp_validate(self): def test_otp_validate(self):
"""test flow with otp stages""" """test flow with otp stages"""
sleep(1) sleep(1)
@ -52,6 +53,7 @@ class TestFlowsOTP(SeleniumTestCase):
USER().username, USER().username,
) )
@retry()
def test_otp_totp_setup(self): def test_otp_totp_setup(self):
"""test TOTP Setup stage""" """test TOTP Setup stage"""
flow: Flow = Flow.objects.get(slug="default-authentication-flow") flow: Flow = Flow.objects.get(slug="default-authentication-flow")
@ -98,6 +100,7 @@ class TestFlowsOTP(SeleniumTestCase):
self.assertTrue(TOTPDevice.objects.filter(user=USER(), confirmed=True).exists()) self.assertTrue(TOTPDevice.objects.filter(user=USER(), confirmed=True).exists())
@retry()
def test_otp_static_setup(self): def test_otp_static_setup(self):
"""test Static OTP Setup stage""" """test Static OTP Setup stage"""
flow: Flow = Flow.objects.get(slug="default-authentication-flow") flow: Flow = Flow.objects.get(slug="default-authentication-flow")

View File

@ -5,7 +5,7 @@ from unittest.case import skipUnless
from selenium.webdriver.common.by import By from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys from selenium.webdriver.common.keys import Keys
from e2e.utils import USER, SeleniumTestCase from e2e.utils import USER, SeleniumTestCase, retry
from passbook.core.models import User from passbook.core.models import User
from passbook.flows.models import Flow, FlowDesignation from passbook.flows.models import Flow, FlowDesignation
from passbook.providers.oauth2.generators import generate_client_secret from passbook.providers.oauth2.generators import generate_client_secret
@ -16,6 +16,7 @@ from passbook.stages.password.models import PasswordStage
class TestFlowsStageSetup(SeleniumTestCase): class TestFlowsStageSetup(SeleniumTestCase):
"""test stage setup flows""" """test stage setup flows"""
@retry()
def test_password_change(self): def test_password_change(self):
"""test password change flow""" """test password change flow"""
# Ensure that password stage has change_flow set # Ensure that password stage has change_flow set

View File

@ -9,7 +9,7 @@ from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as ec from selenium.webdriver.support import expected_conditions as ec
from e2e.utils import USER, SeleniumTestCase from e2e.utils import USER, SeleniumTestCase, retry
from passbook.core.models import Application from passbook.core.models import Application
from passbook.flows.models import Flow from passbook.flows.models import Flow
from passbook.policies.expression.models import ExpressionPolicy from passbook.policies.expression.models import ExpressionPolicy
@ -61,6 +61,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
}, },
} }
@retry()
def test_authorization_consent_implied(self): def test_authorization_consent_implied(self):
"""test OAuth Provider flow (default authorization flow with implied consent)""" """test OAuth Provider flow (default authorization flow with implied consent)"""
# Bootstrap all needed objects # Bootstrap all needed objects
@ -115,6 +116,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
USER().username, USER().username,
) )
@retry()
def test_authorization_consent_explicit(self): def test_authorization_consent_explicit(self):
"""test OAuth Provider flow (default authorization flow with explicit consent)""" """test OAuth Provider flow (default authorization flow with explicit consent)"""
# Bootstrap all needed objects # Bootstrap all needed objects
@ -184,6 +186,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
USER().username, USER().username,
) )
@retry()
def test_denied(self): def test_denied(self):
"""test OAuth Provider flow (default authorization flow, denied)""" """test OAuth Provider flow (default authorization flow, denied)"""
# Bootstrap all needed objects # Bootstrap all needed objects

View File

@ -10,7 +10,7 @@ from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as ec from selenium.webdriver.support import expected_conditions as ec
from structlog import get_logger from structlog import get_logger
from e2e.utils import USER, SeleniumTestCase from e2e.utils import USER, SeleniumTestCase, retry
from passbook.core.models import Application from passbook.core.models import Application
from passbook.crypto.models import CertificateKeyPair from passbook.crypto.models import CertificateKeyPair
from passbook.flows.models import Flow from passbook.flows.models import Flow
@ -80,6 +80,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
}, },
} }
@retry()
def test_redirect_uri_error(self): def test_redirect_uri_error(self):
"""test OpenID Provider flow (invalid redirect URI, check error message)""" """test OpenID Provider flow (invalid redirect URI, check error message)"""
sleep(1) sleep(1)
@ -122,6 +123,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
"Redirect URI Error", "Redirect URI Error",
) )
@retry()
def test_authorization_consent_implied(self): def test_authorization_consent_implied(self):
"""test OpenID Provider flow (default authorization flow with implied consent)""" """test OpenID Provider flow (default authorization flow with implied consent)"""
sleep(1) sleep(1)
@ -183,6 +185,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
USER().email, USER().email,
) )
@retry()
def test_authorization_logout(self): def test_authorization_logout(self):
"""test OpenID Provider flow with logout""" """test OpenID Provider flow with logout"""
sleep(1) sleep(1)
@ -252,6 +255,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
) )
self.driver.find_element(By.ID, "logout").click() self.driver.find_element(By.ID, "logout").click()
@retry()
def test_authorization_consent_explicit(self): def test_authorization_consent_explicit(self):
"""test OpenID Provider flow (default authorization flow with explicit consent)""" """test OpenID Provider flow (default authorization flow with explicit consent)"""
sleep(1) sleep(1)
@ -325,6 +329,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
USER().email, USER().email,
) )
@retry()
def test_authorization_denied(self): def test_authorization_denied(self):
"""test OpenID Provider flow (default authorization with access deny)""" """test OpenID Provider flow (default authorization with access deny)"""
sleep(1) sleep(1)

View File

@ -12,7 +12,7 @@ from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as ec from selenium.webdriver.support import expected_conditions as ec
from structlog import get_logger from structlog import get_logger
from e2e.utils import USER, SeleniumTestCase from e2e.utils import USER, SeleniumTestCase, retry
from passbook.core.models import Application from passbook.core.models import Application
from passbook.crypto.models import CertificateKeyPair from passbook.crypto.models import CertificateKeyPair
from passbook.flows.models import Flow from passbook.flows.models import Flow
@ -76,6 +76,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
LOGGER.info("Container failed healthcheck") LOGGER.info("Container failed healthcheck")
sleep(1) sleep(1)
@retry()
def test_redirect_uri_error(self): def test_redirect_uri_error(self):
"""test OpenID Provider flow (invalid redirect URI, check error message)""" """test OpenID Provider flow (invalid redirect URI, check error message)"""
sleep(1) sleep(1)
@ -119,6 +120,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
"Redirect URI Error", "Redirect URI Error",
) )
@retry()
def test_authorization_consent_implied(self): def test_authorization_consent_implied(self):
"""test OpenID Provider flow (default authorization flow with implied consent)""" """test OpenID Provider flow (default authorization flow with implied consent)"""
sleep(1) sleep(1)
@ -169,6 +171,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
self.assertEqual(body["IDTokenClaims"]["email"], USER().email) self.assertEqual(body["IDTokenClaims"]["email"], USER().email)
self.assertEqual(body["UserInfo"]["email"], USER().email) self.assertEqual(body["UserInfo"]["email"], USER().email)
@retry()
def test_authorization_consent_explicit(self): def test_authorization_consent_explicit(self):
"""test OpenID Provider flow (default authorization flow with explicit consent)""" """test OpenID Provider flow (default authorization flow with explicit consent)"""
sleep(1) sleep(1)
@ -229,6 +232,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
self.assertEqual(body["IDTokenClaims"]["email"], USER().email) self.assertEqual(body["IDTokenClaims"]["email"], USER().email)
self.assertEqual(body["UserInfo"]["email"], USER().email) self.assertEqual(body["UserInfo"]["email"], USER().email)
@retry()
def test_authorization_denied(self): def test_authorization_denied(self):
"""test OpenID Provider flow (default authorization with access deny)""" """test OpenID Provider flow (default authorization with access deny)"""
sleep(1) sleep(1)

View File

@ -11,7 +11,7 @@ from docker.models.containers import Container
from selenium.webdriver.common.by import By from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys from selenium.webdriver.common.keys import Keys
from e2e.utils import USER, SeleniumTestCase from e2e.utils import USER, SeleniumTestCase, retry
from passbook import __version__ from passbook import __version__
from passbook.core.models import Application from passbook.core.models import Application
from passbook.flows.models import Flow from passbook.flows.models import Flow
@ -57,6 +57,7 @@ class TestProviderProxy(SeleniumTestCase):
) )
return container return container
@retry()
def test_proxy_simple(self): def test_proxy_simple(self):
"""Test simple outpost setup with single provider""" """Test simple outpost setup with single provider"""
proxy: ProxyProvider = ProxyProvider.objects.create( proxy: ProxyProvider = ProxyProvider.objects.create(
@ -110,6 +111,7 @@ class TestProviderProxy(SeleniumTestCase):
class TestProviderProxyConnect(ChannelsLiveServerTestCase): class TestProviderProxyConnect(ChannelsLiveServerTestCase):
"""Test Proxy connectivity over websockets""" """Test Proxy connectivity over websockets"""
@retry()
def test_proxy_connectivity(self): def test_proxy_connectivity(self):
"""Test proxy connectivity over websocket""" """Test proxy connectivity over websocket"""
SeleniumTestCase().apply_default_data() SeleniumTestCase().apply_default_data()

View File

@ -12,7 +12,7 @@ from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as ec from selenium.webdriver.support import expected_conditions as ec
from structlog import get_logger from structlog import get_logger
from e2e.utils import USER, SeleniumTestCase from e2e.utils import USER, SeleniumTestCase, retry
from passbook.core.models import Application from passbook.core.models import Application
from passbook.crypto.models import CertificateKeyPair from passbook.crypto.models import CertificateKeyPair
from passbook.flows.models import Flow from passbook.flows.models import Flow
@ -66,6 +66,7 @@ class TestProviderSAML(SeleniumTestCase):
LOGGER.info("Container failed healthcheck") LOGGER.info("Container failed healthcheck")
sleep(1) sleep(1)
@retry()
def test_sp_initiated_implicit(self): def test_sp_initiated_implicit(self):
"""test SAML Provider flow SP-initiated flow (implicit consent)""" """test SAML Provider flow SP-initiated flow (implicit consent)"""
# Bootstrap all needed objects # Bootstrap all needed objects
@ -105,6 +106,7 @@ class TestProviderSAML(SeleniumTestCase):
self.assertEqual(body["attr"]["mail"], [USER().email]) self.assertEqual(body["attr"]["mail"], [USER().email])
self.assertEqual(body["attr"]["uid"], [str(USER().pk)]) self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
@retry()
def test_sp_initiated_explicit(self): def test_sp_initiated_explicit(self):
"""test SAML Provider flow SP-initiated flow (explicit consent)""" """test SAML Provider flow SP-initiated flow (explicit consent)"""
# Bootstrap all needed objects # Bootstrap all needed objects
@ -150,6 +152,7 @@ class TestProviderSAML(SeleniumTestCase):
self.assertEqual(body["attr"]["mail"], [USER().email]) self.assertEqual(body["attr"]["mail"], [USER().email])
self.assertEqual(body["attr"]["uid"], [str(USER().pk)]) self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
@retry()
def test_idp_initiated_implicit(self): def test_idp_initiated_implicit(self):
"""test SAML Provider flow IdP-initiated flow (implicit consent)""" """test SAML Provider flow IdP-initiated flow (implicit consent)"""
# Bootstrap all needed objects # Bootstrap all needed objects
@ -195,6 +198,7 @@ class TestProviderSAML(SeleniumTestCase):
self.assertEqual(body["attr"]["mail"], [USER().email]) self.assertEqual(body["attr"]["mail"], [USER().email])
self.assertEqual(body["attr"]["uid"], [str(USER().pk)]) self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
@retry()
def test_sp_initiated_denied(self): def test_sp_initiated_denied(self):
"""test SAML Provider flow SP-initiated flow (Policy denies access)""" """test SAML Provider flow SP-initiated flow (Policy denies access)"""
# Bootstrap all needed objects # Bootstrap all needed objects

View File

@ -14,7 +14,7 @@ from selenium.webdriver.support import expected_conditions as ec
from structlog import get_logger from structlog import get_logger
from yaml import safe_dump from yaml import safe_dump
from e2e.utils import SeleniumTestCase from e2e.utils import SeleniumTestCase, retry
from passbook.flows.models import Flow from passbook.flows.models import Flow
from passbook.providers.oauth2.generators import ( from passbook.providers.oauth2.generators import (
generate_client_id, generate_client_id,
@ -106,6 +106,7 @@ class TestSourceOAuth2(SeleniumTestCase):
consumer_secret=self.client_secret, consumer_secret=self.client_secret,
) )
@retry()
def test_oauth_enroll(self): def test_oauth_enroll(self):
"""test OAuth Source With With OIDC""" """test OAuth Source With With OIDC"""
self.create_objects() self.create_objects()
@ -159,6 +160,7 @@ class TestSourceOAuth2(SeleniumTestCase):
"admin@example.com", "admin@example.com",
) )
@retry()
@override_settings(SESSION_COOKIE_SAMESITE="strict") @override_settings(SESSION_COOKIE_SAMESITE="strict")
def test_oauth_samesite_strict(self): def test_oauth_samesite_strict(self):
"""test OAuth Source With SameSite set to strict """test OAuth Source With SameSite set to strict
@ -195,6 +197,7 @@ class TestSourceOAuth2(SeleniumTestCase):
"Authentication Failed.", "Authentication Failed.",
) )
@retry()
def test_oauth_enroll_auth(self): def test_oauth_enroll_auth(self):
"""test OAuth Source With With OIDC (enroll and authenticate again)""" """test OAuth Source With With OIDC (enroll and authenticate again)"""
self.test_oauth_enroll() self.test_oauth_enroll()
@ -291,6 +294,7 @@ class TestSourceOAuth1(SeleniumTestCase):
consumer_secret=self.client_secret, consumer_secret=self.client_secret,
) )
@retry()
def test_oauth_enroll(self): def test_oauth_enroll(self):
"""test OAuth Source With With OIDC""" """test OAuth Source With With OIDC"""
self.create_objects() self.create_objects()
@ -317,6 +321,7 @@ class TestSourceOAuth1(SeleniumTestCase):
self.driver.find_element(By.CSS_SELECTOR, "[name='confirm']").click() self.driver.find_element(By.CSS_SELECTOR, "[name='confirm']").click()
# Wait until we've loaded the user info page # Wait until we've loaded the user info page
sleep(2)
self.wait.until(ec.presence_of_element_located((By.ID, "user-settings"))) self.wait.until(ec.presence_of_element_located((By.ID, "user-settings")))
self.driver.get(self.url("passbook_core:user-settings")) self.driver.get(self.url("passbook_core:user-settings"))

View File

@ -10,7 +10,7 @@ from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as ec from selenium.webdriver.support import expected_conditions as ec
from structlog import get_logger from structlog import get_logger
from e2e.utils import SeleniumTestCase from e2e.utils import SeleniumTestCase, retry
from passbook.crypto.models import CertificateKeyPair from passbook.crypto.models import CertificateKeyPair
from passbook.flows.models import Flow from passbook.flows.models import Flow
from passbook.sources.saml.models import SAMLBindingTypes, SAMLSource from passbook.sources.saml.models import SAMLBindingTypes, SAMLSource
@ -92,6 +92,7 @@ class TestSourceSAML(SeleniumTestCase):
}, },
} }
@retry()
def test_idp_redirect(self): def test_idp_redirect(self):
"""test SAML Source With redirect binding""" """test SAML Source With redirect binding"""
# Bootstrap all needed objects # Bootstrap all needed objects
@ -141,6 +142,7 @@ class TestSourceSAML(SeleniumTestCase):
self.driver.find_element(By.ID, "id_username").get_attribute("value"), "" self.driver.find_element(By.ID, "id_username").get_attribute("value"), ""
) )
@retry()
def test_idp_post(self): def test_idp_post(self):
"""test SAML Source With post binding""" """test SAML Source With post binding"""
# Bootstrap all needed objects # Bootstrap all needed objects
@ -192,6 +194,7 @@ class TestSourceSAML(SeleniumTestCase):
self.driver.find_element(By.ID, "id_username").get_attribute("value"), "" self.driver.find_element(By.ID, "id_username").get_attribute("value"), ""
) )
@retry()
def test_idp_post_auto(self): def test_idp_post_auto(self):
"""test SAML Source With post binding (auto redirect)""" """test SAML Source With post binding (auto redirect)"""
# Bootstrap all needed objects # Bootstrap all needed objects

View File

@ -1,19 +1,22 @@
"""passbook e2e testing utilities""" """passbook e2e testing utilities"""
from functools import wraps
from glob import glob from glob import glob
from importlib.util import module_from_spec, spec_from_file_location from importlib.util import module_from_spec, spec_from_file_location
from inspect import getmembers, isfunction from inspect import getmembers, isfunction
from os import environ, makedirs from os import environ, makedirs
from time import sleep, time from time import sleep, time
from typing import Any, Dict, Optional from typing import Any, Callable, Dict, Optional
from django.apps import apps from django.apps import apps
from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.db import connection, transaction from django.db import connection, transaction
from django.db.utils import IntegrityError from django.db.utils import IntegrityError
from django.shortcuts import reverse from django.shortcuts import reverse
from django.test.testcases import TestCase
from docker import DockerClient, from_env from docker import DockerClient, from_env
from docker.models.containers import Container from docker.models.containers import Container
from selenium import webdriver from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.remote.webdriver import WebDriver from selenium.webdriver.remote.webdriver import WebDriver
from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support.ui import WebDriverWait
@ -123,3 +126,35 @@ class SeleniumTestCase(StaticLiveServerTestCase):
func(apps, schema_editor) func(apps, schema_editor)
except IntegrityError: except IntegrityError:
pass pass
def retry(max_retires=3, exceptions=None):
"""Retry test multiple times. Default to catching Selenium Timeout Exception"""
if not exceptions:
exceptions = [TimeoutException]
def retry_actual(func: Callable):
"""Retry test multiple times"""
count = 1
@wraps(func)
def wrapper(self: TestCase, *args, **kwargs):
"""Run test again if we're below max_retries, including tearDown and
setUp. Otherwise raise the error"""
nonlocal count
try:
return func(self, *args, **kwargs)
# pylint: disable=catching-non-exception
except tuple(exceptions) as exc:
count += 1
if count > max_retires:
# pylint: disable=raising-non-exception
raise exc
self.tearDown()
self.setUp()
return wrapper(self, *args, **kwargs)
return wrapper
return retry_actual

View File

@ -1,8 +1,8 @@
apiVersion: v2 apiVersion: v2
appVersion: "0.12.0-stable" appVersion: "0.12.4-stable"
description: A Helm chart for passbook. description: A Helm chart for passbook.
name: passbook name: passbook
version: "0.12.0-stable" version: "0.12.4-stable"
icon: https://github.com/BeryJu/passbook/blob/master/docs/images/logo.svg icon: https://github.com/BeryJu/passbook/blob/master/docs/images/logo.svg
dependencies: dependencies:
- name: postgresql - name: postgresql

View File

@ -7,8 +7,8 @@ data:
POSTGRESQL__NAME: "{{ .Values.postgresql.postgresqlDatabase }}" POSTGRESQL__NAME: "{{ .Values.postgresql.postgresqlDatabase }}"
POSTGRESQL__USER: "{{ .Values.postgresql.postgresqlUsername }}" POSTGRESQL__USER: "{{ .Values.postgresql.postgresqlUsername }}"
{{- if .Values.backup }} {{- if .Values.backup }}
POSTGRESQL__S3_BACKUP__ACCESS_KEY: "{{ .Values.backup.access_key }}" POSTGRESQL__S3_BACKUP__ACCESS_KEY: "{{ .Values.backup.accessKey }}"
POSTGRESQL__S3_BACKUP__SECRET_KEY: "{{ .Values.backup.secret_key }}" POSTGRESQL__S3_BACKUP__SECRET_KEY: "{{ .Values.backup.secretKey }}"
POSTGRESQL__S3_BACKUP__BUCKET: "{{ .Values.backup.bucket }}" POSTGRESQL__S3_BACKUP__BUCKET: "{{ .Values.backup.bucket }}"
POSTGRESQL__S3_BACKUP__REGION: "{{ .Values.backup.region }}" POSTGRESQL__S3_BACKUP__REGION: "{{ .Values.backup.region }}"
POSTGRESQL__S3_BACKUP__HOST: "{{ .Values.backup.host }}" POSTGRESQL__S3_BACKUP__HOST: "{{ .Values.backup.host }}"

View File

@ -1,42 +0,0 @@
{{- if .Values.backup }}
apiVersion: batch/v1beta1
kind: CronJob
metadata:
name: {{ include "passbook.fullname" . }}-backup
labels:
app.kubernetes.io/name: {{ include "passbook.name" . }}
helm.sh/chart: {{ include "passbook.chart" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
spec:
schedule: "0 0 * * *"
jobTemplate:
spec:
template:
spec:
restartPolicy: Never
containers:
- name: {{ .Chart.Name }}
image: "{{ .Values.image.name }}:{{ .Values.image.tag }}"
args: [server]
envFrom:
- configMapRef:
name: {{ include "passbook.fullname" . }}-config
prefix: PASSBOOK_
env:
- name: PASSBOOK_SECRET_KEY
valueFrom:
secretKeyRef:
name: "{{ include "passbook.fullname" . }}-secret-key"
key: "secret_key"
- name: PASSBOOK_REDIS__PASSWORD
valueFrom:
secretKeyRef:
name: "{{ .Release.Name }}-redis"
key: "redis-password"
- name: PASSBOOK_POSTGRESQL__PASSWORD
valueFrom:
secretKeyRef:
name: "{{ .Release.Name }}-postgresql"
key: "postgresql-password"
{{- end}}

View File

@ -28,9 +28,9 @@ rules:
- "patch" - "patch"
- apiGroups: - apiGroups:
- "extensions" - "extensions"
- "networking" - "networking.k8s.io"
resources: resources:
- "ingress" - "ingresses"
verbs: verbs:
- "get" - "get"
- "create" - "create"

View File

@ -4,7 +4,7 @@
image: image:
name: beryju/passbook name: beryju/passbook
name_static: beryju/passbook-static name_static: beryju/passbook-static
tag: 0.12.0-stable tag: 0.12.4-stable
nameOverride: "" nameOverride: ""
@ -28,8 +28,8 @@ config:
# Enable Database Backups to S3 # Enable Database Backups to S3
# backup: # backup:
# access_key: access-key # accessKey: access-key
# secret_key: secret-key # secretKey: secret-key
# bucket: s3-bucket # bucket: s3-bucket
# region: eu-central-1 # region: eu-central-1
# host: s3-host # host: s3-host

View File

@ -47,7 +47,9 @@ if __name__ == "__main__":
# pyright: reportGeneralTypeIssues=false # pyright: reportGeneralTypeIssues=false
spec.loader.exec_module(mod) spec.loader.exec_module(mod)
for _, sub in getmembers(mod, isclass): for name, sub in getmembers(mod, isclass):
if name != "Migration":
continue
migration = sub(curr, conn) migration = sub(curr, conn)
if migration.needs_migration(): if migration.needs_migration():
LOGGER.info("Migration needs to be applied", migration=sub) LOGGER.info("Migration needs to be applied", migration=sub)

View File

@ -25,7 +25,7 @@ delete from django_migrations where app = 'passbook_stages_password' and
name = '0002_passwordstage_change_flow';""" name = '0002_passwordstage_change_flow';"""
class To010Migration(BaseMigration): class Migration(BaseMigration):
def needs_migration(self) -> bool: def needs_migration(self) -> bool:
self.cur.execute( self.cur.execute(
"select * from information_schema.tables where table_name='oidc_provider_client'" "select * from information_schema.tables where table_name='oidc_provider_client'"

View File

@ -1,2 +1,2 @@
"""passbook""" """passbook"""
__version__ = "0.12.0-stable" __version__ = "0.12.4-stable"

View File

@ -49,7 +49,7 @@
</span> </span>
</td> </td>
{% with states=outpost.state %} {% with states=outpost.state %}
{% if states|length > 1 %} {% if states|length > 0 %}
<td role="cell"> <td role="cell">
{% for state in states %} {% for state in states %}
<div> <div>

View File

@ -1,6 +1,4 @@
"""Tokens API Viewset""" """Tokens API Viewset"""
from uuid import UUID
from django.http.response import Http404 from django.http.response import Http404
from rest_framework.decorators import action from rest_framework.decorators import action
from rest_framework.request import Request from rest_framework.request import Request
@ -29,10 +27,9 @@ class TokenViewSet(ModelViewSet):
serializer_class = TokenSerializer serializer_class = TokenSerializer
@action(detail=True) @action(detail=True)
# pylint: disable=invalid-name def view_key(self, request: Request, identifier: str) -> Response:
def view_key(self, request: Request, pk: UUID) -> Response:
"""Return token key and log access""" """Return token key and log access"""
tokens = Token.filter_not_expired(pk=pk) tokens = Token.filter_not_expired(identifier=identifier)
if not tokens.exists(): if not tokens.exists():
raise Http404 raise Http404
token = tokens.first() token = tokens.first()

View File

@ -27,7 +27,15 @@ class FlowStageBindingSerializer(ModelSerializer):
class Meta: class Meta:
model = FlowStageBinding model = FlowStageBinding
fields = ["pk", "target", "stage", "re_evaluate_policies", "order", "policies"] fields = [
"pk",
"target",
"stage",
"evaluate_on_plan",
"re_evaluate_policies",
"order",
"policies",
]
class FlowStageBindingViewSet(ModelViewSet): class FlowStageBindingViewSet(ModelViewSet):

View File

@ -50,12 +50,10 @@ class FlowStageBindingForm(forms.ModelForm):
fields = [ fields = [
"target", "target",
"stage", "stage",
"evaluate_on_plan",
"re_evaluate_policies", "re_evaluate_policies",
"order", "order",
] ]
labels = {
"re_evaluate_policies": _("Re-evaluate Policies"),
}
widgets = { widgets = {
"name": forms.TextInput(), "name": forms.TextInput(),
} }

View File

@ -2,6 +2,7 @@
from dataclasses import dataclass from dataclasses import dataclass
from typing import TYPE_CHECKING, Optional from typing import TYPE_CHECKING, Optional
from django.http.request import HttpRequest
from structlog import get_logger from structlog import get_logger
from passbook.core.models import User from passbook.core.models import User
@ -20,7 +21,9 @@ class StageMarker:
"""Base stage marker class, no extra attributes, and has no special handler.""" """Base stage marker class, no extra attributes, and has no special handler."""
# pylint: disable=unused-argument # pylint: disable=unused-argument
def process(self, plan: "FlowPlan", stage: Stage) -> Optional[Stage]: def process(
self, plan: "FlowPlan", stage: Stage, http_request: Optional[HttpRequest]
) -> Optional[Stage]:
"""Process callback for this marker. This should be overridden by sub-classes. """Process callback for this marker. This should be overridden by sub-classes.
If a stage should be removed, return None.""" If a stage should be removed, return None."""
return stage return stage
@ -33,10 +36,14 @@ class ReevaluateMarker(StageMarker):
binding: PolicyBinding binding: PolicyBinding
user: User user: User
def process(self, plan: "FlowPlan", stage: Stage) -> Optional[Stage]: def process(
self, plan: "FlowPlan", stage: Stage, http_request: Optional[HttpRequest]
) -> Optional[Stage]:
"""Re-evaluate policies bound to stage, and if they fail, remove from plan""" """Re-evaluate policies bound to stage, and if they fail, remove from plan"""
engine = PolicyEngine(self.binding, self.user) engine = PolicyEngine(self.binding, self.user)
engine.use_cache = False engine.use_cache = False
if http_request:
engine.request.http_request = http_request
engine.request.context = plan.context engine.request.context = plan.context
engine.build() engine.build()
result = engine.result result = engine.result

View File

@ -0,0 +1,29 @@
# Generated by Django 3.1.2 on 2020-10-20 12:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("passbook_flows", "0014_auto_20200925_2332"),
]
operations = [
migrations.AlterField(
model_name="flowstagebinding",
name="re_evaluate_policies",
field=models.BooleanField(
default=False,
help_text="Evaluate policies when the Stage is present to the user.",
),
),
migrations.AddField(
model_name="flowstagebinding",
name="evaluate_on_plan",
field=models.BooleanField(
default=True,
help_text="Evaluate policies during the Flow planning process. Disable this for input-based policies.",
),
),
]

View File

@ -154,15 +154,19 @@ class FlowStageBinding(SerializerModel, PolicyBindingModel):
target = models.ForeignKey("Flow", on_delete=models.CASCADE) target = models.ForeignKey("Flow", on_delete=models.CASCADE)
stage = InheritanceForeignKey(Stage, on_delete=models.CASCADE) stage = InheritanceForeignKey(Stage, on_delete=models.CASCADE)
re_evaluate_policies = models.BooleanField( evaluate_on_plan = models.BooleanField(
default=False, default=True,
help_text=_( help_text=_(
( (
"When this option is enabled, the planner will re-evaluate " "Evaluate policies during the Flow planning process. "
"policies bound to this binding." "Disable this for input-based policies."
) )
), ),
) )
re_evaluate_policies = models.BooleanField(
default=False,
help_text=_("Evaluate policies when the Stage is present to the user."),
)
order = models.IntegerField() order = models.IntegerField()

View File

@ -46,7 +46,7 @@ class FlowPlan:
self.stages.append(stage) self.stages.append(stage)
self.markers.append(marker or StageMarker()) self.markers.append(marker or StageMarker())
def next(self) -> Optional[Stage]: def next(self, http_request: Optional[HttpRequest]) -> Optional[Stage]:
"""Return next pending stage from the bottom of the list""" """Return next pending stage from the bottom of the list"""
if not self.has_stages: if not self.has_stages:
return None return None
@ -55,7 +55,7 @@ class FlowPlan:
if marker.__class__ is not StageMarker: if marker.__class__ is not StageMarker:
LOGGER.debug("f(plan_inst): stage has marker", stage=stage, marker=marker) LOGGER.debug("f(plan_inst): stage has marker", stage=stage, marker=marker)
marked_stage = marker.process(self, stage) marked_stage = marker.process(self, stage, http_request)
if not marked_stage: if not marked_stage:
LOGGER.debug("f(plan_inst): marker returned none, next stage", stage=stage) LOGGER.debug("f(plan_inst): marker returned none, next stage", stage=stage)
self.stages.remove(stage) self.stages.remove(stage)
@ -63,7 +63,7 @@ class FlowPlan:
if not self.has_stages: if not self.has_stages:
return None return None
# pylint: disable=not-callable # pylint: disable=not-callable
return self.next() return self.next(http_request)
return marked_stage return marked_stage
def pop(self): def pop(self):
@ -159,23 +159,41 @@ class FlowPlanner:
for binding in FlowStageBinding.objects.filter( for binding in FlowStageBinding.objects.filter(
target__pk=self.flow.pk target__pk=self.flow.pk
).order_by("order"): ).order_by("order"):
binding: FlowStageBinding
stage = binding.stage
marker = StageMarker()
if binding.evaluate_on_plan:
LOGGER.debug(
"f(plan): evaluating on plan",
stage=binding.stage,
flow=self.flow,
)
engine = PolicyEngine(binding, user, request) engine = PolicyEngine(binding, user, request)
engine.request.context = plan.context engine.request.context = plan.context
engine.build() engine.build()
if engine.passing: if engine.passing:
LOGGER.debug( LOGGER.debug(
"f(plan): Stage passing", stage=binding.stage, flow=self.flow "f(plan): Stage passing",
stage=binding.stage,
flow=self.flow,
) )
plan.stages.append(binding.stage) else:
marker = StageMarker() stage = None
if binding.re_evaluate_policies: else:
LOGGER.debug(
"f(plan): not evaluating on plan",
stage=binding.stage,
flow=self.flow,
)
if binding.re_evaluate_policies and stage:
LOGGER.debug( LOGGER.debug(
"f(plan): Stage has re-evaluate marker", "f(plan): Stage has re-evaluate marker",
stage=binding.stage, stage=binding.stage,
flow=self.flow, flow=self.flow,
) )
marker = ReevaluateMarker(binding=binding, user=user) marker = ReevaluateMarker(binding=binding, user=user)
plan.markers.append(marker) if stage:
plan.append(stage, marker)
LOGGER.debug( LOGGER.debug(
"f(plan): Finished building", "f(plan): Finished building",
flow=self.flow, flow=self.flow,

View File

@ -86,7 +86,7 @@ class FlowExecutorView(View):
return to_stage_response(self.request, self.handle_invalid_flow(exc)) return to_stage_response(self.request, self.handle_invalid_flow(exc))
# We don't save the Plan after getting the next stage # We don't save the Plan after getting the next stage
# as it hasn't been successfully passed yet # as it hasn't been successfully passed yet
next_stage = self.plan.next() next_stage = self.plan.next(self.request)
if not next_stage: if not next_stage:
LOGGER.debug("f(exec): no more stages, flow is done.") LOGGER.debug("f(exec): no more stages, flow is done.")
return self._flow_done() return self._flow_done()

View File

@ -79,11 +79,18 @@ class MonitoredTask(Task):
_result: TaskResult _result: TaskResult
_uid: Optional[str]
def __init__(self, *args, **kwargs) -> None: def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.save_on_success = True self.save_on_success = True
self._uid = None
self._result = TaskResult(status=TaskResultStatus.ERROR, messages=[]) self._result = TaskResult(status=TaskResultStatus.ERROR, messages=[])
def set_uid(self, uid: str):
"""Set UID, so in the case of an unexpected error its saved correctly"""
self._uid = uid
def set_status(self, result: TaskResult): def set_status(self, result: TaskResult):
"""Set result for current run, will overwrite previous result.""" """Set result for current run, will overwrite previous result."""
self._result = result self._result = result
@ -92,6 +99,8 @@ class MonitoredTask(Task):
def after_return( def after_return(
self, status, retval, task_id, args: List[Any], kwargs: Dict[str, Any], einfo self, status, retval, task_id, args: List[Any], kwargs: Dict[str, Any], einfo
): ):
if not self._result.uid:
self._result.uid = self._uid
if self.save_on_success: if self.save_on_success:
TaskInfo( TaskInfo(
task_name=self.__name__, task_name=self.__name__,
@ -107,6 +116,8 @@ class MonitoredTask(Task):
# pylint: disable=too-many-arguments # pylint: disable=too-many-arguments
def on_failure(self, exc, task_id, args, kwargs, einfo): def on_failure(self, exc, task_id, args, kwargs, einfo):
if not self._result.uid:
self._result.uid = self._uid
TaskInfo( TaskInfo(
task_name=self.__name__, task_name=self.__name__,
task_description=self.__doc__, task_description=self.__doc__,

View File

@ -0,0 +1,34 @@
"""Database backup task"""
from datetime import datetime
from io import StringIO
from botocore.exceptions import BotoCoreError, ClientError
from django.contrib.humanize.templatetags.humanize import naturaltime
from django.core import management
from structlog import get_logger
from passbook.lib.tasks import MonitoredTask, TaskResult, TaskResultStatus
from passbook.root.celery import CELERY_APP
LOGGER = get_logger()
@CELERY_APP.task(bind=True, base=MonitoredTask)
def backup_database(self: MonitoredTask): # pragma: no cover
"""Database backup"""
try:
start = datetime.now()
out = StringIO()
management.call_command("dbbackup", quiet=True, stdout=out)
self.set_status(
TaskResult(
TaskResultStatus.SUCCESSFUL,
[
f"Successfully finished database backup {naturaltime(start)}",
out.getvalue(),
],
)
)
LOGGER.info("Successfully backed up database.")
except (IOError, BotoCoreError, ClientError) as exc:
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))

View File

@ -21,9 +21,7 @@ class BaseController:
def __init__(self, outpost: Outpost): def __init__(self, outpost: Outpost):
self.outpost = outpost self.outpost = outpost
self.logger = get_logger( self.logger = get_logger()
controller=self.__class__.__name__, outpost=self.outpost
)
self.deployment_ports = {} self.deployment_ports = {}
# pylint: disable=invalid-name # pylint: disable=invalid-name
@ -35,7 +33,7 @@ class BaseController:
"""Call .up() but capture all log output and return it.""" """Call .up() but capture all log output and return it."""
with capture_logs() as logs: with capture_logs() as logs:
self.up() self.up()
return [f"{x['controller']}: {x['event']}" for x in logs] return [x["event"] for x in logs]
def down(self): def down(self):
"""Handler to delete everything we've created""" """Handler to delete everything we've created"""

View File

@ -35,9 +35,7 @@ class KubernetesObjectReconciler(Generic[T]):
def __init__(self, controller: "KubernetesController"): def __init__(self, controller: "KubernetesController"):
self.controller = controller self.controller = controller
self.namespace = controller.outpost.config.kubernetes_namespace self.namespace = controller.outpost.config.kubernetes_namespace
self.logger = get_logger( self.logger = get_logger()
controller=self.__class__.__name__, outpost=controller.outpost
)
@property @property
def name(self) -> str: def name(self) -> str:

View File

@ -1,5 +1,5 @@
"""Kubernetes Deployment Reconciler""" """Kubernetes Deployment Reconciler"""
from typing import TYPE_CHECKING from typing import TYPE_CHECKING, Dict
from kubernetes.client import ( from kubernetes.client import (
AppsV1Api, AppsV1Api,
@ -41,7 +41,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
@property @property
def name(self) -> str: def name(self) -> str:
return f"passbook-outpost-{self.outpost.name}" return f"passbook-outpost-{self.controller.outpost.uuid.hex}"
def reconcile(self, current: V1Deployment, reference: V1Deployment): def reconcile(self, current: V1Deployment, reference: V1Deployment):
if current.spec.replicas != reference.spec.replicas: if current.spec.replicas != reference.spec.replicas:
@ -52,6 +52,14 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
): ):
raise NeedsUpdate() raise NeedsUpdate()
def get_pod_meta(self) -> Dict[str, str]:
"""Get common object metadata"""
return {
"app.kubernetes.io/name": "passbook-outpost",
"app.kubernetes.io/managed-by": "passbook.beryju.org",
"passbook.beryju.org/outpost-uuid": self.controller.outpost.uuid.hex,
}
def get_reference_object(self) -> V1Deployment: def get_reference_object(self) -> V1Deployment:
"""Get deployment object for outpost""" """Get deployment object for outpost"""
# Generate V1ContainerPort objects # Generate V1ContainerPort objects
@ -59,17 +67,18 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
for port_name, port in self.controller.deployment_ports.items(): for port_name, port in self.controller.deployment_ports.items():
container_ports.append(V1ContainerPort(container_port=port, name=port_name)) container_ports.append(V1ContainerPort(container_port=port, name=port_name))
meta = self.get_object_meta(name=self.name) meta = self.get_object_meta(name=self.name)
secret_name = f"passbook-outpost-{self.controller.outpost.uuid.hex}-api"
return V1Deployment( return V1Deployment(
metadata=meta, metadata=meta,
spec=V1DeploymentSpec( spec=V1DeploymentSpec(
replicas=self.outpost.config.kubernetes_replicas, replicas=self.outpost.config.kubernetes_replicas,
selector=V1LabelSelector(match_labels=meta.labels), selector=V1LabelSelector(match_labels=self.get_pod_meta()),
template=V1PodTemplateSpec( template=V1PodTemplateSpec(
metadata=V1ObjectMeta(labels=meta.labels), metadata=V1ObjectMeta(labels=self.get_pod_meta()),
spec=V1PodSpec( spec=V1PodSpec(
containers=[ containers=[
V1Container( V1Container(
name=self.outpost.type, name=str(self.outpost.type),
image=f"{self.image_base}-{self.outpost.type}:{__version__}", image=f"{self.image_base}-{self.outpost.type}:{__version__}",
ports=container_ports, ports=container_ports,
env=[ env=[
@ -77,7 +86,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
name="PASSBOOK_HOST", name="PASSBOOK_HOST",
value_from=V1EnvVarSource( value_from=V1EnvVarSource(
secret_key_ref=V1SecretKeySelector( secret_key_ref=V1SecretKeySelector(
name=f"passbook-outpost-{self.outpost.name}-api", name=secret_name,
key="passbook_host", key="passbook_host",
) )
), ),
@ -86,7 +95,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
name="PASSBOOK_TOKEN", name="PASSBOOK_TOKEN",
value_from=V1EnvVarSource( value_from=V1EnvVarSource(
secret_key_ref=V1SecretKeySelector( secret_key_ref=V1SecretKeySelector(
name=f"passbook-outpost-{self.outpost.name}-api", name=secret_name,
key="token", key="token",
) )
), ),
@ -95,7 +104,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
name="PASSBOOK_INSECURE", name="PASSBOOK_INSECURE",
value_from=V1EnvVarSource( value_from=V1EnvVarSource(
secret_key_ref=V1SecretKeySelector( secret_key_ref=V1SecretKeySelector(
name=f"passbook-outpost-{self.outpost.name}-api", name=secret_name,
key="passbook_host_insecure", key="passbook_host_insecure",
) )
), ),
@ -117,9 +126,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
) )
def retrieve(self) -> V1Deployment: def retrieve(self) -> V1Deployment:
return self.api.read_namespaced_deployment( return self.api.read_namespaced_deployment(self.name, self.namespace)
f"passbook-outpost-{self.outpost.name}", self.namespace
)
def update(self, current: V1Deployment, reference: V1Deployment): def update(self, current: V1Deployment, reference: V1Deployment):
return self.api.patch_namespaced_deployment( return self.api.patch_namespaced_deployment(

View File

@ -27,7 +27,7 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
@property @property
def name(self) -> str: def name(self) -> str:
return f"passbook-outpost-{self.controller.outpost.name}-api" return f"passbook-outpost-{self.controller.outpost.uuid.hex}-api"
def reconcile(self, current: V1Secret, reference: V1Secret): def reconcile(self, current: V1Secret, reference: V1Secret):
for key in reference.data.keys(): for key in reference.data.keys():
@ -59,9 +59,7 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
) )
def retrieve(self) -> V1Secret: def retrieve(self) -> V1Secret:
return self.api.read_namespaced_secret( return self.api.read_namespaced_secret(self.name, self.namespace)
f"passbook-outpost-{self.controller.outpost.name}-api", self.namespace
)
def update(self, current: V1Secret, reference: V1Secret): def update(self, current: V1Secret, reference: V1Secret):
return self.api.patch_namespaced_secret( return self.api.patch_namespaced_secret(

View File

@ -7,6 +7,7 @@ from passbook.outposts.controllers.k8s.base import (
KubernetesObjectReconciler, KubernetesObjectReconciler,
NeedsUpdate, NeedsUpdate,
) )
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
if TYPE_CHECKING: if TYPE_CHECKING:
from passbook.outposts.controllers.kubernetes import KubernetesController from passbook.outposts.controllers.kubernetes import KubernetesController
@ -21,7 +22,7 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
@property @property
def name(self) -> str: def name(self) -> str:
return f"passbook-outpost-{self.controller.outpost.name}" return f"passbook-outpost-{self.controller.outpost.uuid.hex}"
def reconcile(self, current: V1Service, reference: V1Service): def reconcile(self, current: V1Service, reference: V1Service):
if len(current.spec.ports) != len(reference.spec.ports): if len(current.spec.ports) != len(reference.spec.ports):
@ -36,9 +37,10 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
ports = [] ports = []
for port_name, port in self.controller.deployment_ports.items(): for port_name, port in self.controller.deployment_ports.items():
ports.append(V1ServicePort(name=port_name, port=port)) ports.append(V1ServicePort(name=port_name, port=port))
selector_labels = DeploymentReconciler(self.controller).get_pod_meta()
return V1Service( return V1Service(
metadata=meta, metadata=meta,
spec=V1ServiceSpec(ports=ports, selector=meta.labels, type="ClusterIP"), spec=V1ServiceSpec(ports=ports, selector=selector_labels, type="ClusterIP"),
) )
def create(self, reference: V1Service): def create(self, reference: V1Service):
@ -50,9 +52,7 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
) )
def retrieve(self) -> V1Service: def retrieve(self) -> V1Service:
return self.api.read_namespaced_service( return self.api.read_namespaced_service(self.name, self.namespace)
f"passbook-outpost-{self.controller.outpost.name}", self.namespace
)
def update(self, current: V1Service, reference: V1Service): def update(self, current: V1Service, reference: V1Service):
return self.api.patch_namespaced_service( return self.api.patch_namespaced_service(

View File

@ -5,6 +5,7 @@ from typing import Dict, List, Type
from kubernetes.client import OpenApiException from kubernetes.client import OpenApiException
from kubernetes.config import load_incluster_config, load_kube_config from kubernetes.config import load_incluster_config, load_kube_config
from kubernetes.config.config_exception import ConfigException from kubernetes.config.config_exception import ConfigException
from structlog.testing import capture_logs
from yaml import dump_all from yaml import dump_all
from passbook.outposts.controllers.base import BaseController, ControllerException from passbook.outposts.controllers.base import BaseController, ControllerException
@ -43,6 +44,18 @@ class KubernetesController(BaseController):
except OpenApiException as exc: except OpenApiException as exc:
raise ControllerException from exc raise ControllerException from exc
def up_with_logs(self) -> List[str]:
try:
all_logs = []
for reconcile_key in self.reconcile_order:
with capture_logs() as logs:
reconciler = self.reconcilers[reconcile_key](self)
reconciler.up()
all_logs += [f"{reconcile_key.title()}: {x['event']}" for x in logs]
return all_logs
except OpenApiException as exc:
raise ControllerException from exc
def down(self): def down(self):
try: try:
for reconcile_key in self.reconcile_order: for reconcile_key in self.reconcile_order:
@ -56,7 +69,6 @@ class KubernetesController(BaseController):
documents = [] documents = []
for reconcile_key in self.reconcile_order: for reconcile_key in self.reconcile_order:
reconciler = self.reconcilers[reconcile_key](self) reconciler = self.reconcilers[reconcile_key](self)
reconciler.up()
documents.append(reconciler.get_reference_object().to_dict()) documents.append(reconciler.get_reference_object().to_dict())
with StringIO() as _str: with StringIO() as _str:

View File

@ -204,7 +204,11 @@ class OutpostState:
def for_channel(outpost: Outpost, channel: str) -> "OutpostState": def for_channel(outpost: Outpost, channel: str) -> "OutpostState":
"""Get state for a single channel""" """Get state for a single channel"""
key = f"{outpost.state_cache_prefix}_{channel}" key = f"{outpost.state_cache_prefix}_{channel}"
data = cache.get(key, {"uid": channel}) default_data = {"uid": channel}
data = cache.get(key, default_data)
if isinstance(data, str):
cache.delete(key)
data = default_data
state = from_dict(OutpostState, data) state = from_dict(OutpostState, data)
state.uid = channel state.uid = channel
# pylint: disable=protected-access # pylint: disable=protected-access

View File

@ -35,9 +35,10 @@ def outpost_controller_all():
@CELERY_APP.task(bind=True, base=MonitoredTask) @CELERY_APP.task(bind=True, base=MonitoredTask)
def outpost_controller(self: MonitoredTask, outpost_pk: str): def outpost_controller(self: MonitoredTask, outpost_pk: str):
"""Launch controller deployment of Outpost""" """Create/update/monitor the deployment of an Outpost"""
logs = [] logs = []
outpost: Outpost = Outpost.objects.get(pk=outpost_pk) outpost: Outpost = Outpost.objects.get(pk=outpost_pk)
self.set_uid(slugify(outpost.name))
try: try:
if outpost.type == OutpostType.PROXY: if outpost.type == OutpostType.PROXY:
if outpost.deployment_type == OutpostDeploymentType.KUBERNETES: if outpost.deployment_type == OutpostDeploymentType.KUBERNETES:
@ -45,15 +46,9 @@ def outpost_controller(self: MonitoredTask, outpost_pk: str):
if outpost.deployment_type == OutpostDeploymentType.DOCKER: if outpost.deployment_type == OutpostDeploymentType.DOCKER:
logs = ProxyDockerController(outpost).up_with_logs() logs = ProxyDockerController(outpost).up_with_logs()
except ControllerException as exc: except ControllerException as exc:
self.set_status( self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
TaskResult(TaskResultStatus.ERROR, uid=slugify(outpost.name)).with_error(
exc
)
)
else: else:
self.set_status( self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, logs))
TaskResult(TaskResultStatus.SUCCESSFUL, logs, uid=slugify(outpost.name))
)
@CELERY_APP.task() @CELERY_APP.task()

View File

@ -1,9 +1,16 @@
"""outpost tests""" """outpost tests"""
from os import environ
from unittest.case import skipUnless
from unittest.mock import patch
from django.test import TestCase from django.test import TestCase
from guardian.models import UserObjectPermission from guardian.models import UserObjectPermission
from passbook.crypto.models import CertificateKeyPair from passbook.crypto.models import CertificateKeyPair
from passbook.flows.models import Flow from passbook.flows.models import Flow
from passbook.outposts.controllers.k8s.base import NeedsUpdate
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
from passbook.outposts.controllers.kubernetes import KubernetesController
from passbook.outposts.models import Outpost, OutpostDeploymentType, OutpostType from passbook.outposts.models import Outpost, OutpostDeploymentType, OutpostType
from passbook.providers.proxy.models import ProxyProvider from passbook.providers.proxy.models import ProxyProvider
@ -58,3 +65,50 @@ class OutpostTests(TestCase):
permissions = UserObjectPermission.objects.filter(user=outpost.user) permissions = UserObjectPermission.objects.filter(user=outpost.user)
self.assertEqual(len(permissions), 1) self.assertEqual(len(permissions), 1)
self.assertEqual(permissions[0].object_pk, str(outpost.pk)) self.assertEqual(permissions[0].object_pk, str(outpost.pk))
@skipUnless("PB_TEST_K8S" in environ, "Kubernetes test cluster required")
class OutpostKubernetesTests(TestCase):
"""Test Kubernetes Controllers"""
def setUp(self):
super().setUp()
self.provider: ProxyProvider = ProxyProvider.objects.create(
name="test",
internal_host="http://localhost",
external_host="http://localhost",
authorization_flow=Flow.objects.first(),
)
self.outpost: Outpost = Outpost.objects.create(
name="test",
type=OutpostType.PROXY,
deployment_type=OutpostDeploymentType.KUBERNETES,
)
self.outpost.providers.add(self.provider)
self.outpost.save()
def test_deployment_reconciler(self):
"""test that deployment requires update"""
controller = KubernetesController(self.outpost)
deployment_reconciler = DeploymentReconciler(controller)
self.assertIsNotNone(deployment_reconciler.retrieve())
config = self.outpost.config
config.kubernetes_replicas = 3
self.outpost.config = config
with self.assertRaises(NeedsUpdate):
deployment_reconciler.reconcile(
deployment_reconciler.retrieve(),
deployment_reconciler.get_reference_object(),
)
with patch.object(deployment_reconciler, "image_base", "test"):
with self.assertRaises(NeedsUpdate):
deployment_reconciler.reconcile(
deployment_reconciler.retrieve(),
deployment_reconciler.get_reference_object(),
)
deployment_reconciler.delete(deployment_reconciler.get_reference_object())

View File

@ -1,5 +1,5 @@
"""passbook expression policy evaluator""" """passbook expression policy evaluator"""
from ipaddress import ip_address from ipaddress import ip_address, ip_network
from typing import List from typing import List
from django.http import HttpRequest from django.http import HttpRequest
@ -22,6 +22,8 @@ class PolicyEvaluator(BaseEvaluator):
super().__init__() super().__init__()
self._messages = [] self._messages = []
self._context["pb_message"] = self.expr_func_message self._context["pb_message"] = self.expr_func_message
self._context["ip_address"] = ip_address
self._context["ip_network"] = ip_network
self._filename = policy_name or "PolicyEvaluator" self._filename = policy_name or "PolicyEvaluator"
def expr_func_message(self, message: str): def expr_func_message(self, message: str):

View File

@ -34,7 +34,7 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
@property @property
def name(self) -> str: def name(self) -> str:
return f"passbook-outpost-{self.controller.outpost.name}" return f"passbook-outpost-{self.controller.outpost.uuid.hex}"
def reconcile( def reconcile(
self, current: NetworkingV1beta1Ingress, reference: NetworkingV1beta1Ingress self, current: NetworkingV1beta1Ingress, reference: NetworkingV1beta1Ingress
@ -56,7 +56,10 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
have_hosts = [rule.host for rule in reference.spec.rules] have_hosts = [rule.host for rule in reference.spec.rules]
have_hosts.sort() have_hosts.sort()
have_hosts_tls = reference.spec.tls.hosts have_hosts_tls = []
for tls_config in reference.spec.tls:
if tls_config:
have_hosts_tls += tls_config.hosts
have_hosts_tls.sort() have_hosts_tls.sort()
if have_hosts != expected_hosts: if have_hosts != expected_hosts:
@ -102,7 +105,7 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
) )
return NetworkingV1beta1Ingress( return NetworkingV1beta1Ingress(
metadata=meta, metadata=meta,
spec=NetworkingV1beta1IngressSpec(rules=rules, tls=tls_config), spec=NetworkingV1beta1IngressSpec(rules=rules, tls=[tls_config]),
) )
def create(self, reference: NetworkingV1beta1Ingress): def create(self, reference: NetworkingV1beta1Ingress):
@ -114,9 +117,7 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
) )
def retrieve(self) -> NetworkingV1beta1Ingress: def retrieve(self) -> NetworkingV1beta1Ingress:
return self.api.read_namespaced_ingress( return self.api.read_namespaced_ingress(self.name, self.namespace)
f"passbook-outpost-{self.controller.outpost.name}", self.namespace
)
def update( def update(
self, current: NetworkingV1beta1Ingress, reference: NetworkingV1beta1Ingress self, current: NetworkingV1beta1Ingress, reference: NetworkingV1beta1Ingress

View File

@ -31,9 +31,9 @@ class TestControllers(TestCase):
outpost.providers.add(provider) outpost.providers.add(provider)
outpost.save() outpost.save()
controller = ProxyKubernetesController(outpost.pk) controller = ProxyKubernetesController(outpost)
manifest = controller.get_static_deployment() manifest = controller.get_static_deployment()
self.assertEqual(len(list(yaml.load_all(manifest, Loader=yaml.SafeLoader))), 3) self.assertEqual(len(list(yaml.load_all(manifest, Loader=yaml.SafeLoader))), 4)
def test_kubernetes_controller_deploy(self): def test_kubernetes_controller_deploy(self):
"""Test Kubernetes Controller""" """Test Kubernetes Controller"""
@ -51,5 +51,6 @@ class TestControllers(TestCase):
outpost.providers.add(provider) outpost.providers.add(provider)
outpost.save() outpost.save()
controller = ProxyKubernetesController(outpost.pk) controller = ProxyKubernetesController(outpost)
controller.up() controller.up()
controller.down()

View File

@ -269,9 +269,14 @@ CELERY_TASK_SOFT_TIME_LIMIT = 600
CELERY_BEAT_SCHEDULE = { CELERY_BEAT_SCHEDULE = {
"clean_expired_models": { "clean_expired_models": {
"task": "passbook.core.tasks.clean_expired_models", "task": "passbook.core.tasks.clean_expired_models",
"schedule": crontab(minute="*/5"), # Run every 5 minutes "schedule": crontab(minute="*/5"),
"options": {"queue": "passbook_scheduled"}, "options": {"queue": "passbook_scheduled"},
} },
"db_backup": {
"task": "passbook.lib.tasks.backup.backup_database",
"schedule": crontab(minute=0, hour=0),
"options": {"queue": "passbook_scheduled"},
},
} }
CELERY_TASK_CREATE_MISSING_QUEUES = True CELERY_TASK_CREATE_MISSING_QUEUES = True
CELERY_TASK_DEFAULT_QUEUE = "passbook" CELERY_TASK_DEFAULT_QUEUE = "passbook"
@ -404,6 +409,7 @@ _LOGGING_HANDLER_MAP = {
"websockets": "WARNING", "websockets": "WARNING",
"daphne": "WARNING", "daphne": "WARNING",
"dbbackup": "ERROR", "dbbackup": "ERROR",
"kubernetes": "INFO",
} }
for handler_name, level in _LOGGING_HANDLER_MAP.items(): for handler_name, level in _LOGGING_HANDLER_MAP.items():
# pyright: reportGeneralTypeIssues=false # pyright: reportGeneralTypeIssues=false
@ -444,6 +450,7 @@ for _app in INSTALLED_APPS:
if DEBUG: if DEBUG:
INSTALLED_APPS.append("debug_toolbar") INSTALLED_APPS.append("debug_toolbar")
MIDDLEWARE.append("debug_toolbar.middleware.DebugToolbarMiddleware") MIDDLEWARE.append("debug_toolbar.middleware.DebugToolbarMiddleware")
CELERY_TASK_ALWAYS_EAGER = True
INSTALLED_APPS.append("passbook.core.apps.PassbookCoreConfig") INSTALLED_APPS.append("passbook.core.apps.PassbookCoreConfig")

View File

@ -20,8 +20,9 @@ def ldap_sync_all():
@CELERY_APP.task(bind=True, base=MonitoredTask) @CELERY_APP.task(bind=True, base=MonitoredTask)
def ldap_sync(self: MonitoredTask, source_pk: int): def ldap_sync(self: MonitoredTask, source_pk: int):
"""Sync a single source""" """Synchronization of an LDAP Source"""
source: LDAPSource = LDAPSource.objects.get(pk=source_pk) source: LDAPSource = LDAPSource.objects.get(pk=source_pk)
self.set_uid(slugify(source.name))
try: try:
syncer = LDAPSynchronizer(source) syncer = LDAPSynchronizer(source)
user_count = syncer.sync_users() user_count = syncer.sync_users()
@ -33,10 +34,7 @@ def ldap_sync(self: MonitoredTask, source_pk: int):
TaskResult( TaskResult(
TaskResultStatus.SUCCESSFUL, TaskResultStatus.SUCCESSFUL,
[f"Synced {user_count} users", f"Synced {group_count} groups"], [f"Synced {user_count} users", f"Synced {group_count} groups"],
uid=slugify(source.name),
) )
) )
except LDAPException as exc: except LDAPException as exc:
self.set_status( self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
TaskResult(TaskResultStatus.ERROR, uid=slugify(source.name)).with_error(exc)
)

View File

@ -13,7 +13,7 @@ LOGGER = get_logger()
@CELERY_APP.task(bind=True, base=MonitoredTask) @CELERY_APP.task(bind=True, base=MonitoredTask)
def clean_temporary_users(self: MonitoredTask): def clean_temporary_users(self: MonitoredTask):
"""Remove old temporary users""" """Remove temporary users created by SAML Sources"""
_now = now() _now = now()
messages = [] messages = []
deleted_users = 0 deleted_users = 0

View File

@ -37,6 +37,8 @@ def send_mails(stage: EmailStage, *messages: List[EmailMultiAlternatives]):
def send_mail(self: MonitoredTask, email_stage_pk: int, message: Dict[Any, Any]): def send_mail(self: MonitoredTask, email_stage_pk: int, message: Dict[Any, Any]):
"""Send Email for Email Stage. Retries are scheduled automatically.""" """Send Email for Email Stage. Retries are scheduled automatically."""
self.save_on_success = False self.save_on_success = False
message_id = make_msgid(domain=DNS_NAME)
self.set_uid(message_id)
try: try:
stage: EmailStage = EmailStage.objects.get(pk=email_stage_pk) stage: EmailStage = EmailStage.objects.get(pk=email_stage_pk)
backend = stage.backend backend = stage.backend
@ -48,7 +50,6 @@ def send_mail(self: MonitoredTask, email_stage_pk: int, message: Dict[Any, Any])
setattr(message_object, key, value) setattr(message_object, key, value)
message_object.from_email = stage.from_address message_object.from_email = stage.from_address
# Because we use the Message-ID as UID for the task, manually assign it # Because we use the Message-ID as UID for the task, manually assign it
message_id = make_msgid(domain=DNS_NAME)
message_object.extra_headers["Message-ID"] = message_id message_object.extra_headers["Message-ID"] = message_id
LOGGER.debug("Sending mail", to=message_object.to) LOGGER.debug("Sending mail", to=message_object.to)
@ -57,7 +58,6 @@ def send_mail(self: MonitoredTask, email_stage_pk: int, message: Dict[Any, Any])
TaskResult( TaskResult(
TaskResultStatus.SUCCESSFUL, TaskResultStatus.SUCCESSFUL,
messages=["Successfully sent Mail."], messages=["Successfully sent Mail."],
uid=message_id,
) )
) )
except (SMTPException, ConnectionError) as exc: except (SMTPException, ConnectionError) as exc:

View File

@ -1,7 +1,6 @@
"""passbook password stage""" """passbook password stage"""
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from django.contrib import messages
from django.contrib.auth import _clean_credentials from django.contrib.auth import _clean_credentials
from django.contrib.auth.backends import BaseBackend from django.contrib.auth.backends import BaseBackend
from django.contrib.auth.signals import user_login_failed from django.contrib.auth.signals import user_login_failed
@ -122,5 +121,4 @@ class PasswordStageView(FormView, StageView):
self.executor.plan.context[ self.executor.plan.context[
PLAN_CONTEXT_AUTHENTICATION_BACKEND PLAN_CONTEXT_AUTHENTICATION_BACKEND
] = user.backend ] = user.backend
messages.success(self.request, _("Successfully logged in!"))
return self.executor.stage_ok() return self.executor.stage_ok()

View File

@ -39,4 +39,5 @@ class UserLoginStageView(StageView):
flow_slug=self.executor.flow.slug, flow_slug=self.executor.flow.slug,
session_duration=self.executor.current_stage.session_duration, session_duration=self.executor.current_stage.session_duration,
) )
messages.success(self.request, _("Successfully logged in!"))
return self.executor.stage_ok() return self.executor.stage_ok()

View File

@ -10,6 +10,7 @@ import (
"strings" "strings"
"time" "time"
"github.com/BeryJu/passbook/proxy/pkg"
"github.com/BeryJu/passbook/proxy/pkg/client" "github.com/BeryJu/passbook/proxy/pkg/client"
"github.com/BeryJu/passbook/proxy/pkg/client/outposts" "github.com/BeryJu/passbook/proxy/pkg/client/outposts"
"github.com/getsentry/sentry-go" "github.com/getsentry/sentry-go"
@ -70,6 +71,7 @@ func doGlobalSetup(config map[string]interface{}) {
default: default:
log.SetLevel(log.DebugLevel) log.SetLevel(log.DebugLevel)
} }
log.WithField("version", pkg.VERSION).Info("Starting passbook proxy")
var dsn string var dsn string
if config[ConfigErrorReportingEnabled].(bool) { if config[ConfigErrorReportingEnabled].(bool) {

View File

@ -1,3 +1,3 @@
package pkg package pkg
const VERSION = "0.12.0-stable" const VERSION = "0.12.4-stable"

View File

@ -833,6 +833,11 @@ paths:
description: '' description: ''
required: false required: false
type: string type: string
- name: evaluate_on_plan
in: query
description: ''
required: false
type: string
- name: re_evaluate_policies - name: re_evaluate_policies
in: query in: query
description: '' description: ''
@ -6337,10 +6342,14 @@ definitions:
title: Stage title: Stage
type: string type: string
format: uuid format: uuid
evaluate_on_plan:
title: Evaluate on plan
description: Evaluate policies during the Flow planning process. Disable this
for input-based policies.
type: boolean
re_evaluate_policies: re_evaluate_policies:
title: Re evaluate policies title: Re evaluate policies
description: When this option is enabled, the planner will re-evaluate policies description: Evaluate policies when the Stage is present to the user.
bound to this binding.
type: boolean type: boolean
order: order:
title: Order title: Order