Compare commits

..

48 Commits

Author SHA1 Message Date
73497a27cc new release: 0.12.6-stable 2020-10-23 18:42:29 +02:00
f3098418f2 core: fix backup task not being registered, add fallback for api to remove info on ImportError
celery only discovers tasks from installed apps, which `lib` is not, hence the schedule didn't trigger it
2020-10-23 18:32:28 +02:00
a5197963b2 build(deps-dev): bump pytest-django from 4.0.0 to 4.1.0 (#293)
Bumps [pytest-django](https://github.com/pytest-dev/pytest-django) from 4.0.0 to 4.1.0.
- [Release notes](https://github.com/pytest-dev/pytest-django/releases)
- [Changelog](https://github.com/pytest-dev/pytest-django/blob/master/docs/changelog.rst)
- [Commits](https://github.com/pytest-dev/pytest-django/compare/v4.0.0...v4.1.0)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-10-23 09:38:49 +02:00
e4634bcc78 build(deps): bump boto3 from 1.16.2 to 1.16.3 (#294)
Bumps [boto3](https://github.com/boto/boto3) from 1.16.2 to 1.16.3.
- [Release notes](https://github.com/boto/boto3/releases)
- [Changelog](https://github.com/boto/boto3/blob/develop/CHANGELOG.rst)
- [Commits](https://github.com/boto/boto3/compare/1.16.2...1.16.3)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-10-23 08:01:43 +02:00
74da44a6a9 helm: add readme, general cleanup 2020-10-22 17:25:30 +02:00
3324473cd0 new release: 0.12.5-stable 2020-10-22 14:22:32 +02:00
39d8038533 e2e: Fix @retry decorator not truncating database 2020-10-22 14:05:29 +02:00
bbcf58705f lib: add configurable avatars, set to none mode for tests 2020-10-22 14:03:31 +02:00
7b5a0964b2 outposts: handle docker connection error on init 2020-10-22 12:50:06 +02:00
8eca76e464 root: fix docker permission error 2020-10-22 11:54:23 +02:00
fb9ab368f8 root: fix typo in docker-compose 2020-10-22 11:30:53 +02:00
877279b2ee build(deps): bump rollup in /passbook/static/static (#292)
Bumps [rollup](https://github.com/rollup/rollup) from 2.32.0 to 2.32.1.
- [Release notes](https://github.com/rollup/rollup/releases)
- [Changelog](https://github.com/rollup/rollup/blob/master/CHANGELOG.md)
- [Commits](https://github.com/rollup/rollup/compare/v2.32.0...v2.32.1)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-10-22 11:30:03 +02:00
301be4b411 build(deps): bump boto3 from 1.16.1 to 1.16.2 (#291)
Bumps [boto3](https://github.com/boto/boto3) from 1.16.1 to 1.16.2.
- [Release notes](https://github.com/boto/boto3/releases)
- [Changelog](https://github.com/boto/boto3/blob/develop/CHANGELOG.rst)
- [Commits](https://github.com/boto/boto3/compare/1.16.1...1.16.2)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-10-22 08:05:29 +02:00
728f527ccb build(deps): bump drf-yasg2 from 1.19.2 to 1.19.3 (#290)
Bumps [drf-yasg2](https://github.com/JoelLefkowitz/drf-yasg) from 1.19.2 to 1.19.3.
- [Release notes](https://github.com/JoelLefkowitz/drf-yasg/releases)
- [Changelog](https://github.com/JoelLefkowitz/drf-yasg/blob/master/docs/changelog.rst)
- [Commits](https://github.com/JoelLefkowitz/drf-yasg/compare/1.19.2...1.19.3)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-10-21 09:51:39 +02:00
3f1c790b1d build(deps): bump boto3 from 1.16.0 to 1.16.1 (#289)
Bumps [boto3](https://github.com/boto/boto3) from 1.16.0 to 1.16.1.
- [Release notes](https://github.com/boto/boto3/releases)
- [Changelog](https://github.com/boto/boto3/blob/develop/CHANGELOG.rst)
- [Commits](https://github.com/boto/boto3/compare/1.16.0...1.16.1)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-10-21 09:12:13 +02:00
b00573bde2 new release: 0.12.4-stable 2020-10-20 22:31:31 +02:00
aeee3ad7f9 e2e: add @retry decorator to make e2e tests more reliable 2020-10-20 18:51:17 +02:00
ef021495ef flows: revert evaluate_on_call rename for backwards compatibility 2020-10-20 15:41:50 +02:00
061eab4b36 docs: fix keys for example flows 2020-10-20 15:14:41 +02:00
870e01f836 flows: rename re_evaluate_policies to evaluate_on_call, add evaluate_on_plan 2020-10-20 15:06:36 +02:00
e2ca72adf0 stages/user_login: only show successful login message at login stage 2020-10-20 12:11:59 +02:00
395ef43eae policies/expression: fix ip_network not being imported by default 2020-10-20 12:05:56 +02:00
a4cc653757 new release: 0.12.3-stable 2020-10-20 10:24:45 +02:00
db4ff20906 outposts: fix service using incorrect pod selector 2020-10-20 10:18:05 +02:00
1f0fbd33b6 build(deps): bump urllib3 from 1.25.10 to 1.25.11 (#287)
Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.25.10 to 1.25.11.
- [Release notes](https://github.com/urllib3/urllib3/releases)
- [Changelog](https://github.com/urllib3/urllib3/blob/master/CHANGES.rst)
- [Commits](https://github.com/urllib3/urllib3/compare/1.25.10...1.25.11)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-10-20 10:17:46 +02:00
5de8d2721e build(deps): bump uvicorn from 0.12.1 to 0.12.2 (#286)
Bumps [uvicorn](https://github.com/encode/uvicorn) from 0.12.1 to 0.12.2.
- [Release notes](https://github.com/encode/uvicorn/releases)
- [Changelog](https://github.com/encode/uvicorn/blob/master/CHANGELOG.md)
- [Commits](https://github.com/encode/uvicorn/compare/0.12.1...0.12.2)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-10-20 10:09:37 +02:00
0d65da9a9e build(deps): bump boto3 from 1.15.18 to 1.16.0 (#288)
Bumps [boto3](https://github.com/boto/boto3) from 1.15.18 to 1.16.0.
- [Release notes](https://github.com/boto/boto3/releases)
- [Changelog](https://github.com/boto/boto3/blob/develop/CHANGELOG.rst)
- [Commits](https://github.com/boto/boto3/compare/1.15.18...1.16.0)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-10-20 09:34:55 +02:00
4316ee4330 root: implement db backups with monitored task, update docs 2020-10-19 22:17:47 +02:00
2ed9a1dbe3 */tasks: update phrasing 2020-10-19 21:35:31 +02:00
8e03824d20 lib: always set task's UID, even for unexpected errors 2020-10-19 21:30:21 +02:00
754dbdd0e5 outpost: fix logs for kubernetes controller 2020-10-19 21:29:58 +02:00
e13d348315 new release: 0.12.2-stable 2020-10-19 19:36:36 +02:00
169f3ebe5b outposts: fix logger again 2020-10-19 18:52:17 +02:00
f8ad604e85 outposts: add more tests 2020-10-19 17:47:51 +02:00
774b9c8a61 outposts: update kubernetes controller to use pk as identifier instead of name 2020-10-19 17:39:12 +02:00
d8c522233e outposts: fix outpost mangling log output 2020-10-19 16:54:11 +02:00
82d50f7eaa outposts: fix list showing questionmark when only one outpost is registered 2020-10-19 16:34:16 +02:00
1c426c5136 outposts: trigger deployment re-create when selector changes 2020-10-19 16:21:39 +02:00
d6e14cc551 proxy: show version on startup 2020-10-19 16:21:13 +02:00
c3917ebc2e lifecycle: fix formatting 2020-10-19 16:13:45 +02:00
7203bd37a3 outposts: replace migration with string backup handler 2020-10-19 16:04:38 +02:00
597188c7ee lifecycle: fix migration trying to load all classes 2020-10-19 15:55:16 +02:00
ac4c314042 new release: 0.12.1-stable 2020-10-19 15:30:27 +02:00
05866d3544 providers/proxy: fix creation of ingress 2020-10-19 15:06:50 +02:00
6596bc6034 helm: fix permissions for ingresses in networking 2020-10-19 14:55:14 +02:00
c6661ef4d2 lifecycle: add migration to 0.12 which removes old outpost state from cache 2020-10-19 14:35:38 +02:00
386e23dfac core: fix api signature for view_key 2020-10-19 14:35:22 +02:00
5d7220ca70 helm: fix keys for s3 backup 2020-10-19 14:30:44 +02:00
78 changed files with 572 additions and 329 deletions

View File

@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.12.0-stable
current_version = 0.12.6-stable
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)

View File

@ -18,11 +18,11 @@ jobs:
- name: Building Docker Image
run: docker build
--no-cache
-t beryju/passbook:0.12.0-stable
-t beryju/passbook:0.12.6-stable
-t beryju/passbook:latest
-f Dockerfile .
- name: Push Docker Container to Registry (versioned)
run: docker push beryju/passbook:0.12.0-stable
run: docker push beryju/passbook:0.12.6-stable
- name: Push Docker Container to Registry (latest)
run: docker push beryju/passbook:latest
build-proxy:
@ -48,11 +48,11 @@ jobs:
cd proxy
docker build \
--no-cache \
-t beryju/passbook-proxy:0.12.0-stable \
-t beryju/passbook-proxy:0.12.6-stable \
-t beryju/passbook-proxy:latest \
-f Dockerfile .
- name: Push Docker Container to Registry (versioned)
run: docker push beryju/passbook-proxy:0.12.0-stable
run: docker push beryju/passbook-proxy:0.12.6-stable
- name: Push Docker Container to Registry (latest)
run: docker push beryju/passbook-proxy:latest
build-static:
@ -77,11 +77,11 @@ jobs:
run: docker build
--no-cache
--network=$(docker network ls | grep github | awk '{print $1}')
-t beryju/passbook-static:0.12.0-stable
-t beryju/passbook-static:0.12.6-stable
-t beryju/passbook-static:latest
-f static.Dockerfile .
- name: Push Docker Container to Registry (versioned)
run: docker push beryju/passbook-static:0.12.0-stable
run: docker push beryju/passbook-static:0.12.6-stable
- name: Push Docker Container to Registry (latest)
run: docker push beryju/passbook-static:latest
test-release:
@ -114,5 +114,5 @@ jobs:
SENTRY_PROJECT: passbook
SENTRY_URL: https://sentry.beryju.org
with:
tagName: 0.12.0-stable
tagName: 0.12.6-stable
environment: beryjuorg-prod

View File

@ -25,7 +25,14 @@ RUN apt-get update && \
pip install -r /requirements.txt --no-cache-dir && \
apt-get remove --purge -y build-essential && \
apt-get autoremove --purge -y && \
adduser --system --no-create-home --uid 1000 --group --home /passbook passbook
# This is quite hacky, but docker has no guaranteed Group ID
# we could instead check for the GID of the socket and add the user dynamically,
# but then we have to drop permmissions later
groupadd -g 998 docker_998 && \
groupadd -g 999 docker_999 && \
adduser --system --no-create-home --uid 1000 --group --home /passbook passbook && \
usermod -a -G docker_998 passbook && \
usermod -a -G docker_999 passbook
COPY ./passbook/ /passbook
COPY ./manage.py /

110
Pipfile.lock generated
View File

@ -74,18 +74,18 @@
},
"boto3": {
"hashes": [
"sha256:9ab957090f7893172768bb8b8d2c5cce0afd36a9d36d73a9fb14168f72d75a8b",
"sha256:f56148e2c6b9a2d704218da42f07d72f00270bfddb13bc1bdea20d3327daa51e"
"sha256:270ac22a66ce3313e908946193df6e0fb3e81cdf60f5113d62da1d8991b75030",
"sha256:e2857738affb394bbe96473de2ed01331685d6e313bb1a3328fd5f47841429cc"
],
"index": "pypi",
"version": "==1.15.18"
"version": "==1.16.3"
},
"botocore": {
"hashes": [
"sha256:de5f9fc0c7e88ee7ba831fa27475be258ae09ece99143ed623d3618a3c84ee2c",
"sha256:e224754230e7e015836ba20037cac6321e8e2ce9b8627c14d579fcb37249decd"
"sha256:4ea4c74d244c1b4701387fd1abe6a5e1833dc621c6d39f8888f0bfa95ddd82f5",
"sha256:f5084376a8519332a200737f5cd80e87f47868b7da4d57fc192397670e0af022"
],
"version": "==1.18.18"
"version": "==1.19.3"
},
"cachetools": {
"hashes": [
@ -373,11 +373,11 @@
},
"drf-yasg2": {
"hashes": [
"sha256:c4aa21d52f3964f99748eed68eb24be0fdad65e55bb56b99ae85c950718bac64",
"sha256:e880b3fa298a614360f4d882e8bc1712b51e1b28696acbd2684ac0ab18275a62"
"sha256:65826bf19e5222d38b84380468303c8c389d0b9e2335ee6efa4151ba87ca0a3f",
"sha256:6c662de6e0ffd4f74c49c06a88b8a9d1eb4bc9d7bfe82dac9f80a51a23cacecb"
],
"index": "pypi",
"version": "==1.19.2"
"version": "==1.19.3"
},
"eight": {
"hashes": [
@ -1100,23 +1100,23 @@
"secure"
],
"hashes": [
"sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a",
"sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"
"sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2",
"sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"
],
"index": "pypi",
"markers": null,
"version": "==1.25.10"
"version": "==1.25.11"
},
"uvicorn": {
"extras": [
"standard"
],
"hashes": [
"sha256:a461e76406088f448f36323f5ac774d50e5a552b6ccb54e4fca8d83ef614a7c2",
"sha256:d06a25caa8dc680ad92eb3ec67363f5281c092059613a1cc0100acba37fc0f45"
"sha256:8ff7495c74b8286a341526ff9efa3988ebab9a4b2f561c7438c3cb420992d7dd",
"sha256:e5dbed4a8a44c7b04376021021d63798d6a7bcfae9c654a0b153577b93854fba"
],
"index": "pypi",
"version": "==0.12.1"
"version": "==0.12.2"
},
"uvloop": {
"hashes": [
@ -1400,10 +1400,10 @@
},
"gitpython": {
"hashes": [
"sha256:138016d519bf4dd55b22c682c904ed2fd0235c3612b2f8f65ce218ff358deed8",
"sha256:a03f728b49ce9597a6655793207c6ab0da55519368ff5961e4a74ae475b9fa8e"
"sha256:58483ad99811321e3c0b52c8b2229ff517499229a4854752b7d128005986e409",
"sha256:f488d43600d7299567b59fe41497d313e7c1253a9f2a8ebd2df8af2a1151c71d"
],
"version": "==3.1.9"
"version": "==3.1.10"
},
"iniconfig": {
"hashes": [
@ -1476,10 +1476,10 @@
},
"pbr": {
"hashes": [
"sha256:14bfd98f51c78a3dd22a1ef45cf194ad79eee4a19e8e1a0d5c7f8e81ffe182ea",
"sha256:5adc0f9fc64319d8df5ca1e4e06eea674c26b80e6f00c530b18ce6a6592ead15"
"sha256:5fad80b613c402d5b7df7bd84812548b2a61e9977387a80a5fc5c396492b13c9",
"sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00"
],
"version": "==5.5.0"
"version": "==5.5.1"
},
"pep8-naming": {
"hashes": [
@ -1582,11 +1582,11 @@
},
"pytest-django": {
"hashes": [
"sha256:0e91003fdd41ac0322c1978682be2ca180bc564203dd53c698f99242bf513614",
"sha256:5f964ccda1f551e00589ab0679a7c45c36c509a44b5bfb5ad07954e0ae3f4bed"
"sha256:10e384e6b8912ded92db64c58be8139d9ae23fb8361e5fc139d8e4f8fc601bc2",
"sha256:26f02c16d36fd4c8672390deebe3413678d89f30720c16efb8b2a6bf63b9041f"
],
"index": "pypi",
"version": "==4.0.0"
"version": "==4.1.0"
},
"pytz": {
"hashes": [
@ -1614,35 +1614,35 @@
},
"regex": {
"hashes": [
"sha256:02686a2f0b1a4be0facdd0d3ad4dc6c23acaa0f38fb5470d892ae88584ba705c",
"sha256:137da580d1e6302484be3ef41d72cf5c3ad22a076070051b7449c0e13ab2c482",
"sha256:20cdd7e1736f4f61a5161aa30d05ac108ab8efc3133df5eb70fe1e6a23ea1ca6",
"sha256:25991861c6fef1e5fd0a01283cf5658c5e7f7aa644128e85243bc75304e91530",
"sha256:26b85672275d8c7a9d4ff93dbc4954f5146efdb2ecec89ad1de49439984dea14",
"sha256:2f60ba5c33f00ce9be29a140e6f812e39880df8ba9cb92ad333f0016dbc30306",
"sha256:3dd952f3f8dc01b72c0cf05b3631e05c50ac65ddd2afdf26551638e97502107b",
"sha256:578ac6379e65eb8e6a85299b306c966c852712c834dc7eef0ba78d07a828f67b",
"sha256:5d4a3221f37520bb337b64a0632716e61b26c8ae6aaffceeeb7ad69c009c404b",
"sha256:608d6c05452c0e6cc49d4d7407b4767963f19c4d2230fa70b7201732eedc84f2",
"sha256:65b6b018b07e9b3b6a05c2c3bb7710ed66132b4df41926c243887c4f1ff303d5",
"sha256:698f8a5a2815e1663d9895830a063098ae2f8f2655ae4fdc5dfa2b1f52b90087",
"sha256:6c72adb85adecd4522a488a751e465842cdd2a5606b65464b9168bf029a54272",
"sha256:6d4cdb6c20e752426b2e569128488c5046fb1b16b1beadaceea9815c36da0847",
"sha256:6e9f72e0ee49f7d7be395bfa29e9533f0507a882e1e6bf302c0a204c65b742bf",
"sha256:828618f3c3439c5e6ef8621e7c885ca561bbaaba90ddbb6a7dfd9e1ec8341103",
"sha256:85b733a1ef2b2e7001aff0e204a842f50ad699c061856a214e48cfb16ace7d0c",
"sha256:8958befc139ac4e3f16d44ec386c490ea2121ed8322f4956f83dd9cad8e9b922",
"sha256:a51e51eecdac39a50ede4aeed86dbef4776e3b73347d31d6ad0bc9648ba36049",
"sha256:aeac7c9397480450016bc4a840eefbfa8ca68afc1e90648aa6efbfe699e5d3bb",
"sha256:aef23aed9d4017cc74d37f703d57ce254efb4c8a6a01905f40f539220348abf9",
"sha256:af1f5e997dd1ee71fb6eb4a0fb6921bf7a778f4b62f1f7ef0d7445ecce9155d6",
"sha256:b5eeaf4b5ef38fab225429478caf71f44d4a0b44d39a1aa4d4422cda23a9821b",
"sha256:d25f5cca0f3af6d425c9496953445bf5b288bb5b71afc2b8308ad194b714c159",
"sha256:d81be22d5d462b96a2aa5c512f741255ba182995efb0114e5a946fe254148df1",
"sha256:e935a166a5f4c02afe3f7e4ce92ce5a786f75c6caa0c4ce09c922541d74b77e8",
"sha256:ef3a55b16c6450574734db92e0a3aca283290889934a23f7498eaf417e3af9f0"
"sha256:0cb23ed0e327c18fb7eac61ebbb3180ebafed5b9b86ca2e15438201e5903b5dd",
"sha256:1a065e7a6a1b4aa851a0efa1a2579eabc765246b8b3a5fd74000aaa3134b8b4e",
"sha256:1a511470db3aa97432ac8c1bf014fcc6c9fbfd0f4b1313024d342549cf86bcd6",
"sha256:1c447b0d108cddc69036b1b3910fac159f2b51fdeec7f13872e059b7bc932be1",
"sha256:2278453c6a76280b38855a263198961938108ea2333ee145c5168c36b8e2b376",
"sha256:240509721a663836b611fa13ca1843079fc52d0b91ef3f92d9bba8da12e768a0",
"sha256:4e21340c07090ddc8c16deebfd82eb9c9e1ec5e62f57bb86194a2595fd7b46e0",
"sha256:570e916a44a361d4e85f355aacd90e9113319c78ce3c2d098d2ddf9631b34505",
"sha256:59d5c6302d22c16d59611a9fd53556554010db1d47e9df5df37be05007bebe75",
"sha256:6a46eba253cedcbe8a6469f881f014f0a98819d99d341461630885139850e281",
"sha256:6f567df0601e9c7434958143aebea47a9c4b45434ea0ae0286a4ec19e9877169",
"sha256:781906e45ef1d10a0ed9ec8ab83a09b5e0d742de70e627b20d61ccb1b1d3964d",
"sha256:8469377a437dbc31e480993399fd1fd15fe26f382dc04c51c9cb73e42965cc06",
"sha256:8cd0d587aaac74194ad3e68029124c06245acaeddaae14cb45844e5c9bebeea4",
"sha256:97a023f97cddf00831ba04886d1596ef10f59b93df7f855856f037190936e868",
"sha256:a973d5a7a324e2a5230ad7c43f5e1383cac51ef4903bf274936a5634b724b531",
"sha256:af360e62a9790e0a96bc9ac845d87bfa0e4ee0ee68547ae8b5a9c1030517dbef",
"sha256:b706c70070eea03411b1761fff3a2675da28d042a1ab7d0863b3efe1faa125c9",
"sha256:bfd7a9fddd11d116a58b62ee6c502fd24cfe22a4792261f258f886aa41c2a899",
"sha256:c30d8766a055c22e39dd7e1a4f98f6266169f2de05db737efe509c2fb9c8a3c8",
"sha256:c53dc8ee3bb7b7e28ee9feb996a0c999137be6c1d3b02cb6b3c4cba4f9e5ed09",
"sha256:c95d514093b80e5309bdca5dd99e51bcf82c44043b57c34594d9d7556bd04d05",
"sha256:d43cf21df524283daa80ecad551c306b7f52881c8d0fe4e3e76a96b626b6d8d8",
"sha256:d62205f00f461fe8b24ade07499454a3b7adf3def1225e258b994e2215fd15c5",
"sha256:e289a857dca3b35d3615c3a6a438622e20d1bf0abcb82c57d866c8d0be3f44c4",
"sha256:e5f6aa56dda92472e9d6f7b1e6331f4e2d51a67caafff4d4c5121cadac03941e",
"sha256:f4b1c65ee86bfbf7d0c3dfd90592a9e3d6e9ecd36c367c884094c050d4c35d04"
],
"version": "==2020.10.15"
"version": "==2020.10.23"
},
"requirements-detector": {
"hashes": [
@ -1745,12 +1745,12 @@
"secure"
],
"hashes": [
"sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a",
"sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"
"sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2",
"sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"
],
"index": "pypi",
"markers": null,
"version": "==1.25.10"
"version": "==1.25.11"
},
"wrapt": {
"hashes": [

View File

@ -179,13 +179,13 @@ stages:
- task: CmdLine@2
inputs:
script: |
export PB_TEST_K8S=true
sudo pip install -U wheel pipenv
pipenv install --dev
- task: CmdLine@2
displayName: Run full test suite
inputs:
script: |
export PB_TEST_K8S=true
pipenv run coverage run ./manage.py test passbook -v 3
- task: CmdLine@2
inputs:
@ -221,7 +221,6 @@ stages:
- task: CmdLine@2
inputs:
script: |
export PB_TEST_K8S=true
sudo pip install -U wheel pipenv
pipenv install --dev
- task: DockerCompose@0
@ -241,6 +240,7 @@ stages:
displayName: Run full test suite
inputs:
script: |
export PB_TEST_K8S=true
pipenv run coverage run ./manage.py test e2e -v 3 --failfast
- task: CmdLine@2
condition: always()

View File

@ -19,7 +19,7 @@ services:
networks:
- internal
server:
image: beryju/passbook:${PASSBOOK_TAG:-0.12.0-stable}
image: beryju/passbook:${PASSBOOK_TAG:-0.12.6-stable}
command: server
environment:
PASSBOOK_REDIS__HOST: redis
@ -40,7 +40,7 @@ services:
env_file:
- .env
worker:
image: beryju/passbook:${PASSBOOK_TAG:-0.12.0-stable}
image: beryju/passbook:${PASSBOOK_TAG:-0.12.6-stable}
command: worker
networks:
- internal
@ -50,11 +50,11 @@ services:
PASSBOOK_POSTGRESQL__PASSWORD: ${PG_PASS}
volumes:
- ./backups:/backups
- /var/run/docker.socket:/var/run/docker.socket
- /var/run/docker.sock:/var/run/docker.sock
env_file:
- .env
static:
image: beryju/passbook-static:${PASSBOOK_TAG:-0.12.0-stable}
image: beryju/passbook-static:${PASSBOOK_TAG:-0.12.6-stable}
networks:
- internal
labels:

View File

@ -95,7 +95,8 @@
},
"model": "passbook_flows.flowstagebinding",
"attrs": {
"re_evaluate_policies": false
"evaluate_on_plan": false,
"re_evaluate_policies": true
}
},
{

View File

@ -13,7 +13,7 @@ Download the latest `docker-compose.yml` from [here](https://raw.githubuserconte
To optionally enable error-reporting, run `echo PASSBOOK_ERROR_REPORTING__ENABLED=true >> .env`
To optionally deploy a different version run `echo PASSBOOK_TAG=0.12.0-stable >> .env`
To optionally deploy a different version run `echo PASSBOOK_TAG=0.12.6-stable >> .env`
If this is a fresh passbook install run the following commands to generate a password:

View File

@ -11,9 +11,7 @@ This installation automatically applies database migrations on startup. After th
image:
name: beryju/passbook
name_static: beryju/passbook-static
tag: 0.12.0-stable
nameOverride: ""
tag: 0.12.6-stable
serverReplicas: 1
workerReplicas: 1
@ -35,8 +33,8 @@ config:
# Enable Database Backups to S3
# backup:
# access_key: access-key
# secret_key: secret-key
# accessKey: access-key
# secretKey: secret-key
# bucket: s3-bucket
# region: eu-central-1
# host: s3-host
@ -45,7 +43,6 @@ ingress:
annotations: {}
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
path: /
hosts:
- passbook.k8s.local
tls: []

View File

@ -6,6 +6,10 @@
### Backup
!!! notice
Local backups are **enabled** by default, and will be run daily at 00:00
Local backups can be created by running the following command in your passbook installation directory
```
@ -14,15 +18,6 @@ docker-compose run --rm worker backup
This will dump the current database into the `./backups` folder. By defaults, the last 10 Backups are kept.
To schedule these backups, use the following snippet in a crontab
```
0 0 * * * bash -c "cd <passbook install location> && docker-compose run --rm worker backup" >/dev/null
```
!!! notice
passbook does support automatic backups on a schedule, however this is currently not recommended, as there is no way to monitor these scheduled tasks.
### Restore
@ -42,11 +37,7 @@ After you've restored the backup, it is recommended to restart all services with
### S3 Configuration
!!! notice
To trigger backups with S3 enabled, use the same commands as above.
#### S3 Preparation
#### Preparation
passbook expects the bucket you select to already exist. The IAM User given to passbook should have the following permissions
@ -101,11 +92,11 @@ Simply enable these options in your values.yaml file
```yaml
# Enable Database Backups to S3
backup:
access_key: access-key
secret_key: secret-key
accessKey: access-key
secretKey: secret-key
bucket: s3-bucket
region: eu-central-1
host: s3-host
```
Afterwards, run a `helm upgrade` to update the ConfigMap. Because passbook-scheduled backups are not recommended currently, a Kubernetes CronJob is created that runs the backup daily.
Afterwards, run a `helm upgrade` to update the ConfigMap. Backups are done automatically as above, at 00:00 every day.

View File

@ -26,7 +26,11 @@ return False
- `request.obj`: A Django Model instance. This is only set if the policy is ran against an object.
- `request.context`: A dictionary with dynamic data. This depends on the origin of the execution.
- `pb_is_sso_flow`: Boolean which is true if request was initiated by authenticating through an external provider.
- `pb_client_ip`: Client's IP Address or '255.255.255.255' if no IP Address could be extracted. Can be [compared](../expressions/index.md#comparing-ip-addresses)
- `pb_client_ip`: Client's IP Address or 255.255.255.255 if no IP Address could be extracted. Can be [compared](../expressions/index.md#comparing-ip-addresses), for example
```python
return pb_client_ip in ip_network('10.0.0.0/24')
```
Additionally, when the policy is executed from a flow, every variable from the flow's current context is accessible under the `context` object.

View File

@ -8,7 +8,7 @@ from docker.types import Healthcheck
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as ec
from e2e.utils import USER, SeleniumTestCase
from e2e.utils import USER, SeleniumTestCase, retry
from passbook.flows.models import Flow, FlowDesignation, FlowStageBinding
from passbook.stages.email.models import EmailStage, EmailTemplates
from passbook.stages.identification.models import IdentificationStage
@ -34,6 +34,7 @@ class TestFlowsEnroll(SeleniumTestCase):
),
}
@retry()
def test_enroll_2_step(self):
"""Test 2-step enroll flow"""
# First stage fields
@ -119,6 +120,7 @@ class TestFlowsEnroll(SeleniumTestCase):
"foo@bar.baz",
)
@retry()
@override_settings(EMAIL_BACKEND="django.core.mail.backends.smtp.EmailBackend")
def test_enroll_email(self):
"""Test enroll with Email verification"""

View File

@ -5,13 +5,14 @@ from unittest.case import skipUnless
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from e2e.utils import USER, SeleniumTestCase
from e2e.utils import USER, SeleniumTestCase, retry
@skipUnless(platform.startswith("linux"), "requires local docker")
class TestFlowsLogin(SeleniumTestCase):
"""test default login flow"""
@retry()
def test_login(self):
"""test default login flow"""
self.driver.get(f"{self.live_server_url}/flows/default-authentication-flow/")

View File

@ -12,7 +12,7 @@ from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as ec
from e2e.utils import USER, SeleniumTestCase
from e2e.utils import USER, SeleniumTestCase, retry
from passbook.flows.models import Flow, FlowStageBinding
from passbook.stages.otp_validate.models import OTPValidateStage
@ -21,6 +21,7 @@ from passbook.stages.otp_validate.models import OTPValidateStage
class TestFlowsOTP(SeleniumTestCase):
"""test flow with otp stages"""
@retry()
def test_otp_validate(self):
"""test flow with otp stages"""
sleep(1)
@ -52,6 +53,7 @@ class TestFlowsOTP(SeleniumTestCase):
USER().username,
)
@retry()
def test_otp_totp_setup(self):
"""test TOTP Setup stage"""
flow: Flow = Flow.objects.get(slug="default-authentication-flow")
@ -98,6 +100,7 @@ class TestFlowsOTP(SeleniumTestCase):
self.assertTrue(TOTPDevice.objects.filter(user=USER(), confirmed=True).exists())
@retry()
def test_otp_static_setup(self):
"""test Static OTP Setup stage"""
flow: Flow = Flow.objects.get(slug="default-authentication-flow")

View File

@ -5,7 +5,7 @@ from unittest.case import skipUnless
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from e2e.utils import USER, SeleniumTestCase
from e2e.utils import USER, SeleniumTestCase, retry
from passbook.core.models import User
from passbook.flows.models import Flow, FlowDesignation
from passbook.providers.oauth2.generators import generate_client_secret
@ -16,6 +16,7 @@ from passbook.stages.password.models import PasswordStage
class TestFlowsStageSetup(SeleniumTestCase):
"""test stage setup flows"""
@retry()
def test_password_change(self):
"""test password change flow"""
# Ensure that password stage has change_flow set

View File

@ -9,7 +9,7 @@ from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as ec
from e2e.utils import USER, SeleniumTestCase
from e2e.utils import USER, SeleniumTestCase, retry
from passbook.core.models import Application
from passbook.flows.models import Flow
from passbook.policies.expression.models import ExpressionPolicy
@ -61,6 +61,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
},
}
@retry()
def test_authorization_consent_implied(self):
"""test OAuth Provider flow (default authorization flow with implied consent)"""
# Bootstrap all needed objects
@ -115,6 +116,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
USER().username,
)
@retry()
def test_authorization_consent_explicit(self):
"""test OAuth Provider flow (default authorization flow with explicit consent)"""
# Bootstrap all needed objects
@ -184,6 +186,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
USER().username,
)
@retry()
def test_denied(self):
"""test OAuth Provider flow (default authorization flow, denied)"""
# Bootstrap all needed objects

View File

@ -10,7 +10,7 @@ from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as ec
from structlog import get_logger
from e2e.utils import USER, SeleniumTestCase
from e2e.utils import USER, SeleniumTestCase, retry
from passbook.core.models import Application
from passbook.crypto.models import CertificateKeyPair
from passbook.flows.models import Flow
@ -80,6 +80,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
},
}
@retry()
def test_redirect_uri_error(self):
"""test OpenID Provider flow (invalid redirect URI, check error message)"""
sleep(1)
@ -122,6 +123,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
"Redirect URI Error",
)
@retry()
def test_authorization_consent_implied(self):
"""test OpenID Provider flow (default authorization flow with implied consent)"""
sleep(1)
@ -183,6 +185,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
USER().email,
)
@retry()
def test_authorization_logout(self):
"""test OpenID Provider flow with logout"""
sleep(1)
@ -252,6 +255,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
)
self.driver.find_element(By.ID, "logout").click()
@retry()
def test_authorization_consent_explicit(self):
"""test OpenID Provider flow (default authorization flow with explicit consent)"""
sleep(1)
@ -325,6 +329,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
USER().email,
)
@retry()
def test_authorization_denied(self):
"""test OpenID Provider flow (default authorization with access deny)"""
sleep(1)

View File

@ -12,7 +12,7 @@ from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as ec
from structlog import get_logger
from e2e.utils import USER, SeleniumTestCase
from e2e.utils import USER, SeleniumTestCase, retry
from passbook.core.models import Application
from passbook.crypto.models import CertificateKeyPair
from passbook.flows.models import Flow
@ -76,6 +76,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
LOGGER.info("Container failed healthcheck")
sleep(1)
@retry()
def test_redirect_uri_error(self):
"""test OpenID Provider flow (invalid redirect URI, check error message)"""
sleep(1)
@ -119,6 +120,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
"Redirect URI Error",
)
@retry()
def test_authorization_consent_implied(self):
"""test OpenID Provider flow (default authorization flow with implied consent)"""
sleep(1)
@ -169,6 +171,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
self.assertEqual(body["IDTokenClaims"]["email"], USER().email)
self.assertEqual(body["UserInfo"]["email"], USER().email)
@retry()
def test_authorization_consent_explicit(self):
"""test OpenID Provider flow (default authorization flow with explicit consent)"""
sleep(1)
@ -229,6 +232,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
self.assertEqual(body["IDTokenClaims"]["email"], USER().email)
self.assertEqual(body["UserInfo"]["email"], USER().email)
@retry()
def test_authorization_denied(self):
"""test OpenID Provider flow (default authorization with access deny)"""
sleep(1)

View File

@ -11,7 +11,7 @@ from docker.models.containers import Container
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from e2e.utils import USER, SeleniumTestCase
from e2e.utils import USER, SeleniumTestCase, retry
from passbook import __version__
from passbook.core.models import Application
from passbook.flows.models import Flow
@ -57,6 +57,7 @@ class TestProviderProxy(SeleniumTestCase):
)
return container
@retry()
def test_proxy_simple(self):
"""Test simple outpost setup with single provider"""
proxy: ProxyProvider = ProxyProvider.objects.create(
@ -110,6 +111,7 @@ class TestProviderProxy(SeleniumTestCase):
class TestProviderProxyConnect(ChannelsLiveServerTestCase):
"""Test Proxy connectivity over websockets"""
@retry()
def test_proxy_connectivity(self):
"""Test proxy connectivity over websocket"""
SeleniumTestCase().apply_default_data()

View File

@ -12,7 +12,7 @@ from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as ec
from structlog import get_logger
from e2e.utils import USER, SeleniumTestCase
from e2e.utils import USER, SeleniumTestCase, retry
from passbook.core.models import Application
from passbook.crypto.models import CertificateKeyPair
from passbook.flows.models import Flow
@ -66,6 +66,7 @@ class TestProviderSAML(SeleniumTestCase):
LOGGER.info("Container failed healthcheck")
sleep(1)
@retry()
def test_sp_initiated_implicit(self):
"""test SAML Provider flow SP-initiated flow (implicit consent)"""
# Bootstrap all needed objects
@ -105,6 +106,7 @@ class TestProviderSAML(SeleniumTestCase):
self.assertEqual(body["attr"]["mail"], [USER().email])
self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
@retry()
def test_sp_initiated_explicit(self):
"""test SAML Provider flow SP-initiated flow (explicit consent)"""
# Bootstrap all needed objects
@ -150,6 +152,7 @@ class TestProviderSAML(SeleniumTestCase):
self.assertEqual(body["attr"]["mail"], [USER().email])
self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
@retry()
def test_idp_initiated_implicit(self):
"""test SAML Provider flow IdP-initiated flow (implicit consent)"""
# Bootstrap all needed objects
@ -195,6 +198,7 @@ class TestProviderSAML(SeleniumTestCase):
self.assertEqual(body["attr"]["mail"], [USER().email])
self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
@retry()
def test_sp_initiated_denied(self):
"""test SAML Provider flow SP-initiated flow (Policy denies access)"""
# Bootstrap all needed objects

View File

@ -14,7 +14,7 @@ from selenium.webdriver.support import expected_conditions as ec
from structlog import get_logger
from yaml import safe_dump
from e2e.utils import SeleniumTestCase
from e2e.utils import SeleniumTestCase, retry
from passbook.flows.models import Flow
from passbook.providers.oauth2.generators import (
generate_client_id,
@ -106,6 +106,7 @@ class TestSourceOAuth2(SeleniumTestCase):
consumer_secret=self.client_secret,
)
@retry()
def test_oauth_enroll(self):
"""test OAuth Source With With OIDC"""
self.create_objects()
@ -159,6 +160,7 @@ class TestSourceOAuth2(SeleniumTestCase):
"admin@example.com",
)
@retry()
@override_settings(SESSION_COOKIE_SAMESITE="strict")
def test_oauth_samesite_strict(self):
"""test OAuth Source With SameSite set to strict
@ -195,6 +197,7 @@ class TestSourceOAuth2(SeleniumTestCase):
"Authentication Failed.",
)
@retry()
def test_oauth_enroll_auth(self):
"""test OAuth Source With With OIDC (enroll and authenticate again)"""
self.test_oauth_enroll()
@ -291,6 +294,7 @@ class TestSourceOAuth1(SeleniumTestCase):
consumer_secret=self.client_secret,
)
@retry()
def test_oauth_enroll(self):
"""test OAuth Source With With OIDC"""
self.create_objects()
@ -317,6 +321,7 @@ class TestSourceOAuth1(SeleniumTestCase):
self.driver.find_element(By.CSS_SELECTOR, "[name='confirm']").click()
# Wait until we've loaded the user info page
sleep(2)
self.wait.until(ec.presence_of_element_located((By.ID, "user-settings")))
self.driver.get(self.url("passbook_core:user-settings"))

View File

@ -10,7 +10,7 @@ from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as ec
from structlog import get_logger
from e2e.utils import SeleniumTestCase
from e2e.utils import SeleniumTestCase, retry
from passbook.crypto.models import CertificateKeyPair
from passbook.flows.models import Flow
from passbook.sources.saml.models import SAMLBindingTypes, SAMLSource
@ -92,6 +92,7 @@ class TestSourceSAML(SeleniumTestCase):
},
}
@retry()
def test_idp_redirect(self):
"""test SAML Source With redirect binding"""
# Bootstrap all needed objects
@ -141,6 +142,7 @@ class TestSourceSAML(SeleniumTestCase):
self.driver.find_element(By.ID, "id_username").get_attribute("value"), ""
)
@retry()
def test_idp_post(self):
"""test SAML Source With post binding"""
# Bootstrap all needed objects
@ -192,6 +194,7 @@ class TestSourceSAML(SeleniumTestCase):
self.driver.find_element(By.ID, "id_username").get_attribute("value"), ""
)
@retry()
def test_idp_post_auto(self):
"""test SAML Source With post binding (auto redirect)"""
# Bootstrap all needed objects

View File

@ -1,19 +1,22 @@
"""passbook e2e testing utilities"""
from functools import wraps
from glob import glob
from importlib.util import module_from_spec, spec_from_file_location
from inspect import getmembers, isfunction
from os import environ, makedirs
from time import sleep, time
from typing import Any, Dict, Optional
from typing import Any, Callable, Dict, Optional
from django.apps import apps
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.db import connection, transaction
from django.db.utils import IntegrityError
from django.shortcuts import reverse
from django.test.testcases import TransactionTestCase
from docker import DockerClient, from_env
from docker.models.containers import Container
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.remote.webdriver import WebDriver
from selenium.webdriver.support.ui import WebDriverWait
@ -123,3 +126,41 @@ class SeleniumTestCase(StaticLiveServerTestCase):
func(apps, schema_editor)
except IntegrityError:
pass
def retry(max_retires=3, exceptions=None):
"""Retry test multiple times. Default to catching Selenium Timeout Exception"""
if not exceptions:
exceptions = [TimeoutException]
logger = get_logger()
def retry_actual(func: Callable):
"""Retry test multiple times"""
count = 1
@wraps(func)
def wrapper(self: TransactionTestCase, *args, **kwargs):
"""Run test again if we're below max_retries, including tearDown and
setUp. Otherwise raise the error"""
nonlocal count
try:
return func(self, *args, **kwargs)
# pylint: disable=catching-non-exception
except tuple(exceptions) as exc:
count += 1
if count > max_retires:
logger.debug("Exceeded retry count", exc=exc, test=self)
# pylint: disable=raising-non-exception
raise exc
logger.debug("Retrying on error", exc=exc, test=self)
self.tearDown()
# pylint: disable=protected-access
self._post_teardown()
self.setUp()
return wrapper(self, *args, **kwargs)
return wrapper
return retry_actual

View File

@ -1,9 +1,11 @@
apiVersion: v2
appVersion: "0.12.0-stable"
description: A Helm chart for passbook.
description: passbook is an open-source Identity Provider focused on flexibility and versatility. You can use passbook in an existing environment to add support for new protocols. passbook is also a great solution for implementing signup/recovery/etc in your application, so you don't have to deal with it.
name: passbook
version: "0.12.0-stable"
icon: https://github.com/BeryJu/passbook/blob/master/docs/images/logo.svg
home: https://passbook.beryju.org
sources:
- https://github.com/BeryJu/passbook
version: "0.12.6-stable"
icon: https://raw.githubusercontent.com/BeryJu/passbook/master/docs/images/logo.svg
dependencies:
- name: postgresql
version: 9.4.1

28
helm/README.md Normal file
View File

@ -0,0 +1,28 @@
# passbook Helm Chart
| Name | Default | Description |
|-----------------------------------|-------------------------|-------------|
| image.name | beryju/passbook | Image used to run the passbook server and worker |
| image.name_static | beryju/passbook-static | Image used to run the passbook static server (CSS and JS Files) |
| image.tag | 0.12.5-stable | Image tag |
| serverReplicas | 1 | Replicas for the Server deployment |
| workerReplicas | 1 | Replicas for the Worker deployment |
| kubernetesIntegration | true | Enable/disable the Kubernetes integration for passbook. This will create a service account for passbook to create and update outposts in passbook |
| config.secretKey | | Secret key used to sign session cookies, generate with `pwgen 50 1` for example. |
| config.errorReporting.enabled | false | Enable/disable error reporting |
| config.errorReporting.environment | customer | Environment sent with the error reporting |
| config.errorReporting.sendPii | false | Whether to send Personally-identifiable data with the error reporting |
| config.logLevel | warning | Log level of passbook |
| backup.accessKey | | Optionally enable S3 Backup, Access Key |
| backup.secretKey | | Optionally enable S3 Backup, Secret Key |
| backup.bucket | | Optionally enable S3 Backup, Bucket |
| backup.region | | Optionally enable S3 Backup, Region |
| backup.host | | Optionally enable S3 Backup, to custom Endpoint like minio |
| ingress.annotations | {} | Annotations for the ingress object |
| ingress.hosts | [passbook.k8s.local] | Hosts which the ingress will match |
| ingress.tls | [] | TLS Configuration, same as Ingress objects |
| install.postgresql | true | Enables/disables the packaged PostgreSQL Chart
| install.redis | true | Enables/disables the packaged Redis Chart
| postgresql.postgresqlPassword | | Password used for PostgreSQL, generated automatically.
For more info, see https://passbook.beryju.org/ and https://passbook.beryju.org/installation/kubernetes/

View File

@ -3,7 +3,7 @@
Expand the name of the chart.
*/}}
{{- define "passbook.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}}
{{- default .Chart.Name | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
@ -12,17 +12,13 @@ We truncate at 63 chars because some Kubernetes name fields are limited to this
If release name contains chart name it will be used as a full name.
*/}}
{{- define "passbook.fullname" -}}
{{- if .Values.fullnameOverride -}}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}}
{{- else -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- $name := default .Chart.Name -}}
{{- if contains $name .Release.Name -}}
{{- .Release.Name | trunc 63 | trimSuffix "-" -}}
{{- else -}}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{- end -}}
{{- end -}}
{{/*
Create chart name and version as used by the chart label.

View File

@ -7,8 +7,8 @@ data:
POSTGRESQL__NAME: "{{ .Values.postgresql.postgresqlDatabase }}"
POSTGRESQL__USER: "{{ .Values.postgresql.postgresqlUsername }}"
{{- if .Values.backup }}
POSTGRESQL__S3_BACKUP__ACCESS_KEY: "{{ .Values.backup.access_key }}"
POSTGRESQL__S3_BACKUP__SECRET_KEY: "{{ .Values.backup.secret_key }}"
POSTGRESQL__S3_BACKUP__ACCESS_KEY: "{{ .Values.backup.accessKey }}"
POSTGRESQL__S3_BACKUP__SECRET_KEY: "{{ .Values.backup.secretKey }}"
POSTGRESQL__S3_BACKUP__BUCKET: "{{ .Values.backup.bucket }}"
POSTGRESQL__S3_BACKUP__REGION: "{{ .Values.backup.region }}"
POSTGRESQL__S3_BACKUP__HOST: "{{ .Values.backup.host }}"

View File

@ -1,42 +0,0 @@
{{- if .Values.backup }}
apiVersion: batch/v1beta1
kind: CronJob
metadata:
name: {{ include "passbook.fullname" . }}-backup
labels:
app.kubernetes.io/name: {{ include "passbook.name" . }}
helm.sh/chart: {{ include "passbook.chart" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
spec:
schedule: "0 0 * * *"
jobTemplate:
spec:
template:
spec:
restartPolicy: Never
containers:
- name: {{ .Chart.Name }}
image: "{{ .Values.image.name }}:{{ .Values.image.tag }}"
args: [server]
envFrom:
- configMapRef:
name: {{ include "passbook.fullname" . }}-config
prefix: PASSBOOK_
env:
- name: PASSBOOK_SECRET_KEY
valueFrom:
secretKeyRef:
name: "{{ include "passbook.fullname" . }}-secret-key"
key: "secret_key"
- name: PASSBOOK_REDIS__PASSWORD
valueFrom:
secretKeyRef:
name: "{{ .Release.Name }}-redis"
key: "redis-password"
- name: PASSBOOK_POSTGRESQL__PASSWORD
valueFrom:
secretKeyRef:
name: "{{ .Release.Name }}-postgresql"
key: "postgresql-password"
{{- end}}

View File

@ -28,9 +28,9 @@ rules:
- "patch"
- apiGroups:
- "extensions"
- "networking"
- "networking.k8s.io"
resources:
- "ingress"
- "ingresses"
verbs:
- "get"
- "create"

View File

@ -4,9 +4,7 @@
image:
name: beryju/passbook
name_static: beryju/passbook-static
tag: 0.12.0-stable
nameOverride: ""
tag: 0.12.6-stable
serverReplicas: 1
workerReplicas: 1
@ -28,8 +26,8 @@ config:
# Enable Database Backups to S3
# backup:
# access_key: access-key
# secret_key: secret-key
# accessKey: access-key
# secretKey: secret-key
# bucket: s3-bucket
# region: eu-central-1
# host: s3-host
@ -38,7 +36,6 @@ ingress:
annotations: {}
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
path: /
hosts:
- passbook.k8s.local
tls: []
@ -57,12 +54,3 @@ install:
# These values influence the bundled postgresql and redis charts, but are also used by passbook to connect
postgresql:
postgresqlDatabase: passbook
redis:
cluster:
enabled: false
master:
persistence:
enabled: false
# https://stackoverflow.com/a/59189742
disableCommands: []

View File

@ -47,7 +47,9 @@ if __name__ == "__main__":
# pyright: reportGeneralTypeIssues=false
spec.loader.exec_module(mod)
for _, sub in getmembers(mod, isclass):
for name, sub in getmembers(mod, isclass):
if name != "Migration":
continue
migration = sub(curr, conn)
if migration.needs_migration():
LOGGER.info("Migration needs to be applied", migration=sub)

View File

@ -25,7 +25,7 @@ delete from django_migrations where app = 'passbook_stages_password' and
name = '0002_passwordstage_change_flow';"""
class To010Migration(BaseMigration):
class Migration(BaseMigration):
def needs_migration(self) -> bool:
self.cur.execute(
"select * from information_schema.tables where table_name='oidc_provider_client'"

View File

@ -1,2 +1,2 @@
"""passbook"""
__version__ = "0.12.0-stable"
__version__ = "0.12.6-stable"

View File

@ -50,15 +50,23 @@ class TaskViewSet(ViewSet):
task = TaskInfo.by_name(pk)
if not task:
raise Http404
task_module = import_module(task.task_call_module)
task_func = getattr(task_module, task.task_call_func)
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
messages.success(
self.request,
_("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}),
)
return Response(
{
"successful": True,
}
)
try:
task_module = import_module(task.task_call_module)
task_func = getattr(task_module, task.task_call_func)
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
messages.success(
self.request,
_(
"Successfully re-scheduled Task %(name)s!"
% {"name": task.task_name}
),
)
return Response(
{
"successful": True,
}
)
except ImportError:
# if we get an import error, the module path has probably changed
task.delete()
return Response({"successful": False})

View File

@ -49,7 +49,7 @@
</span>
</td>
{% with states=outpost.state %}
{% if states|length > 1 %}
{% if states|length > 0 %}
<td role="cell">
{% for state in states %}
<div>

View File

@ -21,7 +21,7 @@
<tr role="row">
<th role="columnheader" scope="col">{% trans 'Identifier' %}</th>
<th role="columnheader" scope="col">{% trans 'Description' %}</th>
<th role="columnheader" scope="col">{% trans 'Last Status' %}</th>
<th role="columnheader" scope="col">{% trans 'Last Run' %}</th>
<th role="columnheader" scope="col">{% trans 'Status' %}</th>
<th role="columnheader" scope="col">{% trans 'Messages' %}</th>
<th role="cell"></th>

View File

@ -68,6 +68,9 @@ router.register("core/tokens", TokenViewSet)
router.register("outposts/outposts", OutpostViewSet)
router.register("outposts/proxy", OutpostConfigViewSet)
router.register("flows/instances", FlowViewSet)
router.register("flows/bindings", FlowStageBindingViewSet)
router.register("crypto/certificatekeypairs", CertificateKeyPairViewSet)
router.register("audit/events", EventViewSet)
@ -114,9 +117,6 @@ router.register("stages/user_login", UserLoginStageViewSet)
router.register("stages/user_logout", UserLogoutStageViewSet)
router.register("stages/user_write", UserWriteStageViewSet)
router.register("flows/instances", FlowViewSet)
router.register("flows/bindings", FlowStageBindingViewSet)
router.register("stages/dummy", DummyStageViewSet)
router.register("policies/dummy", DummyPolicyViewSet)

View File

@ -1,6 +1,4 @@
"""Tokens API Viewset"""
from uuid import UUID
from django.http.response import Http404
from rest_framework.decorators import action
from rest_framework.request import Request
@ -29,10 +27,9 @@ class TokenViewSet(ModelViewSet):
serializer_class = TokenSerializer
@action(detail=True)
# pylint: disable=invalid-name
def view_key(self, request: Request, pk: UUID) -> Response:
def view_key(self, request: Request, identifier: str) -> Response:
"""Return token key and log access"""
tokens = Token.filter_not_expired(pk=pk)
tokens = Token.filter_not_expired(identifier=identifier)
if not tokens.exists():
raise Http404
token = tokens.first()

View File

@ -1,4 +1,11 @@
"""passbook core tasks"""
from datetime import datetime
from io import StringIO
from boto3.exceptions import Boto3Error
from botocore.exceptions import BotoCoreError, ClientError
from django.contrib.humanize.templatetags.humanize import naturaltime
from django.core import management
from django.utils.timezone import now
from structlog import get_logger
@ -24,3 +31,24 @@ def clean_expired_models(self: MonitoredTask):
LOGGER.debug("Deleted expired models", model=cls, amount=amount)
messages.append(f"Deleted {amount} expired {cls._meta.verbose_name_plural}")
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, messages))
@CELERY_APP.task(bind=True, base=MonitoredTask)
def backup_database(self: MonitoredTask): # pragma: no cover
"""Database backup"""
try:
start = datetime.now()
out = StringIO()
management.call_command("dbbackup", quiet=True, stdout=out)
self.set_status(
TaskResult(
TaskResultStatus.SUCCESSFUL,
[
f"Successfully finished database backup {naturaltime(start)}",
out.getvalue(),
],
)
)
LOGGER.info("Successfully backed up database.")
except (IOError, BotoCoreError, ClientError, Boto3Error) as exc:
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))

View File

@ -53,7 +53,7 @@
{{ user.username }}
</a>
</div>
<img class="pf-c-avatar" src="{% gravatar user.email %}" alt="">
<img class="pf-c-avatar" src="{% avatar user %}" alt="">
</div>
</header>
{% block page_content %}

View File

@ -7,7 +7,7 @@
<div class="pf-c-form__group">
<div class="form-control-static">
<div class="left">
<img class="pf-c-avatar" src="{% gravatar user.email %}" alt="">
<img class="pf-c-avatar" src="{% avatar user %}" alt="">
{{ user.username }}
</div>
<div class="right">

View File

@ -27,7 +27,15 @@ class FlowStageBindingSerializer(ModelSerializer):
class Meta:
model = FlowStageBinding
fields = ["pk", "target", "stage", "re_evaluate_policies", "order", "policies"]
fields = [
"pk",
"target",
"stage",
"evaluate_on_plan",
"re_evaluate_policies",
"order",
"policies",
]
class FlowStageBindingViewSet(ModelViewSet):

View File

@ -50,12 +50,10 @@ class FlowStageBindingForm(forms.ModelForm):
fields = [
"target",
"stage",
"evaluate_on_plan",
"re_evaluate_policies",
"order",
]
labels = {
"re_evaluate_policies": _("Re-evaluate Policies"),
}
widgets = {
"name": forms.TextInput(),
}

View File

@ -2,6 +2,7 @@
from dataclasses import dataclass
from typing import TYPE_CHECKING, Optional
from django.http.request import HttpRequest
from structlog import get_logger
from passbook.core.models import User
@ -20,7 +21,9 @@ class StageMarker:
"""Base stage marker class, no extra attributes, and has no special handler."""
# pylint: disable=unused-argument
def process(self, plan: "FlowPlan", stage: Stage) -> Optional[Stage]:
def process(
self, plan: "FlowPlan", stage: Stage, http_request: Optional[HttpRequest]
) -> Optional[Stage]:
"""Process callback for this marker. This should be overridden by sub-classes.
If a stage should be removed, return None."""
return stage
@ -33,10 +36,14 @@ class ReevaluateMarker(StageMarker):
binding: PolicyBinding
user: User
def process(self, plan: "FlowPlan", stage: Stage) -> Optional[Stage]:
def process(
self, plan: "FlowPlan", stage: Stage, http_request: Optional[HttpRequest]
) -> Optional[Stage]:
"""Re-evaluate policies bound to stage, and if they fail, remove from plan"""
engine = PolicyEngine(self.binding, self.user)
engine.use_cache = False
if http_request:
engine.request.http_request = http_request
engine.request.context = plan.context
engine.build()
result = engine.result

View File

@ -0,0 +1,29 @@
# Generated by Django 3.1.2 on 2020-10-20 12:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("passbook_flows", "0014_auto_20200925_2332"),
]
operations = [
migrations.AlterField(
model_name="flowstagebinding",
name="re_evaluate_policies",
field=models.BooleanField(
default=False,
help_text="Evaluate policies when the Stage is present to the user.",
),
),
migrations.AddField(
model_name="flowstagebinding",
name="evaluate_on_plan",
field=models.BooleanField(
default=True,
help_text="Evaluate policies during the Flow planning process. Disable this for input-based policies.",
),
),
]

View File

@ -154,15 +154,19 @@ class FlowStageBinding(SerializerModel, PolicyBindingModel):
target = models.ForeignKey("Flow", on_delete=models.CASCADE)
stage = InheritanceForeignKey(Stage, on_delete=models.CASCADE)
re_evaluate_policies = models.BooleanField(
default=False,
evaluate_on_plan = models.BooleanField(
default=True,
help_text=_(
(
"When this option is enabled, the planner will re-evaluate "
"policies bound to this binding."
"Evaluate policies during the Flow planning process. "
"Disable this for input-based policies."
)
),
)
re_evaluate_policies = models.BooleanField(
default=False,
help_text=_("Evaluate policies when the Stage is present to the user."),
)
order = models.IntegerField()

View File

@ -46,7 +46,7 @@ class FlowPlan:
self.stages.append(stage)
self.markers.append(marker or StageMarker())
def next(self) -> Optional[Stage]:
def next(self, http_request: Optional[HttpRequest]) -> Optional[Stage]:
"""Return next pending stage from the bottom of the list"""
if not self.has_stages:
return None
@ -55,7 +55,7 @@ class FlowPlan:
if marker.__class__ is not StageMarker:
LOGGER.debug("f(plan_inst): stage has marker", stage=stage, marker=marker)
marked_stage = marker.process(self, stage)
marked_stage = marker.process(self, stage, http_request)
if not marked_stage:
LOGGER.debug("f(plan_inst): marker returned none, next stage", stage=stage)
self.stages.remove(stage)
@ -63,7 +63,7 @@ class FlowPlan:
if not self.has_stages:
return None
# pylint: disable=not-callable
return self.next()
return self.next(http_request)
return marked_stage
def pop(self):
@ -159,23 +159,41 @@ class FlowPlanner:
for binding in FlowStageBinding.objects.filter(
target__pk=self.flow.pk
).order_by("order"):
engine = PolicyEngine(binding, user, request)
engine.request.context = plan.context
engine.build()
if engine.passing:
binding: FlowStageBinding
stage = binding.stage
marker = StageMarker()
if binding.evaluate_on_plan:
LOGGER.debug(
"f(plan): Stage passing", stage=binding.stage, flow=self.flow
"f(plan): evaluating on plan",
stage=binding.stage,
flow=self.flow,
)
plan.stages.append(binding.stage)
marker = StageMarker()
if binding.re_evaluate_policies:
engine = PolicyEngine(binding, user, request)
engine.request.context = plan.context
engine.build()
if engine.passing:
LOGGER.debug(
"f(plan): Stage has re-evaluate marker",
"f(plan): Stage passing",
stage=binding.stage,
flow=self.flow,
)
marker = ReevaluateMarker(binding=binding, user=user)
plan.markers.append(marker)
else:
stage = None
else:
LOGGER.debug(
"f(plan): not evaluating on plan",
stage=binding.stage,
flow=self.flow,
)
if binding.re_evaluate_policies and stage:
LOGGER.debug(
"f(plan): Stage has re-evaluate marker",
stage=binding.stage,
flow=self.flow,
)
marker = ReevaluateMarker(binding=binding, user=user)
if stage:
plan.append(stage, marker)
LOGGER.debug(
"f(plan): Finished building",
flow=self.flow,

View File

@ -86,7 +86,7 @@ class FlowExecutorView(View):
return to_stage_response(self.request, self.handle_invalid_flow(exc))
# We don't save the Plan after getting the next stage
# as it hasn't been successfully passed yet
next_stage = self.plan.next()
next_stage = self.plan.next(self.request)
if not next_stage:
LOGGER.debug("f(exec): no more stages, flow is done.")
return self._flow_done()

View File

@ -22,6 +22,7 @@ error_reporting:
send_pii: false
passbook:
avatars: gravatar # gravatar or none
branding:
title: passbook
title_show: true

View File

@ -62,6 +62,10 @@ class TaskInfo:
"""Get TaskInfo Object by name"""
return cache.get(f"task_{name}")
def delete(self):
"""Delete task info from cache"""
return cache.delete(f"task_{self.task_name}")
def save(self):
"""Save task into cache"""
key = f"task_{self.task_name}"
@ -79,11 +83,18 @@ class MonitoredTask(Task):
_result: TaskResult
_uid: Optional[str]
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.save_on_success = True
self._uid = None
self._result = TaskResult(status=TaskResultStatus.ERROR, messages=[])
def set_uid(self, uid: str):
"""Set UID, so in the case of an unexpected error its saved correctly"""
self._uid = uid
def set_status(self, result: TaskResult):
"""Set result for current run, will overwrite previous result."""
self._result = result
@ -92,6 +103,8 @@ class MonitoredTask(Task):
def after_return(
self, status, retval, task_id, args: List[Any], kwargs: Dict[str, Any], einfo
):
if not self._result.uid:
self._result.uid = self._uid
if self.save_on_success:
TaskInfo(
task_name=self.__name__,
@ -107,6 +120,8 @@ class MonitoredTask(Task):
# pylint: disable=too-many-arguments
def on_failure(self, exc, task_id, args, kwargs, einfo):
if not self._result.uid:
self._result.uid = self._uid
TaskInfo(
task_name=self.__name__,
task_description=self.__doc__,

View File

@ -6,15 +6,19 @@ from django import template
from django.db.models import Model
from django.http.request import HttpRequest
from django.template import Context
from django.templatetags.static import static
from django.utils.html import escape, mark_safe
from structlog import get_logger
from passbook.core.models import User
from passbook.lib.config import CONFIG
from passbook.lib.utils.urls import is_url_absolute
register = template.Library()
LOGGER = get_logger()
GRAVATAR_URL = "https://secure.gravatar.com"
@register.simple_tag(takes_context=True)
def back(context: Context) -> str:
@ -54,37 +58,23 @@ def css_class(field, css):
@register.simple_tag
def gravatar(email, size=None, rating=None):
"""
Generates a Gravatar URL for the given email address.
Syntax::
{% gravatar <email> [size] [rating] %}
Example::
{% gravatar someone@example.com 48 pg %}
"""
# gravatar uses md5 for their URLs, so md5 can't be avoided
gravatar_url = "%savatar/%s" % (
"https://secure.gravatar.com/",
md5(email.encode("utf-8")).hexdigest(), # nosec
)
parameters = [
p
for p in (
("s", size or "158"),
("r", rating or "g"),
def avatar(user: User) -> str:
"""Get avatar, depending on passbook.avatar setting"""
mode = CONFIG.raw.get("passbook").get("avatars")
if mode == "none":
return static("passbook/user-default.png")
if mode == "gravatar":
parameters = [
("s", "158"),
("r", "g"),
]
# gravatar uses md5 for their URLs, so md5 can't be avoided
mail_hash = md5(user.email.encode("utf-8")).hexdigest() # nosec
gravatar_url = (
f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters, doseq=True)}"
)
if p[1]
]
if parameters:
gravatar_url += "?" + urlencode(parameters, doseq=True)
return escape(gravatar_url)
return escape(gravatar_url)
raise ValueError(f"Invalid avatar mode {mode}")
@register.filter

View File

@ -21,9 +21,7 @@ class BaseController:
def __init__(self, outpost: Outpost):
self.outpost = outpost
self.logger = get_logger(
controller=self.__class__.__name__, outpost=self.outpost
)
self.logger = get_logger()
self.deployment_ports = {}
# pylint: disable=invalid-name
@ -35,7 +33,7 @@ class BaseController:
"""Call .up() but capture all log output and return it."""
with capture_logs() as logs:
self.up()
return [f"{x['controller']}: {x['event']}" for x in logs]
return [x["event"] for x in logs]
def down(self):
"""Handler to delete everything we've created"""

View File

@ -24,7 +24,10 @@ class DockerController(BaseController):
def __init__(self, outpost: Outpost) -> None:
super().__init__(outpost)
self.client = from_env()
try:
self.client = from_env()
except DockerException as exc:
raise ControllerException from exc
def _get_labels(self) -> Dict[str, str]:
return {}

View File

@ -35,9 +35,7 @@ class KubernetesObjectReconciler(Generic[T]):
def __init__(self, controller: "KubernetesController"):
self.controller = controller
self.namespace = controller.outpost.config.kubernetes_namespace
self.logger = get_logger(
controller=self.__class__.__name__, outpost=controller.outpost
)
self.logger = get_logger()
@property
def name(self) -> str:

View File

@ -1,5 +1,5 @@
"""Kubernetes Deployment Reconciler"""
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Dict
from kubernetes.client import (
AppsV1Api,
@ -41,7 +41,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
@property
def name(self) -> str:
return f"passbook-outpost-{self.outpost.name}"
return f"passbook-outpost-{self.controller.outpost.uuid.hex}"
def reconcile(self, current: V1Deployment, reference: V1Deployment):
if current.spec.replicas != reference.spec.replicas:
@ -52,6 +52,14 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
):
raise NeedsUpdate()
def get_pod_meta(self) -> Dict[str, str]:
"""Get common object metadata"""
return {
"app.kubernetes.io/name": "passbook-outpost",
"app.kubernetes.io/managed-by": "passbook.beryju.org",
"passbook.beryju.org/outpost-uuid": self.controller.outpost.uuid.hex,
}
def get_reference_object(self) -> V1Deployment:
"""Get deployment object for outpost"""
# Generate V1ContainerPort objects
@ -59,17 +67,18 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
for port_name, port in self.controller.deployment_ports.items():
container_ports.append(V1ContainerPort(container_port=port, name=port_name))
meta = self.get_object_meta(name=self.name)
secret_name = f"passbook-outpost-{self.controller.outpost.uuid.hex}-api"
return V1Deployment(
metadata=meta,
spec=V1DeploymentSpec(
replicas=self.outpost.config.kubernetes_replicas,
selector=V1LabelSelector(match_labels=meta.labels),
selector=V1LabelSelector(match_labels=self.get_pod_meta()),
template=V1PodTemplateSpec(
metadata=V1ObjectMeta(labels=meta.labels),
metadata=V1ObjectMeta(labels=self.get_pod_meta()),
spec=V1PodSpec(
containers=[
V1Container(
name=self.outpost.type,
name=str(self.outpost.type),
image=f"{self.image_base}-{self.outpost.type}:{__version__}",
ports=container_ports,
env=[
@ -77,7 +86,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
name="PASSBOOK_HOST",
value_from=V1EnvVarSource(
secret_key_ref=V1SecretKeySelector(
name=f"passbook-outpost-{self.outpost.name}-api",
name=secret_name,
key="passbook_host",
)
),
@ -86,7 +95,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
name="PASSBOOK_TOKEN",
value_from=V1EnvVarSource(
secret_key_ref=V1SecretKeySelector(
name=f"passbook-outpost-{self.outpost.name}-api",
name=secret_name,
key="token",
)
),
@ -95,7 +104,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
name="PASSBOOK_INSECURE",
value_from=V1EnvVarSource(
secret_key_ref=V1SecretKeySelector(
name=f"passbook-outpost-{self.outpost.name}-api",
name=secret_name,
key="passbook_host_insecure",
)
),
@ -117,9 +126,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
)
def retrieve(self) -> V1Deployment:
return self.api.read_namespaced_deployment(
f"passbook-outpost-{self.outpost.name}", self.namespace
)
return self.api.read_namespaced_deployment(self.name, self.namespace)
def update(self, current: V1Deployment, reference: V1Deployment):
return self.api.patch_namespaced_deployment(

View File

@ -27,7 +27,7 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
@property
def name(self) -> str:
return f"passbook-outpost-{self.controller.outpost.name}-api"
return f"passbook-outpost-{self.controller.outpost.uuid.hex}-api"
def reconcile(self, current: V1Secret, reference: V1Secret):
for key in reference.data.keys():
@ -59,9 +59,7 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
)
def retrieve(self) -> V1Secret:
return self.api.read_namespaced_secret(
f"passbook-outpost-{self.controller.outpost.name}-api", self.namespace
)
return self.api.read_namespaced_secret(self.name, self.namespace)
def update(self, current: V1Secret, reference: V1Secret):
return self.api.patch_namespaced_secret(

View File

@ -7,6 +7,7 @@ from passbook.outposts.controllers.k8s.base import (
KubernetesObjectReconciler,
NeedsUpdate,
)
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
if TYPE_CHECKING:
from passbook.outposts.controllers.kubernetes import KubernetesController
@ -21,7 +22,7 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
@property
def name(self) -> str:
return f"passbook-outpost-{self.controller.outpost.name}"
return f"passbook-outpost-{self.controller.outpost.uuid.hex}"
def reconcile(self, current: V1Service, reference: V1Service):
if len(current.spec.ports) != len(reference.spec.ports):
@ -36,9 +37,10 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
ports = []
for port_name, port in self.controller.deployment_ports.items():
ports.append(V1ServicePort(name=port_name, port=port))
selector_labels = DeploymentReconciler(self.controller).get_pod_meta()
return V1Service(
metadata=meta,
spec=V1ServiceSpec(ports=ports, selector=meta.labels, type="ClusterIP"),
spec=V1ServiceSpec(ports=ports, selector=selector_labels, type="ClusterIP"),
)
def create(self, reference: V1Service):
@ -50,9 +52,7 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
)
def retrieve(self) -> V1Service:
return self.api.read_namespaced_service(
f"passbook-outpost-{self.controller.outpost.name}", self.namespace
)
return self.api.read_namespaced_service(self.name, self.namespace)
def update(self, current: V1Service, reference: V1Service):
return self.api.patch_namespaced_service(

View File

@ -5,6 +5,7 @@ from typing import Dict, List, Type
from kubernetes.client import OpenApiException
from kubernetes.config import load_incluster_config, load_kube_config
from kubernetes.config.config_exception import ConfigException
from structlog.testing import capture_logs
from yaml import dump_all
from passbook.outposts.controllers.base import BaseController, ControllerException
@ -43,6 +44,18 @@ class KubernetesController(BaseController):
except OpenApiException as exc:
raise ControllerException from exc
def up_with_logs(self) -> List[str]:
try:
all_logs = []
for reconcile_key in self.reconcile_order:
with capture_logs() as logs:
reconciler = self.reconcilers[reconcile_key](self)
reconciler.up()
all_logs += [f"{reconcile_key.title()}: {x['event']}" for x in logs]
return all_logs
except OpenApiException as exc:
raise ControllerException from exc
def down(self):
try:
for reconcile_key in self.reconcile_order:
@ -56,7 +69,6 @@ class KubernetesController(BaseController):
documents = []
for reconcile_key in self.reconcile_order:
reconciler = self.reconcilers[reconcile_key](self)
reconciler.up()
documents.append(reconciler.get_reference_object().to_dict())
with StringIO() as _str:

View File

@ -204,7 +204,11 @@ class OutpostState:
def for_channel(outpost: Outpost, channel: str) -> "OutpostState":
"""Get state for a single channel"""
key = f"{outpost.state_cache_prefix}_{channel}"
data = cache.get(key, {"uid": channel})
default_data = {"uid": channel}
data = cache.get(key, default_data)
if isinstance(data, str):
cache.delete(key)
data = default_data
state = from_dict(OutpostState, data)
state.uid = channel
# pylint: disable=protected-access

View File

@ -35,9 +35,10 @@ def outpost_controller_all():
@CELERY_APP.task(bind=True, base=MonitoredTask)
def outpost_controller(self: MonitoredTask, outpost_pk: str):
"""Launch controller deployment of Outpost"""
"""Create/update/monitor the deployment of an Outpost"""
logs = []
outpost: Outpost = Outpost.objects.get(pk=outpost_pk)
self.set_uid(slugify(outpost.name))
try:
if outpost.type == OutpostType.PROXY:
if outpost.deployment_type == OutpostDeploymentType.KUBERNETES:
@ -45,15 +46,9 @@ def outpost_controller(self: MonitoredTask, outpost_pk: str):
if outpost.deployment_type == OutpostDeploymentType.DOCKER:
logs = ProxyDockerController(outpost).up_with_logs()
except ControllerException as exc:
self.set_status(
TaskResult(TaskResultStatus.ERROR, uid=slugify(outpost.name)).with_error(
exc
)
)
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
else:
self.set_status(
TaskResult(TaskResultStatus.SUCCESSFUL, logs, uid=slugify(outpost.name))
)
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, logs))
@CELERY_APP.task()

View File

@ -24,6 +24,7 @@
<label class="pf-c-form__label" for="help-text-simple-form-name">
<span class="pf-c-form__label-text">PASSBOOK_TOKEN</span>
</label>
{# TODO: Only load key on modal open #}
<input class="pf-c-form-control" data-pb-fetch-key="key" data-pb-fetch-fill="{% url 'passbook_api:token-view-key' identifier=outpost.token_identifier %}" readonly type="text" value="" />
</div>
<h3>{% trans 'If your passbook Instance is using a self-signed certificate, set this value.' %}</h3>

View File

@ -1,9 +1,16 @@
"""outpost tests"""
from os import environ
from unittest.case import skipUnless
from unittest.mock import patch
from django.test import TestCase
from guardian.models import UserObjectPermission
from passbook.crypto.models import CertificateKeyPair
from passbook.flows.models import Flow
from passbook.outposts.controllers.k8s.base import NeedsUpdate
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
from passbook.outposts.controllers.kubernetes import KubernetesController
from passbook.outposts.models import Outpost, OutpostDeploymentType, OutpostType
from passbook.providers.proxy.models import ProxyProvider
@ -58,3 +65,50 @@ class OutpostTests(TestCase):
permissions = UserObjectPermission.objects.filter(user=outpost.user)
self.assertEqual(len(permissions), 1)
self.assertEqual(permissions[0].object_pk, str(outpost.pk))
@skipUnless("PB_TEST_K8S" in environ, "Kubernetes test cluster required")
class OutpostKubernetesTests(TestCase):
"""Test Kubernetes Controllers"""
def setUp(self):
super().setUp()
self.provider: ProxyProvider = ProxyProvider.objects.create(
name="test",
internal_host="http://localhost",
external_host="http://localhost",
authorization_flow=Flow.objects.first(),
)
self.outpost: Outpost = Outpost.objects.create(
name="test",
type=OutpostType.PROXY,
deployment_type=OutpostDeploymentType.KUBERNETES,
)
self.outpost.providers.add(self.provider)
self.outpost.save()
def test_deployment_reconciler(self):
"""test that deployment requires update"""
controller = KubernetesController(self.outpost)
deployment_reconciler = DeploymentReconciler(controller)
self.assertIsNotNone(deployment_reconciler.retrieve())
config = self.outpost.config
config.kubernetes_replicas = 3
self.outpost.config = config
with self.assertRaises(NeedsUpdate):
deployment_reconciler.reconcile(
deployment_reconciler.retrieve(),
deployment_reconciler.get_reference_object(),
)
with patch.object(deployment_reconciler, "image_base", "test"):
with self.assertRaises(NeedsUpdate):
deployment_reconciler.reconcile(
deployment_reconciler.retrieve(),
deployment_reconciler.get_reference_object(),
)
deployment_reconciler.delete(deployment_reconciler.get_reference_object())

View File

@ -1,5 +1,5 @@
"""passbook expression policy evaluator"""
from ipaddress import ip_address
from ipaddress import ip_address, ip_network
from typing import List
from django.http import HttpRequest
@ -22,6 +22,8 @@ class PolicyEvaluator(BaseEvaluator):
super().__init__()
self._messages = []
self._context["pb_message"] = self.expr_func_message
self._context["ip_address"] = ip_address
self._context["ip_network"] = ip_network
self._filename = policy_name or "PolicyEvaluator"
def expr_func_message(self, message: str):

View File

@ -34,7 +34,7 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
@property
def name(self) -> str:
return f"passbook-outpost-{self.controller.outpost.name}"
return f"passbook-outpost-{self.controller.outpost.uuid.hex}"
def reconcile(
self, current: NetworkingV1beta1Ingress, reference: NetworkingV1beta1Ingress
@ -56,7 +56,10 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
have_hosts = [rule.host for rule in reference.spec.rules]
have_hosts.sort()
have_hosts_tls = reference.spec.tls.hosts
have_hosts_tls = []
for tls_config in reference.spec.tls:
if tls_config:
have_hosts_tls += tls_config.hosts
have_hosts_tls.sort()
if have_hosts != expected_hosts:
@ -102,7 +105,7 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
)
return NetworkingV1beta1Ingress(
metadata=meta,
spec=NetworkingV1beta1IngressSpec(rules=rules, tls=tls_config),
spec=NetworkingV1beta1IngressSpec(rules=rules, tls=[tls_config]),
)
def create(self, reference: NetworkingV1beta1Ingress):
@ -114,9 +117,7 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
)
def retrieve(self) -> NetworkingV1beta1Ingress:
return self.api.read_namespaced_ingress(
f"passbook-outpost-{self.controller.outpost.name}", self.namespace
)
return self.api.read_namespaced_ingress(self.name, self.namespace)
def update(
self, current: NetworkingV1beta1Ingress, reference: NetworkingV1beta1Ingress

View File

@ -31,9 +31,9 @@ class TestControllers(TestCase):
outpost.providers.add(provider)
outpost.save()
controller = ProxyKubernetesController(outpost.pk)
controller = ProxyKubernetesController(outpost)
manifest = controller.get_static_deployment()
self.assertEqual(len(list(yaml.load_all(manifest, Loader=yaml.SafeLoader))), 3)
self.assertEqual(len(list(yaml.load_all(manifest, Loader=yaml.SafeLoader))), 4)
def test_kubernetes_controller_deploy(self):
"""Test Kubernetes Controller"""
@ -51,5 +51,6 @@ class TestControllers(TestCase):
outpost.providers.add(provider)
outpost.save()
controller = ProxyKubernetesController(outpost.pk)
controller = ProxyKubernetesController(outpost)
controller.up()
controller.down()

View File

@ -269,9 +269,14 @@ CELERY_TASK_SOFT_TIME_LIMIT = 600
CELERY_BEAT_SCHEDULE = {
"clean_expired_models": {
"task": "passbook.core.tasks.clean_expired_models",
"schedule": crontab(minute="*/5"), # Run every 5 minutes
"schedule": crontab(minute="*/5"),
"options": {"queue": "passbook_scheduled"},
}
},
"db_backup": {
"task": "passbook.core.tasks.backup_database",
"schedule": crontab(minute=0, hour=0),
"options": {"queue": "passbook_scheduled"},
},
}
CELERY_TASK_CREATE_MISSING_QUEUES = True
CELERY_TASK_DEFAULT_QUEUE = "passbook"
@ -404,6 +409,7 @@ _LOGGING_HANDLER_MAP = {
"websockets": "WARNING",
"daphne": "WARNING",
"dbbackup": "ERROR",
"kubernetes": "INFO",
}
for handler_name, level in _LOGGING_HANDLER_MAP.items():
# pyright: reportGeneralTypeIssues=false
@ -444,6 +450,7 @@ for _app in INSTALLED_APPS:
if DEBUG:
INSTALLED_APPS.append("debug_toolbar")
MIDDLEWARE.append("debug_toolbar.middleware.DebugToolbarMiddleware")
CELERY_TASK_ALWAYS_EAGER = True
INSTALLED_APPS.append("passbook.core.apps.PassbookCoreConfig")

View File

@ -1,6 +1,8 @@
"""Integrate ./manage.py test with pytest"""
from django.conf import settings
from passbook.lib.config import CONFIG
class PytestTestRunner:
"""Runs pytest to discover and run tests."""
@ -11,6 +13,7 @@ class PytestTestRunner:
self.keepdb = keepdb
settings.TEST = True
settings.CELERY_TASK_ALWAYS_EAGER = True
CONFIG.raw.get("passbook")["avatars"] = "none"
def run_tests(self, test_labels):
"""Run pytest and return the exitcode.

View File

@ -20,8 +20,9 @@ def ldap_sync_all():
@CELERY_APP.task(bind=True, base=MonitoredTask)
def ldap_sync(self: MonitoredTask, source_pk: int):
"""Sync a single source"""
"""Synchronization of an LDAP Source"""
source: LDAPSource = LDAPSource.objects.get(pk=source_pk)
self.set_uid(slugify(source.name))
try:
syncer = LDAPSynchronizer(source)
user_count = syncer.sync_users()
@ -33,10 +34,7 @@ def ldap_sync(self: MonitoredTask, source_pk: int):
TaskResult(
TaskResultStatus.SUCCESSFUL,
[f"Synced {user_count} users", f"Synced {group_count} groups"],
uid=slugify(source.name),
)
)
except LDAPException as exc:
self.set_status(
TaskResult(TaskResultStatus.ERROR, uid=slugify(source.name)).with_error(exc)
)
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))

View File

@ -13,7 +13,7 @@ LOGGER = get_logger()
@CELERY_APP.task(bind=True, base=MonitoredTask)
def clean_temporary_users(self: MonitoredTask):
"""Remove old temporary users"""
"""Remove temporary users created by SAML Sources"""
_now = now()
messages = []
deleted_users = 0

View File

@ -37,6 +37,8 @@ def send_mails(stage: EmailStage, *messages: List[EmailMultiAlternatives]):
def send_mail(self: MonitoredTask, email_stage_pk: int, message: Dict[Any, Any]):
"""Send Email for Email Stage. Retries are scheduled automatically."""
self.save_on_success = False
message_id = make_msgid(domain=DNS_NAME)
self.set_uid(message_id)
try:
stage: EmailStage = EmailStage.objects.get(pk=email_stage_pk)
backend = stage.backend
@ -48,7 +50,6 @@ def send_mail(self: MonitoredTask, email_stage_pk: int, message: Dict[Any, Any])
setattr(message_object, key, value)
message_object.from_email = stage.from_address
# Because we use the Message-ID as UID for the task, manually assign it
message_id = make_msgid(domain=DNS_NAME)
message_object.extra_headers["Message-ID"] = message_id
LOGGER.debug("Sending mail", to=message_object.to)
@ -57,7 +58,6 @@ def send_mail(self: MonitoredTask, email_stage_pk: int, message: Dict[Any, Any])
TaskResult(
TaskResultStatus.SUCCESSFUL,
messages=["Successfully sent Mail."],
uid=message_id,
)
)
except (SMTPException, ConnectionError) as exc:

View File

@ -1,7 +1,6 @@
"""passbook password stage"""
from typing import Any, Dict, List, Optional
from django.contrib import messages
from django.contrib.auth import _clean_credentials
from django.contrib.auth.backends import BaseBackend
from django.contrib.auth.signals import user_login_failed
@ -122,5 +121,4 @@ class PasswordStageView(FormView, StageView):
self.executor.plan.context[
PLAN_CONTEXT_AUTHENTICATION_BACKEND
] = user.backend
messages.success(self.request, _("Successfully logged in!"))
return self.executor.stage_ok()

View File

@ -39,4 +39,5 @@ class UserLoginStageView(StageView):
flow_slug=self.executor.flow.slug,
session_duration=self.executor.current_stage.session_duration,
)
messages.success(self.request, _("Successfully logged in!"))
return self.executor.stage_ok()

View File

@ -442,9 +442,9 @@
}
},
"rollup": {
"version": "2.32.0",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-2.32.0.tgz",
"integrity": "sha512-0FIG1jY88uhCP2yP4CfvtKEqPDRmsUwfY1kEOOM+DH/KOGATgaIFd/is1+fQOxsvh62ELzcFfKonwKWnHhrqmw==",
"version": "2.32.1",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-2.32.1.tgz",
"integrity": "sha512-Op2vWTpvK7t6/Qnm1TTh7VjEZZkN8RWgf0DHbkKzQBwNf748YhXbozHVefqpPp/Fuyk/PQPAnYsBxAEtlMvpUw==",
"requires": {
"fsevents": "~2.1.2"
}

View File

@ -11,7 +11,7 @@
"codemirror": "^5.58.1",
"lit-element": "^2.4.0",
"lit-html": "^1.3.0",
"rollup": "^2.32.0"
"rollup": "^2.32.1"
},
"devDependencies": {
"rollup-plugin-commonjs": "^10.1.0",

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 B

View File

@ -10,6 +10,7 @@ import (
"strings"
"time"
"github.com/BeryJu/passbook/proxy/pkg"
"github.com/BeryJu/passbook/proxy/pkg/client"
"github.com/BeryJu/passbook/proxy/pkg/client/outposts"
"github.com/getsentry/sentry-go"
@ -70,6 +71,7 @@ func doGlobalSetup(config map[string]interface{}) {
default:
log.SetLevel(log.DebugLevel)
}
log.WithField("version", pkg.VERSION).Info("Starting passbook proxy")
var dsn string
if config[ConfigErrorReportingEnabled].(bool) {

View File

@ -1,3 +1,3 @@
package pkg
const VERSION = "0.12.0-stable"
const VERSION = "0.12.6-stable"

View File

@ -833,6 +833,11 @@ paths:
description: ''
required: false
type: string
- name: evaluate_on_plan
in: query
description: ''
required: false
type: string
- name: re_evaluate_policies
in: query
description: ''
@ -6337,10 +6342,14 @@ definitions:
title: Stage
type: string
format: uuid
evaluate_on_plan:
title: Evaluate on plan
description: Evaluate policies during the Flow planning process. Disable this
for input-based policies.
type: boolean
re_evaluate_policies:
title: Re evaluate policies
description: When this option is enabled, the planner will re-evaluate policies
bound to this binding.
description: Evaluate policies when the Stage is present to the user.
type: boolean
order:
title: Order