Compare commits
121 Commits
version/0.
...
version/0.
| Author | SHA1 | Date | |
|---|---|---|---|
| 7393d8720b | |||
| 287cb72d6f | |||
| c5eff4bdd6 | |||
| e9a33ed8ab | |||
| 875173a86e | |||
| df7642b365 | |||
| 3bc1c0aa8b | |||
| 8951f5695e | |||
| 7401278707 | |||
| e99f6e289b | |||
| 07da6ffa69 | |||
| dc18730094 | |||
| a202679bfb | |||
| 1edcda58ba | |||
| 5cb7f0794e | |||
| 7e8e3893eb | |||
| e91e286ebc | |||
| ef4a115b61 | |||
| b79b73f5c6 | |||
| 056e3ed15b | |||
| fb5e210af8 | |||
| e5e2615f15 | |||
| 6c72a9e2e8 | |||
| c04d0a373a | |||
| bd74e518a7 | |||
| 3b76af4eaa | |||
| 706448dc14 | |||
| 34793f7cef | |||
| ba96c9526e | |||
| 617432deaa | |||
| 36bf2be16d | |||
| 912ed343e6 | |||
| 2e15df295a | |||
| eaab3f62cb | |||
| aa615b0fd6 | |||
| b775f2788c | |||
| 9c28db3d89 | |||
| 67360bd6e9 | |||
| 4f6f8c7cae | |||
| 3b82ad798b | |||
| 8827f06ac1 | |||
| 251672a67d | |||
| 4ffc0e2a08 | |||
| 4e1808632d | |||
| 791627d3ce | |||
| f3df3a0157 | |||
| 6aaae53a19 | |||
| 4d84f6d598 | |||
| 4e2349b6d9 | |||
| cd57b8f7f3 | |||
| 40b1fc06b0 | |||
| 02fa217e28 | |||
| 6652514358 | |||
| dcd3dc9744 | |||
| d6afdc575e | |||
| 287b38efee | |||
| e805fb62fb | |||
| c92dda77f1 | |||
| f12fd78822 | |||
| caba183c9b | |||
| 3aeaa121a3 | |||
| a9f3118a7d | |||
| 054b819262 | |||
| 6b3411f63b | |||
| 6a8000ea0d | |||
| 352d4db0d7 | |||
| 4b665cfb8f | |||
| 4e12003944 | |||
| 6bfd465855 | |||
| e8670aa693 | |||
| 5263e750b1 | |||
| a2a9d73296 | |||
| 6befc9d627 | |||
| 73497a27cc | |||
| f3098418f2 | |||
| a5197963b2 | |||
| e4634bcc78 | |||
| 74da44a6a9 | |||
| 3324473cd0 | |||
| 39d8038533 | |||
| bbcf58705f | |||
| 7b5a0964b2 | |||
| 8eca76e464 | |||
| fb9ab368f8 | |||
| 877279b2ee | |||
| 301be4b411 | |||
| 728f527ccb | |||
| 3f1c790b1d | |||
| b00573bde2 | |||
| aeee3ad7f9 | |||
| ef021495ef | |||
| 061eab4b36 | |||
| 870e01f836 | |||
| e2ca72adf0 | |||
| 395ef43eae | |||
| a4cc653757 | |||
| db4ff20906 | |||
| 1f0fbd33b6 | |||
| 5de8d2721e | |||
| 0d65da9a9e | |||
| 4316ee4330 | |||
| 2ed9a1dbe3 | |||
| 8e03824d20 | |||
| 754dbdd0e5 | |||
| e13d348315 | |||
| 169f3ebe5b | |||
| f8ad604e85 | |||
| 774b9c8a61 | |||
| d8c522233e | |||
| 82d50f7eaa | |||
| 1c426c5136 | |||
| d6e14cc551 | |||
| c3917ebc2e | |||
| 7203bd37a3 | |||
| 597188c7ee | |||
| ac4c314042 | |||
| 05866d3544 | |||
| 6596bc6034 | |||
| c6661ef4d2 | |||
| 386e23dfac | |||
| 5d7220ca70 |
@ -1,5 +1,5 @@
|
|||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 0.12.0-stable
|
current_version = 0.12.10-stable
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)
|
||||||
|
|||||||
@ -3,4 +3,4 @@ helm
|
|||||||
passbook-ui
|
passbook-ui
|
||||||
static
|
static
|
||||||
*.env.yml
|
*.env.yml
|
||||||
node_modules/
|
**/node_modules
|
||||||
|
|||||||
16
.github/dependabot.yml
vendored
16
.github/dependabot.yml
vendored
@ -24,3 +24,19 @@ updates:
|
|||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
assignees:
|
assignees:
|
||||||
- BeryJu
|
- BeryJu
|
||||||
|
- package-ecosystem: docker
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "04:00"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
assignees:
|
||||||
|
- BeryJu
|
||||||
|
- package-ecosystem: docker
|
||||||
|
directory: "/proxy"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "04:00"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
assignees:
|
||||||
|
- BeryJu
|
||||||
|
|||||||
14
.github/workflows/release.yml
vendored
14
.github/workflows/release.yml
vendored
@ -18,11 +18,11 @@ jobs:
|
|||||||
- name: Building Docker Image
|
- name: Building Docker Image
|
||||||
run: docker build
|
run: docker build
|
||||||
--no-cache
|
--no-cache
|
||||||
-t beryju/passbook:0.12.0-stable
|
-t beryju/passbook:0.12.10-stable
|
||||||
-t beryju/passbook:latest
|
-t beryju/passbook:latest
|
||||||
-f Dockerfile .
|
-f Dockerfile .
|
||||||
- name: Push Docker Container to Registry (versioned)
|
- name: Push Docker Container to Registry (versioned)
|
||||||
run: docker push beryju/passbook:0.12.0-stable
|
run: docker push beryju/passbook:0.12.10-stable
|
||||||
- name: Push Docker Container to Registry (latest)
|
- name: Push Docker Container to Registry (latest)
|
||||||
run: docker push beryju/passbook:latest
|
run: docker push beryju/passbook:latest
|
||||||
build-proxy:
|
build-proxy:
|
||||||
@ -48,11 +48,11 @@ jobs:
|
|||||||
cd proxy
|
cd proxy
|
||||||
docker build \
|
docker build \
|
||||||
--no-cache \
|
--no-cache \
|
||||||
-t beryju/passbook-proxy:0.12.0-stable \
|
-t beryju/passbook-proxy:0.12.10-stable \
|
||||||
-t beryju/passbook-proxy:latest \
|
-t beryju/passbook-proxy:latest \
|
||||||
-f Dockerfile .
|
-f Dockerfile .
|
||||||
- name: Push Docker Container to Registry (versioned)
|
- name: Push Docker Container to Registry (versioned)
|
||||||
run: docker push beryju/passbook-proxy:0.12.0-stable
|
run: docker push beryju/passbook-proxy:0.12.10-stable
|
||||||
- name: Push Docker Container to Registry (latest)
|
- name: Push Docker Container to Registry (latest)
|
||||||
run: docker push beryju/passbook-proxy:latest
|
run: docker push beryju/passbook-proxy:latest
|
||||||
build-static:
|
build-static:
|
||||||
@ -77,11 +77,11 @@ jobs:
|
|||||||
run: docker build
|
run: docker build
|
||||||
--no-cache
|
--no-cache
|
||||||
--network=$(docker network ls | grep github | awk '{print $1}')
|
--network=$(docker network ls | grep github | awk '{print $1}')
|
||||||
-t beryju/passbook-static:0.12.0-stable
|
-t beryju/passbook-static:0.12.10-stable
|
||||||
-t beryju/passbook-static:latest
|
-t beryju/passbook-static:latest
|
||||||
-f static.Dockerfile .
|
-f static.Dockerfile .
|
||||||
- name: Push Docker Container to Registry (versioned)
|
- name: Push Docker Container to Registry (versioned)
|
||||||
run: docker push beryju/passbook-static:0.12.0-stable
|
run: docker push beryju/passbook-static:0.12.10-stable
|
||||||
- name: Push Docker Container to Registry (latest)
|
- name: Push Docker Container to Registry (latest)
|
||||||
run: docker push beryju/passbook-static:latest
|
run: docker push beryju/passbook-static:latest
|
||||||
test-release:
|
test-release:
|
||||||
@ -114,5 +114,5 @@ jobs:
|
|||||||
SENTRY_PROJECT: passbook
|
SENTRY_PROJECT: passbook
|
||||||
SENTRY_URL: https://sentry.beryju.org
|
SENTRY_URL: https://sentry.beryju.org
|
||||||
with:
|
with:
|
||||||
tagName: 0.12.0-stable
|
tagName: 0.12.10-stable
|
||||||
environment: beryjuorg-prod
|
environment: beryjuorg-prod
|
||||||
|
|||||||
11
Dockerfile
11
Dockerfile
@ -25,7 +25,16 @@ RUN apt-get update && \
|
|||||||
pip install -r /requirements.txt --no-cache-dir && \
|
pip install -r /requirements.txt --no-cache-dir && \
|
||||||
apt-get remove --purge -y build-essential && \
|
apt-get remove --purge -y build-essential && \
|
||||||
apt-get autoremove --purge -y && \
|
apt-get autoremove --purge -y && \
|
||||||
adduser --system --no-create-home --uid 1000 --group --home /passbook passbook
|
# This is quite hacky, but docker has no guaranteed Group ID
|
||||||
|
# we could instead check for the GID of the socket and add the user dynamically,
|
||||||
|
# but then we have to drop permmissions later
|
||||||
|
groupadd -g 998 docker_998 && \
|
||||||
|
groupadd -g 999 docker_999 && \
|
||||||
|
adduser --system --no-create-home --uid 1000 --group --home /passbook passbook && \
|
||||||
|
usermod -a -G docker_998 passbook && \
|
||||||
|
usermod -a -G docker_999 passbook && \
|
||||||
|
mkdir /backups && \
|
||||||
|
chown passbook:passbook /backups
|
||||||
|
|
||||||
COPY ./passbook/ /passbook
|
COPY ./passbook/ /passbook
|
||||||
COPY ./manage.py /
|
COPY ./manage.py /
|
||||||
|
|||||||
2
Makefile
2
Makefile
@ -12,7 +12,7 @@ lint-fix:
|
|||||||
|
|
||||||
lint:
|
lint:
|
||||||
pyright passbook e2e lifecycle
|
pyright passbook e2e lifecycle
|
||||||
bandit -r passbook e2e lifecycle
|
bandit -r passbook e2e lifecycle -x node_modules
|
||||||
pylint passbook e2e lifecycle
|
pylint passbook e2e lifecycle
|
||||||
prospector
|
prospector
|
||||||
|
|
||||||
|
|||||||
556
Pipfile.lock
generated
556
Pipfile.lock
generated
@ -25,17 +25,17 @@
|
|||||||
},
|
},
|
||||||
"amqp": {
|
"amqp": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:9881f8e6fe23e3db9faa6cfd8c05390213e1d1b95c0162bc50552cad75bffa5f",
|
"sha256:5b9062d5c0812335c75434bf17ce33d7a20ecfedaa0733faec7379868eb4068a",
|
||||||
"sha256:a8fb8151eb9d12204c9f1784c0da920476077609fa0a70f2468001e3a4258484"
|
"sha256:fcd5b3baeeb7fc19b3486ff6d10543099d40ae1f5c9196eae695d1cde1b2f784"
|
||||||
],
|
],
|
||||||
"version": "==5.0.1"
|
"version": "==5.0.2"
|
||||||
},
|
},
|
||||||
"asgiref": {
|
"asgiref": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:7e51911ee147dd685c3c8b805c0ad0cb58d360987b56953878f8c06d2d1c6f1a",
|
"sha256:5ee950735509d04eb673bd7f7120f8fa1c9e2df495394992c73234d526907e17",
|
||||||
"sha256:9fc6fb5d39b8af147ba40765234fa822b39818b12cc80b35ad9b0cef3a476aed"
|
"sha256:7162a3cb30ab0609f1a4c95938fd73e8604f63bdba516a7f7d64b83ff09478f0"
|
||||||
],
|
],
|
||||||
"version": "==3.2.10"
|
"version": "==3.3.1"
|
||||||
},
|
},
|
||||||
"async-timeout": {
|
"async-timeout": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -46,10 +46,10 @@
|
|||||||
},
|
},
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594",
|
"sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6",
|
||||||
"sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc"
|
"sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"
|
||||||
],
|
],
|
||||||
"version": "==20.2.0"
|
"version": "==20.3.0"
|
||||||
},
|
},
|
||||||
"autobahn": {
|
"autobahn": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -74,18 +74,18 @@
|
|||||||
},
|
},
|
||||||
"boto3": {
|
"boto3": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:9ab957090f7893172768bb8b8d2c5cce0afd36a9d36d73a9fb14168f72d75a8b",
|
"sha256:60cc37e027d8911f4890275bcd8d1e3f9f5bdb18b3506641a343ae7e60a1d41a",
|
||||||
"sha256:f56148e2c6b9a2d704218da42f07d72f00270bfddb13bc1bdea20d3327daa51e"
|
"sha256:904d2f1935241c4437781769f9c0d90826470c59eef0d62ea7df4aaf63295d7c"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==1.15.18"
|
"version": "==1.16.15"
|
||||||
},
|
},
|
||||||
"botocore": {
|
"botocore": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:de5f9fc0c7e88ee7ba831fa27475be258ae09ece99143ed623d3618a3c84ee2c",
|
"sha256:4e9dc37fb3cc47425c6480dc22999d556ca3cf71714f2937df0fc3db2a7f6581",
|
||||||
"sha256:e224754230e7e015836ba20037cac6321e8e2ce9b8627c14d579fcb37249decd"
|
"sha256:a2d789c8bed5bf1165cc57c95e2db1e74ec50508beb770a89f7c89bc68523281"
|
||||||
],
|
],
|
||||||
"version": "==1.18.18"
|
"version": "==1.19.15"
|
||||||
},
|
},
|
||||||
"cachetools": {
|
"cachetools": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -96,18 +96,18 @@
|
|||||||
},
|
},
|
||||||
"celery": {
|
"celery": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:7aa4ee46ed318bc177900ae7c01500354aee62d723255b0925db0754bcd4d390",
|
"sha256:012c814967fe89e3f5d2cf49df2dba3de5f29253a7f4f2270e8fce6b901b4ebf",
|
||||||
"sha256:e3e8956d74af986b1e9770e0a294338b259618bf70283d6157416328e50c2bd6"
|
"sha256:930c3acd55349d028c4e7104a7d377729cbcca19d9fce470c17172d9e7f9a8b6"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==5.0.1"
|
"version": "==5.0.2"
|
||||||
},
|
},
|
||||||
"certifi": {
|
"certifi": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3",
|
"sha256:1f422849db327d534e3d0c5f02a263458c3955ec0aae4ff09b95f195c59f4edd",
|
||||||
"sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"
|
"sha256:f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4"
|
||||||
],
|
],
|
||||||
"version": "==2020.6.20"
|
"version": "==2020.11.8"
|
||||||
},
|
},
|
||||||
"cffi": {
|
"cffi": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -152,19 +152,19 @@
|
|||||||
},
|
},
|
||||||
"channels": {
|
"channels": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:08e756406d7165cb32f6fc3090c0643f41ca9f7e0f7fada0b31194662f20f414",
|
"sha256:74db79c9eca616be69d38013b22083ab5d3f9ccda1ab5e69096b1bb7da2d9b18",
|
||||||
"sha256:80a5ad1962ae039a3dcc0a5cb5212413e66e2f11ad9e9db8004834436daf3400"
|
"sha256:f50a6e79757a64c1e45e95e144a2ac5f1e99ee44a0718ab182c501f5e5abd268"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==2.4.0"
|
"version": "==3.0.2"
|
||||||
},
|
},
|
||||||
"channels-redis": {
|
"channels-redis": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:3ce9832b64a2d7f950dd11e4f0dca784de7cbee99e95a3c345a1460c8878b682",
|
"sha256:18d63f6462a58011740dc8eeb57ea4b31ec220eb551cb71b27de9c6779a549de",
|
||||||
"sha256:41ee0af352d3b6b31a6b613985b51dc5695d2da60688c38e6caa0a1772735a9f"
|
"sha256:2fb31a63b05373f6402da2e6a91a22b9e66eb8b56626c6bfc93e156c734c5ae6"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==3.1.0"
|
"version": "==3.2.0"
|
||||||
},
|
},
|
||||||
"chardet": {
|
"chardet": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -216,27 +216,30 @@
|
|||||||
},
|
},
|
||||||
"cryptography": {
|
"cryptography": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:091d31c42f444c6f519485ed528d8b451d1a0c7bf30e8ca583a0cac44b8a0df6",
|
"sha256:07ca431b788249af92764e3be9a488aa1d39a0bc3be313d826bbec690417e538",
|
||||||
"sha256:18452582a3c85b96014b45686af264563e3e5d99d226589f057ace56196ec78b",
|
"sha256:13b88a0bd044b4eae1ef40e265d006e34dbcde0c2f1e15eb9896501b2d8f6c6f",
|
||||||
"sha256:1dfa985f62b137909496e7fc182dac687206d8d089dd03eaeb28ae16eec8e7d5",
|
"sha256:32434673d8505b42c0de4de86da8c1620651abd24afe91ae0335597683ed1b77",
|
||||||
"sha256:1e4014639d3d73fbc5ceff206049c5a9a849cefd106a49fa7aaaa25cc0ce35cf",
|
"sha256:3cd75a683b15576cfc822c7c5742b3276e50b21a06672dc3a800a2d5da4ecd1b",
|
||||||
"sha256:22e91636a51170df0ae4dcbd250d318fd28c9f491c4e50b625a49964b24fe46e",
|
"sha256:4e7268a0ca14536fecfdf2b00297d4e407da904718658c1ff1961c713f90fd33",
|
||||||
"sha256:3b3eba865ea2754738616f87292b7f29448aec342a7c720956f8083d252bf28b",
|
"sha256:545a8550782dda68f8cdc75a6e3bf252017aa8f75f19f5a9ca940772fc0cb56e",
|
||||||
"sha256:651448cd2e3a6bc2bb76c3663785133c40d5e1a8c1a9c5429e4354201c6024ae",
|
"sha256:55d0b896631412b6f0c7de56e12eb3e261ac347fbaa5d5e705291a9016e5f8cb",
|
||||||
"sha256:726086c17f94747cedbee6efa77e99ae170caebeb1116353c6cf0ab67ea6829b",
|
"sha256:5849d59358547bf789ee7e0d7a9036b2d29e9a4ddf1ce5e06bb45634f995c53e",
|
||||||
"sha256:844a76bc04472e5135b909da6aed84360f522ff5dfa47f93e3dd2a0b84a89fa0",
|
"sha256:6dc59630ecce8c1f558277ceb212c751d6730bd12c80ea96b4ac65637c4f55e7",
|
||||||
"sha256:88c881dd5a147e08d1bdcf2315c04972381d026cdb803325c03fe2b4a8ed858b",
|
"sha256:7117319b44ed1842c617d0a452383a5a052ec6aa726dfbaffa8b94c910444297",
|
||||||
"sha256:96c080ae7118c10fcbe6229ab43eb8b090fccd31a09ef55f83f690d1ef619a1d",
|
"sha256:75e8e6684cf0034f6bf2a97095cb95f81537b12b36a8fedf06e73050bb171c2d",
|
||||||
"sha256:a0c30272fb4ddda5f5ffc1089d7405b7a71b0b0f51993cb4e5dbb4590b2fc229",
|
"sha256:7b8d9d8d3a9bd240f453342981f765346c87ade811519f98664519696f8e6ab7",
|
||||||
"sha256:bb1f0281887d89617b4c68e8db9a2c42b9efebf2702a3c5bf70599421a8623e3",
|
"sha256:a035a10686532b0587d58a606004aa20ad895c60c4d029afa245802347fab57b",
|
||||||
"sha256:c447cf087cf2dbddc1add6987bbe2f767ed5317adb2d08af940db517dd704365",
|
"sha256:a4e27ed0b2504195f855b52052eadcc9795c59909c9d84314c5408687f933fc7",
|
||||||
"sha256:c4fd17d92e9d55b84707f4fd09992081ba872d1a0c610c109c18e062e06a2e55",
|
"sha256:a733671100cd26d816eed39507e585c156e4498293a907029969234e5e634bc4",
|
||||||
"sha256:d0d5aeaedd29be304848f1c5059074a740fa9f6f26b84c5b63e8b29e73dfc270",
|
"sha256:a75f306a16d9f9afebfbedc41c8c2351d8e61e818ba6b4c40815e2b5740bb6b8",
|
||||||
"sha256:daf54a4b07d67ad437ff239c8a4080cfd1cc7213df57d33c97de7b4738048d5e",
|
"sha256:bd717aa029217b8ef94a7d21632a3bb5a4e7218a4513d2521c2a2fd63011e98b",
|
||||||
"sha256:e993468c859d084d5579e2ebee101de8f5a27ce8e2159959b6673b418fd8c785",
|
"sha256:d25cecbac20713a7c3bc544372d42d8eafa89799f492a43b79e1dfd650484851",
|
||||||
"sha256:f118a95c7480f5be0df8afeb9a11bd199aa20afab7a96bcf20409b411a3a85f0"
|
"sha256:d26a2557d8f9122f9bf445fc7034242f4375bd4e95ecda007667540270965b13",
|
||||||
|
"sha256:d3545829ab42a66b84a9aaabf216a4dce7f16dbc76eb69be5c302ed6b8f4a29b",
|
||||||
|
"sha256:d3d5e10be0cf2a12214ddee45c6bd203dab435e3d83b4560c03066eda600bfe3",
|
||||||
|
"sha256:efe15aca4f64f3a7ea0c09c87826490e50ed166ce67368a68f315ea0807a20df"
|
||||||
],
|
],
|
||||||
"version": "==2.9.2"
|
"version": "==3.2.1"
|
||||||
},
|
},
|
||||||
"dacite": {
|
"dacite": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -248,10 +251,10 @@
|
|||||||
},
|
},
|
||||||
"daphne": {
|
"daphne": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:1ca46d7419103958bbc9576fb7ba3b25b053006e22058bc97084ee1a7d44f4ba",
|
"sha256:60856f7efa0b1e1b969efa074e8698bd09de4713ecc06e6a4d19d04c66c4a3bd",
|
||||||
"sha256:aa64840015709bbc9daa3c4464a4a4d437937d6cda10a9b51e913eb319272553"
|
"sha256:b43e70d74ff832a634ff6c92badd208824e4530e08b340116517e5aad0aca774"
|
||||||
],
|
],
|
||||||
"version": "==2.5.0"
|
"version": "==3.0.0"
|
||||||
},
|
},
|
||||||
"defusedxml": {
|
"defusedxml": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -263,11 +266,11 @@
|
|||||||
},
|
},
|
||||||
"django": {
|
"django": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:a2127ad0150ec6966655bedf15dbbff9697cc86d61653db2da1afa506c0b04cc",
|
"sha256:14a4b7cd77297fba516fc0d92444cc2e2e388aa9de32d7a68d4a83d58f5a4927",
|
||||||
"sha256:c93c28ccf1d094cbd00d860e83128a39e45d2c571d3b54361713aaaf9a94cac4"
|
"sha256:14b87775ffedab2ef6299b73343d1b4b41e5d4e2aa58c6581f114dbec01e3f8f"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==3.1.2"
|
"version": "==3.1.3"
|
||||||
},
|
},
|
||||||
"django-cors-middleware": {
|
"django-cors-middleware": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -310,11 +313,11 @@
|
|||||||
},
|
},
|
||||||
"django-otp": {
|
"django-otp": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:2fb1c8dbd7e7ae76a65b63d89d3d8c3e1105a48bc29830b81c6e417a89380658",
|
"sha256:8ba5ab9bd2738c7321376c349d7cce49cf4404e79f6804e0a3cc462a91728e18",
|
||||||
"sha256:fef1f2de9a52bc37e16211b98b4323e5b34fa24739116fbe3d1ff018c17ebea8"
|
"sha256:f523fb9dec420f28a29d3e2ad72ac06f64588956ed4f2b5b430d8e957ebb8287"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==1.0.1"
|
"version": "==1.0.2"
|
||||||
},
|
},
|
||||||
"django-prometheus": {
|
"django-prometheus": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -349,11 +352,10 @@
|
|||||||
},
|
},
|
||||||
"djangorestframework": {
|
"djangorestframework": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:5c5071fcbad6dce16f566d492015c829ddb0df42965d488b878594aabc3aed21",
|
"sha256:0209bafcb7b5010fdfec784034f059d512256424de2a0f084cb82b096d6dd6a7"
|
||||||
"sha256:d54452aedebb4b650254ca092f9f4f5df947cb1de6ab245d817b08b4f4156249"
|
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==3.12.1"
|
"version": "==3.12.2"
|
||||||
},
|
},
|
||||||
"djangorestframework-guardian": {
|
"djangorestframework-guardian": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -373,11 +375,11 @@
|
|||||||
},
|
},
|
||||||
"drf-yasg2": {
|
"drf-yasg2": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:c4aa21d52f3964f99748eed68eb24be0fdad65e55bb56b99ae85c950718bac64",
|
"sha256:7037a8041eb5d1073fa504a284fc889685f93d0bfd008a963db1b366db786734",
|
||||||
"sha256:e880b3fa298a614360f4d882e8bc1712b51e1b28696acbd2684ac0ab18275a62"
|
"sha256:75e661ca5cf15eb44fcfab408c7b864f87c20794f564aa08b3a31817a857f19d"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==1.19.2"
|
"version": "==1.19.4"
|
||||||
},
|
},
|
||||||
"eight": {
|
"eight": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -402,10 +404,10 @@
|
|||||||
},
|
},
|
||||||
"google-auth": {
|
"google-auth": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:712dd7d140a9a1ea218e5688c7fcb04af71b431a29ec9ce433e384c60e387b98",
|
"sha256:5176db85f1e7e837a646cd9cede72c3c404ccf2e3373d9ee14b2db88febad440",
|
||||||
"sha256:9c0f71789438d703f77b94aad4ea545afaec9a65f10e6cc1bc8b89ce242244bb"
|
"sha256:b728625ff5dfce8f9e56a499c8a4eb51443a67f20f6d28b67d5774c310ec4b6b"
|
||||||
],
|
],
|
||||||
"version": "==1.22.1"
|
"version": "==1.23.0"
|
||||||
},
|
},
|
||||||
"gunicorn": {
|
"gunicorn": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -571,6 +573,7 @@
|
|||||||
},
|
},
|
||||||
"lxml": {
|
"lxml": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
|
"sha256:098fb713b31050463751dcc694878e1d39f316b86366fb9fe3fbbe5396ac9fab",
|
||||||
"sha256:0e89f5d422988c65e6936e4ec0fe54d6f73f3128c80eb7ecc3b87f595523607b",
|
"sha256:0e89f5d422988c65e6936e4ec0fe54d6f73f3128c80eb7ecc3b87f595523607b",
|
||||||
"sha256:189ad47203e846a7a4951c17694d845b6ade7917c47c64b29b86526eefc3adf5",
|
"sha256:189ad47203e846a7a4951c17694d845b6ade7917c47c64b29b86526eefc3adf5",
|
||||||
"sha256:1d87936cb5801c557f3e981c9c193861264c01209cb3ad0964a16310ca1b3301",
|
"sha256:1d87936cb5801c557f3e981c9c193861264c01209cb3ad0964a16310ca1b3301",
|
||||||
@ -706,9 +709,11 @@
|
|||||||
"sha256:0deac2af1a587ae12836aa07970f5cb91964f05a7c6cdb69d8425ff4c15d4e2c",
|
"sha256:0deac2af1a587ae12836aa07970f5cb91964f05a7c6cdb69d8425ff4c15d4e2c",
|
||||||
"sha256:0e4dc3d5996760104746e6cfcdb519d9d2cd27c738296525d5867ea695774e67",
|
"sha256:0e4dc3d5996760104746e6cfcdb519d9d2cd27c738296525d5867ea695774e67",
|
||||||
"sha256:11b9c0ebce097180129e422379b824ae21c8f2a6596b159c7659e2e5a00e1aa0",
|
"sha256:11b9c0ebce097180129e422379b824ae21c8f2a6596b159c7659e2e5a00e1aa0",
|
||||||
|
"sha256:15978a1fbd225583dd8cdaf37e67ccc278b5abecb4caf6b2d6b8e2b948e953f6",
|
||||||
"sha256:1fabed9ea2acc4efe4671b92c669a213db744d2af8a9fc5d69a8e9bc14b7a9db",
|
"sha256:1fabed9ea2acc4efe4671b92c669a213db744d2af8a9fc5d69a8e9bc14b7a9db",
|
||||||
"sha256:2dac98e85565d5688e8ab7bdea5446674a83a3945a8f416ad0110018d1501b94",
|
"sha256:2dac98e85565d5688e8ab7bdea5446674a83a3945a8f416ad0110018d1501b94",
|
||||||
"sha256:42ec1035841b389e8cc3692277a0bd81cdfe0b65d575a2c8862cec7a80e62e52",
|
"sha256:42ec1035841b389e8cc3692277a0bd81cdfe0b65d575a2c8862cec7a80e62e52",
|
||||||
|
"sha256:6422f2ff0919fd720195f64ffd8f924c1395d30f9a495f31e2392c2efafb5056",
|
||||||
"sha256:6a32f3a4cb2f6e1a0b15215f448e8ce2da192fd4ff35084d80d5e39da683e79b",
|
"sha256:6a32f3a4cb2f6e1a0b15215f448e8ce2da192fd4ff35084d80d5e39da683e79b",
|
||||||
"sha256:7312e931b90fe14f925729cde58022f5d034241918a5c4f9797cac62f6b3a9dd",
|
"sha256:7312e931b90fe14f925729cde58022f5d034241918a5c4f9797cac62f6b3a9dd",
|
||||||
"sha256:7d92a09b788cbb1aec325af5fcba9fed7203897bbd9269d5691bb1e3bce29550",
|
"sha256:7d92a09b788cbb1aec325af5fcba9fed7203897bbd9269d5691bb1e3bce29550",
|
||||||
@ -763,84 +768,84 @@
|
|||||||
},
|
},
|
||||||
"pycryptodome": {
|
"pycryptodome": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:02e51e1d5828d58f154896ddfd003e2e7584869c275e5acbe290443575370fba",
|
"sha256:19cb674df6c74a14b8b408aa30ba8a89bd1c01e23505100fb45f930fbf0ed0d9",
|
||||||
"sha256:03d5cca8618620f45fd40f827423f82b86b3a202c8d44108601b0f5f56b04299",
|
"sha256:1cfdb92dca388e27e732caa72a1cc624520fe93752a665c3b6cd8f1a91b34916",
|
||||||
"sha256:0e24171cf01021bc5dc17d6a9d4f33a048f09d62cc3f62541e95ef104588bda4",
|
"sha256:27397aee992af69d07502126561d851ba3845aa808f0e55c71ad0efa264dd7d4",
|
||||||
"sha256:132a56abba24e2e06a479d8e5db7a48271a73a215f605017bbd476d31f8e71c1",
|
"sha256:28f75e58d02019a7edc7d4135203d2501dfc47256d175c72c9798f9a129a49a7",
|
||||||
"sha256:1e655746f539421d923fd48df8f6f40b3443d80b75532501c0085b64afed9df5",
|
"sha256:2a68df525b387201a43b27b879ce8c08948a430e883a756d6c9e3acdaa7d7bd8",
|
||||||
"sha256:2b998dc45ef5f4e5cf5248a6edfcd8d8e9fb5e35df8e4259b13a1b10eda7b16b",
|
"sha256:411745c6dce4eff918906eebcde78771d44795d747e194462abb120d2e537cd9",
|
||||||
"sha256:360955eece2cd0fa694a708d10303c6abd7b39614fa2547b6bd245da76198beb",
|
"sha256:46e96aeb8a9ca8b1edf9b1fd0af4bf6afcf3f1ca7fa35529f5d60b98f3e4e959",
|
||||||
"sha256:39ef9fb52d6ec7728fce1f1693cb99d60ce302aeebd59bcedea70ca3203fda60",
|
"sha256:4ed27951b0a17afd287299e2206a339b5b6d12de9321e1a1575261ef9c4a851b",
|
||||||
"sha256:4350a42028240c344ee855f032c7d4ad6ff4f813bfbe7121547b7dc579ecc876",
|
"sha256:50826b49fbca348a61529693b0031cdb782c39060fb9dca5ac5dff858159dc5a",
|
||||||
"sha256:50348edd283afdccddc0938cdc674484533912ba8a99a27c7bfebb75030aa856",
|
"sha256:5598dc6c9dbfe882904e54584322893eff185b98960bbe2cdaaa20e8a437b6e5",
|
||||||
"sha256:54bdedd28476dea8a3cd86cb67c0df1f0e3d71cae8022354b0f879c41a3d27b2",
|
"sha256:5c3c4865730dfb0263f822b966d6d58429d8b1e560d1ddae37685fd9e7c63161",
|
||||||
"sha256:55eb61aca2c883db770999f50d091ff7c14016f2769ad7bca3d9b75d1d7c1b68",
|
"sha256:5f19e6ef750f677d924d9c7141f54bade3cd56695bbfd8a9ef15d0378557dfe4",
|
||||||
"sha256:6276478ada411aca97c0d5104916354b3d740d368407912722bd4d11aa9ee4c2",
|
"sha256:60febcf5baf70c566d9d9351c47fbd8321da9a4edf2eff45c4c31c86164ca794",
|
||||||
"sha256:663f8de2b3df2e744d6e1610506e0ea4e213bde906795953c1e82279c169f0a7",
|
"sha256:62c488a21c253dadc9f731a32f0ac61e4e436d81a1ea6f7d1d9146ed4d20d6bd",
|
||||||
"sha256:67dcad1b8b201308586a8ca2ffe89df1e4f731d5a4cdd0610cc4ea790351c739",
|
"sha256:6d3baaf82681cfb1a842f1c8f77beac791ceedd99af911e4f5fabec32bae2259",
|
||||||
"sha256:709b9f144d23e290b9863121d1ace14a72e01f66ea9c903fbdc690520dfdfcf0",
|
"sha256:6e4227849e4231a3f5b35ea5bdedf9a82b3883500e5624f00a19156e9a9ef861",
|
||||||
"sha256:8063a712fba642f78d3c506b0896846601b6de7f5c3d534e388ad0cc07f5a149",
|
"sha256:6e89bb3826e6f84501e8e3b205c22595d0c5492c2f271cbb9ee1c48eb1866645",
|
||||||
"sha256:80d57177a0b7c14d4594c62bbb47fe2f6309ad3b0a34348a291d570925c97a82",
|
"sha256:70d807d11d508433daf96244ec1c64e55039e8a35931fc5ea9eee94dbe3cb6b5",
|
||||||
"sha256:87006cf0d81505408f1ae4f55cf8a5d95a8e029a4793360720ae17c6500f7ecc",
|
"sha256:76b1a34d74bb2c91bce460cdc74d1347592045627a955e9a252554481c17c52f",
|
||||||
"sha256:9f62d21bc693f3d7d444f17ed2ad7a913b4c37c15cd807895d013c39c0517dfd",
|
"sha256:7798e73225a699651888489fbb1dbc565e03a509942a8ce6194bbe6fb582a41f",
|
||||||
"sha256:a207231a52426de3ff20f5608f0687261a3329d97a036c51f7d4c606a6f30c23",
|
"sha256:834b790bbb6bd18956f625af4004d9c15eed12d5186d8e57851454ae76d52215",
|
||||||
"sha256:abc2e126c9490e58a36a0f83516479e781d83adfb134576a5cbe5c6af2a3e93c",
|
"sha256:843e5f10ecdf9d307032b8b91afe9da1d6ed5bb89d0bbec5c8dcb4ba44008e11",
|
||||||
"sha256:b56638d58a3a4be13229c6a815cd448f9e3ce40c00880a5398471b42ee86f50e",
|
"sha256:8f9f84059039b672a5a705b3c5aa21747867bacc30a72e28bf0d147cc8ef85ed",
|
||||||
"sha256:bcd5b8416e73e4b0d48afba3704d8c826414764dafaed7a1a93c442188d90ccc",
|
"sha256:9000877383e2189dafd1b2fc68c6c726eca9a3cfb6d68148fbb72ccf651959b6",
|
||||||
"sha256:bec2bcdf7c9ce7f04d718e51887f3b05dc5c1cfaf5d2c2e9065ecddd1b2f6c9a",
|
"sha256:910e202a557e1131b1c1b3f17a63914d57aac55cf9fb9b51644962841c3995c4",
|
||||||
"sha256:c8bf40cf6e281a4378e25846924327e728a887e8bf0ee83b2604a0f4b61692e8",
|
"sha256:946399d15eccebafc8ce0257fc4caffe383c75e6b0633509bd011e357368306c",
|
||||||
"sha256:cecbf67e81d6144a50dc615629772859463b2e4f815d0c082fa421db362f040e",
|
"sha256:a199e9ca46fc6e999e5f47fce342af4b56c7de85fae893c69ab6aa17531fb1e1",
|
||||||
"sha256:d8074c8448cfd0705dfa71ca333277fce9786d0b9cac75d120545de6253f996a",
|
"sha256:a3d8a9efa213be8232c59cdc6b65600276508e375e0a119d710826248fd18d37",
|
||||||
"sha256:dd302b6ae3965afeb5ef1b0d92486f986c0e65183cd7835973f0b593800590e6",
|
"sha256:a4599c0ca0fc027c780c1c45ed996d5bef03e571470b7b1c7171ec1e1a90914c",
|
||||||
"sha256:de6e1cd75677423ff64712c337521e62e3a7a4fc84caabbd93207752e831a85a",
|
"sha256:b4e6b269a8ddaede774e5c3adbef6bf452ee144e6db8a716d23694953348cd86",
|
||||||
"sha256:ef39c98d9b8c0736d91937d193653e47c3b19ddf4fc3bccdc5e09aaa4b0c5d21",
|
"sha256:b68794fba45bdb367eeb71249c26d23e61167510a1d0c3d6cf0f2f14636e62ee",
|
||||||
"sha256:f2e045224074d5664dc9cbabbf4f4d4d46f1ee90f24780e3a9a668fd096ff17f",
|
"sha256:d7ec2bd8f57c559dd24e71891c51c25266a8deb66fc5f02cc97c7fb593d1780a",
|
||||||
"sha256:f521178e5a991ffd04182ed08f552daca1affcb826aeda0e1945cd989a9d4345",
|
"sha256:e15bde67ccb7d4417f627dd16ffe2f5a4c2941ce5278444e884cb26d73ecbc61",
|
||||||
"sha256:f78a68c2c820e4731e510a2df3eef0322f24fde1781ced970bf497b6c7d92982",
|
"sha256:eb01f9997e4d6a8ec8a1ad1f676ba5a362781ff64e8189fe2985258ba9cb9706",
|
||||||
"sha256:fbe65d5cfe04ff2f7684160d50f5118bdefb01e3af4718eeb618bfed40f19d94"
|
"sha256:faa682c404c218e8788c3126c9a4b8fbcc54dc245b5b6e8ea5b46f3b63bd0c84"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==3.9.8"
|
"version": "==3.9.9"
|
||||||
},
|
},
|
||||||
"pycryptodomex": {
|
"pycryptodomex": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:06f5a458624c9b0e04c0086c7f84bcc578567dab0ddc816e0476b3057b18339f",
|
"sha256:15c03ffdac17731b126880622823d30d0a3cc7203cd219e6b9814140a44e7fab",
|
||||||
"sha256:1714675fb4ac29a26ced38ca22eb8ffd923ac851b7a6140563863194d7158422",
|
"sha256:20fb7f4efc494016eab1bc2f555bc0a12dd5ca61f35c95df8061818ffb2c20a3",
|
||||||
"sha256:17272d06e4b2f6455ee2cbe93e8eb50d9450a5dc6223d06862ee1ea5d1235861",
|
"sha256:28ee3bcb4d609aea3040cad995a8e2c9c6dc57c12183dadd69e53880c35333b9",
|
||||||
"sha256:2199708ebeed4b82eb45b10e1754292677f5a0df7d627ee91ea01290b9bab7e6",
|
"sha256:305e3c46f20d019cd57543c255e7ba49e432e275d7c0de8913b6dbe57a851bc8",
|
||||||
"sha256:2275a663c9e744ee4eace816ef2d446b3060554c5773a92fbc79b05bf47debda",
|
"sha256:3547b87b16aad6afb28c9b3a9cd870e11b5e7b5ac649b74265258d96d8de1130",
|
||||||
"sha256:2710fc8d83b3352b370db932b3710033b9d630b970ff5aaa3e7458b5336e3b32",
|
"sha256:3642252d7bfc4403a42050e18ba748bedebd5a998a8cba89665a4f42aea4c380",
|
||||||
"sha256:35b9c9177a9fe7288b19dd41554c9c8ca1063deb426dd5a02e7e2a7416b6bd11",
|
"sha256:404faa3e518f8bea516aae2aac47d4d960397199a15b4bd6f66cad97825469a0",
|
||||||
"sha256:3b23d63030819b7d9ac7db9360305fd1241e6870ca5b7e8d59fee4db4674a490",
|
"sha256:42669638e4f7937b7141044a2fbd1019caca62bd2cdd8b535f731426ab07bde1",
|
||||||
"sha256:3caa32cf807422adf33c10c88c22e9e2e08b9d9d042f12e1e25fe23113dd618f",
|
"sha256:4632d55a140b28e20be3cd7a3057af52fb747298ff0fd3290d4e9f245b5004ba",
|
||||||
"sha256:48cc2cfc251f04a6142badeb666d1ff49ca6fdfc303fd72579f62b768aaa52b9",
|
"sha256:4a88c9383d273bdce3afc216020282c9c5c39ec0bd9462b1a206af6afa377cf0",
|
||||||
"sha256:4ae6379350a09339109e9b6f419bb2c3f03d3e441f4b0f5b8ca699d47cc9ff7e",
|
"sha256:4ce1fc1e6d2fd2d6dc197607153327989a128c093e0e94dca63408f506622c3e",
|
||||||
"sha256:4e0b27697fa1621c6d3d3b4edeec723c2e841285de6a8d378c1962da77b349be",
|
"sha256:55cf4e99b3ba0122dee570dc7661b97bf35c16aab3e2ccb5070709d282a1c7ab",
|
||||||
"sha256:58e19560814dabf5d788b95a13f6b98279cf41a49b1e49ee6cf6c79a57adb4c9",
|
"sha256:5e486cab2dfcfaec934dd4f5d5837f4a9428b690f4d92a3b020fd31d1497ca64",
|
||||||
"sha256:8044eae59301dd392fbb4a7c5d64e1aea8ef0be2540549807ecbe703d6233d68",
|
"sha256:65ec88c8271448d2ea109d35c1f297b09b872c57214ab7e832e413090d3469a9",
|
||||||
"sha256:85c108b42e47d4073344ff61d4e019f1d95bb7725ca0fe87d0a2deb237c10e49",
|
"sha256:6c95a3361ce70068cf69526a58751f73ddac5ba27a3c2379b057efa2f5338c8c",
|
||||||
"sha256:89be1bf55e50116fe7e493a7c0c483099770dd7f81b87ac8d04a43b1a203e259",
|
"sha256:73240335f4a1baf12880ebac6df66ab4d3a9212db9f3efe809c36a27280d16f8",
|
||||||
"sha256:8fcdda24dddf47f716400d54fc7f75cadaaba1dd47cc127e59d752c9c0fc3c48",
|
"sha256:7651211e15109ac0058a49159265d9f6e6423c8a81c65434d3c56d708417a05b",
|
||||||
"sha256:914fbb18e29c54585e6aa39d300385f90d0fa3b3cc02ed829b08f95c1acf60c2",
|
"sha256:7b5b7c5896f8172ea0beb283f7f9428e0ab88ec248ce0a5b8c98d73e26267d51",
|
||||||
"sha256:93a75d1acd54efed314b82c952b39eac96ce98d241ad7431547442e5c56138aa",
|
"sha256:836fe39282e75311ce4c38468be148f7fac0df3d461c5de58c5ff1ddb8966bac",
|
||||||
"sha256:9fd758e5e2fe02d57860b85da34a1a1e7037155c4eadc2326fc7af02f9cae214",
|
"sha256:871852044f55295449fbf225538c2c4118525093c32f0a6c43c91bed0452d7e3",
|
||||||
"sha256:a2bc4e1a2e6ca3a18b2e0be6131a23af76fecb37990c159df6edc7da6df913e3",
|
"sha256:892e93f3e7e10c751d6c17fa0dc422f7984cfd5eb6690011f9264dc73e2775fc",
|
||||||
"sha256:a2ee8ba99d33e1a434fcd27d7d0aa7964163efeee0730fe2efc9d60edae1fc71",
|
"sha256:934e460c5058346c6f1d62fdf3db5680fbdfbfd212722d24d8277bf47cd9ebdc",
|
||||||
"sha256:b2d756620078570d3f940c84bc94dd30aa362b795cce8b2723300a8800b87f1c",
|
"sha256:9736f3f3e1761024200637a080a4f922f5298ad5d780e10dbb5634fe8c65b34c",
|
||||||
"sha256:c0d085c8187a1e4d3402f626c9e438b5861151ab132d8761d9c5ce6491a87761",
|
"sha256:a1d38a96da57e6103423a446079ead600b450cf0f8ebf56a231895abf77e7ffc",
|
||||||
"sha256:c315262e26d54a9684e323e37ac9254f481d57fcc4fd94002992460898ef5c04",
|
"sha256:a385fceaa0cdb97f0098f1c1e9ec0b46cc09186ddf60ec23538e871b1dddb6dc",
|
||||||
"sha256:c990f2c58f7c67688e9e86e6557ed05952669ff6f1343e77b459007d85f7df00",
|
"sha256:a7cf1c14e47027d9fb9d26aa62e5d603994227bd635e58a8df4b1d2d1b6a8ed7",
|
||||||
"sha256:ccbbec59bf4b74226170c54476da5780c9176bae084878fc94d9a2c841218e34",
|
"sha256:a9aac1a30b00b5038d3d8e48248f3b58ea15c827b67325c0d18a447552e30fc8",
|
||||||
"sha256:dc2bed32c7b138f1331794e454a953360c8cedf3ee62ae31f063822da6007489",
|
"sha256:b696876ee583d15310be57311e90e153a84b7913ac93e6b99675c0c9867926d0",
|
||||||
"sha256:ddb1ae2891c8cb83a25da87a3e00111a9654fc5f0b70f18879c41aece45d6182",
|
"sha256:bef9e9d39393dc7baec39ba4bac6c73826a4db02114cdeade2552a9d6afa16e2",
|
||||||
"sha256:e070a1f91202ed34c396be5ea842b886f6fa2b90d2db437dc9fb35a26c80c060",
|
"sha256:c885fe4d5f26ce8ca20c97d02e88f5fdd92c01e1cc771ad0951b21e1641faf6d",
|
||||||
"sha256:e42860fbe1292668b682f6dabd225fbe2a7a4fa1632f0c39881c019e93dea594",
|
"sha256:d2d1388595cb5d27d9220d5cbaff4f37c6ec696a25882eb06d224d241e6e93fb",
|
||||||
"sha256:e4e1c486bf226822c8dceac81d0ec59c0a2399dbd1b9e04f03c3efa3605db677",
|
"sha256:d2e853e0f9535e693fade97768cf7293f3febabecc5feb1e9b2ffdfe1044ab96",
|
||||||
"sha256:ea4d4b58f9bc34e224ef4b4604a6be03d72ef1f8c486391f970205f6733dbc46",
|
"sha256:d62fbab185a6b01c5469eda9f0795f3d1a5bba24f5a5813f362e4b73a3c4dc70",
|
||||||
"sha256:f5bd6891380e0fb5467251daf22525644fdf6afd9ae8bc2fe065c78ea1882e0d",
|
"sha256:f20a62397e09704049ce9007bea4f6bad965ba9336a760c6f4ef1b4192e12d6d",
|
||||||
"sha256:f60b3484ce4be04f5da3777c51c5140d3fe21cdd6674f2b6568f41c8130bcdeb"
|
"sha256:f81f7311250d9480e36dec819127897ae772e7e8de07abfabe931b8566770b8e"
|
||||||
],
|
],
|
||||||
"version": "==3.9.8"
|
"version": "==3.9.9"
|
||||||
},
|
},
|
||||||
"pyhamcrest": {
|
"pyhamcrest": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -885,17 +890,17 @@
|
|||||||
},
|
},
|
||||||
"python-dotenv": {
|
"python-dotenv": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:8c10c99a1b25d9a68058a1ad6f90381a62ba68230ca93966882a4dbc3bc9c33d",
|
"sha256:0c8d1b80d1a1e91717ea7d526178e3882732420b03f08afea0406db6402e220e",
|
||||||
"sha256:c10863aee750ad720f4f43436565e4c1698798d763b63234fb5021b6c616e423"
|
"sha256:587825ed60b1711daea4832cf37524dfd404325b7db5e25ebe88c495c9f807a0"
|
||||||
],
|
],
|
||||||
"version": "==0.14.0"
|
"version": "==0.15.0"
|
||||||
},
|
},
|
||||||
"pytz": {
|
"pytz": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed",
|
"sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268",
|
||||||
"sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"
|
"sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd"
|
||||||
],
|
],
|
||||||
"version": "==2020.1"
|
"version": "==2020.4"
|
||||||
},
|
},
|
||||||
"pyyaml": {
|
"pyyaml": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -998,11 +1003,11 @@
|
|||||||
},
|
},
|
||||||
"sentry-sdk": {
|
"sentry-sdk": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:0eea248408d36e8e7037c7b73827bea20b13a4375bf1719c406cae6fcbc094e3",
|
"sha256:17b725df2258354ccb39618ae4ead29651aa92c01a92acf72f98efe06ee2e45a",
|
||||||
"sha256:5cf36eb6b1dc62d55f3c64289792cbaebc8ffa5a9da14474f49b46d20caa7fc8"
|
"sha256:9040539485226708b5cad0401d76628fba4eed9154bf301c50579767afe344fd"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==0.19.1"
|
"version": "==0.19.2"
|
||||||
},
|
},
|
||||||
"service-identity": {
|
"service-identity": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1014,11 +1019,11 @@
|
|||||||
},
|
},
|
||||||
"signxml": {
|
"signxml": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:4c996153153c9b1eb7ff40cf624722946f8c2ab059febfa641e54cd59725acd9",
|
"sha256:b70e151d10d99cbc74a50a3344f508ee481fe3c376d61cd1cae850912d303d19",
|
||||||
"sha256:d116c283f2c940bc2b4edf011330107ba02f197650a4878466987e04142d43b1"
|
"sha256:bab03a6823c9a5b225d1e6266ce66b5d08c4ebfb42029fdb5d3e588b8128c86d"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==2.8.0"
|
"version": "==2.8.1"
|
||||||
},
|
},
|
||||||
"six": {
|
"six": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1100,23 +1105,23 @@
|
|||||||
"secure"
|
"secure"
|
||||||
],
|
],
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a",
|
"sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2",
|
||||||
"sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"
|
"sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"markers": null,
|
"markers": null,
|
||||||
"version": "==1.25.10"
|
"version": "==1.25.11"
|
||||||
},
|
},
|
||||||
"uvicorn": {
|
"uvicorn": {
|
||||||
"extras": [
|
"extras": [
|
||||||
"standard"
|
"standard"
|
||||||
],
|
],
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:a461e76406088f448f36323f5ac774d50e5a552b6ccb54e4fca8d83ef614a7c2",
|
"sha256:8ff7495c74b8286a341526ff9efa3988ebab9a4b2f561c7438c3cb420992d7dd",
|
||||||
"sha256:d06a25caa8dc680ad92eb3ec67363f5281c092059613a1cc0100acba37fc0f45"
|
"sha256:e5dbed4a8a44c7b04376021021d63798d6a7bcfae9c654a0b153577b93854fba"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==0.12.1"
|
"version": "==0.12.2"
|
||||||
},
|
},
|
||||||
"uvloop": {
|
"uvloop": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1189,48 +1194,60 @@
|
|||||||
},
|
},
|
||||||
"zope.interface": {
|
"zope.interface": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:040f833694496065147e76581c0bf32b229a8b8c5eda120a0293afb008222387",
|
"sha256:05a97ba92c1c7c26f25c9f671aa1ef85ffead6cdad13770e5b689cf983adc7e1",
|
||||||
"sha256:11198b44e4a3d8c7a80cc20bbdd65522258a4d82fe467cd310c9fcce8ffe2ed2",
|
"sha256:07d61722dd7d85547b7c6b0f5486b4338001fab349f2ac5cabc0b7182eb3425d",
|
||||||
"sha256:121a9dccfe0c34be9c33b2c28225f0284f9b8e090580ffdff26c38fa16c7ffe1",
|
"sha256:0a990dcc97806e5980bbb54b2e46b9cde9e48932d8e6984daf71ef1745516123",
|
||||||
"sha256:15f3082575e7e19581a80b866664f843719b647a7f7189c811ba7f9ab3309f83",
|
"sha256:150e8bcb7253a34a4535aeea3de36c0bb3b1a6a47a183a95d65a194b3e07f232",
|
||||||
"sha256:1d73d8986f948525536956ddd902e8a587a6846ebf4492117db16daba2865ddf",
|
"sha256:1743bcfe45af8846b775086471c28258f4c6e9ee8ef37484de4495f15a98b549",
|
||||||
"sha256:208e82f73b242275b8566ac07a25158e7b21fa2f14e642a7881048430612d1a6",
|
"sha256:1b5f6c8fff4ed32aa2dd43e84061bc8346f32d3ba6ad6e58f088fe109608f102",
|
||||||
"sha256:2557833df892558123d791d6ff80ac4a2a0351f69c7421c7d5f0c07db72c8865",
|
"sha256:21e49123f375703cf824214939d39df0af62c47d122d955b2a8d9153ea08cfd5",
|
||||||
"sha256:25ea6906f9987d42546329d06f9750e69f0ee62307a2e7092955ed0758e64f09",
|
"sha256:21f579134a47083ffb5ddd1307f0405c91aa8b61ad4be6fd5af0171474fe0c45",
|
||||||
"sha256:2c867914f7608674a555ac8daf20265644ac7be709e1da7d818089eebdfe544e",
|
"sha256:27c267dc38a0f0079e96a2945ee65786d38ef111e413c702fbaaacbab6361d00",
|
||||||
"sha256:2eadac20711a795d3bb7a2bfc87c04091cb5274d9c3281b43088a1227099b662",
|
"sha256:299bde0ab9e5c4a92f01a152b7fbabb460f31343f1416f9b7b983167ab1e33bc",
|
||||||
"sha256:37999d5ebd5d7bcd32438b725ca3470df05a7de8b1e9c0395bef24296b31ca99",
|
"sha256:2ab88d8f228f803fcb8cb7d222c579d13dab2d3622c51e8cf321280da01102a7",
|
||||||
"sha256:3ae8946d51789779f76e4fa326fd6676d8c19c1c3b4c4c5e9342807185264875",
|
"sha256:2ced4c35061eea623bc84c7711eedce8ecc3c2c51cd9c6afa6290df3bae9e104",
|
||||||
"sha256:5636cd7e60583b1608044ae4405e91575399430e66a5e1812f4bf30bcc55864e",
|
"sha256:2dcab01c660983ba5e5a612e0c935141ccbee67d2e2e14b833e01c2354bd8034",
|
||||||
"sha256:570e637cb6509998555f7e4af13006d89fad6c09cfc5c4795855385391063e4b",
|
"sha256:32546af61a9a9b141ca38d971aa6eb9800450fa6620ce6323cc30eec447861f3",
|
||||||
"sha256:590a40447ff3803c44050ce3c17c3958f11ca028dae3eacdd7b96775184394fa",
|
"sha256:32b40a4c46d199827d79c86bb8cb88b1bbb764f127876f2cb6f3a47f63dbada3",
|
||||||
"sha256:5aab51b9c1af1b8a84f40aa49ffe1684d41810b18d6c3e94aa50194e0a563f01",
|
"sha256:3cc94c69f6bd48ed86e8e24f358cb75095c8129827df1298518ab860115269a4",
|
||||||
"sha256:5ffe4e0753393bcbcfc9a58133ed3d3a584634cc7cc2e667f8e3e6fbcbb2155d",
|
"sha256:42b278ac0989d6f5cf58d7e0828ea6b5951464e3cf2ff229dd09a96cb6ba0c86",
|
||||||
"sha256:663982381bd428a275a841009e52983cc69c471a4979ce01344fadbf72cf353d",
|
"sha256:495b63fd0302f282ee6c1e6ea0f1c12cb3d1a49c8292d27287f01845ff252a96",
|
||||||
"sha256:6d06bf8e24dd6c473c4fbd8e16a83bd2e6d74add6ba25169043deb46d497b211",
|
"sha256:4af87cdc0d4b14e600e6d3d09793dce3b7171348a094ba818e2a68ae7ee67546",
|
||||||
"sha256:6e5b9a4bf133cf1887b4a04c21c10ca9f548114f19c83957b2820d5c84254940",
|
"sha256:4b94df9f2fdde7b9314321bab8448e6ad5a23b80542dcab53e329527d4099dcb",
|
||||||
"sha256:70a2aed9615645bbe9d82c0f52bc7e676d2c0f8a63933d68418e0cb307f30536",
|
"sha256:4c48ddb63e2b20fba4c6a2bf81b4d49e99b6d4587fb67a6cd33a2c1f003af3e3",
|
||||||
"sha256:7750746421c4395e3d2cc3d805919f4f57bb9f2a9a0ccd955566a9341050a1b4",
|
"sha256:4df9afd17bd5477e9f8c8b6bb8507e18dd0f8b4efe73bb99729ff203279e9e3b",
|
||||||
"sha256:7fc8708bc996e50fc7a9a2ad394e1f015348e389da26789fa6916630237143d7",
|
"sha256:518950fe6a5d56f94ba125107895f938a4f34f704c658986eae8255edb41163b",
|
||||||
"sha256:91abd2f080065a7c007540f6bbd93ef7bdbbffa6df4a4cfab3892d8623b83c98",
|
"sha256:538298e4e113ccb8b41658d5a4b605bebe75e46a30ceca22a5a289cf02c80bec",
|
||||||
"sha256:988f8b2281f3d95c66c01bdb141cefef1cc97db0d473c25c3fe2927ef00293b9",
|
"sha256:55465121e72e208a7b69b53de791402affe6165083b2ea71b892728bd19ba9ae",
|
||||||
"sha256:9f56121d8a676802044584e6cc41250bbcde069d8adf725b9b817a6b0fd87f09",
|
"sha256:588384d70a0f19b47409cfdb10e0c27c20e4293b74fc891df3d8eb47782b8b3e",
|
||||||
"sha256:a0f51536ce6e817a7aa25b0dca8b62feb210d4dc22cabfe8d1a92d47979372cd",
|
"sha256:6278c080d4afffc9016e14325f8734456831124e8c12caa754fd544435c08386",
|
||||||
"sha256:a1cdd7390d7f66ddcebf545203ca3728c4890d605f9f2697bc8e31437906e8e7",
|
"sha256:64ea6c221aeee4796860405e1aedec63424cda4202a7ad27a5066876db5b0fd2",
|
||||||
"sha256:b10eb4d0a77609679bf5f23708e20b1cd461a1643bd8ea42b1ca4149b1a5406c",
|
"sha256:681dbb33e2b40262b33fd383bae63c36d33fd79fa1a8e4092945430744ffd34a",
|
||||||
"sha256:b274ac8e511b55ffb62e8292316bd2baa80c10e9fe811b1aa5ce81da6b6697d8",
|
"sha256:6936aa9da390402d646a32a6a38d5409c2d2afb2950f045a7d02ab25a4e7d08d",
|
||||||
"sha256:c75b502af2c83fcfa2ee9c2257c1ba5806634a91a50db6129ff70e67c42c7e7b",
|
"sha256:778d0ec38bbd288b150a3ae363c8ffd88d2207a756842495e9bffd8a8afbc89a",
|
||||||
"sha256:c9c8e53a5472b77f6a391b515c771105011f4b40740ce53af8428d1c8ca20004",
|
"sha256:8251f06a77985a2729a8bdbefbae79ee78567dddc3acbd499b87e705ca59fe24",
|
||||||
"sha256:d867998a56c5133b9d31992beb699892e33b72150a8bf40f86cb52b8c606c83f",
|
"sha256:83b4aa5344cce005a9cff5d0321b2e318e871cc1dfc793b66c32dd4f59e9770d",
|
||||||
"sha256:eb566cab630ec176b2d6115ed08b2cf4d921b47caa7f02cca1b4a9525223ee94",
|
"sha256:844fad925ac5c2ad4faaceb3b2520ad016b5280105c6e16e79838cf951903a7b",
|
||||||
"sha256:f61e6b95b414431ffe9dc460928fe9f351095fde074e2c2f5c6dda7b67a2192d",
|
"sha256:8ceb3667dd13b8133f2e4d637b5b00f240f066448e2aa89a41f4c2d78a26ce50",
|
||||||
"sha256:f718675fd071bcce4f7cbf9250cbaaf64e2e91ef1b0b32a1af596e7412647556",
|
"sha256:92dc0fb79675882d0b6138be4bf0cec7ea7c7eede60aaca78303d8e8dbdaa523",
|
||||||
"sha256:f9d4bfbd015e4b80dbad11c97049975f94592a6a0440e903ee647309f6252a1f",
|
"sha256:9789bd945e9f5bd026ed3f5b453d640befb8b1fc33a779c1fe8d3eb21fe3fb4a",
|
||||||
"sha256:fae50fc12a5e8541f6f1cc4ed744ca8f76a9543876cf63f618fb0e6aca8f8375",
|
"sha256:a2b6d6eb693bc2fc6c484f2e5d93bd0b0da803fa77bf974f160533e555e4d095",
|
||||||
"sha256:fcf9c8edda7f7b2fd78069e97f4197815df5e871ec47b0f22580d330c6dec561",
|
"sha256:aab9f1e34d810feb00bf841993552b8fcc6ae71d473c505381627143d0018a6a",
|
||||||
"sha256:fdedce3bc5360bd29d4bb90396e8d4d3c09af49bc0383909fe84c7233c5ee675"
|
"sha256:abb61afd84f23099ac6099d804cdba9bd3b902aaaded3ffff47e490b0a495520",
|
||||||
|
"sha256:adf9ee115ae8ff8b6da4b854b4152f253b390ba64407a22d75456fe07dcbda65",
|
||||||
|
"sha256:aedc6c672b351afe6dfe17ff83ee5e7eb6ed44718f879a9328a68bdb20b57e11",
|
||||||
|
"sha256:b7a00ecb1434f8183395fac5366a21ee73d14900082ca37cf74993cf46baa56c",
|
||||||
|
"sha256:ba32f4a91c1cb7314c429b03afbf87b1fff4fb1c8db32260e7310104bd77f0c7",
|
||||||
|
"sha256:cbd0f2cbd8689861209cd89141371d3a22a11613304d1f0736492590aa0ab332",
|
||||||
|
"sha256:e4bc372b953bf6cec65a8d48482ba574f6e051621d157cf224227dbb55486b1e",
|
||||||
|
"sha256:eccac3d9aadc68e994b6d228cb0c8919fc47a5350d85a1b4d3d81d1e98baf40c",
|
||||||
|
"sha256:efd550b3da28195746bb43bd1d815058181a7ca6d9d6aa89dd37f5eefe2cacb7",
|
||||||
|
"sha256:efef581c8ba4d990770875e1a2218e856849d32ada2680e53aebc5d154a17e20",
|
||||||
|
"sha256:f057897711a630a0b7a6a03f1acf379b6ba25d37dc5dc217a97191984ba7f2fc",
|
||||||
|
"sha256:f37d45fab14ffef9d33a0dc3bc59ce0c5313e2253323312d47739192da94f5fd",
|
||||||
|
"sha256:f44906f70205d456d503105023041f1e63aece7623b31c390a0103db4de17537"
|
||||||
],
|
],
|
||||||
"version": "==5.1.2"
|
"version": "==5.2.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"develop": {
|
"develop": {
|
||||||
@ -1243,10 +1260,10 @@
|
|||||||
},
|
},
|
||||||
"asgiref": {
|
"asgiref": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:7e51911ee147dd685c3c8b805c0ad0cb58d360987b56953878f8c06d2d1c6f1a",
|
"sha256:5ee950735509d04eb673bd7f7120f8fa1c9e2df495394992c73234d526907e17",
|
||||||
"sha256:9fc6fb5d39b8af147ba40765234fa822b39818b12cc80b35ad9b0cef3a476aed"
|
"sha256:7162a3cb30ab0609f1a4c95938fd73e8604f63bdba516a7f7d64b83ff09478f0"
|
||||||
],
|
],
|
||||||
"version": "==3.2.10"
|
"version": "==3.3.1"
|
||||||
},
|
},
|
||||||
"astroid": {
|
"astroid": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1257,10 +1274,10 @@
|
|||||||
},
|
},
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594",
|
"sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6",
|
||||||
"sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc"
|
"sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"
|
||||||
],
|
],
|
||||||
"version": "==20.2.0"
|
"version": "==20.3.0"
|
||||||
},
|
},
|
||||||
"autopep8": {
|
"autopep8": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1356,11 +1373,11 @@
|
|||||||
},
|
},
|
||||||
"django": {
|
"django": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:a2127ad0150ec6966655bedf15dbbff9697cc86d61653db2da1afa506c0b04cc",
|
"sha256:14a4b7cd77297fba516fc0d92444cc2e2e388aa9de32d7a68d4a83d58f5a4927",
|
||||||
"sha256:c93c28ccf1d094cbd00d860e83128a39e45d2c571d3b54361713aaaf9a94cac4"
|
"sha256:14b87775ffedab2ef6299b73343d1b4b41e5d4e2aa58c6581f114dbec01e3f8f"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==3.1.2"
|
"version": "==3.1.3"
|
||||||
},
|
},
|
||||||
"django-debug-toolbar": {
|
"django-debug-toolbar": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1400,10 +1417,10 @@
|
|||||||
},
|
},
|
||||||
"gitpython": {
|
"gitpython": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:138016d519bf4dd55b22c682c904ed2fd0235c3612b2f8f65ce218ff358deed8",
|
"sha256:6eea89b655917b500437e9668e4a12eabdcf00229a0df1762aabd692ef9b746b",
|
||||||
"sha256:a03f728b49ce9597a6655793207c6ab0da55519368ff5961e4a74ae475b9fa8e"
|
"sha256:befa4d101f91bad1b632df4308ec64555db684c360bd7d2130b4807d49ce86b8"
|
||||||
],
|
],
|
||||||
"version": "==3.1.9"
|
"version": "==3.1.11"
|
||||||
},
|
},
|
||||||
"iniconfig": {
|
"iniconfig": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1469,17 +1486,17 @@
|
|||||||
},
|
},
|
||||||
"pathspec": {
|
"pathspec": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0",
|
"sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd",
|
||||||
"sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"
|
"sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"
|
||||||
],
|
],
|
||||||
"version": "==0.8.0"
|
"version": "==0.8.1"
|
||||||
},
|
},
|
||||||
"pbr": {
|
"pbr": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:14bfd98f51c78a3dd22a1ef45cf194ad79eee4a19e8e1a0d5c7f8e81ffe182ea",
|
"sha256:5fad80b613c402d5b7df7bd84812548b2a61e9977387a80a5fc5c396492b13c9",
|
||||||
"sha256:5adc0f9fc64319d8df5ca1e4e06eea674c26b80e6f00c530b18ce6a6592ead15"
|
"sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00"
|
||||||
],
|
],
|
||||||
"version": "==5.5.0"
|
"version": "==5.5.1"
|
||||||
},
|
},
|
||||||
"pep8-naming": {
|
"pep8-naming": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1574,26 +1591,26 @@
|
|||||||
},
|
},
|
||||||
"pytest": {
|
"pytest": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:7a8190790c17d79a11f847fba0b004ee9a8122582ebff4729a082c109e81a4c9",
|
"sha256:4288fed0d9153d9646bfcdf0c0428197dba1ecb27a33bb6e031d002fa88653fe",
|
||||||
"sha256:8f593023c1a0f916110285b6efd7f99db07d59546e3d8c36fc60e2ab05d3be92"
|
"sha256:c0a7e94a8cdbc5422a51ccdad8e6f1024795939cc89159a0ae7f0b316ad3823e"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==6.1.1"
|
"version": "==6.1.2"
|
||||||
},
|
},
|
||||||
"pytest-django": {
|
"pytest-django": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:0e91003fdd41ac0322c1978682be2ca180bc564203dd53c698f99242bf513614",
|
"sha256:10e384e6b8912ded92db64c58be8139d9ae23fb8361e5fc139d8e4f8fc601bc2",
|
||||||
"sha256:5f964ccda1f551e00589ab0679a7c45c36c509a44b5bfb5ad07954e0ae3f4bed"
|
"sha256:26f02c16d36fd4c8672390deebe3413678d89f30720c16efb8b2a6bf63b9041f"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==4.0.0"
|
"version": "==4.1.0"
|
||||||
},
|
},
|
||||||
"pytz": {
|
"pytz": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed",
|
"sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268",
|
||||||
"sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"
|
"sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd"
|
||||||
],
|
],
|
||||||
"version": "==2020.1"
|
"version": "==2020.4"
|
||||||
},
|
},
|
||||||
"pyyaml": {
|
"pyyaml": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1614,35 +1631,51 @@
|
|||||||
},
|
},
|
||||||
"regex": {
|
"regex": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:02686a2f0b1a4be0facdd0d3ad4dc6c23acaa0f38fb5470d892ae88584ba705c",
|
"sha256:03855ee22980c3e4863dc84c42d6d2901133362db5daf4c36b710dd895d78f0a",
|
||||||
"sha256:137da580d1e6302484be3ef41d72cf5c3ad22a076070051b7449c0e13ab2c482",
|
"sha256:06b52815d4ad38d6524666e0d50fe9173533c9cc145a5779b89733284e6f688f",
|
||||||
"sha256:20cdd7e1736f4f61a5161aa30d05ac108ab8efc3133df5eb70fe1e6a23ea1ca6",
|
"sha256:11116d424734fe356d8777f89d625f0df783251ada95d6261b4c36ad27a394bb",
|
||||||
"sha256:25991861c6fef1e5fd0a01283cf5658c5e7f7aa644128e85243bc75304e91530",
|
"sha256:119e0355dbdd4cf593b17f2fc5dbd4aec2b8899d0057e4957ba92f941f704bf5",
|
||||||
"sha256:26b85672275d8c7a9d4ff93dbc4954f5146efdb2ecec89ad1de49439984dea14",
|
"sha256:127a9e0c0d91af572fbb9e56d00a504dbd4c65e574ddda3d45b55722462210de",
|
||||||
"sha256:2f60ba5c33f00ce9be29a140e6f812e39880df8ba9cb92ad333f0016dbc30306",
|
"sha256:1ec66700a10e3c75f1f92cbde36cca0d3aaee4c73dfa26699495a3a30b09093c",
|
||||||
"sha256:3dd952f3f8dc01b72c0cf05b3631e05c50ac65ddd2afdf26551638e97502107b",
|
"sha256:227a8d2e5282c2b8346e7f68aa759e0331a0b4a890b55a5cfbb28bd0261b84c0",
|
||||||
"sha256:578ac6379e65eb8e6a85299b306c966c852712c834dc7eef0ba78d07a828f67b",
|
"sha256:2564def9ce0710d510b1fc7e5178ce2d20f75571f788b5197b3c8134c366f50c",
|
||||||
"sha256:5d4a3221f37520bb337b64a0632716e61b26c8ae6aaffceeeb7ad69c009c404b",
|
"sha256:297116e79074ec2a2f885d22db00ce6e88b15f75162c5e8b38f66ea734e73c64",
|
||||||
"sha256:608d6c05452c0e6cc49d4d7407b4767963f19c4d2230fa70b7201732eedc84f2",
|
"sha256:2dc522e25e57e88b4980d2bdd334825dbf6fa55f28a922fc3bfa60cc09e5ef53",
|
||||||
"sha256:65b6b018b07e9b3b6a05c2c3bb7710ed66132b4df41926c243887c4f1ff303d5",
|
"sha256:3a5f08039eee9ea195a89e180c5762bfb55258bfb9abb61a20d3abee3b37fd12",
|
||||||
"sha256:698f8a5a2815e1663d9895830a063098ae2f8f2655ae4fdc5dfa2b1f52b90087",
|
"sha256:3dfca201fa6b326239e1bccb00b915e058707028809b8ecc0cf6819ad233a740",
|
||||||
"sha256:6c72adb85adecd4522a488a751e465842cdd2a5606b65464b9168bf029a54272",
|
"sha256:49461446b783945597c4076aea3f49aee4b4ce922bd241e4fcf62a3e7c61794c",
|
||||||
"sha256:6d4cdb6c20e752426b2e569128488c5046fb1b16b1beadaceea9815c36da0847",
|
"sha256:4afa350f162551cf402bfa3cd8302165c8e03e689c897d185f16a167328cc6dd",
|
||||||
"sha256:6e9f72e0ee49f7d7be395bfa29e9533f0507a882e1e6bf302c0a204c65b742bf",
|
"sha256:4b5a9bcb56cc146c3932c648603b24514447eafa6ce9295234767bf92f69b504",
|
||||||
"sha256:828618f3c3439c5e6ef8621e7c885ca561bbaaba90ddbb6a7dfd9e1ec8341103",
|
"sha256:52e83a5f28acd621ba8e71c2b816f6541af7144b69cc5859d17da76c436a5427",
|
||||||
"sha256:85b733a1ef2b2e7001aff0e204a842f50ad699c061856a214e48cfb16ace7d0c",
|
"sha256:625116aca6c4b57c56ea3d70369cacc4d62fead4930f8329d242e4fe7a58ce4b",
|
||||||
"sha256:8958befc139ac4e3f16d44ec386c490ea2121ed8322f4956f83dd9cad8e9b922",
|
"sha256:654c1635f2313d0843028487db2191530bca45af61ca85d0b16555c399625b0e",
|
||||||
"sha256:a51e51eecdac39a50ede4aeed86dbef4776e3b73347d31d6ad0bc9648ba36049",
|
"sha256:8092a5a06ad9a7a247f2a76ace121183dc4e1a84c259cf9c2ce3bbb69fac3582",
|
||||||
"sha256:aeac7c9397480450016bc4a840eefbfa8ca68afc1e90648aa6efbfe699e5d3bb",
|
"sha256:832339223b9ce56b7b15168e691ae654d345ac1635eeb367ade9ecfe0e66bee0",
|
||||||
"sha256:aef23aed9d4017cc74d37f703d57ce254efb4c8a6a01905f40f539220348abf9",
|
"sha256:8ca9dca965bd86ea3631b975d63b0693566d3cc347e55786d5514988b6f5b84c",
|
||||||
"sha256:af1f5e997dd1ee71fb6eb4a0fb6921bf7a778f4b62f1f7ef0d7445ecce9155d6",
|
"sha256:96f99219dddb33e235a37283306834700b63170d7bb2a1ee17e41c6d589c8eb9",
|
||||||
"sha256:b5eeaf4b5ef38fab225429478caf71f44d4a0b44d39a1aa4d4422cda23a9821b",
|
"sha256:9b6305295b6591e45f069d3553c54d50cc47629eb5c218aac99e0f7fafbf90a1",
|
||||||
"sha256:d25f5cca0f3af6d425c9496953445bf5b288bb5b71afc2b8308ad194b714c159",
|
"sha256:a62162be05edf64f819925ea88d09d18b09bebf20971b363ce0c24e8b4aa14c0",
|
||||||
"sha256:d81be22d5d462b96a2aa5c512f741255ba182995efb0114e5a946fe254148df1",
|
"sha256:aacc8623ffe7999a97935eeabbd24b1ae701d08ea8f874a6ff050e93c3e658cf",
|
||||||
"sha256:e935a166a5f4c02afe3f7e4ce92ce5a786f75c6caa0c4ce09c922541d74b77e8",
|
"sha256:b45bab9f224de276b7bc916f6306b86283f6aa8afe7ed4133423efb42015a898",
|
||||||
"sha256:ef3a55b16c6450574734db92e0a3aca283290889934a23f7498eaf417e3af9f0"
|
"sha256:b88fa3b8a3469f22b4f13d045d9bd3eda797aa4e406fde0a2644bc92bbdd4bdd",
|
||||||
|
"sha256:b8a686a6c98872007aa41fdbb2e86dc03b287d951ff4a7f1da77fb7f14113e4d",
|
||||||
|
"sha256:bd904c0dec29bbd0769887a816657491721d5f545c29e30fd9d7a1a275dc80ab",
|
||||||
|
"sha256:bf4f896c42c63d1f22039ad57de2644c72587756c0cfb3cc3b7530cfe228277f",
|
||||||
|
"sha256:c13d311a4c4a8d671f5860317eb5f09591fbe8259676b86a85769423b544451e",
|
||||||
|
"sha256:c2c6c56ee97485a127555c9595c069201b5161de9d05495fbe2132b5ac104786",
|
||||||
|
"sha256:c32c91a0f1ac779cbd73e62430de3d3502bbc45ffe5bb6c376015acfa848144b",
|
||||||
|
"sha256:c3466a84fce42c2016113101018a9981804097bacbab029c2d5b4fcb224b89de",
|
||||||
|
"sha256:c454ad88e56e80e44f824ef8366bb7e4c3def12999151fd5c0ea76a18fe9aa3e",
|
||||||
|
"sha256:c8a2b7ccff330ae4c460aff36626f911f918555660cc28163417cb84ffb25789",
|
||||||
|
"sha256:cb905f3d2e290a8b8f1579d3984f2cfa7c3a29cc7cba608540ceeed18513f520",
|
||||||
|
"sha256:cfcf28ed4ce9ced47b9b9670a4f0d3d3c0e4d4779ad4dadb1ad468b097f808aa",
|
||||||
|
"sha256:dd3e6547ecf842a29cf25123fbf8d2461c53c8d37aa20d87ecee130c89b7079b",
|
||||||
|
"sha256:de7fd57765398d141949946c84f3590a68cf5887dac3fc52388df0639b01eda4",
|
||||||
|
"sha256:ea37320877d56a7f0a1e6a625d892cf963aa7f570013499f5b8d5ab8402b5625",
|
||||||
|
"sha256:f1fce1e4929157b2afeb4bb7069204d4370bab9f4fc03ca1fbec8bd601f8c87d",
|
||||||
|
"sha256:f43109822df2d3faac7aad79613f5f02e4eab0fc8ad7932d2e70e2a83bd49c26"
|
||||||
],
|
],
|
||||||
"version": "==2020.10.15"
|
"version": "==2020.10.28"
|
||||||
},
|
},
|
||||||
"requirements-detector": {
|
"requirements-detector": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -1701,33 +1734,42 @@
|
|||||||
},
|
},
|
||||||
"toml": {
|
"toml": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f",
|
"sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
|
||||||
"sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"
|
"sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
|
||||||
],
|
],
|
||||||
"version": "==0.10.1"
|
"version": "==0.10.2"
|
||||||
},
|
},
|
||||||
"typed-ast": {
|
"typed-ast": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355",
|
"sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355",
|
||||||
"sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919",
|
"sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919",
|
||||||
|
"sha256:0d8110d78a5736e16e26213114a38ca35cb15b6515d535413b090bd50951556d",
|
||||||
"sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa",
|
"sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa",
|
||||||
"sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652",
|
"sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652",
|
||||||
"sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75",
|
"sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75",
|
||||||
|
"sha256:3742b32cf1c6ef124d57f95be609c473d7ec4c14d0090e5a5e05a15269fb4d0c",
|
||||||
"sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01",
|
"sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01",
|
||||||
"sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d",
|
"sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d",
|
||||||
"sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1",
|
"sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1",
|
||||||
"sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907",
|
"sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907",
|
||||||
"sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c",
|
"sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c",
|
||||||
"sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3",
|
"sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3",
|
||||||
|
"sha256:7e4c9d7658aaa1fc80018593abdf8598bf91325af6af5cce4ce7c73bc45ea53d",
|
||||||
"sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b",
|
"sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b",
|
||||||
"sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614",
|
"sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614",
|
||||||
|
"sha256:92c325624e304ebf0e025d1224b77dd4e6393f18aab8d829b5b7e04afe9b7a2c",
|
||||||
"sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb",
|
"sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb",
|
||||||
|
"sha256:b52ccf7cfe4ce2a1064b18594381bccf4179c2ecf7f513134ec2f993dd4ab395",
|
||||||
"sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b",
|
"sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b",
|
||||||
"sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41",
|
"sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41",
|
||||||
"sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6",
|
"sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6",
|
||||||
"sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34",
|
"sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34",
|
||||||
"sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe",
|
"sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe",
|
||||||
|
"sha256:d648b8e3bf2fe648745c8ffcee3db3ff903d0817a01a12dd6a6ea7a8f4889072",
|
||||||
|
"sha256:f208eb7aff048f6bea9586e61af041ddf7f9ade7caed625742af423f6bae3298",
|
||||||
|
"sha256:fac11badff8313e23717f3dada86a15389d0708275bddf766cca67a84ead3e91",
|
||||||
"sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4",
|
"sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4",
|
||||||
|
"sha256:fcf135e17cc74dbfbc05894ebca928ffeb23d9790b3167a674921db19082401f",
|
||||||
"sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"
|
"sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"
|
||||||
],
|
],
|
||||||
"version": "==1.4.1"
|
"version": "==1.4.1"
|
||||||
@ -1745,12 +1787,12 @@
|
|||||||
"secure"
|
"secure"
|
||||||
],
|
],
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a",
|
"sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2",
|
||||||
"sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"
|
"sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"markers": null,
|
"markers": null,
|
||||||
"version": "==1.25.10"
|
"version": "==1.25.11"
|
||||||
},
|
},
|
||||||
"wrapt": {
|
"wrapt": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
|
|||||||
@ -179,13 +179,13 @@ stages:
|
|||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
export PB_TEST_K8S=true
|
|
||||||
sudo pip install -U wheel pipenv
|
sudo pip install -U wheel pipenv
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
displayName: Run full test suite
|
displayName: Run full test suite
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
|
export PB_TEST_K8S=true
|
||||||
pipenv run coverage run ./manage.py test passbook -v 3
|
pipenv run coverage run ./manage.py test passbook -v 3
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
@ -221,7 +221,6 @@ stages:
|
|||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
export PB_TEST_K8S=true
|
|
||||||
sudo pip install -U wheel pipenv
|
sudo pip install -U wheel pipenv
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
- task: DockerCompose@0
|
- task: DockerCompose@0
|
||||||
@ -241,6 +240,7 @@ stages:
|
|||||||
displayName: Run full test suite
|
displayName: Run full test suite
|
||||||
inputs:
|
inputs:
|
||||||
script: |
|
script: |
|
||||||
|
export PB_TEST_K8S=true
|
||||||
pipenv run coverage run ./manage.py test e2e -v 3 --failfast
|
pipenv run coverage run ./manage.py test e2e -v 3 --failfast
|
||||||
- task: CmdLine@2
|
- task: CmdLine@2
|
||||||
condition: always()
|
condition: always()
|
||||||
|
|||||||
@ -19,7 +19,7 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- internal
|
- internal
|
||||||
server:
|
server:
|
||||||
image: beryju/passbook:${PASSBOOK_TAG:-0.12.0-stable}
|
image: beryju/passbook:${PASSBOOK_TAG:-0.12.10-stable}
|
||||||
command: server
|
command: server
|
||||||
environment:
|
environment:
|
||||||
PASSBOOK_REDIS__HOST: redis
|
PASSBOOK_REDIS__HOST: redis
|
||||||
@ -40,7 +40,7 @@ services:
|
|||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
worker:
|
worker:
|
||||||
image: beryju/passbook:${PASSBOOK_TAG:-0.12.0-stable}
|
image: beryju/passbook:${PASSBOOK_TAG:-0.12.10-stable}
|
||||||
command: worker
|
command: worker
|
||||||
networks:
|
networks:
|
||||||
- internal
|
- internal
|
||||||
@ -50,11 +50,11 @@ services:
|
|||||||
PASSBOOK_POSTGRESQL__PASSWORD: ${PG_PASS}
|
PASSBOOK_POSTGRESQL__PASSWORD: ${PG_PASS}
|
||||||
volumes:
|
volumes:
|
||||||
- ./backups:/backups
|
- ./backups:/backups
|
||||||
- /var/run/docker.socket:/var/run/docker.socket
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
static:
|
static:
|
||||||
image: beryju/passbook-static:${PASSBOOK_TAG:-0.12.0-stable}
|
image: beryju/passbook-static:${PASSBOOK_TAG:-0.12.10-stable}
|
||||||
networks:
|
networks:
|
||||||
- internal
|
- internal
|
||||||
labels:
|
labels:
|
||||||
@ -68,7 +68,7 @@ services:
|
|||||||
traefik:
|
traefik:
|
||||||
image: traefik:2.3
|
image: traefik:2.3
|
||||||
command:
|
command:
|
||||||
- "--accesslog=true"
|
- "--log.format=json"
|
||||||
- "--api.insecure=true"
|
- "--api.insecure=true"
|
||||||
- "--providers.docker=true"
|
- "--providers.docker=true"
|
||||||
- "--providers.docker.exposedbydefault=false"
|
- "--providers.docker.exposedbydefault=false"
|
||||||
|
|||||||
@ -117,7 +117,7 @@
|
|||||||
},
|
},
|
||||||
"model": "passbook_stages_user_login.userloginstage",
|
"model": "passbook_stages_user_login.userloginstage",
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"session_duration": 0
|
"session_duration": "seconds=-1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@ -136,7 +136,7 @@
|
|||||||
},
|
},
|
||||||
"model": "passbook_stages_user_login.userloginstage",
|
"model": "passbook_stages_user_login.userloginstage",
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"session_duration": 0
|
"session_duration": "seconds=-1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@ -20,7 +20,7 @@
|
|||||||
},
|
},
|
||||||
"model": "passbook_stages_user_login.userloginstage",
|
"model": "passbook_stages_user_login.userloginstage",
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"session_duration": 0
|
"session_duration": "seconds=-1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@ -20,7 +20,7 @@
|
|||||||
},
|
},
|
||||||
"model": "passbook_stages_user_login.userloginstage",
|
"model": "passbook_stages_user_login.userloginstage",
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"session_duration": 0
|
"session_duration": "seconds=-1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -95,7 +95,8 @@
|
|||||||
},
|
},
|
||||||
"model": "passbook_flows.flowstagebinding",
|
"model": "passbook_flows.flowstagebinding",
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"re_evaluate_policies": false
|
"evaluate_on_plan": false,
|
||||||
|
"re_evaluate_policies": true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@ -118,7 +118,7 @@
|
|||||||
},
|
},
|
||||||
"model": "passbook_stages_user_login.userloginstage",
|
"model": "passbook_stages_user_login.userloginstage",
|
||||||
"attrs": {
|
"attrs": {
|
||||||
"session_duration": 0
|
"session_duration": "seconds=-1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@ -13,7 +13,7 @@ Download the latest `docker-compose.yml` from [here](https://raw.githubuserconte
|
|||||||
|
|
||||||
To optionally enable error-reporting, run `echo PASSBOOK_ERROR_REPORTING__ENABLED=true >> .env`
|
To optionally enable error-reporting, run `echo PASSBOOK_ERROR_REPORTING__ENABLED=true >> .env`
|
||||||
|
|
||||||
To optionally deploy a different version run `echo PASSBOOK_TAG=0.12.0-stable >> .env`
|
To optionally deploy a different version run `echo PASSBOOK_TAG=0.12.10-stable >> .env`
|
||||||
|
|
||||||
If this is a fresh passbook install run the following commands to generate a password:
|
If this is a fresh passbook install run the following commands to generate a password:
|
||||||
|
|
||||||
|
|||||||
@ -11,9 +11,7 @@ This installation automatically applies database migrations on startup. After th
|
|||||||
image:
|
image:
|
||||||
name: beryju/passbook
|
name: beryju/passbook
|
||||||
name_static: beryju/passbook-static
|
name_static: beryju/passbook-static
|
||||||
tag: 0.12.0-stable
|
tag: 0.12.10-stable
|
||||||
|
|
||||||
nameOverride: ""
|
|
||||||
|
|
||||||
serverReplicas: 1
|
serverReplicas: 1
|
||||||
workerReplicas: 1
|
workerReplicas: 1
|
||||||
@ -35,8 +33,8 @@ config:
|
|||||||
|
|
||||||
# Enable Database Backups to S3
|
# Enable Database Backups to S3
|
||||||
# backup:
|
# backup:
|
||||||
# access_key: access-key
|
# accessKey: access-key
|
||||||
# secret_key: secret-key
|
# secretKey: secret-key
|
||||||
# bucket: s3-bucket
|
# bucket: s3-bucket
|
||||||
# region: eu-central-1
|
# region: eu-central-1
|
||||||
# host: s3-host
|
# host: s3-host
|
||||||
@ -45,7 +43,6 @@ ingress:
|
|||||||
annotations: {}
|
annotations: {}
|
||||||
# kubernetes.io/ingress.class: nginx
|
# kubernetes.io/ingress.class: nginx
|
||||||
# kubernetes.io/tls-acme: "true"
|
# kubernetes.io/tls-acme: "true"
|
||||||
path: /
|
|
||||||
hosts:
|
hosts:
|
||||||
- passbook.k8s.local
|
- passbook.k8s.local
|
||||||
tls: []
|
tls: []
|
||||||
|
|||||||
@ -34,7 +34,8 @@ server {
|
|||||||
proxy_set_header X-Forwarded-Proto https;
|
proxy_set_header X-Forwarded-Proto https;
|
||||||
proxy_set_header X-Forwarded-Port 443;
|
proxy_set_header X-Forwarded-Port 443;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header Host $http_host;
|
# This needs to be set inside the location block, very important.
|
||||||
|
proxy_set_header Host $host;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection $connection_upgrade;
|
proxy_set_header Connection $connection_upgrade;
|
||||||
}
|
}
|
||||||
|
|||||||
59
docs/integrations/services/home-assistant/index.md
Normal file
59
docs/integrations/services/home-assistant/index.md
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
# Home-Assistant Integration
|
||||||
|
|
||||||
|
## What is Home-Assistant
|
||||||
|
|
||||||
|
From https://www.home-assistant.io/
|
||||||
|
|
||||||
|
!!! note ""
|
||||||
|
Open source home automation that puts local control and privacy first. Powered by a worldwide community of tinkerers and DIY enthusiasts. Perfect to run on a Raspberry Pi or a local server.
|
||||||
|
|
||||||
|
|
||||||
|
## Preparation
|
||||||
|
|
||||||
|
The following placeholders will be used:
|
||||||
|
|
||||||
|
- `hass.company` is the FQDN of the Home-Assistant install.
|
||||||
|
- `passbook.company` is the FQDN of the passbook install.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
This setup uses https://github.com/BeryJu/hass-auth-header and the passbook proxy for authentication. When this [PR](https://github.com/home-assistant/core/pull/32926) is merged, this will no longer be necessary.
|
||||||
|
|
||||||
|
## Home-Assistant
|
||||||
|
|
||||||
|
This guide requires https://github.com/BeryJu/hass-auth-header, which can be installed as described in the Readme.
|
||||||
|
|
||||||
|
Afterwards, make sure the `trusted_proxies` setting contains the IP(s) of the Host(s) passbook is running on.
|
||||||
|
|
||||||
|
With the default Header of `X-Forwarded-Preferred-Username` matching is done on a username basis, so your Name in Home-Assistant and your username in passbook have to match.
|
||||||
|
|
||||||
|
If this is not the case, you can simply add an additional header for your user, which contains the Home-Assistant Name and authenticate based on that.
|
||||||
|
|
||||||
|
For example add this to your user's properties and set the Header to `X-pb-hass-user`.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
additionalHeaders:
|
||||||
|
X-pb-hass-user: some other name
|
||||||
|
```
|
||||||
|
|
||||||
|
## passbook
|
||||||
|
|
||||||
|
Create a Proxy Provider with the following values
|
||||||
|
|
||||||
|
- Internal host
|
||||||
|
|
||||||
|
If Home-Assistant is running in docker, and you're deploying the passbook proxy on the same host, set the value to `http://homeassistant:8123`, where Home-Assistant is the name of your container.
|
||||||
|
|
||||||
|
If Home-Assistant is running on a different server than where you are deploying the passbook proxy, set the value to `http://hass.company:8123`.
|
||||||
|
|
||||||
|
- External host
|
||||||
|
|
||||||
|
Set this to the external URL you will be accessing Home-Assistant from.
|
||||||
|
|
||||||
|
Create an application in passbook and select the provider you've created above.
|
||||||
|
|
||||||
|
## Deployment
|
||||||
|
|
||||||
|
Create an outpost deployment for the provider you've created above, as described [here](../../../outposts/outposts.md). Deploy this Outpost either on the same host or a different host that can access Home-Assistant.
|
||||||
|
|
||||||
|
The outpost will connect to passbook and configure itself.
|
||||||
@ -18,7 +18,7 @@ The following placeholders will be used:
|
|||||||
- `sonarr.company` is the FQDN of the Sonarr install.
|
- `sonarr.company` is the FQDN of the Sonarr install.
|
||||||
- `passbook.company` is the FQDN of the passbook install.
|
- `passbook.company` is the FQDN of the passbook install.
|
||||||
|
|
||||||
Create an application in passbook. Create a Proxy Provider with the following values
|
Create a Proxy Provider with the following values
|
||||||
|
|
||||||
- Internal host
|
- Internal host
|
||||||
|
|
||||||
@ -30,6 +30,8 @@ Create an application in passbook. Create a Proxy Provider with the following va
|
|||||||
|
|
||||||
Set this to the external URL you will be accessing Sonarr from.
|
Set this to the external URL you will be accessing Sonarr from.
|
||||||
|
|
||||||
|
Create an application in passbook and select the provider you've created above.
|
||||||
|
|
||||||
## Deployment
|
## Deployment
|
||||||
|
|
||||||
Create an outpost deployment for the provider you've created above, as described [here](../../../outposts/outposts.md). Deploy this Outpost either on the same host or a different host that can access Sonarr.
|
Create an outpost deployment for the provider you've created above, as described [here](../../../outposts/outposts.md). Deploy this Outpost either on the same host or a different host that can access Sonarr.
|
||||||
|
|||||||
@ -6,6 +6,10 @@
|
|||||||
|
|
||||||
### Backup
|
### Backup
|
||||||
|
|
||||||
|
!!! notice
|
||||||
|
|
||||||
|
Local backups are **enabled** by default, and will be run daily at 00:00
|
||||||
|
|
||||||
Local backups can be created by running the following command in your passbook installation directory
|
Local backups can be created by running the following command in your passbook installation directory
|
||||||
|
|
||||||
```
|
```
|
||||||
@ -14,15 +18,6 @@ docker-compose run --rm worker backup
|
|||||||
|
|
||||||
This will dump the current database into the `./backups` folder. By defaults, the last 10 Backups are kept.
|
This will dump the current database into the `./backups` folder. By defaults, the last 10 Backups are kept.
|
||||||
|
|
||||||
To schedule these backups, use the following snippet in a crontab
|
|
||||||
|
|
||||||
```
|
|
||||||
0 0 * * * bash -c "cd <passbook install location> && docker-compose run --rm worker backup" >/dev/null
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! notice
|
|
||||||
|
|
||||||
passbook does support automatic backups on a schedule, however this is currently not recommended, as there is no way to monitor these scheduled tasks.
|
|
||||||
|
|
||||||
### Restore
|
### Restore
|
||||||
|
|
||||||
@ -42,11 +37,7 @@ After you've restored the backup, it is recommended to restart all services with
|
|||||||
|
|
||||||
### S3 Configuration
|
### S3 Configuration
|
||||||
|
|
||||||
!!! notice
|
#### Preparation
|
||||||
|
|
||||||
To trigger backups with S3 enabled, use the same commands as above.
|
|
||||||
|
|
||||||
#### S3 Preparation
|
|
||||||
|
|
||||||
passbook expects the bucket you select to already exist. The IAM User given to passbook should have the following permissions
|
passbook expects the bucket you select to already exist. The IAM User given to passbook should have the following permissions
|
||||||
|
|
||||||
@ -101,11 +92,11 @@ Simply enable these options in your values.yaml file
|
|||||||
```yaml
|
```yaml
|
||||||
# Enable Database Backups to S3
|
# Enable Database Backups to S3
|
||||||
backup:
|
backup:
|
||||||
access_key: access-key
|
accessKey: access-key
|
||||||
secret_key: secret-key
|
secretKey: secret-key
|
||||||
bucket: s3-bucket
|
bucket: s3-bucket
|
||||||
region: eu-central-1
|
region: eu-central-1
|
||||||
host: s3-host
|
host: s3-host
|
||||||
```
|
```
|
||||||
|
|
||||||
Afterwards, run a `helm upgrade` to update the ConfigMap. Because passbook-scheduled backups are not recommended currently, a Kubernetes CronJob is created that runs the backup daily.
|
Afterwards, run a `helm upgrade` to update the ConfigMap. Backups are done automatically as above, at 00:00 every day.
|
||||||
|
|||||||
@ -26,7 +26,11 @@ return False
|
|||||||
- `request.obj`: A Django Model instance. This is only set if the policy is ran against an object.
|
- `request.obj`: A Django Model instance. This is only set if the policy is ran against an object.
|
||||||
- `request.context`: A dictionary with dynamic data. This depends on the origin of the execution.
|
- `request.context`: A dictionary with dynamic data. This depends on the origin of the execution.
|
||||||
- `pb_is_sso_flow`: Boolean which is true if request was initiated by authenticating through an external provider.
|
- `pb_is_sso_flow`: Boolean which is true if request was initiated by authenticating through an external provider.
|
||||||
- `pb_client_ip`: Client's IP Address or '255.255.255.255' if no IP Address could be extracted. Can be [compared](../expressions/index.md#comparing-ip-addresses)
|
- `pb_client_ip`: Client's IP Address or 255.255.255.255 if no IP Address could be extracted. Can be [compared](../expressions/index.md#comparing-ip-addresses), for example
|
||||||
|
|
||||||
|
```python
|
||||||
|
return pb_client_ip in ip_network('10.0.0.0/24')
|
||||||
|
```
|
||||||
|
|
||||||
Additionally, when the policy is executed from a flow, every variable from the flow's current context is accessible under the `context` object.
|
Additionally, when the policy is executed from a flow, every variable from the flow's current context is accessible under the `context` object.
|
||||||
|
|
||||||
|
|||||||
@ -11,6 +11,14 @@ The Proxy these extra headers to the application:
|
|||||||
|
|
||||||
Header Name | Value
|
Header Name | Value
|
||||||
-------------|-------
|
-------------|-------
|
||||||
X-Auth-Request-User | The user's unique identifier
|
X-Forwarded-User | The user's unique identifier (**not the username**)
|
||||||
X-Auth-Request-Email | The user's email address
|
X-Forwarded-Email | The user's email address
|
||||||
X-Auth-Request-Preferred-Username | The user's username
|
X-Forwarded-Preferred-Username | The user's username
|
||||||
|
X-Auth-Username | The user's username
|
||||||
|
|
||||||
|
Additionally, you can add more custom headers using `additionalHeaders` in the User or Group Properties, for example
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
additionalHeaders:
|
||||||
|
X-additional-header: bar
|
||||||
|
```
|
||||||
|
|||||||
@ -8,7 +8,7 @@ from docker.types import Healthcheck
|
|||||||
from selenium.webdriver.common.by import By
|
from selenium.webdriver.common.by import By
|
||||||
from selenium.webdriver.support import expected_conditions as ec
|
from selenium.webdriver.support import expected_conditions as ec
|
||||||
|
|
||||||
from e2e.utils import USER, SeleniumTestCase
|
from e2e.utils import USER, SeleniumTestCase, retry
|
||||||
from passbook.flows.models import Flow, FlowDesignation, FlowStageBinding
|
from passbook.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||||
from passbook.stages.email.models import EmailStage, EmailTemplates
|
from passbook.stages.email.models import EmailStage, EmailTemplates
|
||||||
from passbook.stages.identification.models import IdentificationStage
|
from passbook.stages.identification.models import IdentificationStage
|
||||||
@ -23,7 +23,7 @@ class TestFlowsEnroll(SeleniumTestCase):
|
|||||||
|
|
||||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||||
return {
|
return {
|
||||||
"image": "mailhog/mailhog:v1.0.1",
|
"image": "docker.beryju.org/proxy/mailhog/mailhog:v1.0.1",
|
||||||
"detach": True,
|
"detach": True,
|
||||||
"network_mode": "host",
|
"network_mode": "host",
|
||||||
"auto_remove": True,
|
"auto_remove": True,
|
||||||
@ -34,6 +34,7 @@ class TestFlowsEnroll(SeleniumTestCase):
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_enroll_2_step(self):
|
def test_enroll_2_step(self):
|
||||||
"""Test 2-step enroll flow"""
|
"""Test 2-step enroll flow"""
|
||||||
# First stage fields
|
# First stage fields
|
||||||
@ -119,6 +120,7 @@ class TestFlowsEnroll(SeleniumTestCase):
|
|||||||
"foo@bar.baz",
|
"foo@bar.baz",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
@override_settings(EMAIL_BACKEND="django.core.mail.backends.smtp.EmailBackend")
|
@override_settings(EMAIL_BACKEND="django.core.mail.backends.smtp.EmailBackend")
|
||||||
def test_enroll_email(self):
|
def test_enroll_email(self):
|
||||||
"""Test enroll with Email verification"""
|
"""Test enroll with Email verification"""
|
||||||
|
|||||||
@ -5,13 +5,14 @@ from unittest.case import skipUnless
|
|||||||
from selenium.webdriver.common.by import By
|
from selenium.webdriver.common.by import By
|
||||||
from selenium.webdriver.common.keys import Keys
|
from selenium.webdriver.common.keys import Keys
|
||||||
|
|
||||||
from e2e.utils import USER, SeleniumTestCase
|
from e2e.utils import USER, SeleniumTestCase, retry
|
||||||
|
|
||||||
|
|
||||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||||
class TestFlowsLogin(SeleniumTestCase):
|
class TestFlowsLogin(SeleniumTestCase):
|
||||||
"""test default login flow"""
|
"""test default login flow"""
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_login(self):
|
def test_login(self):
|
||||||
"""test default login flow"""
|
"""test default login flow"""
|
||||||
self.driver.get(f"{self.live_server_url}/flows/default-authentication-flow/")
|
self.driver.get(f"{self.live_server_url}/flows/default-authentication-flow/")
|
||||||
|
|||||||
@ -12,7 +12,7 @@ from selenium.webdriver.common.by import By
|
|||||||
from selenium.webdriver.common.keys import Keys
|
from selenium.webdriver.common.keys import Keys
|
||||||
from selenium.webdriver.support import expected_conditions as ec
|
from selenium.webdriver.support import expected_conditions as ec
|
||||||
|
|
||||||
from e2e.utils import USER, SeleniumTestCase
|
from e2e.utils import USER, SeleniumTestCase, retry
|
||||||
from passbook.flows.models import Flow, FlowStageBinding
|
from passbook.flows.models import Flow, FlowStageBinding
|
||||||
from passbook.stages.otp_validate.models import OTPValidateStage
|
from passbook.stages.otp_validate.models import OTPValidateStage
|
||||||
|
|
||||||
@ -21,6 +21,7 @@ from passbook.stages.otp_validate.models import OTPValidateStage
|
|||||||
class TestFlowsOTP(SeleniumTestCase):
|
class TestFlowsOTP(SeleniumTestCase):
|
||||||
"""test flow with otp stages"""
|
"""test flow with otp stages"""
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_otp_validate(self):
|
def test_otp_validate(self):
|
||||||
"""test flow with otp stages"""
|
"""test flow with otp stages"""
|
||||||
sleep(1)
|
sleep(1)
|
||||||
@ -52,6 +53,7 @@ class TestFlowsOTP(SeleniumTestCase):
|
|||||||
USER().username,
|
USER().username,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_otp_totp_setup(self):
|
def test_otp_totp_setup(self):
|
||||||
"""test TOTP Setup stage"""
|
"""test TOTP Setup stage"""
|
||||||
flow: Flow = Flow.objects.get(slug="default-authentication-flow")
|
flow: Flow = Flow.objects.get(slug="default-authentication-flow")
|
||||||
@ -98,6 +100,7 @@ class TestFlowsOTP(SeleniumTestCase):
|
|||||||
|
|
||||||
self.assertTrue(TOTPDevice.objects.filter(user=USER(), confirmed=True).exists())
|
self.assertTrue(TOTPDevice.objects.filter(user=USER(), confirmed=True).exists())
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_otp_static_setup(self):
|
def test_otp_static_setup(self):
|
||||||
"""test Static OTP Setup stage"""
|
"""test Static OTP Setup stage"""
|
||||||
flow: Flow = Flow.objects.get(slug="default-authentication-flow")
|
flow: Flow = Flow.objects.get(slug="default-authentication-flow")
|
||||||
|
|||||||
@ -5,7 +5,7 @@ from unittest.case import skipUnless
|
|||||||
from selenium.webdriver.common.by import By
|
from selenium.webdriver.common.by import By
|
||||||
from selenium.webdriver.common.keys import Keys
|
from selenium.webdriver.common.keys import Keys
|
||||||
|
|
||||||
from e2e.utils import USER, SeleniumTestCase
|
from e2e.utils import USER, SeleniumTestCase, retry
|
||||||
from passbook.core.models import User
|
from passbook.core.models import User
|
||||||
from passbook.flows.models import Flow, FlowDesignation
|
from passbook.flows.models import Flow, FlowDesignation
|
||||||
from passbook.providers.oauth2.generators import generate_client_secret
|
from passbook.providers.oauth2.generators import generate_client_secret
|
||||||
@ -16,6 +16,7 @@ from passbook.stages.password.models import PasswordStage
|
|||||||
class TestFlowsStageSetup(SeleniumTestCase):
|
class TestFlowsStageSetup(SeleniumTestCase):
|
||||||
"""test stage setup flows"""
|
"""test stage setup flows"""
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_password_change(self):
|
def test_password_change(self):
|
||||||
"""test password change flow"""
|
"""test password change flow"""
|
||||||
# Ensure that password stage has change_flow set
|
# Ensure that password stage has change_flow set
|
||||||
|
|||||||
@ -9,7 +9,7 @@ from selenium.webdriver.common.by import By
|
|||||||
from selenium.webdriver.common.keys import Keys
|
from selenium.webdriver.common.keys import Keys
|
||||||
from selenium.webdriver.support import expected_conditions as ec
|
from selenium.webdriver.support import expected_conditions as ec
|
||||||
|
|
||||||
from e2e.utils import USER, SeleniumTestCase
|
from e2e.utils import USER, SeleniumTestCase, retry
|
||||||
from passbook.core.models import Application
|
from passbook.core.models import Application
|
||||||
from passbook.flows.models import Flow
|
from passbook.flows.models import Flow
|
||||||
from passbook.policies.expression.models import ExpressionPolicy
|
from passbook.policies.expression.models import ExpressionPolicy
|
||||||
@ -33,7 +33,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
|||||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||||
"""Setup client grafana container which we test OAuth against"""
|
"""Setup client grafana container which we test OAuth against"""
|
||||||
return {
|
return {
|
||||||
"image": "grafana/grafana:7.1.0",
|
"image": "docker.beryju.org/proxy/grafana/grafana:7.1.0",
|
||||||
"detach": True,
|
"detach": True,
|
||||||
"network_mode": "host",
|
"network_mode": "host",
|
||||||
"auto_remove": True,
|
"auto_remove": True,
|
||||||
@ -61,6 +61,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_authorization_consent_implied(self):
|
def test_authorization_consent_implied(self):
|
||||||
"""test OAuth Provider flow (default authorization flow with implied consent)"""
|
"""test OAuth Provider flow (default authorization flow with implied consent)"""
|
||||||
# Bootstrap all needed objects
|
# Bootstrap all needed objects
|
||||||
@ -115,6 +116,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
|||||||
USER().username,
|
USER().username,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_authorization_consent_explicit(self):
|
def test_authorization_consent_explicit(self):
|
||||||
"""test OAuth Provider flow (default authorization flow with explicit consent)"""
|
"""test OAuth Provider flow (default authorization flow with explicit consent)"""
|
||||||
# Bootstrap all needed objects
|
# Bootstrap all needed objects
|
||||||
@ -184,6 +186,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
|||||||
USER().username,
|
USER().username,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_denied(self):
|
def test_denied(self):
|
||||||
"""test OAuth Provider flow (default authorization flow, denied)"""
|
"""test OAuth Provider flow (default authorization flow, denied)"""
|
||||||
# Bootstrap all needed objects
|
# Bootstrap all needed objects
|
||||||
|
|||||||
@ -10,7 +10,7 @@ from selenium.webdriver.common.keys import Keys
|
|||||||
from selenium.webdriver.support import expected_conditions as ec
|
from selenium.webdriver.support import expected_conditions as ec
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
|
|
||||||
from e2e.utils import USER, SeleniumTestCase
|
from e2e.utils import USER, SeleniumTestCase, retry
|
||||||
from passbook.core.models import Application
|
from passbook.core.models import Application
|
||||||
from passbook.crypto.models import CertificateKeyPair
|
from passbook.crypto.models import CertificateKeyPair
|
||||||
from passbook.flows.models import Flow
|
from passbook.flows.models import Flow
|
||||||
@ -47,7 +47,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
|
|||||||
|
|
||||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||||
return {
|
return {
|
||||||
"image": "grafana/grafana:7.1.0",
|
"image": "docker.beryju.org/proxy/grafana/grafana:7.1.0",
|
||||||
"detach": True,
|
"detach": True,
|
||||||
"network_mode": "host",
|
"network_mode": "host",
|
||||||
"auto_remove": True,
|
"auto_remove": True,
|
||||||
@ -80,6 +80,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_redirect_uri_error(self):
|
def test_redirect_uri_error(self):
|
||||||
"""test OpenID Provider flow (invalid redirect URI, check error message)"""
|
"""test OpenID Provider flow (invalid redirect URI, check error message)"""
|
||||||
sleep(1)
|
sleep(1)
|
||||||
@ -122,6 +123,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
|
|||||||
"Redirect URI Error",
|
"Redirect URI Error",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_authorization_consent_implied(self):
|
def test_authorization_consent_implied(self):
|
||||||
"""test OpenID Provider flow (default authorization flow with implied consent)"""
|
"""test OpenID Provider flow (default authorization flow with implied consent)"""
|
||||||
sleep(1)
|
sleep(1)
|
||||||
@ -183,6 +185,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
|
|||||||
USER().email,
|
USER().email,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_authorization_logout(self):
|
def test_authorization_logout(self):
|
||||||
"""test OpenID Provider flow with logout"""
|
"""test OpenID Provider flow with logout"""
|
||||||
sleep(1)
|
sleep(1)
|
||||||
@ -252,6 +255,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
|
|||||||
)
|
)
|
||||||
self.driver.find_element(By.ID, "logout").click()
|
self.driver.find_element(By.ID, "logout").click()
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_authorization_consent_explicit(self):
|
def test_authorization_consent_explicit(self):
|
||||||
"""test OpenID Provider flow (default authorization flow with explicit consent)"""
|
"""test OpenID Provider flow (default authorization flow with explicit consent)"""
|
||||||
sleep(1)
|
sleep(1)
|
||||||
@ -325,6 +329,7 @@ class TestProviderOAuth2OAuth(SeleniumTestCase):
|
|||||||
USER().email,
|
USER().email,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_authorization_denied(self):
|
def test_authorization_denied(self):
|
||||||
"""test OpenID Provider flow (default authorization with access deny)"""
|
"""test OpenID Provider flow (default authorization with access deny)"""
|
||||||
sleep(1)
|
sleep(1)
|
||||||
|
|||||||
@ -12,7 +12,7 @@ from selenium.webdriver.common.keys import Keys
|
|||||||
from selenium.webdriver.support import expected_conditions as ec
|
from selenium.webdriver.support import expected_conditions as ec
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
|
|
||||||
from e2e.utils import USER, SeleniumTestCase
|
from e2e.utils import USER, SeleniumTestCase, retry
|
||||||
from passbook.core.models import Application
|
from passbook.core.models import Application
|
||||||
from passbook.crypto.models import CertificateKeyPair
|
from passbook.crypto.models import CertificateKeyPair
|
||||||
from passbook.flows.models import Flow
|
from passbook.flows.models import Flow
|
||||||
@ -53,7 +53,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
|||||||
client: DockerClient = from_env()
|
client: DockerClient = from_env()
|
||||||
client.images.pull("beryju/oidc-test-client")
|
client.images.pull("beryju/oidc-test-client")
|
||||||
container = client.containers.run(
|
container = client.containers.run(
|
||||||
image="beryju/oidc-test-client",
|
image="docker.beryju.org/proxy/beryju/oidc-test-client",
|
||||||
detach=True,
|
detach=True,
|
||||||
network_mode="host",
|
network_mode="host",
|
||||||
auto_remove=True,
|
auto_remove=True,
|
||||||
@ -76,6 +76,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
|||||||
LOGGER.info("Container failed healthcheck")
|
LOGGER.info("Container failed healthcheck")
|
||||||
sleep(1)
|
sleep(1)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_redirect_uri_error(self):
|
def test_redirect_uri_error(self):
|
||||||
"""test OpenID Provider flow (invalid redirect URI, check error message)"""
|
"""test OpenID Provider flow (invalid redirect URI, check error message)"""
|
||||||
sleep(1)
|
sleep(1)
|
||||||
@ -119,6 +120,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
|||||||
"Redirect URI Error",
|
"Redirect URI Error",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_authorization_consent_implied(self):
|
def test_authorization_consent_implied(self):
|
||||||
"""test OpenID Provider flow (default authorization flow with implied consent)"""
|
"""test OpenID Provider flow (default authorization flow with implied consent)"""
|
||||||
sleep(1)
|
sleep(1)
|
||||||
@ -169,6 +171,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
|||||||
self.assertEqual(body["IDTokenClaims"]["email"], USER().email)
|
self.assertEqual(body["IDTokenClaims"]["email"], USER().email)
|
||||||
self.assertEqual(body["UserInfo"]["email"], USER().email)
|
self.assertEqual(body["UserInfo"]["email"], USER().email)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_authorization_consent_explicit(self):
|
def test_authorization_consent_explicit(self):
|
||||||
"""test OpenID Provider flow (default authorization flow with explicit consent)"""
|
"""test OpenID Provider flow (default authorization flow with explicit consent)"""
|
||||||
sleep(1)
|
sleep(1)
|
||||||
@ -229,6 +232,7 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
|||||||
self.assertEqual(body["IDTokenClaims"]["email"], USER().email)
|
self.assertEqual(body["IDTokenClaims"]["email"], USER().email)
|
||||||
self.assertEqual(body["UserInfo"]["email"], USER().email)
|
self.assertEqual(body["UserInfo"]["email"], USER().email)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_authorization_denied(self):
|
def test_authorization_denied(self):
|
||||||
"""test OpenID Provider flow (default authorization with access deny)"""
|
"""test OpenID Provider flow (default authorization with access deny)"""
|
||||||
sleep(1)
|
sleep(1)
|
||||||
|
|||||||
@ -11,14 +11,14 @@ from docker.models.containers import Container
|
|||||||
from selenium.webdriver.common.by import By
|
from selenium.webdriver.common.by import By
|
||||||
from selenium.webdriver.common.keys import Keys
|
from selenium.webdriver.common.keys import Keys
|
||||||
|
|
||||||
from e2e.utils import USER, SeleniumTestCase
|
from e2e.utils import USER, SeleniumTestCase, retry
|
||||||
from passbook import __version__
|
from passbook import __version__
|
||||||
from passbook.core.models import Application
|
from passbook.core.models import Application
|
||||||
from passbook.flows.models import Flow
|
from passbook.flows.models import Flow
|
||||||
from passbook.outposts.models import (
|
from passbook.outposts.models import (
|
||||||
|
DockerServiceConnection,
|
||||||
Outpost,
|
Outpost,
|
||||||
OutpostConfig,
|
OutpostConfig,
|
||||||
OutpostDeploymentType,
|
|
||||||
OutpostType,
|
OutpostType,
|
||||||
)
|
)
|
||||||
from passbook.providers.proxy.models import ProxyProvider
|
from passbook.providers.proxy.models import ProxyProvider
|
||||||
@ -36,7 +36,7 @@ class TestProviderProxy(SeleniumTestCase):
|
|||||||
|
|
||||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||||
return {
|
return {
|
||||||
"image": "traefik/whoami:latest",
|
"image": "docker.beryju.org/proxy/traefik/whoami:latest",
|
||||||
"detach": True,
|
"detach": True,
|
||||||
"network_mode": "host",
|
"network_mode": "host",
|
||||||
"auto_remove": True,
|
"auto_remove": True,
|
||||||
@ -57,6 +57,7 @@ class TestProviderProxy(SeleniumTestCase):
|
|||||||
)
|
)
|
||||||
return container
|
return container
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_proxy_simple(self):
|
def test_proxy_simple(self):
|
||||||
"""Test simple outpost setup with single provider"""
|
"""Test simple outpost setup with single provider"""
|
||||||
proxy: ProxyProvider = ProxyProvider.objects.create(
|
proxy: ProxyProvider = ProxyProvider.objects.create(
|
||||||
@ -75,7 +76,6 @@ class TestProviderProxy(SeleniumTestCase):
|
|||||||
outpost: Outpost = Outpost.objects.create(
|
outpost: Outpost = Outpost.objects.create(
|
||||||
name="proxy_outpost",
|
name="proxy_outpost",
|
||||||
type=OutpostType.PROXY,
|
type=OutpostType.PROXY,
|
||||||
deployment_type=OutpostDeploymentType.CUSTOM,
|
|
||||||
)
|
)
|
||||||
outpost.providers.add(proxy)
|
outpost.providers.add(proxy)
|
||||||
outpost.save()
|
outpost.save()
|
||||||
@ -110,6 +110,7 @@ class TestProviderProxy(SeleniumTestCase):
|
|||||||
class TestProviderProxyConnect(ChannelsLiveServerTestCase):
|
class TestProviderProxyConnect(ChannelsLiveServerTestCase):
|
||||||
"""Test Proxy connectivity over websockets"""
|
"""Test Proxy connectivity over websockets"""
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_proxy_connectivity(self):
|
def test_proxy_connectivity(self):
|
||||||
"""Test proxy connectivity over websocket"""
|
"""Test proxy connectivity over websocket"""
|
||||||
SeleniumTestCase().apply_default_data()
|
SeleniumTestCase().apply_default_data()
|
||||||
@ -126,10 +127,11 @@ class TestProviderProxyConnect(ChannelsLiveServerTestCase):
|
|||||||
proxy.save()
|
proxy.save()
|
||||||
# we need to create an application to actually access the proxy
|
# we need to create an application to actually access the proxy
|
||||||
Application.objects.create(name="proxy", slug="proxy", provider=proxy)
|
Application.objects.create(name="proxy", slug="proxy", provider=proxy)
|
||||||
|
service_connection = DockerServiceConnection.objects.get(local=True)
|
||||||
outpost: Outpost = Outpost.objects.create(
|
outpost: Outpost = Outpost.objects.create(
|
||||||
name="proxy_outpost",
|
name="proxy_outpost",
|
||||||
type=OutpostType.PROXY,
|
type=OutpostType.PROXY,
|
||||||
deployment_type=OutpostDeploymentType.DOCKER,
|
service_connection=service_connection,
|
||||||
_config=asdict(
|
_config=asdict(
|
||||||
OutpostConfig(passbook_host=self.live_server_url, log_level="debug")
|
OutpostConfig(passbook_host=self.live_server_url, log_level="debug")
|
||||||
),
|
),
|
||||||
|
|||||||
@ -12,7 +12,7 @@ from selenium.webdriver.common.keys import Keys
|
|||||||
from selenium.webdriver.support import expected_conditions as ec
|
from selenium.webdriver.support import expected_conditions as ec
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
|
|
||||||
from e2e.utils import USER, SeleniumTestCase
|
from e2e.utils import USER, SeleniumTestCase, retry
|
||||||
from passbook.core.models import Application
|
from passbook.core.models import Application
|
||||||
from passbook.crypto.models import CertificateKeyPair
|
from passbook.crypto.models import CertificateKeyPair
|
||||||
from passbook.flows.models import Flow
|
from passbook.flows.models import Flow
|
||||||
@ -38,7 +38,7 @@ class TestProviderSAML(SeleniumTestCase):
|
|||||||
client: DockerClient = from_env()
|
client: DockerClient = from_env()
|
||||||
client.images.pull("beryju/oidc-test-client")
|
client.images.pull("beryju/oidc-test-client")
|
||||||
container = client.containers.run(
|
container = client.containers.run(
|
||||||
image="beryju/saml-test-sp",
|
image="docker.beryju.org/proxy/beryju/saml-test-sp",
|
||||||
detach=True,
|
detach=True,
|
||||||
network_mode="host",
|
network_mode="host",
|
||||||
auto_remove=True,
|
auto_remove=True,
|
||||||
@ -66,6 +66,7 @@ class TestProviderSAML(SeleniumTestCase):
|
|||||||
LOGGER.info("Container failed healthcheck")
|
LOGGER.info("Container failed healthcheck")
|
||||||
sleep(1)
|
sleep(1)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_sp_initiated_implicit(self):
|
def test_sp_initiated_implicit(self):
|
||||||
"""test SAML Provider flow SP-initiated flow (implicit consent)"""
|
"""test SAML Provider flow SP-initiated flow (implicit consent)"""
|
||||||
# Bootstrap all needed objects
|
# Bootstrap all needed objects
|
||||||
@ -105,6 +106,7 @@ class TestProviderSAML(SeleniumTestCase):
|
|||||||
self.assertEqual(body["attr"]["mail"], [USER().email])
|
self.assertEqual(body["attr"]["mail"], [USER().email])
|
||||||
self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
|
self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_sp_initiated_explicit(self):
|
def test_sp_initiated_explicit(self):
|
||||||
"""test SAML Provider flow SP-initiated flow (explicit consent)"""
|
"""test SAML Provider flow SP-initiated flow (explicit consent)"""
|
||||||
# Bootstrap all needed objects
|
# Bootstrap all needed objects
|
||||||
@ -150,6 +152,7 @@ class TestProviderSAML(SeleniumTestCase):
|
|||||||
self.assertEqual(body["attr"]["mail"], [USER().email])
|
self.assertEqual(body["attr"]["mail"], [USER().email])
|
||||||
self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
|
self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_idp_initiated_implicit(self):
|
def test_idp_initiated_implicit(self):
|
||||||
"""test SAML Provider flow IdP-initiated flow (implicit consent)"""
|
"""test SAML Provider flow IdP-initiated flow (implicit consent)"""
|
||||||
# Bootstrap all needed objects
|
# Bootstrap all needed objects
|
||||||
@ -195,6 +198,7 @@ class TestProviderSAML(SeleniumTestCase):
|
|||||||
self.assertEqual(body["attr"]["mail"], [USER().email])
|
self.assertEqual(body["attr"]["mail"], [USER().email])
|
||||||
self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
|
self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_sp_initiated_denied(self):
|
def test_sp_initiated_denied(self):
|
||||||
"""test SAML Provider flow SP-initiated flow (Policy denies access)"""
|
"""test SAML Provider flow SP-initiated flow (Policy denies access)"""
|
||||||
# Bootstrap all needed objects
|
# Bootstrap all needed objects
|
||||||
|
|||||||
@ -14,7 +14,7 @@ from selenium.webdriver.support import expected_conditions as ec
|
|||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
from yaml import safe_dump
|
from yaml import safe_dump
|
||||||
|
|
||||||
from e2e.utils import SeleniumTestCase
|
from e2e.utils import SeleniumTestCase, retry
|
||||||
from passbook.flows.models import Flow
|
from passbook.flows.models import Flow
|
||||||
from passbook.providers.oauth2.generators import (
|
from passbook.providers.oauth2.generators import (
|
||||||
generate_client_id,
|
generate_client_id,
|
||||||
@ -106,6 +106,7 @@ class TestSourceOAuth2(SeleniumTestCase):
|
|||||||
consumer_secret=self.client_secret,
|
consumer_secret=self.client_secret,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_oauth_enroll(self):
|
def test_oauth_enroll(self):
|
||||||
"""test OAuth Source With With OIDC"""
|
"""test OAuth Source With With OIDC"""
|
||||||
self.create_objects()
|
self.create_objects()
|
||||||
@ -159,6 +160,7 @@ class TestSourceOAuth2(SeleniumTestCase):
|
|||||||
"admin@example.com",
|
"admin@example.com",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
@override_settings(SESSION_COOKIE_SAMESITE="strict")
|
@override_settings(SESSION_COOKIE_SAMESITE="strict")
|
||||||
def test_oauth_samesite_strict(self):
|
def test_oauth_samesite_strict(self):
|
||||||
"""test OAuth Source With SameSite set to strict
|
"""test OAuth Source With SameSite set to strict
|
||||||
@ -195,6 +197,7 @@ class TestSourceOAuth2(SeleniumTestCase):
|
|||||||
"Authentication Failed.",
|
"Authentication Failed.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_oauth_enroll_auth(self):
|
def test_oauth_enroll_auth(self):
|
||||||
"""test OAuth Source With With OIDC (enroll and authenticate again)"""
|
"""test OAuth Source With With OIDC (enroll and authenticate again)"""
|
||||||
self.test_oauth_enroll()
|
self.test_oauth_enroll()
|
||||||
@ -255,7 +258,7 @@ class TestSourceOAuth1(SeleniumTestCase):
|
|||||||
|
|
||||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||||
return {
|
return {
|
||||||
"image": "beryju/oauth1-test-server",
|
"image": "docker.beryju.org/proxy/beryju/oauth1-test-server",
|
||||||
"detach": True,
|
"detach": True,
|
||||||
"network_mode": "host",
|
"network_mode": "host",
|
||||||
"auto_remove": True,
|
"auto_remove": True,
|
||||||
@ -291,6 +294,7 @@ class TestSourceOAuth1(SeleniumTestCase):
|
|||||||
consumer_secret=self.client_secret,
|
consumer_secret=self.client_secret,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_oauth_enroll(self):
|
def test_oauth_enroll(self):
|
||||||
"""test OAuth Source With With OIDC"""
|
"""test OAuth Source With With OIDC"""
|
||||||
self.create_objects()
|
self.create_objects()
|
||||||
@ -317,6 +321,7 @@ class TestSourceOAuth1(SeleniumTestCase):
|
|||||||
self.driver.find_element(By.CSS_SELECTOR, "[name='confirm']").click()
|
self.driver.find_element(By.CSS_SELECTOR, "[name='confirm']").click()
|
||||||
|
|
||||||
# Wait until we've loaded the user info page
|
# Wait until we've loaded the user info page
|
||||||
|
sleep(2)
|
||||||
self.wait.until(ec.presence_of_element_located((By.ID, "user-settings")))
|
self.wait.until(ec.presence_of_element_located((By.ID, "user-settings")))
|
||||||
self.driver.get(self.url("passbook_core:user-settings"))
|
self.driver.get(self.url("passbook_core:user-settings"))
|
||||||
|
|
||||||
|
|||||||
@ -10,7 +10,7 @@ from selenium.webdriver.common.keys import Keys
|
|||||||
from selenium.webdriver.support import expected_conditions as ec
|
from selenium.webdriver.support import expected_conditions as ec
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
|
|
||||||
from e2e.utils import SeleniumTestCase
|
from e2e.utils import SeleniumTestCase, retry
|
||||||
from passbook.crypto.models import CertificateKeyPair
|
from passbook.crypto.models import CertificateKeyPair
|
||||||
from passbook.flows.models import Flow
|
from passbook.flows.models import Flow
|
||||||
from passbook.sources.saml.models import SAMLBindingTypes, SAMLSource
|
from passbook.sources.saml.models import SAMLBindingTypes, SAMLSource
|
||||||
@ -75,7 +75,7 @@ class TestSourceSAML(SeleniumTestCase):
|
|||||||
|
|
||||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||||
return {
|
return {
|
||||||
"image": "kristophjunge/test-saml-idp:1.15",
|
"image": "docker.beryju.org/proxy/kristophjunge/test-saml-idp:1.15",
|
||||||
"detach": True,
|
"detach": True,
|
||||||
"network_mode": "host",
|
"network_mode": "host",
|
||||||
"auto_remove": True,
|
"auto_remove": True,
|
||||||
@ -92,6 +92,7 @@ class TestSourceSAML(SeleniumTestCase):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_idp_redirect(self):
|
def test_idp_redirect(self):
|
||||||
"""test SAML Source With redirect binding"""
|
"""test SAML Source With redirect binding"""
|
||||||
# Bootstrap all needed objects
|
# Bootstrap all needed objects
|
||||||
@ -141,6 +142,7 @@ class TestSourceSAML(SeleniumTestCase):
|
|||||||
self.driver.find_element(By.ID, "id_username").get_attribute("value"), ""
|
self.driver.find_element(By.ID, "id_username").get_attribute("value"), ""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_idp_post(self):
|
def test_idp_post(self):
|
||||||
"""test SAML Source With post binding"""
|
"""test SAML Source With post binding"""
|
||||||
# Bootstrap all needed objects
|
# Bootstrap all needed objects
|
||||||
@ -192,6 +194,7 @@ class TestSourceSAML(SeleniumTestCase):
|
|||||||
self.driver.find_element(By.ID, "id_username").get_attribute("value"), ""
|
self.driver.find_element(By.ID, "id_username").get_attribute("value"), ""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@retry()
|
||||||
def test_idp_post_auto(self):
|
def test_idp_post_auto(self):
|
||||||
"""test SAML Source With post binding (auto redirect)"""
|
"""test SAML Source With post binding (auto redirect)"""
|
||||||
# Bootstrap all needed objects
|
# Bootstrap all needed objects
|
||||||
|
|||||||
43
e2e/utils.py
43
e2e/utils.py
@ -1,19 +1,22 @@
|
|||||||
"""passbook e2e testing utilities"""
|
"""passbook e2e testing utilities"""
|
||||||
|
from functools import wraps
|
||||||
from glob import glob
|
from glob import glob
|
||||||
from importlib.util import module_from_spec, spec_from_file_location
|
from importlib.util import module_from_spec, spec_from_file_location
|
||||||
from inspect import getmembers, isfunction
|
from inspect import getmembers, isfunction
|
||||||
from os import environ, makedirs
|
from os import environ, makedirs
|
||||||
from time import sleep, time
|
from time import sleep, time
|
||||||
from typing import Any, Dict, Optional
|
from typing import Any, Callable, Dict, Optional
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
|
||||||
from django.db import connection, transaction
|
from django.db import connection, transaction
|
||||||
from django.db.utils import IntegrityError
|
from django.db.utils import IntegrityError
|
||||||
from django.shortcuts import reverse
|
from django.shortcuts import reverse
|
||||||
|
from django.test.testcases import TransactionTestCase
|
||||||
from docker import DockerClient, from_env
|
from docker import DockerClient, from_env
|
||||||
from docker.models.containers import Container
|
from docker.models.containers import Container
|
||||||
from selenium import webdriver
|
from selenium import webdriver
|
||||||
|
from selenium.common.exceptions import NoSuchElementException, TimeoutException
|
||||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||||
from selenium.webdriver.remote.webdriver import WebDriver
|
from selenium.webdriver.remote.webdriver import WebDriver
|
||||||
from selenium.webdriver.support.ui import WebDriverWait
|
from selenium.webdriver.support.ui import WebDriverWait
|
||||||
@ -123,3 +126,41 @@ class SeleniumTestCase(StaticLiveServerTestCase):
|
|||||||
func(apps, schema_editor)
|
func(apps, schema_editor)
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def retry(max_retires=3, exceptions=None):
|
||||||
|
"""Retry test multiple times. Default to catching Selenium Timeout Exception"""
|
||||||
|
|
||||||
|
if not exceptions:
|
||||||
|
exceptions = [TimeoutException, NoSuchElementException]
|
||||||
|
|
||||||
|
logger = get_logger()
|
||||||
|
|
||||||
|
def retry_actual(func: Callable):
|
||||||
|
"""Retry test multiple times"""
|
||||||
|
count = 1
|
||||||
|
|
||||||
|
@wraps(func)
|
||||||
|
def wrapper(self: TransactionTestCase, *args, **kwargs):
|
||||||
|
"""Run test again if we're below max_retries, including tearDown and
|
||||||
|
setUp. Otherwise raise the error"""
|
||||||
|
nonlocal count
|
||||||
|
try:
|
||||||
|
return func(self, *args, **kwargs)
|
||||||
|
# pylint: disable=catching-non-exception
|
||||||
|
except tuple(exceptions) as exc:
|
||||||
|
count += 1
|
||||||
|
if count > max_retires:
|
||||||
|
logger.debug("Exceeded retry count", exc=exc, test=self)
|
||||||
|
# pylint: disable=raising-non-exception
|
||||||
|
raise exc
|
||||||
|
logger.debug("Retrying on error", exc=exc, test=self)
|
||||||
|
self.tearDown()
|
||||||
|
# pylint: disable=protected-access
|
||||||
|
self._post_teardown()
|
||||||
|
self.setUp()
|
||||||
|
return wrapper(self, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
return retry_actual
|
||||||
|
|||||||
@ -1,9 +1,11 @@
|
|||||||
apiVersion: v2
|
apiVersion: v2
|
||||||
appVersion: "0.12.0-stable"
|
description: passbook is an open-source Identity Provider focused on flexibility and versatility. You can use passbook in an existing environment to add support for new protocols. passbook is also a great solution for implementing signup/recovery/etc in your application, so you don't have to deal with it.
|
||||||
description: A Helm chart for passbook.
|
|
||||||
name: passbook
|
name: passbook
|
||||||
version: "0.12.0-stable"
|
home: https://passbook.beryju.org
|
||||||
icon: https://github.com/BeryJu/passbook/blob/master/docs/images/logo.svg
|
sources:
|
||||||
|
- https://github.com/BeryJu/passbook
|
||||||
|
version: "0.12.10-stable"
|
||||||
|
icon: https://raw.githubusercontent.com/BeryJu/passbook/master/docs/images/logo.svg
|
||||||
dependencies:
|
dependencies:
|
||||||
- name: postgresql
|
- name: postgresql
|
||||||
version: 9.4.1
|
version: 9.4.1
|
||||||
|
|||||||
28
helm/README.md
Normal file
28
helm/README.md
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
# passbook Helm Chart
|
||||||
|
|
||||||
|
| Name | Default | Description |
|
||||||
|
|-----------------------------------|-------------------------|-------------|
|
||||||
|
| image.name | beryju/passbook | Image used to run the passbook server and worker |
|
||||||
|
| image.name_static | beryju/passbook-static | Image used to run the passbook static server (CSS and JS Files) |
|
||||||
|
| image.tag | 0.12.5-stable | Image tag |
|
||||||
|
| serverReplicas | 1 | Replicas for the Server deployment |
|
||||||
|
| workerReplicas | 1 | Replicas for the Worker deployment |
|
||||||
|
| kubernetesIntegration | true | Enable/disable the Kubernetes integration for passbook. This will create a service account for passbook to create and update outposts in passbook |
|
||||||
|
| config.secretKey | | Secret key used to sign session cookies, generate with `pwgen 50 1` for example. |
|
||||||
|
| config.errorReporting.enabled | false | Enable/disable error reporting |
|
||||||
|
| config.errorReporting.environment | customer | Environment sent with the error reporting |
|
||||||
|
| config.errorReporting.sendPii | false | Whether to send Personally-identifiable data with the error reporting |
|
||||||
|
| config.logLevel | warning | Log level of passbook |
|
||||||
|
| backup.accessKey | | Optionally enable S3 Backup, Access Key |
|
||||||
|
| backup.secretKey | | Optionally enable S3 Backup, Secret Key |
|
||||||
|
| backup.bucket | | Optionally enable S3 Backup, Bucket |
|
||||||
|
| backup.region | | Optionally enable S3 Backup, Region |
|
||||||
|
| backup.host | | Optionally enable S3 Backup, to custom Endpoint like minio |
|
||||||
|
| ingress.annotations | {} | Annotations for the ingress object |
|
||||||
|
| ingress.hosts | [passbook.k8s.local] | Hosts which the ingress will match |
|
||||||
|
| ingress.tls | [] | TLS Configuration, same as Ingress objects |
|
||||||
|
| install.postgresql | true | Enables/disables the packaged PostgreSQL Chart
|
||||||
|
| install.redis | true | Enables/disables the packaged Redis Chart
|
||||||
|
| postgresql.postgresqlPassword | | Password used for PostgreSQL, generated automatically.
|
||||||
|
|
||||||
|
For more info, see https://passbook.beryju.org/ and https://passbook.beryju.org/installation/kubernetes/
|
||||||
@ -3,7 +3,7 @@
|
|||||||
Expand the name of the chart.
|
Expand the name of the chart.
|
||||||
*/}}
|
*/}}
|
||||||
{{- define "passbook.name" -}}
|
{{- define "passbook.name" -}}
|
||||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}}
|
{{- default .Chart.Name | trunc 63 | trimSuffix "-" -}}
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
|
|
||||||
{{/*
|
{{/*
|
||||||
@ -12,17 +12,13 @@ We truncate at 63 chars because some Kubernetes name fields are limited to this
|
|||||||
If release name contains chart name it will be used as a full name.
|
If release name contains chart name it will be used as a full name.
|
||||||
*/}}
|
*/}}
|
||||||
{{- define "passbook.fullname" -}}
|
{{- define "passbook.fullname" -}}
|
||||||
{{- if .Values.fullnameOverride -}}
|
{{- $name := default .Chart.Name -}}
|
||||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}}
|
|
||||||
{{- else -}}
|
|
||||||
{{- $name := default .Chart.Name .Values.nameOverride -}}
|
|
||||||
{{- if contains $name .Release.Name -}}
|
{{- if contains $name .Release.Name -}}
|
||||||
{{- .Release.Name | trunc 63 | trimSuffix "-" -}}
|
{{- .Release.Name | trunc 63 | trimSuffix "-" -}}
|
||||||
{{- else -}}
|
{{- else -}}
|
||||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}}
|
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}}
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
{{/*
|
{{/*
|
||||||
Create chart name and version as used by the chart label.
|
Create chart name and version as used by the chart label.
|
||||||
|
|||||||
@ -7,8 +7,8 @@ data:
|
|||||||
POSTGRESQL__NAME: "{{ .Values.postgresql.postgresqlDatabase }}"
|
POSTGRESQL__NAME: "{{ .Values.postgresql.postgresqlDatabase }}"
|
||||||
POSTGRESQL__USER: "{{ .Values.postgresql.postgresqlUsername }}"
|
POSTGRESQL__USER: "{{ .Values.postgresql.postgresqlUsername }}"
|
||||||
{{- if .Values.backup }}
|
{{- if .Values.backup }}
|
||||||
POSTGRESQL__S3_BACKUP__ACCESS_KEY: "{{ .Values.backup.access_key }}"
|
POSTGRESQL__S3_BACKUP__ACCESS_KEY: "{{ .Values.backup.accessKey }}"
|
||||||
POSTGRESQL__S3_BACKUP__SECRET_KEY: "{{ .Values.backup.secret_key }}"
|
POSTGRESQL__S3_BACKUP__SECRET_KEY: "{{ .Values.backup.secretKey }}"
|
||||||
POSTGRESQL__S3_BACKUP__BUCKET: "{{ .Values.backup.bucket }}"
|
POSTGRESQL__S3_BACKUP__BUCKET: "{{ .Values.backup.bucket }}"
|
||||||
POSTGRESQL__S3_BACKUP__REGION: "{{ .Values.backup.region }}"
|
POSTGRESQL__S3_BACKUP__REGION: "{{ .Values.backup.region }}"
|
||||||
POSTGRESQL__S3_BACKUP__HOST: "{{ .Values.backup.host }}"
|
POSTGRESQL__S3_BACKUP__HOST: "{{ .Values.backup.host }}"
|
||||||
|
|||||||
@ -1,42 +0,0 @@
|
|||||||
{{- if .Values.backup }}
|
|
||||||
apiVersion: batch/v1beta1
|
|
||||||
kind: CronJob
|
|
||||||
metadata:
|
|
||||||
name: {{ include "passbook.fullname" . }}-backup
|
|
||||||
labels:
|
|
||||||
app.kubernetes.io/name: {{ include "passbook.name" . }}
|
|
||||||
helm.sh/chart: {{ include "passbook.chart" . }}
|
|
||||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
|
||||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
|
||||||
spec:
|
|
||||||
schedule: "0 0 * * *"
|
|
||||||
jobTemplate:
|
|
||||||
spec:
|
|
||||||
template:
|
|
||||||
spec:
|
|
||||||
restartPolicy: Never
|
|
||||||
containers:
|
|
||||||
- name: {{ .Chart.Name }}
|
|
||||||
image: "{{ .Values.image.name }}:{{ .Values.image.tag }}"
|
|
||||||
args: [server]
|
|
||||||
envFrom:
|
|
||||||
- configMapRef:
|
|
||||||
name: {{ include "passbook.fullname" . }}-config
|
|
||||||
prefix: PASSBOOK_
|
|
||||||
env:
|
|
||||||
- name: PASSBOOK_SECRET_KEY
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: "{{ include "passbook.fullname" . }}-secret-key"
|
|
||||||
key: "secret_key"
|
|
||||||
- name: PASSBOOK_REDIS__PASSWORD
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: "{{ .Release.Name }}-redis"
|
|
||||||
key: "redis-password"
|
|
||||||
- name: PASSBOOK_POSTGRESQL__PASSWORD
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: "{{ .Release.Name }}-postgresql"
|
|
||||||
key: "postgresql-password"
|
|
||||||
{{- end}}
|
|
||||||
@ -28,9 +28,9 @@ rules:
|
|||||||
- "patch"
|
- "patch"
|
||||||
- apiGroups:
|
- apiGroups:
|
||||||
- "extensions"
|
- "extensions"
|
||||||
- "networking"
|
- "networking.k8s.io"
|
||||||
resources:
|
resources:
|
||||||
- "ingress"
|
- "ingresses"
|
||||||
verbs:
|
verbs:
|
||||||
- "get"
|
- "get"
|
||||||
- "create"
|
- "create"
|
||||||
|
|||||||
@ -4,9 +4,7 @@
|
|||||||
image:
|
image:
|
||||||
name: beryju/passbook
|
name: beryju/passbook
|
||||||
name_static: beryju/passbook-static
|
name_static: beryju/passbook-static
|
||||||
tag: 0.12.0-stable
|
tag: 0.12.10-stable
|
||||||
|
|
||||||
nameOverride: ""
|
|
||||||
|
|
||||||
serverReplicas: 1
|
serverReplicas: 1
|
||||||
workerReplicas: 1
|
workerReplicas: 1
|
||||||
@ -28,8 +26,8 @@ config:
|
|||||||
|
|
||||||
# Enable Database Backups to S3
|
# Enable Database Backups to S3
|
||||||
# backup:
|
# backup:
|
||||||
# access_key: access-key
|
# accessKey: access-key
|
||||||
# secret_key: secret-key
|
# secretKey: secret-key
|
||||||
# bucket: s3-bucket
|
# bucket: s3-bucket
|
||||||
# region: eu-central-1
|
# region: eu-central-1
|
||||||
# host: s3-host
|
# host: s3-host
|
||||||
@ -38,7 +36,6 @@ ingress:
|
|||||||
annotations: {}
|
annotations: {}
|
||||||
# kubernetes.io/ingress.class: nginx
|
# kubernetes.io/ingress.class: nginx
|
||||||
# kubernetes.io/tls-acme: "true"
|
# kubernetes.io/tls-acme: "true"
|
||||||
path: /
|
|
||||||
hosts:
|
hosts:
|
||||||
- passbook.k8s.local
|
- passbook.k8s.local
|
||||||
tls: []
|
tls: []
|
||||||
@ -62,7 +59,5 @@ redis:
|
|||||||
cluster:
|
cluster:
|
||||||
enabled: false
|
enabled: false
|
||||||
master:
|
master:
|
||||||
persistence:
|
|
||||||
enabled: false
|
|
||||||
# https://stackoverflow.com/a/59189742
|
# https://stackoverflow.com/a/59189742
|
||||||
disableCommands: []
|
disableCommands: []
|
||||||
|
|||||||
@ -1,4 +1,6 @@
|
|||||||
"""Gunicorn config"""
|
"""Gunicorn config"""
|
||||||
|
import os
|
||||||
|
import warnings
|
||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@ -13,6 +15,8 @@ worker_class = "uvicorn.workers.UvicornWorker"
|
|||||||
# Docker containers don't have /tmp as tmpfs
|
# Docker containers don't have /tmp as tmpfs
|
||||||
worker_tmp_dir = "/dev/shm"
|
worker_tmp_dir = "/dev/shm"
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.root.settings")
|
||||||
|
|
||||||
logconfig_dict = {
|
logconfig_dict = {
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"disable_existing_loggers": False,
|
"disable_existing_loggers": False,
|
||||||
@ -49,3 +53,5 @@ if Path("/var/run/secrets/kubernetes.io").exists():
|
|||||||
else:
|
else:
|
||||||
worker = cpu_count() * 2 + 1
|
worker = cpu_count() * 2 + 1
|
||||||
threads = 4
|
threads = 4
|
||||||
|
|
||||||
|
warnings.simplefilter("once")
|
||||||
|
|||||||
@ -47,7 +47,9 @@ if __name__ == "__main__":
|
|||||||
# pyright: reportGeneralTypeIssues=false
|
# pyright: reportGeneralTypeIssues=false
|
||||||
spec.loader.exec_module(mod)
|
spec.loader.exec_module(mod)
|
||||||
|
|
||||||
for _, sub in getmembers(mod, isclass):
|
for name, sub in getmembers(mod, isclass):
|
||||||
|
if name != "Migration":
|
||||||
|
continue
|
||||||
migration = sub(curr, conn)
|
migration = sub(curr, conn)
|
||||||
if migration.needs_migration():
|
if migration.needs_migration():
|
||||||
LOGGER.info("Migration needs to be applied", migration=sub)
|
LOGGER.info("Migration needs to be applied", migration=sub)
|
||||||
|
|||||||
@ -25,7 +25,7 @@ delete from django_migrations where app = 'passbook_stages_password' and
|
|||||||
name = '0002_passwordstage_change_flow';"""
|
name = '0002_passwordstage_change_flow';"""
|
||||||
|
|
||||||
|
|
||||||
class To010Migration(BaseMigration):
|
class Migration(BaseMigration):
|
||||||
def needs_migration(self) -> bool:
|
def needs_migration(self) -> bool:
|
||||||
self.cur.execute(
|
self.cur.execute(
|
||||||
"select * from information_schema.tables where table_name='oidc_provider_client'"
|
"select * from information_schema.tables where table_name='oidc_provider_client'"
|
||||||
|
|||||||
@ -1,2 +1,2 @@
|
|||||||
"""passbook"""
|
"""passbook"""
|
||||||
__version__ = "0.12.0-stable"
|
__version__ = "0.12.10-stable"
|
||||||
|
|||||||
@ -50,15 +50,23 @@ class TaskViewSet(ViewSet):
|
|||||||
task = TaskInfo.by_name(pk)
|
task = TaskInfo.by_name(pk)
|
||||||
if not task:
|
if not task:
|
||||||
raise Http404
|
raise Http404
|
||||||
|
try:
|
||||||
task_module = import_module(task.task_call_module)
|
task_module = import_module(task.task_call_module)
|
||||||
task_func = getattr(task_module, task.task_call_func)
|
task_func = getattr(task_module, task.task_call_func)
|
||||||
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
|
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
|
||||||
messages.success(
|
messages.success(
|
||||||
self.request,
|
self.request,
|
||||||
_("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}),
|
_(
|
||||||
|
"Successfully re-scheduled Task %(name)s!"
|
||||||
|
% {"name": task.task_name}
|
||||||
|
),
|
||||||
)
|
)
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"successful": True,
|
"successful": True,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
except ImportError:
|
||||||
|
# if we get an import error, the module path has probably changed
|
||||||
|
task.delete()
|
||||||
|
return Response({"successful": False})
|
||||||
|
|||||||
@ -46,12 +46,29 @@
|
|||||||
{% trans 'Providers' %}
|
{% trans 'Providers' %}
|
||||||
</a>
|
</a>
|
||||||
</li>
|
</li>
|
||||||
|
<li class="pf-c-nav__item pf-m-expanded">
|
||||||
|
<a href="#" class="pf-c-nav__link" aria-expanded="true">{% trans 'Outposts' %}
|
||||||
|
<span class="pf-c-nav__toggle">
|
||||||
|
<i class="fas fa-angle-right" aria-hidden="true"></i>
|
||||||
|
</span>
|
||||||
|
</a>
|
||||||
|
<section class="pf-c-nav__subnav">
|
||||||
|
<ul class="pf-c-nav__simple-list">
|
||||||
<li class="pf-c-nav__item">
|
<li class="pf-c-nav__item">
|
||||||
<a href="{% url 'passbook_admin:outposts' %}"
|
<a href="{% url 'passbook_admin:outposts' %}"
|
||||||
class="pf-c-nav__link {% is_active 'passbook_admin:outposts' 'passbook_admin:outpost-create' 'passbook_admin:outpost-update' 'passbook_admin:outpost-delete' %}">
|
class="pf-c-nav__link {% is_active 'passbook_admin:outposts' 'passbook_admin:outpost-create' 'passbook_admin:outpost-update' 'passbook_admin:outpost-delete' %}">
|
||||||
{% trans 'Outposts' %}
|
{% trans 'Outposts' %}
|
||||||
</a>
|
</a>
|
||||||
</li>
|
</li>
|
||||||
|
<li class="pf-c-nav__item">
|
||||||
|
<a href="{% url 'passbook_admin:outpost-service-connections' %}"
|
||||||
|
class="pf-c-nav__link {% is_active 'passbook_admin:outpost-service-connections' 'passbook_admin:outpost-service-connections-create' 'passbook_admin:outpost-service-connections-update' 'passbook_admin:outpost-service-connections-delete' %}">
|
||||||
|
{% trans 'Service Connections' %}
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</section>
|
||||||
|
</li>
|
||||||
<li class="pf-c-nav__item">
|
<li class="pf-c-nav__item">
|
||||||
<a href="{% url 'passbook_admin:property-mappings' %}"
|
<a href="{% url 'passbook_admin:property-mappings' %}"
|
||||||
class="pf-c-nav__link {% is_active 'passbook_admin:property-mappings' 'passbook_admin:property-mapping-create' 'passbook_admin:property-mapping-update' 'passbook_admin:property-mapping-delete' %}">
|
class="pf-c-nav__link {% is_active 'passbook_admin:property-mappings' 'passbook_admin:property-mapping-create' 'passbook_admin:property-mapping-update' 'passbook_admin:property-mapping-delete' %}">
|
||||||
|
|||||||
@ -49,7 +49,7 @@
|
|||||||
</span>
|
</span>
|
||||||
</td>
|
</td>
|
||||||
{% with states=outpost.state %}
|
{% with states=outpost.state %}
|
||||||
{% if states|length > 1 %}
|
{% if states|length > 0 %}
|
||||||
<td role="cell">
|
<td role="cell">
|
||||||
{% for state in states %}
|
{% for state in states %}
|
||||||
<div>
|
<div>
|
||||||
|
|||||||
@ -0,0 +1,135 @@
|
|||||||
|
{% extends "administration/base.html" %}
|
||||||
|
|
||||||
|
{% load i18n %}
|
||||||
|
{% load humanize %}
|
||||||
|
{% load passbook_utils %}
|
||||||
|
{% load admin_reflection %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<section class="pf-c-page__main-section pf-m-light">
|
||||||
|
<div class="pf-c-content">
|
||||||
|
<h1>
|
||||||
|
<i class="pf-icon-integration"></i>
|
||||||
|
{% trans 'Outpost Service-Connections' %}
|
||||||
|
</h1>
|
||||||
|
<p>{% trans "Outpost Service-Connections define how passbook connects to external platforms to manage and deploy Outposts." %}</p>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
<section class="pf-c-page__main-section pf-m-no-padding-mobile">
|
||||||
|
<div class="pf-c-card">
|
||||||
|
{% if object_list %}
|
||||||
|
<div class="pf-c-toolbar">
|
||||||
|
<div class="pf-c-toolbar__content">
|
||||||
|
{% include 'partials/toolbar_search.html' %}
|
||||||
|
<div class="pf-c-toolbar__bulk-select">
|
||||||
|
<div class="pf-c-dropdown">
|
||||||
|
<button class="pf-m-primary pf-c-dropdown__toggle" type="button">
|
||||||
|
<span class="pf-c-dropdown__toggle-text">{% trans 'Create' %}</span>
|
||||||
|
<i class="fas fa-caret-down pf-c-dropdown__toggle-icon" aria-hidden="true"></i>
|
||||||
|
</button>
|
||||||
|
<ul class="pf-c-dropdown__menu" hidden>
|
||||||
|
{% for type, name in types.items %}
|
||||||
|
<li>
|
||||||
|
<a class="pf-c-dropdown__menu-item" href="{% url 'passbook_admin:outpost-service-connection-create' %}?type={{ type }}&back={{ request.get_full_path }}">
|
||||||
|
{{ name|verbose_name }}<br>
|
||||||
|
<small>
|
||||||
|
{{ name|doc }}
|
||||||
|
</small>
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% include 'partials/pagination.html' %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<table class="pf-c-table pf-m-compact pf-m-grid-xl" role="grid">
|
||||||
|
<thead>
|
||||||
|
<tr role="row">
|
||||||
|
<th role="columnheader" scope="col">{% trans 'Name' %}</th>
|
||||||
|
<th role="columnheader" scope="col">{% trans 'Type' %}</th>
|
||||||
|
<th role="columnheader" scope="col">{% trans 'Local?' %}</th>
|
||||||
|
<th role="columnheader" scope="col">{% trans 'Status' %}</th>
|
||||||
|
<th role="cell"></th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody role="rowgroup">
|
||||||
|
{% for sc in object_list %}
|
||||||
|
<tr role="row">
|
||||||
|
<th role="columnheader">
|
||||||
|
<span>{{ sc.name }}</span>
|
||||||
|
</th>
|
||||||
|
<td role="cell">
|
||||||
|
<span>
|
||||||
|
{{ sc|verbose_name }}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td role="cell">
|
||||||
|
<span>
|
||||||
|
{{ sc.local|yesno:"Yes,No" }}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td role="cell">
|
||||||
|
<span>
|
||||||
|
{% if sc.state.healthy %}
|
||||||
|
<i class="fas fa-check pf-m-success"></i> {{ sc.state.version }}
|
||||||
|
{% else %}
|
||||||
|
<i class="fas fa-times pf-m-danger"></i> {% trans 'Unhealthy' %}
|
||||||
|
{% endif %}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<a class="pf-c-button pf-m-secondary" href="{% url 'passbook_admin:outpost-service-connection-update' pk=sc.pk %}?back={{ request.get_full_path }}">{% trans 'Edit' %}</a>
|
||||||
|
<a class="pf-c-button pf-m-danger" href="{% url 'passbook_admin:outpost-service-connection-delete' pk=sc.pk %}?back={{ request.get_full_path }}">{% trans 'Delete' %}</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
<div class="pf-c-pagination pf-m-bottom">
|
||||||
|
{% include 'partials/pagination.html' %}
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="pf-c-toolbar">
|
||||||
|
<div class="pf-c-toolbar__content">
|
||||||
|
{% include 'partials/toolbar_search.html' %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="pf-c-empty-state">
|
||||||
|
<div class="pf-c-empty-state__content">
|
||||||
|
<i class="fas fa-map-marker pf-c-empty-state__icon" aria-hidden="true"></i>
|
||||||
|
<h1 class="pf-c-title pf-m-lg">
|
||||||
|
{% trans 'No Outpost Service Connections.' %}
|
||||||
|
</h1>
|
||||||
|
<div class="pf-c-empty-state__body">
|
||||||
|
{% if request.GET.search != "" %}
|
||||||
|
{% trans "Your search query doesn't match any outposts." %}
|
||||||
|
{% else %}
|
||||||
|
{% trans 'Currently no service connections exist. Click the button below to create one.' %}
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div class="pf-c-dropdown">
|
||||||
|
<button class="pf-m-primary pf-c-dropdown__toggle" type="button">
|
||||||
|
<span class="pf-c-dropdown__toggle-text">{% trans 'Create' %}</span>
|
||||||
|
<i class="fas fa-caret-down pf-c-dropdown__toggle-icon" aria-hidden="true"></i>
|
||||||
|
</button>
|
||||||
|
<ul class="pf-c-dropdown__menu" hidden>
|
||||||
|
{% for type, name in types.items %}
|
||||||
|
<li>
|
||||||
|
<a class="pf-c-dropdown__menu-item" href="{% url 'passbook_admin:outpost-service-connection-create' %}?type={{ type }}&back={{ request.get_full_path }}">
|
||||||
|
{{ name|verbose_name }}<br>
|
||||||
|
<small>
|
||||||
|
{{ name|doc }}
|
||||||
|
</small>
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
{% endblock %}
|
||||||
@ -21,7 +21,7 @@
|
|||||||
<tr role="row">
|
<tr role="row">
|
||||||
<th role="columnheader" scope="col">{% trans 'Identifier' %}</th>
|
<th role="columnheader" scope="col">{% trans 'Identifier' %}</th>
|
||||||
<th role="columnheader" scope="col">{% trans 'Description' %}</th>
|
<th role="columnheader" scope="col">{% trans 'Description' %}</th>
|
||||||
<th role="columnheader" scope="col">{% trans 'Last Status' %}</th>
|
<th role="columnheader" scope="col">{% trans 'Last Run' %}</th>
|
||||||
<th role="columnheader" scope="col">{% trans 'Status' %}</th>
|
<th role="columnheader" scope="col">{% trans 'Status' %}</th>
|
||||||
<th role="columnheader" scope="col">{% trans 'Messages' %}</th>
|
<th role="columnheader" scope="col">{% trans 'Messages' %}</th>
|
||||||
<th role="cell"></th>
|
<th role="cell"></th>
|
||||||
|
|||||||
@ -7,10 +7,11 @@ from passbook.admin.views import (
|
|||||||
flows,
|
flows,
|
||||||
groups,
|
groups,
|
||||||
outposts,
|
outposts,
|
||||||
|
outposts_service_connections,
|
||||||
overview,
|
overview,
|
||||||
policies,
|
policies,
|
||||||
policies_bindings,
|
policies_bindings,
|
||||||
property_mapping,
|
property_mappings,
|
||||||
providers,
|
providers,
|
||||||
sources,
|
sources,
|
||||||
stages,
|
stages,
|
||||||
@ -225,22 +226,22 @@ urlpatterns = [
|
|||||||
# Property Mappings
|
# Property Mappings
|
||||||
path(
|
path(
|
||||||
"property-mappings/",
|
"property-mappings/",
|
||||||
property_mapping.PropertyMappingListView.as_view(),
|
property_mappings.PropertyMappingListView.as_view(),
|
||||||
name="property-mappings",
|
name="property-mappings",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"property-mappings/create/",
|
"property-mappings/create/",
|
||||||
property_mapping.PropertyMappingCreateView.as_view(),
|
property_mappings.PropertyMappingCreateView.as_view(),
|
||||||
name="property-mapping-create",
|
name="property-mapping-create",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"property-mappings/<uuid:pk>/update/",
|
"property-mappings/<uuid:pk>/update/",
|
||||||
property_mapping.PropertyMappingUpdateView.as_view(),
|
property_mappings.PropertyMappingUpdateView.as_view(),
|
||||||
name="property-mapping-update",
|
name="property-mapping-update",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"property-mappings/<uuid:pk>/delete/",
|
"property-mappings/<uuid:pk>/delete/",
|
||||||
property_mapping.PropertyMappingDeleteView.as_view(),
|
property_mappings.PropertyMappingDeleteView.as_view(),
|
||||||
name="property-mapping-delete",
|
name="property-mapping-delete",
|
||||||
),
|
),
|
||||||
# Users
|
# Users
|
||||||
@ -312,6 +313,27 @@ urlpatterns = [
|
|||||||
outposts.OutpostDeleteView.as_view(),
|
outposts.OutpostDeleteView.as_view(),
|
||||||
name="outpost-delete",
|
name="outpost-delete",
|
||||||
),
|
),
|
||||||
|
# Outpost Service Connections
|
||||||
|
path(
|
||||||
|
"outposts/service_connections/",
|
||||||
|
outposts_service_connections.OutpostServiceConnectionListView.as_view(),
|
||||||
|
name="outpost-service-connections",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"outposts/service_connections/create/",
|
||||||
|
outposts_service_connections.OutpostServiceConnectionCreateView.as_view(),
|
||||||
|
name="outpost-service-connection-create",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"outposts/service_connections/<uuid:pk>/update/",
|
||||||
|
outposts_service_connections.OutpostServiceConnectionUpdateView.as_view(),
|
||||||
|
name="outpost-service-connection-update",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"outposts/service_connections/<uuid:pk>/delete/",
|
||||||
|
outposts_service_connections.OutpostServiceConnectionDeleteView.as_view(),
|
||||||
|
name="outpost-service-connection-delete",
|
||||||
|
),
|
||||||
# Tasks
|
# Tasks
|
||||||
path(
|
path(
|
||||||
"tasks/",
|
"tasks/",
|
||||||
|
|||||||
83
passbook/admin/views/outposts_service_connections.py
Normal file
83
passbook/admin/views/outposts_service_connections.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
"""passbook OutpostServiceConnection administration"""
|
||||||
|
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||||
|
from django.contrib.auth.mixins import (
|
||||||
|
PermissionRequiredMixin as DjangoPermissionRequiredMixin,
|
||||||
|
)
|
||||||
|
from django.contrib.messages.views import SuccessMessageMixin
|
||||||
|
from django.urls import reverse_lazy
|
||||||
|
from django.utils.translation import gettext as _
|
||||||
|
from guardian.mixins import PermissionListMixin, PermissionRequiredMixin
|
||||||
|
|
||||||
|
from passbook.admin.views.utils import (
|
||||||
|
BackSuccessUrlMixin,
|
||||||
|
DeleteMessageView,
|
||||||
|
InheritanceCreateView,
|
||||||
|
InheritanceListView,
|
||||||
|
InheritanceUpdateView,
|
||||||
|
SearchListMixin,
|
||||||
|
UserPaginateListMixin,
|
||||||
|
)
|
||||||
|
from passbook.outposts.models import OutpostServiceConnection
|
||||||
|
|
||||||
|
|
||||||
|
class OutpostServiceConnectionListView(
|
||||||
|
LoginRequiredMixin,
|
||||||
|
PermissionListMixin,
|
||||||
|
UserPaginateListMixin,
|
||||||
|
SearchListMixin,
|
||||||
|
InheritanceListView,
|
||||||
|
):
|
||||||
|
"""Show list of all outpost-service-connections"""
|
||||||
|
|
||||||
|
model = OutpostServiceConnection
|
||||||
|
permission_required = "passbook_outposts.add_outpostserviceconnection"
|
||||||
|
template_name = "administration/outpost_service_connection/list.html"
|
||||||
|
ordering = "pk"
|
||||||
|
search_fields = ["pk", "name"]
|
||||||
|
|
||||||
|
|
||||||
|
class OutpostServiceConnectionCreateView(
|
||||||
|
SuccessMessageMixin,
|
||||||
|
BackSuccessUrlMixin,
|
||||||
|
LoginRequiredMixin,
|
||||||
|
DjangoPermissionRequiredMixin,
|
||||||
|
InheritanceCreateView,
|
||||||
|
):
|
||||||
|
"""Create new OutpostServiceConnection"""
|
||||||
|
|
||||||
|
model = OutpostServiceConnection
|
||||||
|
permission_required = "passbook_outposts.add_outpostserviceconnection"
|
||||||
|
|
||||||
|
template_name = "generic/create.html"
|
||||||
|
success_url = reverse_lazy("passbook_admin:outpost-service-connections")
|
||||||
|
success_message = _("Successfully created OutpostServiceConnection")
|
||||||
|
|
||||||
|
|
||||||
|
class OutpostServiceConnectionUpdateView(
|
||||||
|
SuccessMessageMixin,
|
||||||
|
BackSuccessUrlMixin,
|
||||||
|
LoginRequiredMixin,
|
||||||
|
PermissionRequiredMixin,
|
||||||
|
InheritanceUpdateView,
|
||||||
|
):
|
||||||
|
"""Update outpostserviceconnection"""
|
||||||
|
|
||||||
|
model = OutpostServiceConnection
|
||||||
|
permission_required = "passbook_outposts.change_outpostserviceconnection"
|
||||||
|
|
||||||
|
template_name = "generic/update.html"
|
||||||
|
success_url = reverse_lazy("passbook_admin:outpost-service-connections")
|
||||||
|
success_message = _("Successfully updated OutpostServiceConnection")
|
||||||
|
|
||||||
|
|
||||||
|
class OutpostServiceConnectionDeleteView(
|
||||||
|
LoginRequiredMixin, PermissionRequiredMixin, DeleteMessageView
|
||||||
|
):
|
||||||
|
"""Delete outpostserviceconnection"""
|
||||||
|
|
||||||
|
model = OutpostServiceConnection
|
||||||
|
permission_required = "passbook_outposts.delete_outpostserviceconnection"
|
||||||
|
|
||||||
|
template_name = "generic/delete.html"
|
||||||
|
success_url = reverse_lazy("passbook_admin:outpost-service-connections")
|
||||||
|
success_message = _("Successfully deleted OutpostServiceConnection")
|
||||||
@ -32,8 +32,8 @@ class ProviderListView(
|
|||||||
model = Provider
|
model = Provider
|
||||||
permission_required = "passbook_core.add_provider"
|
permission_required = "passbook_core.add_provider"
|
||||||
template_name = "administration/provider/list.html"
|
template_name = "administration/provider/list.html"
|
||||||
ordering = "id"
|
ordering = "pk"
|
||||||
search_fields = ["id", "name"]
|
search_fields = ["pk", "name"]
|
||||||
|
|
||||||
|
|
||||||
class ProviderCreateView(
|
class ProviderCreateView(
|
||||||
|
|||||||
@ -25,10 +25,7 @@ def token_from_header(raw_header: bytes) -> Optional[Token]:
|
|||||||
try:
|
try:
|
||||||
auth_credentials = b64decode(auth_credentials.encode()).decode()
|
auth_credentials = b64decode(auth_credentials.encode()).decode()
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
# TODO: Remove this workaround
|
return None
|
||||||
# temporary fallback for 0.11 to 0.12 upgrade
|
|
||||||
# 0.11 and below proxy sends authorization header not base64 encoded
|
|
||||||
pass
|
|
||||||
# Accept credentials with username and without
|
# Accept credentials with username and without
|
||||||
if ":" in auth_credentials:
|
if ":" in auth_credentials:
|
||||||
_, password = auth_credentials.split(":")
|
_, password = auth_credentials.split(":")
|
||||||
|
|||||||
@ -19,7 +19,11 @@ from passbook.core.api.tokens import TokenViewSet
|
|||||||
from passbook.core.api.users import UserViewSet
|
from passbook.core.api.users import UserViewSet
|
||||||
from passbook.crypto.api import CertificateKeyPairViewSet
|
from passbook.crypto.api import CertificateKeyPairViewSet
|
||||||
from passbook.flows.api import FlowStageBindingViewSet, FlowViewSet, StageViewSet
|
from passbook.flows.api import FlowStageBindingViewSet, FlowViewSet, StageViewSet
|
||||||
from passbook.outposts.api import OutpostViewSet
|
from passbook.outposts.api import (
|
||||||
|
DockerServiceConnectionViewSet,
|
||||||
|
KubernetesServiceConnectionViewSet,
|
||||||
|
OutpostViewSet,
|
||||||
|
)
|
||||||
from passbook.policies.api import PolicyBindingViewSet, PolicyViewSet
|
from passbook.policies.api import PolicyBindingViewSet, PolicyViewSet
|
||||||
from passbook.policies.dummy.api import DummyPolicyViewSet
|
from passbook.policies.dummy.api import DummyPolicyViewSet
|
||||||
from passbook.policies.expiry.api import PasswordExpiryPolicyViewSet
|
from passbook.policies.expiry.api import PasswordExpiryPolicyViewSet
|
||||||
@ -29,7 +33,7 @@ from passbook.policies.hibp.api import HaveIBeenPwendPolicyViewSet
|
|||||||
from passbook.policies.password.api import PasswordPolicyViewSet
|
from passbook.policies.password.api import PasswordPolicyViewSet
|
||||||
from passbook.policies.reputation.api import ReputationPolicyViewSet
|
from passbook.policies.reputation.api import ReputationPolicyViewSet
|
||||||
from passbook.providers.oauth2.api import OAuth2ProviderViewSet, ScopeMappingViewSet
|
from passbook.providers.oauth2.api import OAuth2ProviderViewSet, ScopeMappingViewSet
|
||||||
from passbook.providers.proxy.api import OutpostConfigViewSet, ProxyProviderViewSet
|
from passbook.providers.proxy.api import ProxyOutpostConfigViewSet, ProxyProviderViewSet
|
||||||
from passbook.providers.saml.api import SAMLPropertyMappingViewSet, SAMLProviderViewSet
|
from passbook.providers.saml.api import SAMLPropertyMappingViewSet, SAMLProviderViewSet
|
||||||
from passbook.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
from passbook.sources.ldap.api import LDAPPropertyMappingViewSet, LDAPSourceViewSet
|
||||||
from passbook.sources.oauth.api import OAuthSourceViewSet
|
from passbook.sources.oauth.api import OAuthSourceViewSet
|
||||||
@ -66,7 +70,14 @@ router.register("core/users", UserViewSet)
|
|||||||
router.register("core/tokens", TokenViewSet)
|
router.register("core/tokens", TokenViewSet)
|
||||||
|
|
||||||
router.register("outposts/outposts", OutpostViewSet)
|
router.register("outposts/outposts", OutpostViewSet)
|
||||||
router.register("outposts/proxy", OutpostConfigViewSet)
|
router.register("outposts/service_connections/docker", DockerServiceConnectionViewSet)
|
||||||
|
router.register(
|
||||||
|
"outposts/service_connections/kubernetes", KubernetesServiceConnectionViewSet
|
||||||
|
)
|
||||||
|
router.register("outposts/proxy", ProxyOutpostConfigViewSet)
|
||||||
|
|
||||||
|
router.register("flows/instances", FlowViewSet)
|
||||||
|
router.register("flows/bindings", FlowStageBindingViewSet)
|
||||||
|
|
||||||
router.register("crypto/certificatekeypairs", CertificateKeyPairViewSet)
|
router.register("crypto/certificatekeypairs", CertificateKeyPairViewSet)
|
||||||
|
|
||||||
@ -114,9 +125,6 @@ router.register("stages/user_login", UserLoginStageViewSet)
|
|||||||
router.register("stages/user_logout", UserLogoutStageViewSet)
|
router.register("stages/user_logout", UserLogoutStageViewSet)
|
||||||
router.register("stages/user_write", UserWriteStageViewSet)
|
router.register("stages/user_write", UserWriteStageViewSet)
|
||||||
|
|
||||||
router.register("flows/instances", FlowViewSet)
|
|
||||||
router.register("flows/bindings", FlowStageBindingViewSet)
|
|
||||||
|
|
||||||
router.register("stages/dummy", DummyStageViewSet)
|
router.register("stages/dummy", DummyStageViewSet)
|
||||||
router.register("policies/dummy", DummyPolicyViewSet)
|
router.register("policies/dummy", DummyPolicyViewSet)
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,4 @@
|
|||||||
"""Tokens API Viewset"""
|
"""Tokens API Viewset"""
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
from django.http.response import Http404
|
from django.http.response import Http404
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
@ -29,10 +27,9 @@ class TokenViewSet(ModelViewSet):
|
|||||||
serializer_class = TokenSerializer
|
serializer_class = TokenSerializer
|
||||||
|
|
||||||
@action(detail=True)
|
@action(detail=True)
|
||||||
# pylint: disable=invalid-name
|
def view_key(self, request: Request, identifier: str) -> Response:
|
||||||
def view_key(self, request: Request, pk: UUID) -> Response:
|
|
||||||
"""Return token key and log access"""
|
"""Return token key and log access"""
|
||||||
tokens = Token.filter_not_expired(pk=pk)
|
tokens = Token.filter_not_expired(identifier=identifier)
|
||||||
if not tokens.exists():
|
if not tokens.exists():
|
||||||
raise Http404
|
raise Http404
|
||||||
token = tokens.first()
|
token = tokens.first()
|
||||||
|
|||||||
@ -1,4 +1,12 @@
|
|||||||
"""passbook core tasks"""
|
"""passbook core tasks"""
|
||||||
|
from datetime import datetime
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
from boto3.exceptions import Boto3Error
|
||||||
|
from botocore.exceptions import BotoCoreError, ClientError
|
||||||
|
from dbbackup.db.exceptions import CommandConnectorError
|
||||||
|
from django.contrib.humanize.templatetags.humanize import naturaltime
|
||||||
|
from django.core import management
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
|
|
||||||
@ -24,3 +32,32 @@ def clean_expired_models(self: MonitoredTask):
|
|||||||
LOGGER.debug("Deleted expired models", model=cls, amount=amount)
|
LOGGER.debug("Deleted expired models", model=cls, amount=amount)
|
||||||
messages.append(f"Deleted {amount} expired {cls._meta.verbose_name_plural}")
|
messages.append(f"Deleted {amount} expired {cls._meta.verbose_name_plural}")
|
||||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, messages))
|
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, messages))
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
|
def backup_database(self: MonitoredTask): # pragma: no cover
|
||||||
|
"""Database backup"""
|
||||||
|
self.result_timeout_hours = 25
|
||||||
|
try:
|
||||||
|
start = datetime.now()
|
||||||
|
out = StringIO()
|
||||||
|
management.call_command("dbbackup", quiet=True, stdout=out)
|
||||||
|
self.set_status(
|
||||||
|
TaskResult(
|
||||||
|
TaskResultStatus.SUCCESSFUL,
|
||||||
|
[
|
||||||
|
f"Successfully finished database backup {naturaltime(start)}",
|
||||||
|
out.getvalue(),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
LOGGER.info("Successfully backed up database.")
|
||||||
|
except (
|
||||||
|
IOError,
|
||||||
|
BotoCoreError,
|
||||||
|
ClientError,
|
||||||
|
Boto3Error,
|
||||||
|
PermissionError,
|
||||||
|
CommandConnectorError,
|
||||||
|
) as exc:
|
||||||
|
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||||
|
|||||||
@ -53,7 +53,7 @@
|
|||||||
{{ user.username }}
|
{{ user.username }}
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
<img class="pf-c-avatar" src="{% gravatar user.email %}" alt="">
|
<img class="pf-c-avatar" src="{% avatar user %}" alt="">
|
||||||
</div>
|
</div>
|
||||||
</header>
|
</header>
|
||||||
{% block page_content %}
|
{% block page_content %}
|
||||||
|
|||||||
@ -7,7 +7,7 @@
|
|||||||
<div class="pf-c-form__group">
|
<div class="pf-c-form__group">
|
||||||
<div class="form-control-static">
|
<div class="form-control-static">
|
||||||
<div class="left">
|
<div class="left">
|
||||||
<img class="pf-c-avatar" src="{% gravatar user.email %}" alt="">
|
<img class="pf-c-avatar" src="{% avatar user %}" alt="">
|
||||||
{{ user.username }}
|
{{ user.username }}
|
||||||
</div>
|
</div>
|
||||||
<div class="right">
|
<div class="right">
|
||||||
|
|||||||
@ -54,7 +54,7 @@ class CertificateKeyPair(CreatedUpdatedModel):
|
|||||||
@property
|
@property
|
||||||
def private_key(self) -> Optional[RSAPrivateKey]:
|
def private_key(self) -> Optional[RSAPrivateKey]:
|
||||||
"""Get python cryptography PrivateKey instance"""
|
"""Get python cryptography PrivateKey instance"""
|
||||||
if not self._private_key:
|
if not self._private_key and self._private_key != "":
|
||||||
self._private_key = load_pem_private_key(
|
self._private_key = load_pem_private_key(
|
||||||
str.encode("\n".join([x.strip() for x in self.key_data.split("\n")])),
|
str.encode("\n".join([x.strip() for x in self.key_data.split("\n")])),
|
||||||
password=None,
|
password=None,
|
||||||
|
|||||||
@ -27,7 +27,15 @@ class FlowStageBindingSerializer(ModelSerializer):
|
|||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = FlowStageBinding
|
model = FlowStageBinding
|
||||||
fields = ["pk", "target", "stage", "re_evaluate_policies", "order", "policies"]
|
fields = [
|
||||||
|
"pk",
|
||||||
|
"target",
|
||||||
|
"stage",
|
||||||
|
"evaluate_on_plan",
|
||||||
|
"re_evaluate_policies",
|
||||||
|
"order",
|
||||||
|
"policies",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class FlowStageBindingViewSet(ModelViewSet):
|
class FlowStageBindingViewSet(ModelViewSet):
|
||||||
|
|||||||
@ -50,12 +50,10 @@ class FlowStageBindingForm(forms.ModelForm):
|
|||||||
fields = [
|
fields = [
|
||||||
"target",
|
"target",
|
||||||
"stage",
|
"stage",
|
||||||
|
"evaluate_on_plan",
|
||||||
"re_evaluate_policies",
|
"re_evaluate_policies",
|
||||||
"order",
|
"order",
|
||||||
]
|
]
|
||||||
labels = {
|
|
||||||
"re_evaluate_policies": _("Re-evaluate Policies"),
|
|
||||||
}
|
|
||||||
widgets = {
|
widgets = {
|
||||||
"name": forms.TextInput(),
|
"name": forms.TextInput(),
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,6 +2,7 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import TYPE_CHECKING, Optional
|
from typing import TYPE_CHECKING, Optional
|
||||||
|
|
||||||
|
from django.http.request import HttpRequest
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
|
|
||||||
from passbook.core.models import User
|
from passbook.core.models import User
|
||||||
@ -20,7 +21,9 @@ class StageMarker:
|
|||||||
"""Base stage marker class, no extra attributes, and has no special handler."""
|
"""Base stage marker class, no extra attributes, and has no special handler."""
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def process(self, plan: "FlowPlan", stage: Stage) -> Optional[Stage]:
|
def process(
|
||||||
|
self, plan: "FlowPlan", stage: Stage, http_request: Optional[HttpRequest]
|
||||||
|
) -> Optional[Stage]:
|
||||||
"""Process callback for this marker. This should be overridden by sub-classes.
|
"""Process callback for this marker. This should be overridden by sub-classes.
|
||||||
If a stage should be removed, return None."""
|
If a stage should be removed, return None."""
|
||||||
return stage
|
return stage
|
||||||
@ -33,10 +36,14 @@ class ReevaluateMarker(StageMarker):
|
|||||||
binding: PolicyBinding
|
binding: PolicyBinding
|
||||||
user: User
|
user: User
|
||||||
|
|
||||||
def process(self, plan: "FlowPlan", stage: Stage) -> Optional[Stage]:
|
def process(
|
||||||
|
self, plan: "FlowPlan", stage: Stage, http_request: Optional[HttpRequest]
|
||||||
|
) -> Optional[Stage]:
|
||||||
"""Re-evaluate policies bound to stage, and if they fail, remove from plan"""
|
"""Re-evaluate policies bound to stage, and if they fail, remove from plan"""
|
||||||
engine = PolicyEngine(self.binding, self.user)
|
engine = PolicyEngine(self.binding, self.user)
|
||||||
engine.use_cache = False
|
engine.use_cache = False
|
||||||
|
if http_request:
|
||||||
|
engine.request.http_request = http_request
|
||||||
engine.request.context = plan.context
|
engine.request.context = plan.context
|
||||||
engine.build()
|
engine.build()
|
||||||
result = engine.result
|
result = engine.result
|
||||||
|
|||||||
@ -0,0 +1,29 @@
|
|||||||
|
# Generated by Django 3.1.2 on 2020-10-20 12:42
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("passbook_flows", "0014_auto_20200925_2332"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="flowstagebinding",
|
||||||
|
name="re_evaluate_policies",
|
||||||
|
field=models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
help_text="Evaluate policies when the Stage is present to the user.",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="flowstagebinding",
|
||||||
|
name="evaluate_on_plan",
|
||||||
|
field=models.BooleanField(
|
||||||
|
default=True,
|
||||||
|
help_text="Evaluate policies during the Flow planning process. Disable this for input-based policies.",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -154,15 +154,19 @@ class FlowStageBinding(SerializerModel, PolicyBindingModel):
|
|||||||
target = models.ForeignKey("Flow", on_delete=models.CASCADE)
|
target = models.ForeignKey("Flow", on_delete=models.CASCADE)
|
||||||
stage = InheritanceForeignKey(Stage, on_delete=models.CASCADE)
|
stage = InheritanceForeignKey(Stage, on_delete=models.CASCADE)
|
||||||
|
|
||||||
re_evaluate_policies = models.BooleanField(
|
evaluate_on_plan = models.BooleanField(
|
||||||
default=False,
|
default=True,
|
||||||
help_text=_(
|
help_text=_(
|
||||||
(
|
(
|
||||||
"When this option is enabled, the planner will re-evaluate "
|
"Evaluate policies during the Flow planning process. "
|
||||||
"policies bound to this binding."
|
"Disable this for input-based policies."
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
re_evaluate_policies = models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
help_text=_("Evaluate policies when the Stage is present to the user."),
|
||||||
|
)
|
||||||
|
|
||||||
order = models.IntegerField()
|
order = models.IntegerField()
|
||||||
|
|
||||||
|
|||||||
@ -46,7 +46,7 @@ class FlowPlan:
|
|||||||
self.stages.append(stage)
|
self.stages.append(stage)
|
||||||
self.markers.append(marker or StageMarker())
|
self.markers.append(marker or StageMarker())
|
||||||
|
|
||||||
def next(self) -> Optional[Stage]:
|
def next(self, http_request: Optional[HttpRequest]) -> Optional[Stage]:
|
||||||
"""Return next pending stage from the bottom of the list"""
|
"""Return next pending stage from the bottom of the list"""
|
||||||
if not self.has_stages:
|
if not self.has_stages:
|
||||||
return None
|
return None
|
||||||
@ -55,7 +55,7 @@ class FlowPlan:
|
|||||||
|
|
||||||
if marker.__class__ is not StageMarker:
|
if marker.__class__ is not StageMarker:
|
||||||
LOGGER.debug("f(plan_inst): stage has marker", stage=stage, marker=marker)
|
LOGGER.debug("f(plan_inst): stage has marker", stage=stage, marker=marker)
|
||||||
marked_stage = marker.process(self, stage)
|
marked_stage = marker.process(self, stage, http_request)
|
||||||
if not marked_stage:
|
if not marked_stage:
|
||||||
LOGGER.debug("f(plan_inst): marker returned none, next stage", stage=stage)
|
LOGGER.debug("f(plan_inst): marker returned none, next stage", stage=stage)
|
||||||
self.stages.remove(stage)
|
self.stages.remove(stage)
|
||||||
@ -63,7 +63,7 @@ class FlowPlan:
|
|||||||
if not self.has_stages:
|
if not self.has_stages:
|
||||||
return None
|
return None
|
||||||
# pylint: disable=not-callable
|
# pylint: disable=not-callable
|
||||||
return self.next()
|
return self.next(http_request)
|
||||||
return marked_stage
|
return marked_stage
|
||||||
|
|
||||||
def pop(self):
|
def pop(self):
|
||||||
@ -159,23 +159,41 @@ class FlowPlanner:
|
|||||||
for binding in FlowStageBinding.objects.filter(
|
for binding in FlowStageBinding.objects.filter(
|
||||||
target__pk=self.flow.pk
|
target__pk=self.flow.pk
|
||||||
).order_by("order"):
|
).order_by("order"):
|
||||||
|
binding: FlowStageBinding
|
||||||
|
stage = binding.stage
|
||||||
|
marker = StageMarker()
|
||||||
|
if binding.evaluate_on_plan:
|
||||||
|
LOGGER.debug(
|
||||||
|
"f(plan): evaluating on plan",
|
||||||
|
stage=binding.stage,
|
||||||
|
flow=self.flow,
|
||||||
|
)
|
||||||
engine = PolicyEngine(binding, user, request)
|
engine = PolicyEngine(binding, user, request)
|
||||||
engine.request.context = plan.context
|
engine.request.context = plan.context
|
||||||
engine.build()
|
engine.build()
|
||||||
if engine.passing:
|
if engine.passing:
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"f(plan): Stage passing", stage=binding.stage, flow=self.flow
|
"f(plan): Stage passing",
|
||||||
|
stage=binding.stage,
|
||||||
|
flow=self.flow,
|
||||||
)
|
)
|
||||||
plan.stages.append(binding.stage)
|
else:
|
||||||
marker = StageMarker()
|
stage = None
|
||||||
if binding.re_evaluate_policies:
|
else:
|
||||||
|
LOGGER.debug(
|
||||||
|
"f(plan): not evaluating on plan",
|
||||||
|
stage=binding.stage,
|
||||||
|
flow=self.flow,
|
||||||
|
)
|
||||||
|
if binding.re_evaluate_policies and stage:
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"f(plan): Stage has re-evaluate marker",
|
"f(plan): Stage has re-evaluate marker",
|
||||||
stage=binding.stage,
|
stage=binding.stage,
|
||||||
flow=self.flow,
|
flow=self.flow,
|
||||||
)
|
)
|
||||||
marker = ReevaluateMarker(binding=binding, user=user)
|
marker = ReevaluateMarker(binding=binding, user=user)
|
||||||
plan.markers.append(marker)
|
if stage:
|
||||||
|
plan.append(stage, marker)
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"f(plan): Finished building",
|
"f(plan): Finished building",
|
||||||
flow=self.flow,
|
flow=self.flow,
|
||||||
|
|||||||
@ -86,7 +86,7 @@ class FlowExecutorView(View):
|
|||||||
return to_stage_response(self.request, self.handle_invalid_flow(exc))
|
return to_stage_response(self.request, self.handle_invalid_flow(exc))
|
||||||
# We don't save the Plan after getting the next stage
|
# We don't save the Plan after getting the next stage
|
||||||
# as it hasn't been successfully passed yet
|
# as it hasn't been successfully passed yet
|
||||||
next_stage = self.plan.next()
|
next_stage = self.plan.next(self.request)
|
||||||
if not next_stage:
|
if not next_stage:
|
||||||
LOGGER.debug("f(exec): no more stages, flow is done.")
|
LOGGER.debug("f(exec): no more stages, flow is done.")
|
||||||
return self._flow_done()
|
return self._flow_done()
|
||||||
|
|||||||
@ -22,6 +22,7 @@ error_reporting:
|
|||||||
send_pii: false
|
send_pii: false
|
||||||
|
|
||||||
passbook:
|
passbook:
|
||||||
|
avatars: gravatar # gravatar or none
|
||||||
branding:
|
branding:
|
||||||
title: passbook
|
title: passbook
|
||||||
title_show: true
|
title_show: true
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
"""passbook sentry integration"""
|
"""passbook sentry integration"""
|
||||||
|
from aioredis.errors import ReplyError, ConnectionClosedError
|
||||||
from billiard.exceptions import WorkerLostError
|
from billiard.exceptions import WorkerLostError
|
||||||
from botocore.client import ClientError
|
from botocore.client import ClientError
|
||||||
from celery.exceptions import CeleryError
|
from celery.exceptions import CeleryError
|
||||||
@ -8,7 +9,7 @@ from django.db import InternalError, OperationalError, ProgrammingError
|
|||||||
from django_redis.exceptions import ConnectionInterrupted
|
from django_redis.exceptions import ConnectionInterrupted
|
||||||
from ldap3.core.exceptions import LDAPException
|
from ldap3.core.exceptions import LDAPException
|
||||||
from redis.exceptions import ConnectionError as RedisConnectionError
|
from redis.exceptions import ConnectionError as RedisConnectionError
|
||||||
from redis.exceptions import RedisError
|
from redis.exceptions import RedisError, ResponseError
|
||||||
from rest_framework.exceptions import APIException
|
from rest_framework.exceptions import APIException
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
from websockets.exceptions import WebSocketException
|
from websockets.exceptions import WebSocketException
|
||||||
@ -23,26 +24,37 @@ class SentryIgnoredException(Exception):
|
|||||||
def before_send(event, hint):
|
def before_send(event, hint):
|
||||||
"""Check if error is database error, and ignore if so"""
|
"""Check if error is database error, and ignore if so"""
|
||||||
ignored_classes = (
|
ignored_classes = (
|
||||||
|
# Inbuilt types
|
||||||
|
KeyboardInterrupt,
|
||||||
|
ConnectionResetError,
|
||||||
|
OSError,
|
||||||
|
# Django DB Errors
|
||||||
OperationalError,
|
OperationalError,
|
||||||
InternalError,
|
InternalError,
|
||||||
ProgrammingError,
|
ProgrammingError,
|
||||||
ConnectionInterrupted,
|
|
||||||
APIException,
|
|
||||||
ConnectionResetError,
|
|
||||||
RedisConnectionError,
|
|
||||||
WorkerLostError,
|
|
||||||
DisallowedHost,
|
DisallowedHost,
|
||||||
ConnectionResetError,
|
|
||||||
KeyboardInterrupt,
|
|
||||||
ClientError,
|
|
||||||
ValidationError,
|
ValidationError,
|
||||||
OSError,
|
# Redis errors
|
||||||
|
RedisConnectionError,
|
||||||
|
ConnectionInterrupted,
|
||||||
RedisError,
|
RedisError,
|
||||||
SentryIgnoredException,
|
ResponseError,
|
||||||
CeleryError,
|
ReplyError,
|
||||||
LDAPException,
|
ConnectionClosedError,
|
||||||
|
# websocket errors
|
||||||
ChannelFull,
|
ChannelFull,
|
||||||
WebSocketException,
|
WebSocketException,
|
||||||
|
# rest_framework error
|
||||||
|
APIException,
|
||||||
|
# celery errors
|
||||||
|
WorkerLostError,
|
||||||
|
CeleryError,
|
||||||
|
# S3 errors
|
||||||
|
ClientError,
|
||||||
|
# custom baseclass
|
||||||
|
SentryIgnoredException,
|
||||||
|
# ldap errors
|
||||||
|
LDAPException,
|
||||||
)
|
)
|
||||||
if "exc_info" in hint:
|
if "exc_info" in hint:
|
||||||
_, exc_value, _ = hint["exc_info"]
|
_, exc_value, _ = hint["exc_info"]
|
||||||
|
|||||||
@ -62,13 +62,17 @@ class TaskInfo:
|
|||||||
"""Get TaskInfo Object by name"""
|
"""Get TaskInfo Object by name"""
|
||||||
return cache.get(f"task_{name}")
|
return cache.get(f"task_{name}")
|
||||||
|
|
||||||
def save(self):
|
def delete(self):
|
||||||
|
"""Delete task info from cache"""
|
||||||
|
return cache.delete(f"task_{self.task_name}")
|
||||||
|
|
||||||
|
def save(self, timeout_hours=6):
|
||||||
"""Save task into cache"""
|
"""Save task into cache"""
|
||||||
key = f"task_{self.task_name}"
|
key = f"task_{self.task_name}"
|
||||||
if self.result.uid:
|
if self.result.uid:
|
||||||
key += f"_{self.result.uid}"
|
key += f"_{self.result.uid}"
|
||||||
self.task_name += f"_{self.result.uid}"
|
self.task_name += f"_{self.result.uid}"
|
||||||
cache.set(key, self, timeout=6 * 60 * 60)
|
cache.set(key, self, timeout=timeout_hours * 60 * 60)
|
||||||
|
|
||||||
|
|
||||||
class MonitoredTask(Task):
|
class MonitoredTask(Task):
|
||||||
@ -79,10 +83,18 @@ class MonitoredTask(Task):
|
|||||||
|
|
||||||
_result: TaskResult
|
_result: TaskResult
|
||||||
|
|
||||||
|
_uid: Optional[str]
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs) -> None:
|
def __init__(self, *args, **kwargs) -> None:
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.save_on_success = True
|
self.save_on_success = True
|
||||||
|
self._uid = None
|
||||||
self._result = TaskResult(status=TaskResultStatus.ERROR, messages=[])
|
self._result = TaskResult(status=TaskResultStatus.ERROR, messages=[])
|
||||||
|
self.result_timeout_hours = 6
|
||||||
|
|
||||||
|
def set_uid(self, uid: str):
|
||||||
|
"""Set UID, so in the case of an unexpected error its saved correctly"""
|
||||||
|
self._uid = uid
|
||||||
|
|
||||||
def set_status(self, result: TaskResult):
|
def set_status(self, result: TaskResult):
|
||||||
"""Set result for current run, will overwrite previous result."""
|
"""Set result for current run, will overwrite previous result."""
|
||||||
@ -92,6 +104,8 @@ class MonitoredTask(Task):
|
|||||||
def after_return(
|
def after_return(
|
||||||
self, status, retval, task_id, args: List[Any], kwargs: Dict[str, Any], einfo
|
self, status, retval, task_id, args: List[Any], kwargs: Dict[str, Any], einfo
|
||||||
):
|
):
|
||||||
|
if not self._result.uid:
|
||||||
|
self._result.uid = self._uid
|
||||||
if self.save_on_success:
|
if self.save_on_success:
|
||||||
TaskInfo(
|
TaskInfo(
|
||||||
task_name=self.__name__,
|
task_name=self.__name__,
|
||||||
@ -102,11 +116,13 @@ class MonitoredTask(Task):
|
|||||||
task_call_func=self.__name__,
|
task_call_func=self.__name__,
|
||||||
task_call_args=args,
|
task_call_args=args,
|
||||||
task_call_kwargs=kwargs,
|
task_call_kwargs=kwargs,
|
||||||
).save()
|
).save(self.result_timeout_hours)
|
||||||
return super().after_return(status, retval, task_id, args, kwargs, einfo=einfo)
|
return super().after_return(status, retval, task_id, args, kwargs, einfo=einfo)
|
||||||
|
|
||||||
# pylint: disable=too-many-arguments
|
# pylint: disable=too-many-arguments
|
||||||
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
||||||
|
if not self._result.uid:
|
||||||
|
self._result.uid = self._uid
|
||||||
TaskInfo(
|
TaskInfo(
|
||||||
task_name=self.__name__,
|
task_name=self.__name__,
|
||||||
task_description=self.__doc__,
|
task_description=self.__doc__,
|
||||||
@ -116,7 +132,7 @@ class MonitoredTask(Task):
|
|||||||
task_call_func=self.__name__,
|
task_call_func=self.__name__,
|
||||||
task_call_args=args,
|
task_call_args=args,
|
||||||
task_call_kwargs=kwargs,
|
task_call_kwargs=kwargs,
|
||||||
).save()
|
).save(self.result_timeout_hours)
|
||||||
return super().on_failure(exc, task_id, args, kwargs, einfo=einfo)
|
return super().on_failure(exc, task_id, args, kwargs, einfo=einfo)
|
||||||
|
|
||||||
def run(self, *args, **kwargs):
|
def run(self, *args, **kwargs):
|
||||||
|
|||||||
@ -6,15 +6,19 @@ from django import template
|
|||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
from django.template import Context
|
from django.template import Context
|
||||||
|
from django.templatetags.static import static
|
||||||
from django.utils.html import escape, mark_safe
|
from django.utils.html import escape, mark_safe
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
|
|
||||||
|
from passbook.core.models import User
|
||||||
from passbook.lib.config import CONFIG
|
from passbook.lib.config import CONFIG
|
||||||
from passbook.lib.utils.urls import is_url_absolute
|
from passbook.lib.utils.urls import is_url_absolute
|
||||||
|
|
||||||
register = template.Library()
|
register = template.Library()
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
GRAVATAR_URL = "https://secure.gravatar.com"
|
||||||
|
|
||||||
|
|
||||||
@register.simple_tag(takes_context=True)
|
@register.simple_tag(takes_context=True)
|
||||||
def back(context: Context) -> str:
|
def back(context: Context) -> str:
|
||||||
@ -54,37 +58,23 @@ def css_class(field, css):
|
|||||||
|
|
||||||
|
|
||||||
@register.simple_tag
|
@register.simple_tag
|
||||||
def gravatar(email, size=None, rating=None):
|
def avatar(user: User) -> str:
|
||||||
"""
|
"""Get avatar, depending on passbook.avatar setting"""
|
||||||
Generates a Gravatar URL for the given email address.
|
mode = CONFIG.raw.get("passbook").get("avatars")
|
||||||
|
if mode == "none":
|
||||||
Syntax::
|
return static("passbook/user-default.png")
|
||||||
|
if mode == "gravatar":
|
||||||
{% gravatar <email> [size] [rating] %}
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
{% gravatar someone@example.com 48 pg %}
|
|
||||||
"""
|
|
||||||
# gravatar uses md5 for their URLs, so md5 can't be avoided
|
|
||||||
gravatar_url = "%savatar/%s" % (
|
|
||||||
"https://secure.gravatar.com/",
|
|
||||||
md5(email.encode("utf-8")).hexdigest(), # nosec
|
|
||||||
)
|
|
||||||
|
|
||||||
parameters = [
|
parameters = [
|
||||||
p
|
("s", "158"),
|
||||||
for p in (
|
("r", "g"),
|
||||||
("s", size or "158"),
|
|
||||||
("r", rating or "g"),
|
|
||||||
)
|
|
||||||
if p[1]
|
|
||||||
]
|
]
|
||||||
|
# gravatar uses md5 for their URLs, so md5 can't be avoided
|
||||||
if parameters:
|
mail_hash = md5(user.email.encode("utf-8")).hexdigest() # nosec
|
||||||
gravatar_url += "?" + urlencode(parameters, doseq=True)
|
gravatar_url = (
|
||||||
|
f"{GRAVATAR_URL}/avatar/{mail_hash}?{urlencode(parameters, doseq=True)}"
|
||||||
|
)
|
||||||
return escape(gravatar_url)
|
return escape(gravatar_url)
|
||||||
|
raise ValueError(f"Invalid avatar mode {mode}")
|
||||||
|
|
||||||
|
|
||||||
@register.filter
|
@register.filter
|
||||||
|
|||||||
@ -2,7 +2,11 @@
|
|||||||
from rest_framework.serializers import JSONField, ModelSerializer
|
from rest_framework.serializers import JSONField, ModelSerializer
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import (
|
||||||
|
DockerServiceConnection,
|
||||||
|
KubernetesServiceConnection,
|
||||||
|
Outpost,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class OutpostSerializer(ModelSerializer):
|
class OutpostSerializer(ModelSerializer):
|
||||||
@ -13,7 +17,7 @@ class OutpostSerializer(ModelSerializer):
|
|||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
model = Outpost
|
model = Outpost
|
||||||
fields = ["pk", "name", "providers", "_config"]
|
fields = ["pk", "name", "providers", "service_connection", "_config"]
|
||||||
|
|
||||||
|
|
||||||
class OutpostViewSet(ModelViewSet):
|
class OutpostViewSet(ModelViewSet):
|
||||||
@ -21,3 +25,35 @@ class OutpostViewSet(ModelViewSet):
|
|||||||
|
|
||||||
queryset = Outpost.objects.all()
|
queryset = Outpost.objects.all()
|
||||||
serializer_class = OutpostSerializer
|
serializer_class = OutpostSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class DockerServiceConnectionSerializer(ModelSerializer):
|
||||||
|
"""DockerServiceConnection Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = DockerServiceConnection
|
||||||
|
fields = ["pk", "name", "local", "url", "tls"]
|
||||||
|
|
||||||
|
|
||||||
|
class DockerServiceConnectionViewSet(ModelViewSet):
|
||||||
|
"""DockerServiceConnection Viewset"""
|
||||||
|
|
||||||
|
queryset = DockerServiceConnection.objects.all()
|
||||||
|
serializer_class = DockerServiceConnectionSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class KubernetesServiceConnectionSerializer(ModelSerializer):
|
||||||
|
"""KubernetesServiceConnection Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = KubernetesServiceConnection
|
||||||
|
fields = ["pk", "name", "local", "kubeconfig"]
|
||||||
|
|
||||||
|
|
||||||
|
class KubernetesServiceConnectionViewSet(ModelViewSet):
|
||||||
|
"""KubernetesServiceConnection Viewset"""
|
||||||
|
|
||||||
|
queryset = KubernetesServiceConnection.objects.all()
|
||||||
|
serializer_class = KubernetesServiceConnectionSerializer
|
||||||
|
|||||||
@ -1,7 +1,20 @@
|
|||||||
"""passbook outposts app config"""
|
"""passbook outposts app config"""
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
from os import R_OK, access
|
||||||
|
from os.path import expanduser
|
||||||
|
from pathlib import Path
|
||||||
|
from socket import gethostname
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import yaml
|
||||||
from django.apps import AppConfig
|
from django.apps import AppConfig
|
||||||
|
from django.db import ProgrammingError
|
||||||
|
from docker.constants import DEFAULT_UNIX_SOCKET
|
||||||
|
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
|
||||||
|
from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
|
||||||
|
from structlog import get_logger
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
class PassbookOutpostConfig(AppConfig):
|
class PassbookOutpostConfig(AppConfig):
|
||||||
@ -14,3 +27,48 @@ class PassbookOutpostConfig(AppConfig):
|
|||||||
|
|
||||||
def ready(self):
|
def ready(self):
|
||||||
import_module("passbook.outposts.signals")
|
import_module("passbook.outposts.signals")
|
||||||
|
try:
|
||||||
|
self.init_local_connection()
|
||||||
|
except ProgrammingError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def init_local_connection(self):
|
||||||
|
"""Check if local kubernetes or docker connections should be created"""
|
||||||
|
from passbook.outposts.models import (
|
||||||
|
KubernetesServiceConnection,
|
||||||
|
DockerServiceConnection,
|
||||||
|
)
|
||||||
|
|
||||||
|
if Path(SERVICE_TOKEN_FILENAME).exists():
|
||||||
|
LOGGER.debug("Detected in-cluster Kubernetes Config")
|
||||||
|
if not KubernetesServiceConnection.objects.filter(local=True).exists():
|
||||||
|
LOGGER.debug("Created Service Connection for in-cluster")
|
||||||
|
KubernetesServiceConnection.objects.create(
|
||||||
|
name="Local Kubernetes Cluster", local=True, kubeconfig={}
|
||||||
|
)
|
||||||
|
# For development, check for the existence of a kubeconfig file
|
||||||
|
kubeconfig_path = expanduser(KUBE_CONFIG_DEFAULT_LOCATION)
|
||||||
|
if Path(kubeconfig_path).exists():
|
||||||
|
LOGGER.debug("Detected kubeconfig")
|
||||||
|
kubeconfig_local_name = f"k8s-{gethostname()}"
|
||||||
|
if not KubernetesServiceConnection.objects.filter(
|
||||||
|
name=kubeconfig_local_name
|
||||||
|
).exists():
|
||||||
|
LOGGER.debug("Creating kubeconfig Service Connection")
|
||||||
|
with open(kubeconfig_path, "r") as _kubeconfig:
|
||||||
|
KubernetesServiceConnection.objects.create(
|
||||||
|
name=kubeconfig_local_name,
|
||||||
|
kubeconfig=yaml.safe_load(_kubeconfig),
|
||||||
|
)
|
||||||
|
unix_socket_path = urlparse(DEFAULT_UNIX_SOCKET).path
|
||||||
|
socket = Path(unix_socket_path)
|
||||||
|
if socket.exists() and access(socket, R_OK):
|
||||||
|
LOGGER.debug("Detected local docker socket")
|
||||||
|
if not DockerServiceConnection.objects.filter(local=True).exists():
|
||||||
|
LOGGER.debug("Created Service Connection for docker")
|
||||||
|
DockerServiceConnection.objects.create(
|
||||||
|
name="Local Docker connection",
|
||||||
|
local=True,
|
||||||
|
url=unix_socket_path,
|
||||||
|
tls=True,
|
||||||
|
)
|
||||||
|
|||||||
@ -5,11 +5,11 @@ from structlog import get_logger
|
|||||||
from structlog.testing import capture_logs
|
from structlog.testing import capture_logs
|
||||||
|
|
||||||
from passbook.lib.sentry import SentryIgnoredException
|
from passbook.lib.sentry import SentryIgnoredException
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import Outpost, OutpostServiceConnection
|
||||||
|
|
||||||
|
|
||||||
class ControllerException(SentryIgnoredException):
|
class ControllerException(SentryIgnoredException):
|
||||||
"""Exception raise when anything fails during controller run"""
|
"""Exception raised when anything fails during controller run"""
|
||||||
|
|
||||||
|
|
||||||
class BaseController:
|
class BaseController:
|
||||||
@ -18,12 +18,12 @@ class BaseController:
|
|||||||
deployment_ports: Dict[str, int]
|
deployment_ports: Dict[str, int]
|
||||||
|
|
||||||
outpost: Outpost
|
outpost: Outpost
|
||||||
|
connection: OutpostServiceConnection
|
||||||
|
|
||||||
def __init__(self, outpost: Outpost):
|
def __init__(self, outpost: Outpost, connection: OutpostServiceConnection):
|
||||||
self.outpost = outpost
|
self.outpost = outpost
|
||||||
self.logger = get_logger(
|
self.connection = connection
|
||||||
controller=self.__class__.__name__, outpost=self.outpost
|
self.logger = get_logger()
|
||||||
)
|
|
||||||
self.deployment_ports = {}
|
self.deployment_ports = {}
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
@ -35,7 +35,7 @@ class BaseController:
|
|||||||
"""Call .up() but capture all log output and return it."""
|
"""Call .up() but capture all log output and return it."""
|
||||||
with capture_logs() as logs:
|
with capture_logs() as logs:
|
||||||
self.up()
|
self.up()
|
||||||
return [f"{x['controller']}: {x['event']}" for x in logs]
|
return [x["event"] for x in logs]
|
||||||
|
|
||||||
def down(self):
|
def down(self):
|
||||||
"""Handler to delete everything we've created"""
|
"""Handler to delete everything we've created"""
|
||||||
|
|||||||
@ -3,14 +3,18 @@ from time import sleep
|
|||||||
from typing import Dict, Tuple
|
from typing import Dict, Tuple
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from docker import DockerClient, from_env
|
from docker import DockerClient
|
||||||
from docker.errors import DockerException, NotFound
|
from docker.errors import DockerException, NotFound
|
||||||
from docker.models.containers import Container
|
from docker.models.containers import Container
|
||||||
from yaml import safe_dump
|
from yaml import safe_dump
|
||||||
|
|
||||||
from passbook import __version__
|
from passbook import __version__
|
||||||
from passbook.outposts.controllers.base import BaseController, ControllerException
|
from passbook.outposts.controllers.base import BaseController, ControllerException
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import (
|
||||||
|
DockerServiceConnection,
|
||||||
|
Outpost,
|
||||||
|
ServiceConnectionInvalid,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class DockerController(BaseController):
|
class DockerController(BaseController):
|
||||||
@ -19,12 +23,16 @@ class DockerController(BaseController):
|
|||||||
client: DockerClient
|
client: DockerClient
|
||||||
|
|
||||||
container: Container
|
container: Container
|
||||||
|
connection: DockerServiceConnection
|
||||||
|
|
||||||
image_base = "beryju/passbook"
|
image_base = "beryju/passbook"
|
||||||
|
|
||||||
def __init__(self, outpost: Outpost) -> None:
|
def __init__(self, outpost: Outpost, connection: DockerServiceConnection) -> None:
|
||||||
super().__init__(outpost)
|
super().__init__(outpost, connection)
|
||||||
self.client = from_env()
|
try:
|
||||||
|
self.client = connection.client()
|
||||||
|
except ServiceConnectionInvalid as exc:
|
||||||
|
raise ControllerException from exc
|
||||||
|
|
||||||
def _get_labels(self) -> Dict[str, str]:
|
def _get_labels(self) -> Dict[str, str]:
|
||||||
return {}
|
return {}
|
||||||
|
|||||||
@ -35,9 +35,7 @@ class KubernetesObjectReconciler(Generic[T]):
|
|||||||
def __init__(self, controller: "KubernetesController"):
|
def __init__(self, controller: "KubernetesController"):
|
||||||
self.controller = controller
|
self.controller = controller
|
||||||
self.namespace = controller.outpost.config.kubernetes_namespace
|
self.namespace = controller.outpost.config.kubernetes_namespace
|
||||||
self.logger = get_logger(
|
self.logger = get_logger()
|
||||||
controller=self.__class__.__name__, outpost=controller.outpost
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
"""Kubernetes Deployment Reconciler"""
|
"""Kubernetes Deployment Reconciler"""
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING, Dict
|
||||||
|
|
||||||
from kubernetes.client import (
|
from kubernetes.client import (
|
||||||
AppsV1Api,
|
AppsV1Api,
|
||||||
@ -36,12 +36,12 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
|
|
||||||
def __init__(self, controller: "KubernetesController") -> None:
|
def __init__(self, controller: "KubernetesController") -> None:
|
||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = AppsV1Api()
|
self.api = AppsV1Api(controller.client)
|
||||||
self.outpost = self.controller.outpost
|
self.outpost = self.controller.outpost
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
return f"passbook-outpost-{self.outpost.name}"
|
return f"passbook-outpost-{self.controller.outpost.uuid.hex}"
|
||||||
|
|
||||||
def reconcile(self, current: V1Deployment, reference: V1Deployment):
|
def reconcile(self, current: V1Deployment, reference: V1Deployment):
|
||||||
if current.spec.replicas != reference.spec.replicas:
|
if current.spec.replicas != reference.spec.replicas:
|
||||||
@ -52,6 +52,14 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
):
|
):
|
||||||
raise NeedsUpdate()
|
raise NeedsUpdate()
|
||||||
|
|
||||||
|
def get_pod_meta(self) -> Dict[str, str]:
|
||||||
|
"""Get common object metadata"""
|
||||||
|
return {
|
||||||
|
"app.kubernetes.io/name": "passbook-outpost",
|
||||||
|
"app.kubernetes.io/managed-by": "passbook.beryju.org",
|
||||||
|
"passbook.beryju.org/outpost-uuid": self.controller.outpost.uuid.hex,
|
||||||
|
}
|
||||||
|
|
||||||
def get_reference_object(self) -> V1Deployment:
|
def get_reference_object(self) -> V1Deployment:
|
||||||
"""Get deployment object for outpost"""
|
"""Get deployment object for outpost"""
|
||||||
# Generate V1ContainerPort objects
|
# Generate V1ContainerPort objects
|
||||||
@ -59,17 +67,18 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
for port_name, port in self.controller.deployment_ports.items():
|
for port_name, port in self.controller.deployment_ports.items():
|
||||||
container_ports.append(V1ContainerPort(container_port=port, name=port_name))
|
container_ports.append(V1ContainerPort(container_port=port, name=port_name))
|
||||||
meta = self.get_object_meta(name=self.name)
|
meta = self.get_object_meta(name=self.name)
|
||||||
|
secret_name = f"passbook-outpost-{self.controller.outpost.uuid.hex}-api"
|
||||||
return V1Deployment(
|
return V1Deployment(
|
||||||
metadata=meta,
|
metadata=meta,
|
||||||
spec=V1DeploymentSpec(
|
spec=V1DeploymentSpec(
|
||||||
replicas=self.outpost.config.kubernetes_replicas,
|
replicas=self.outpost.config.kubernetes_replicas,
|
||||||
selector=V1LabelSelector(match_labels=meta.labels),
|
selector=V1LabelSelector(match_labels=self.get_pod_meta()),
|
||||||
template=V1PodTemplateSpec(
|
template=V1PodTemplateSpec(
|
||||||
metadata=V1ObjectMeta(labels=meta.labels),
|
metadata=V1ObjectMeta(labels=self.get_pod_meta()),
|
||||||
spec=V1PodSpec(
|
spec=V1PodSpec(
|
||||||
containers=[
|
containers=[
|
||||||
V1Container(
|
V1Container(
|
||||||
name=self.outpost.type,
|
name=str(self.outpost.type),
|
||||||
image=f"{self.image_base}-{self.outpost.type}:{__version__}",
|
image=f"{self.image_base}-{self.outpost.type}:{__version__}",
|
||||||
ports=container_ports,
|
ports=container_ports,
|
||||||
env=[
|
env=[
|
||||||
@ -77,7 +86,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
name="PASSBOOK_HOST",
|
name="PASSBOOK_HOST",
|
||||||
value_from=V1EnvVarSource(
|
value_from=V1EnvVarSource(
|
||||||
secret_key_ref=V1SecretKeySelector(
|
secret_key_ref=V1SecretKeySelector(
|
||||||
name=f"passbook-outpost-{self.outpost.name}-api",
|
name=secret_name,
|
||||||
key="passbook_host",
|
key="passbook_host",
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
@ -86,7 +95,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
name="PASSBOOK_TOKEN",
|
name="PASSBOOK_TOKEN",
|
||||||
value_from=V1EnvVarSource(
|
value_from=V1EnvVarSource(
|
||||||
secret_key_ref=V1SecretKeySelector(
|
secret_key_ref=V1SecretKeySelector(
|
||||||
name=f"passbook-outpost-{self.outpost.name}-api",
|
name=secret_name,
|
||||||
key="token",
|
key="token",
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
@ -95,7 +104,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
name="PASSBOOK_INSECURE",
|
name="PASSBOOK_INSECURE",
|
||||||
value_from=V1EnvVarSource(
|
value_from=V1EnvVarSource(
|
||||||
secret_key_ref=V1SecretKeySelector(
|
secret_key_ref=V1SecretKeySelector(
|
||||||
name=f"passbook-outpost-{self.outpost.name}-api",
|
name=secret_name,
|
||||||
key="passbook_host_insecure",
|
key="passbook_host_insecure",
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
@ -117,9 +126,7 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def retrieve(self) -> V1Deployment:
|
def retrieve(self) -> V1Deployment:
|
||||||
return self.api.read_namespaced_deployment(
|
return self.api.read_namespaced_deployment(self.name, self.namespace)
|
||||||
f"passbook-outpost-{self.outpost.name}", self.namespace
|
|
||||||
)
|
|
||||||
|
|
||||||
def update(self, current: V1Deployment, reference: V1Deployment):
|
def update(self, current: V1Deployment, reference: V1Deployment):
|
||||||
return self.api.patch_namespaced_deployment(
|
return self.api.patch_namespaced_deployment(
|
||||||
|
|||||||
@ -23,11 +23,11 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
|
|||||||
|
|
||||||
def __init__(self, controller: "KubernetesController") -> None:
|
def __init__(self, controller: "KubernetesController") -> None:
|
||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = CoreV1Api()
|
self.api = CoreV1Api(controller.client)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
return f"passbook-outpost-{self.controller.outpost.name}-api"
|
return f"passbook-outpost-{self.controller.outpost.uuid.hex}-api"
|
||||||
|
|
||||||
def reconcile(self, current: V1Secret, reference: V1Secret):
|
def reconcile(self, current: V1Secret, reference: V1Secret):
|
||||||
for key in reference.data.keys():
|
for key in reference.data.keys():
|
||||||
@ -59,9 +59,7 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def retrieve(self) -> V1Secret:
|
def retrieve(self) -> V1Secret:
|
||||||
return self.api.read_namespaced_secret(
|
return self.api.read_namespaced_secret(self.name, self.namespace)
|
||||||
f"passbook-outpost-{self.controller.outpost.name}-api", self.namespace
|
|
||||||
)
|
|
||||||
|
|
||||||
def update(self, current: V1Secret, reference: V1Secret):
|
def update(self, current: V1Secret, reference: V1Secret):
|
||||||
return self.api.patch_namespaced_secret(
|
return self.api.patch_namespaced_secret(
|
||||||
|
|||||||
@ -7,6 +7,7 @@ from passbook.outposts.controllers.k8s.base import (
|
|||||||
KubernetesObjectReconciler,
|
KubernetesObjectReconciler,
|
||||||
NeedsUpdate,
|
NeedsUpdate,
|
||||||
)
|
)
|
||||||
|
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from passbook.outposts.controllers.kubernetes import KubernetesController
|
from passbook.outposts.controllers.kubernetes import KubernetesController
|
||||||
@ -17,11 +18,11 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
|
|||||||
|
|
||||||
def __init__(self, controller: "KubernetesController") -> None:
|
def __init__(self, controller: "KubernetesController") -> None:
|
||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = CoreV1Api()
|
self.api = CoreV1Api(controller.client)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
return f"passbook-outpost-{self.controller.outpost.name}"
|
return f"passbook-outpost-{self.controller.outpost.uuid.hex}"
|
||||||
|
|
||||||
def reconcile(self, current: V1Service, reference: V1Service):
|
def reconcile(self, current: V1Service, reference: V1Service):
|
||||||
if len(current.spec.ports) != len(reference.spec.ports):
|
if len(current.spec.ports) != len(reference.spec.ports):
|
||||||
@ -36,9 +37,10 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
|
|||||||
ports = []
|
ports = []
|
||||||
for port_name, port in self.controller.deployment_ports.items():
|
for port_name, port in self.controller.deployment_ports.items():
|
||||||
ports.append(V1ServicePort(name=port_name, port=port))
|
ports.append(V1ServicePort(name=port_name, port=port))
|
||||||
|
selector_labels = DeploymentReconciler(self.controller).get_pod_meta()
|
||||||
return V1Service(
|
return V1Service(
|
||||||
metadata=meta,
|
metadata=meta,
|
||||||
spec=V1ServiceSpec(ports=ports, selector=meta.labels, type="ClusterIP"),
|
spec=V1ServiceSpec(ports=ports, selector=selector_labels, type="ClusterIP"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, reference: V1Service):
|
def create(self, reference: V1Service):
|
||||||
@ -50,9 +52,7 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def retrieve(self) -> V1Service:
|
def retrieve(self) -> V1Service:
|
||||||
return self.api.read_namespaced_service(
|
return self.api.read_namespaced_service(self.name, self.namespace)
|
||||||
f"passbook-outpost-{self.controller.outpost.name}", self.namespace
|
|
||||||
)
|
|
||||||
|
|
||||||
def update(self, current: V1Service, reference: V1Service):
|
def update(self, current: V1Service, reference: V1Service):
|
||||||
return self.api.patch_namespaced_service(
|
return self.api.patch_namespaced_service(
|
||||||
|
|||||||
@ -3,8 +3,8 @@ from io import StringIO
|
|||||||
from typing import Dict, List, Type
|
from typing import Dict, List, Type
|
||||||
|
|
||||||
from kubernetes.client import OpenApiException
|
from kubernetes.client import OpenApiException
|
||||||
from kubernetes.config import load_incluster_config, load_kube_config
|
from kubernetes.client.api_client import ApiClient
|
||||||
from kubernetes.config.config_exception import ConfigException
|
from structlog.testing import capture_logs
|
||||||
from yaml import dump_all
|
from yaml import dump_all
|
||||||
|
|
||||||
from passbook.outposts.controllers.base import BaseController, ControllerException
|
from passbook.outposts.controllers.base import BaseController, ControllerException
|
||||||
@ -12,7 +12,7 @@ from passbook.outposts.controllers.k8s.base import KubernetesObjectReconciler
|
|||||||
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
|
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
|
||||||
from passbook.outposts.controllers.k8s.secret import SecretReconciler
|
from passbook.outposts.controllers.k8s.secret import SecretReconciler
|
||||||
from passbook.outposts.controllers.k8s.service import ServiceReconciler
|
from passbook.outposts.controllers.k8s.service import ServiceReconciler
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import KubernetesServiceConnection, Outpost
|
||||||
|
|
||||||
|
|
||||||
class KubernetesController(BaseController):
|
class KubernetesController(BaseController):
|
||||||
@ -21,12 +21,14 @@ class KubernetesController(BaseController):
|
|||||||
reconcilers: Dict[str, Type[KubernetesObjectReconciler]]
|
reconcilers: Dict[str, Type[KubernetesObjectReconciler]]
|
||||||
reconcile_order: List[str]
|
reconcile_order: List[str]
|
||||||
|
|
||||||
def __init__(self, outpost: Outpost) -> None:
|
client: ApiClient
|
||||||
super().__init__(outpost)
|
connection: KubernetesServiceConnection
|
||||||
try:
|
|
||||||
load_incluster_config()
|
def __init__(
|
||||||
except ConfigException:
|
self, outpost: Outpost, connection: KubernetesServiceConnection
|
||||||
load_kube_config()
|
) -> None:
|
||||||
|
super().__init__(outpost, connection)
|
||||||
|
self.client = connection.client()
|
||||||
self.reconcilers = {
|
self.reconcilers = {
|
||||||
"secret": SecretReconciler,
|
"secret": SecretReconciler,
|
||||||
"deployment": DeploymentReconciler,
|
"deployment": DeploymentReconciler,
|
||||||
@ -43,6 +45,18 @@ class KubernetesController(BaseController):
|
|||||||
except OpenApiException as exc:
|
except OpenApiException as exc:
|
||||||
raise ControllerException from exc
|
raise ControllerException from exc
|
||||||
|
|
||||||
|
def up_with_logs(self) -> List[str]:
|
||||||
|
try:
|
||||||
|
all_logs = []
|
||||||
|
for reconcile_key in self.reconcile_order:
|
||||||
|
with capture_logs() as logs:
|
||||||
|
reconciler = self.reconcilers[reconcile_key](self)
|
||||||
|
reconciler.up()
|
||||||
|
all_logs += [f"{reconcile_key.title()}: {x['event']}" for x in logs]
|
||||||
|
return all_logs
|
||||||
|
except OpenApiException as exc:
|
||||||
|
raise ControllerException from exc
|
||||||
|
|
||||||
def down(self):
|
def down(self):
|
||||||
try:
|
try:
|
||||||
for reconcile_key in self.reconcile_order:
|
for reconcile_key in self.reconcile_order:
|
||||||
@ -56,7 +70,6 @@ class KubernetesController(BaseController):
|
|||||||
documents = []
|
documents = []
|
||||||
for reconcile_key in self.reconcile_order:
|
for reconcile_key in self.reconcile_order:
|
||||||
reconciler = self.reconcilers[reconcile_key](self)
|
reconciler = self.reconcilers[reconcile_key](self)
|
||||||
reconciler.up()
|
|
||||||
documents.append(reconciler.get_reference_object().to_dict())
|
documents.append(reconciler.get_reference_object().to_dict())
|
||||||
|
|
||||||
with StringIO() as _str:
|
with StringIO() as _str:
|
||||||
|
|||||||
@ -4,7 +4,12 @@ from django import forms
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from passbook.admin.fields import CodeMirrorWidget, YAMLField
|
from passbook.admin.fields import CodeMirrorWidget, YAMLField
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import (
|
||||||
|
DockerServiceConnection,
|
||||||
|
KubernetesServiceConnection,
|
||||||
|
Outpost,
|
||||||
|
OutpostServiceConnection,
|
||||||
|
)
|
||||||
from passbook.providers.proxy.models import ProxyProvider
|
from passbook.providers.proxy.models import ProxyProvider
|
||||||
|
|
||||||
|
|
||||||
@ -14,6 +19,9 @@ class OutpostForm(forms.ModelForm):
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.fields["providers"].queryset = ProxyProvider.objects.all()
|
self.fields["providers"].queryset = ProxyProvider.objects.all()
|
||||||
|
self.fields[
|
||||||
|
"service_connection"
|
||||||
|
].queryset = OutpostServiceConnection.objects.select_subclasses()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
@ -21,7 +29,7 @@ class OutpostForm(forms.ModelForm):
|
|||||||
fields = [
|
fields = [
|
||||||
"name",
|
"name",
|
||||||
"type",
|
"type",
|
||||||
"deployment_type",
|
"service_connection",
|
||||||
"providers",
|
"providers",
|
||||||
"_config",
|
"_config",
|
||||||
]
|
]
|
||||||
@ -33,3 +41,40 @@ class OutpostForm(forms.ModelForm):
|
|||||||
"_config": YAMLField,
|
"_config": YAMLField,
|
||||||
}
|
}
|
||||||
labels = {"_config": _("Configuration")}
|
labels = {"_config": _("Configuration")}
|
||||||
|
|
||||||
|
|
||||||
|
class DockerServiceConnectionForm(forms.ModelForm):
|
||||||
|
"""Docker service-connection form"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = DockerServiceConnection
|
||||||
|
fields = ["name", "local", "url", "tls"]
|
||||||
|
widgets = {
|
||||||
|
"name": forms.TextInput,
|
||||||
|
"url": forms.TextInput,
|
||||||
|
}
|
||||||
|
labels = {
|
||||||
|
"url": _("URL"),
|
||||||
|
"tls": _("TLS"),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class KubernetesServiceConnectionForm(forms.ModelForm):
|
||||||
|
"""Kubernetes service-connection form"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
model = KubernetesServiceConnection
|
||||||
|
fields = [
|
||||||
|
"name",
|
||||||
|
"local",
|
||||||
|
"kubeconfig",
|
||||||
|
]
|
||||||
|
widgets = {
|
||||||
|
"name": forms.TextInput,
|
||||||
|
"kubeconfig": CodeMirrorWidget,
|
||||||
|
}
|
||||||
|
field_classes = {
|
||||||
|
"kubeconfig": YAMLField,
|
||||||
|
}
|
||||||
|
|||||||
@ -6,10 +6,17 @@ from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
|||||||
|
|
||||||
|
|
||||||
def fix_missing_token_identifier(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
def fix_missing_token_identifier(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
User = apps.get_model("passbook_core", "User")
|
||||||
|
Token = apps.get_model("passbook_core", "Token")
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import Outpost
|
||||||
|
|
||||||
for outpost in Outpost.objects.using(schema_editor.connection.alias).all():
|
for outpost in (
|
||||||
token = outpost.token
|
Outpost.objects.using(schema_editor.connection.alias).all().only("pk")
|
||||||
|
):
|
||||||
|
user_identifier = outpost.user_identifier
|
||||||
|
user = User.objects.get(username=user_identifier)
|
||||||
|
tokens = Token.objects.filter(user=user)
|
||||||
|
for token in tokens:
|
||||||
if token.identifier != outpost.token_identifier:
|
if token.identifier != outpost.token_identifier:
|
||||||
token.identifier = outpost.token_identifier
|
token.identifier = outpost.token_identifier
|
||||||
token.save()
|
token.save()
|
||||||
|
|||||||
172
passbook/outposts/migrations/0010_service_connection.py
Normal file
172
passbook/outposts/migrations/0010_service_connection.py
Normal file
@ -0,0 +1,172 @@
|
|||||||
|
# Generated by Django 3.1.3 on 2020-11-04 09:11
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.apps.registry import Apps
|
||||||
|
from django.core.exceptions import FieldError
|
||||||
|
from django.db import migrations, models
|
||||||
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
|
|
||||||
|
import passbook.lib.models
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_to_service_connection(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
Outpost = apps.get_model("passbook_outposts", "Outpost")
|
||||||
|
DockerServiceConnection = apps.get_model(
|
||||||
|
"passbook_outposts", "DockerServiceConnection"
|
||||||
|
)
|
||||||
|
KubernetesServiceConnection = apps.get_model(
|
||||||
|
"passbook_outposts", "KubernetesServiceConnection"
|
||||||
|
)
|
||||||
|
from passbook.outposts.apps import PassbookOutpostConfig
|
||||||
|
|
||||||
|
# Ensure that local connection have been created
|
||||||
|
PassbookOutpostConfig.init_local_connection(None)
|
||||||
|
|
||||||
|
docker = DockerServiceConnection.objects.filter(local=True).first()
|
||||||
|
k8s = KubernetesServiceConnection.objects.filter(local=True).first()
|
||||||
|
|
||||||
|
try:
|
||||||
|
for outpost in (
|
||||||
|
Outpost.objects.using(db_alias).all().exclude(deployment_type="custom")
|
||||||
|
):
|
||||||
|
if outpost.deployment_type == "kubernetes":
|
||||||
|
outpost.service_connection = k8s
|
||||||
|
elif outpost.deployment_type == "docker":
|
||||||
|
outpost.service_connection = docker
|
||||||
|
outpost.save()
|
||||||
|
except FieldError:
|
||||||
|
# This is triggered during e2e tests when this function is called on an already-upgraded
|
||||||
|
# schema
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("passbook_outposts", "0009_fix_missing_token_identifier"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="OutpostServiceConnection",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"uuid",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4,
|
||||||
|
editable=False,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("name", models.TextField()),
|
||||||
|
(
|
||||||
|
"local",
|
||||||
|
models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
help_text="If enabled, use the local connection. Required Docker socket/Kubernetes Integration",
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="DockerServiceConnection",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"outpostserviceconnection_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="passbook_outposts.outpostserviceconnection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("url", models.TextField()),
|
||||||
|
("tls", models.BooleanField()),
|
||||||
|
],
|
||||||
|
bases=("passbook_outposts.outpostserviceconnection",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="KubernetesServiceConnection",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"outpostserviceconnection_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="passbook_outposts.outpostserviceconnection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("kubeconfig", models.JSONField()),
|
||||||
|
],
|
||||||
|
bases=("passbook_outposts.outpostserviceconnection",),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="outpost",
|
||||||
|
name="service_connection",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="Select Service-Connection passbook should use to manage this outpost. Leave empty if passbook should not handle the deployment.",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
to="passbook_outposts.outpostserviceconnection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunPython(migrate_to_service_connection),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="outpost",
|
||||||
|
name="deployment_type",
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="dockerserviceconnection",
|
||||||
|
options={
|
||||||
|
"verbose_name": "Docker Service-Connection",
|
||||||
|
"verbose_name_plural": "Docker Service-Connections",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="kubernetesserviceconnection",
|
||||||
|
options={
|
||||||
|
"verbose_name": "Kubernetes Service-Connection",
|
||||||
|
"verbose_name_plural": "Kubernetes Service-Connections",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="outpost",
|
||||||
|
name="service_connection",
|
||||||
|
field=passbook.lib.models.InheritanceForeignKey(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text="Select Service-Connection passbook should use to manage this outpost. Leave empty if passbook should not handle the deployment.",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||||
|
to="passbook_outposts.outpostserviceconnection",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="outpostserviceconnection",
|
||||||
|
options={
|
||||||
|
"verbose_name": "Outpost Service-Connection",
|
||||||
|
"verbose_name_plural": "Outpost Service-Connections",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="kubernetesserviceconnection",
|
||||||
|
name="kubeconfig",
|
||||||
|
field=models.JSONField(
|
||||||
|
default=None,
|
||||||
|
help_text="Paste your kubeconfig here. passbook will automatically use the currently selected context.",
|
||||||
|
),
|
||||||
|
preserve_default=False,
|
||||||
|
),
|
||||||
|
]
|
||||||
@ -1,28 +1,46 @@
|
|||||||
"""Outpost models"""
|
"""Outpost models"""
|
||||||
from dataclasses import asdict, dataclass, field
|
from dataclasses import asdict, dataclass, field
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict, Iterable, List, Optional, Union
|
from typing import Dict, Iterable, List, Optional, Type, Union
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from dacite import from_dict
|
from dacite import from_dict
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db import models, transaction
|
from django.db import models, transaction
|
||||||
from django.db.models.base import Model
|
from django.db.models.base import Model
|
||||||
|
from django.forms.models import ModelForm
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from docker.client import DockerClient
|
||||||
|
from docker.errors import DockerException
|
||||||
from guardian.models import UserObjectPermission
|
from guardian.models import UserObjectPermission
|
||||||
from guardian.shortcuts import assign_perm
|
from guardian.shortcuts import assign_perm
|
||||||
|
from kubernetes.client import VersionApi, VersionInfo
|
||||||
|
from kubernetes.client.api_client import ApiClient
|
||||||
|
from kubernetes.client.configuration import Configuration
|
||||||
|
from kubernetes.client.exceptions import OpenApiException
|
||||||
|
from kubernetes.config.config_exception import ConfigException
|
||||||
|
from kubernetes.config.incluster_config import load_incluster_config
|
||||||
|
from kubernetes.config.kube_config import load_kube_config_from_dict
|
||||||
|
from model_utils.managers import InheritanceManager
|
||||||
from packaging.version import LegacyVersion, Version, parse
|
from packaging.version import LegacyVersion, Version, parse
|
||||||
|
from urllib3.exceptions import HTTPError
|
||||||
|
|
||||||
from passbook import __version__
|
from passbook import __version__
|
||||||
from passbook.core.models import Provider, Token, TokenIntents, User
|
from passbook.core.models import Provider, Token, TokenIntents, User
|
||||||
from passbook.lib.config import CONFIG
|
from passbook.lib.config import CONFIG
|
||||||
|
from passbook.lib.models import InheritanceForeignKey
|
||||||
|
from passbook.lib.sentry import SentryIgnoredException
|
||||||
from passbook.lib.utils.template import render_to_string
|
from passbook.lib.utils.template import render_to_string
|
||||||
|
|
||||||
OUR_VERSION = parse(__version__)
|
OUR_VERSION = parse(__version__)
|
||||||
OUTPOST_HELLO_INTERVAL = 10
|
OUTPOST_HELLO_INTERVAL = 10
|
||||||
|
|
||||||
|
|
||||||
|
class ServiceConnectionInvalid(SentryIgnoredException):
|
||||||
|
""""Exception raised when a Service Connection has invalid parameters"""
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class OutpostConfig:
|
class OutpostConfig:
|
||||||
"""Configuration an outpost uses to configure it self"""
|
"""Configuration an outpost uses to configure it self"""
|
||||||
@ -60,19 +78,158 @@ class OutpostType(models.TextChoices):
|
|||||||
PROXY = "proxy"
|
PROXY = "proxy"
|
||||||
|
|
||||||
|
|
||||||
class OutpostDeploymentType(models.TextChoices):
|
|
||||||
"""Deployment types that are managed through passbook"""
|
|
||||||
|
|
||||||
KUBERNETES = "kubernetes"
|
|
||||||
DOCKER = "docker"
|
|
||||||
CUSTOM = "custom"
|
|
||||||
|
|
||||||
|
|
||||||
def default_outpost_config():
|
def default_outpost_config():
|
||||||
"""Get default outpost config"""
|
"""Get default outpost config"""
|
||||||
return asdict(OutpostConfig(passbook_host=""))
|
return asdict(OutpostConfig(passbook_host=""))
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class OutpostServiceConnectionState:
|
||||||
|
"""State of an Outpost Service Connection"""
|
||||||
|
|
||||||
|
version: str
|
||||||
|
healthy: bool
|
||||||
|
|
||||||
|
|
||||||
|
class OutpostServiceConnection(models.Model):
|
||||||
|
"""Connection details for an Outpost Controller, like Docker or Kubernetes"""
|
||||||
|
|
||||||
|
uuid = models.UUIDField(default=uuid4, editable=False, primary_key=True)
|
||||||
|
name = models.TextField()
|
||||||
|
|
||||||
|
local = models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
unique=True,
|
||||||
|
help_text=_(
|
||||||
|
(
|
||||||
|
"If enabled, use the local connection. Required Docker "
|
||||||
|
"socket/Kubernetes Integration"
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
objects = InheritanceManager()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def state(self) -> OutpostServiceConnectionState:
|
||||||
|
"""Get state of service connection"""
|
||||||
|
state_key = f"outpost_service_connection_{self.pk.hex}"
|
||||||
|
state = cache.get(state_key, None)
|
||||||
|
if not state:
|
||||||
|
state = self._get_state()
|
||||||
|
cache.set(state_key, state, timeout=0)
|
||||||
|
return state
|
||||||
|
|
||||||
|
def _get_state(self) -> OutpostServiceConnectionState:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def form(self) -> Type[ModelForm]:
|
||||||
|
"""Return Form class used to edit this object"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
verbose_name = _("Outpost Service-Connection")
|
||||||
|
verbose_name_plural = _("Outpost Service-Connections")
|
||||||
|
|
||||||
|
|
||||||
|
class DockerServiceConnection(OutpostServiceConnection):
|
||||||
|
"""Service Connection to a Docker endpoint"""
|
||||||
|
|
||||||
|
url = models.TextField()
|
||||||
|
tls = models.BooleanField()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def form(self) -> Type[ModelForm]:
|
||||||
|
from passbook.outposts.forms import DockerServiceConnectionForm
|
||||||
|
|
||||||
|
return DockerServiceConnectionForm
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Docker Service-Connection {self.name}"
|
||||||
|
|
||||||
|
def client(self) -> DockerClient:
|
||||||
|
"""Get DockerClient"""
|
||||||
|
try:
|
||||||
|
client = None
|
||||||
|
if self.local:
|
||||||
|
client = DockerClient.from_env()
|
||||||
|
else:
|
||||||
|
client = DockerClient(
|
||||||
|
base_url=self.url,
|
||||||
|
tls=self.tls,
|
||||||
|
)
|
||||||
|
client.containers.list()
|
||||||
|
except DockerException as exc:
|
||||||
|
raise ServiceConnectionInvalid from exc
|
||||||
|
return client
|
||||||
|
|
||||||
|
def _get_state(self) -> OutpostServiceConnectionState:
|
||||||
|
try:
|
||||||
|
client = self.client()
|
||||||
|
return OutpostServiceConnectionState(
|
||||||
|
version=client.info()["ServerVersion"], healthy=True
|
||||||
|
)
|
||||||
|
except ServiceConnectionInvalid:
|
||||||
|
return OutpostServiceConnectionState(version="", healthy=False)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
verbose_name = _("Docker Service-Connection")
|
||||||
|
verbose_name_plural = _("Docker Service-Connections")
|
||||||
|
|
||||||
|
|
||||||
|
class KubernetesServiceConnection(OutpostServiceConnection):
|
||||||
|
"""Service Connection to a Kubernetes cluster"""
|
||||||
|
|
||||||
|
kubeconfig = models.JSONField(
|
||||||
|
help_text=_(
|
||||||
|
(
|
||||||
|
"Paste your kubeconfig here. passbook will automatically use "
|
||||||
|
"the currently selected context."
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def form(self) -> Type[ModelForm]:
|
||||||
|
from passbook.outposts.forms import KubernetesServiceConnectionForm
|
||||||
|
|
||||||
|
return KubernetesServiceConnectionForm
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Kubernetes Service-Connection {self.name}"
|
||||||
|
|
||||||
|
def _get_state(self) -> OutpostServiceConnectionState:
|
||||||
|
try:
|
||||||
|
client = self.client()
|
||||||
|
api_instance = VersionApi(client)
|
||||||
|
version: VersionInfo = api_instance.get_code()
|
||||||
|
return OutpostServiceConnectionState(
|
||||||
|
version=version.git_version, healthy=True
|
||||||
|
)
|
||||||
|
except (OpenApiException, HTTPError):
|
||||||
|
return OutpostServiceConnectionState(version="", healthy=False)
|
||||||
|
|
||||||
|
def client(self) -> ApiClient:
|
||||||
|
"""Get Kubernetes client configured from kubeconfig"""
|
||||||
|
config = Configuration()
|
||||||
|
try:
|
||||||
|
if self.local:
|
||||||
|
load_incluster_config(client_configuration=config)
|
||||||
|
else:
|
||||||
|
load_kube_config_from_dict(self.kubeconfig, client_configuration=config)
|
||||||
|
return ApiClient(config)
|
||||||
|
except ConfigException as exc:
|
||||||
|
raise ServiceConnectionInvalid from exc
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
|
||||||
|
verbose_name = _("Kubernetes Service-Connection")
|
||||||
|
verbose_name_plural = _("Kubernetes Service-Connections")
|
||||||
|
|
||||||
|
|
||||||
class Outpost(models.Model):
|
class Outpost(models.Model):
|
||||||
"""Outpost instance which manages a service user and token"""
|
"""Outpost instance which manages a service user and token"""
|
||||||
|
|
||||||
@ -80,13 +237,20 @@ class Outpost(models.Model):
|
|||||||
name = models.TextField()
|
name = models.TextField()
|
||||||
|
|
||||||
type = models.TextField(choices=OutpostType.choices, default=OutpostType.PROXY)
|
type = models.TextField(choices=OutpostType.choices, default=OutpostType.PROXY)
|
||||||
deployment_type = models.TextField(
|
service_connection = InheritanceForeignKey(
|
||||||
choices=OutpostDeploymentType.choices,
|
OutpostServiceConnection,
|
||||||
default=OutpostDeploymentType.CUSTOM,
|
default=None,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
help_text=_(
|
help_text=_(
|
||||||
"Select between passbook-managed deployment types or a custom deployment."
|
(
|
||||||
),
|
"Select Service-Connection passbook should use to manage this outpost. "
|
||||||
|
"Leave empty if passbook should not handle the deployment."
|
||||||
)
|
)
|
||||||
|
),
|
||||||
|
on_delete=models.SET_DEFAULT,
|
||||||
|
)
|
||||||
|
|
||||||
_config = models.JSONField(default=default_outpost_config)
|
_config = models.JSONField(default=default_outpost_config)
|
||||||
|
|
||||||
providers = models.ManyToManyField(Provider)
|
providers = models.ManyToManyField(Provider)
|
||||||
@ -111,12 +275,17 @@ class Outpost(models.Model):
|
|||||||
"""Get outpost's health status"""
|
"""Get outpost's health status"""
|
||||||
return OutpostState.for_outpost(self)
|
return OutpostState.for_outpost(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_identifier(self):
|
||||||
|
"""Username for service user"""
|
||||||
|
return f"pb-outpost-{self.uuid.hex}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user(self) -> User:
|
def user(self) -> User:
|
||||||
"""Get/create user with access to all required objects"""
|
"""Get/create user with access to all required objects"""
|
||||||
users = User.objects.filter(username=f"pb-outpost-{self.uuid.hex}")
|
users = User.objects.filter(username=self.user_identifier)
|
||||||
if not users.exists():
|
if not users.exists():
|
||||||
user: User = User.objects.create(username=f"pb-outpost-{self.uuid.hex}")
|
user: User = User.objects.create(username=self.user_identifier)
|
||||||
user.set_unusable_password()
|
user.set_unusable_password()
|
||||||
user.save()
|
user.save()
|
||||||
else:
|
else:
|
||||||
@ -204,7 +373,11 @@ class OutpostState:
|
|||||||
def for_channel(outpost: Outpost, channel: str) -> "OutpostState":
|
def for_channel(outpost: Outpost, channel: str) -> "OutpostState":
|
||||||
"""Get state for a single channel"""
|
"""Get state for a single channel"""
|
||||||
key = f"{outpost.state_cache_prefix}_{channel}"
|
key = f"{outpost.state_cache_prefix}_{channel}"
|
||||||
data = cache.get(key, {"uid": channel})
|
default_data = {"uid": channel}
|
||||||
|
data = cache.get(key, default_data)
|
||||||
|
if isinstance(data, str):
|
||||||
|
cache.delete(key)
|
||||||
|
data = default_data
|
||||||
state = from_dict(OutpostState, data)
|
state = from_dict(OutpostState, data)
|
||||||
state.uid = channel
|
state.uid = channel
|
||||||
# pylint: disable=protected-access
|
# pylint: disable=protected-access
|
||||||
|
|||||||
@ -7,4 +7,9 @@ CELERY_BEAT_SCHEDULE = {
|
|||||||
"schedule": crontab(minute="*/5"),
|
"schedule": crontab(minute="*/5"),
|
||||||
"options": {"queue": "passbook_scheduled"},
|
"options": {"queue": "passbook_scheduled"},
|
||||||
},
|
},
|
||||||
|
"outposts_service_connection_check": {
|
||||||
|
"task": "passbook.outposts.tasks.outpost_service_connection_monitor",
|
||||||
|
"schedule": crontab(minute=0, hour="*"),
|
||||||
|
"options": {"queue": "passbook_scheduled"},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,6 +3,7 @@ from typing import Any
|
|||||||
|
|
||||||
from asgiref.sync import async_to_sync
|
from asgiref.sync import async_to_sync
|
||||||
from channels.layers import get_channel_layer
|
from channels.layers import get_channel_layer
|
||||||
|
from django.core.cache import cache
|
||||||
from django.db.models.base import Model
|
from django.db.models.base import Model
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
from structlog import get_logger
|
from structlog import get_logger
|
||||||
@ -11,9 +12,11 @@ from passbook.lib.tasks import MonitoredTask, TaskResult, TaskResultStatus
|
|||||||
from passbook.lib.utils.reflection import path_to_class
|
from passbook.lib.utils.reflection import path_to_class
|
||||||
from passbook.outposts.controllers.base import ControllerException
|
from passbook.outposts.controllers.base import ControllerException
|
||||||
from passbook.outposts.models import (
|
from passbook.outposts.models import (
|
||||||
|
DockerServiceConnection,
|
||||||
|
KubernetesServiceConnection,
|
||||||
Outpost,
|
Outpost,
|
||||||
OutpostDeploymentType,
|
|
||||||
OutpostModel,
|
OutpostModel,
|
||||||
|
OutpostServiceConnection,
|
||||||
OutpostState,
|
OutpostState,
|
||||||
OutpostType,
|
OutpostType,
|
||||||
)
|
)
|
||||||
@ -27,33 +30,48 @@ LOGGER = get_logger()
|
|||||||
@CELERY_APP.task()
|
@CELERY_APP.task()
|
||||||
def outpost_controller_all():
|
def outpost_controller_all():
|
||||||
"""Launch Controller for all Outposts which support it"""
|
"""Launch Controller for all Outposts which support it"""
|
||||||
for outpost in Outpost.objects.exclude(
|
for outpost in Outpost.objects.exclude(service_connection=None):
|
||||||
deployment_type=OutpostDeploymentType.CUSTOM
|
|
||||||
):
|
|
||||||
outpost_controller.delay(outpost.pk.hex)
|
outpost_controller.delay(outpost.pk.hex)
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task()
|
||||||
|
def outpost_service_connection_state(state_pk: Any):
|
||||||
|
"""Update cached state of a service connection"""
|
||||||
|
connection: OutpostServiceConnection = (
|
||||||
|
OutpostServiceConnection.objects.filter(pk=state_pk).select_subclasses().first()
|
||||||
|
)
|
||||||
|
cache.delete(f"outpost_service_connection_{connection.pk.hex}")
|
||||||
|
_ = connection.state
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
|
def outpost_service_connection_monitor(self: MonitoredTask):
|
||||||
|
"""Regularly check the state of Outpost Service Connections"""
|
||||||
|
for connection in OutpostServiceConnection.objects.select_subclasses():
|
||||||
|
cache.delete(f"outpost_service_connection_{connection.pk.hex}")
|
||||||
|
_ = connection.state
|
||||||
|
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL))
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
def outpost_controller(self: MonitoredTask, outpost_pk: str):
|
def outpost_controller(self: MonitoredTask, outpost_pk: str):
|
||||||
"""Launch controller deployment of Outpost"""
|
"""Create/update/monitor the deployment of an Outpost"""
|
||||||
logs = []
|
logs = []
|
||||||
outpost: Outpost = Outpost.objects.get(pk=outpost_pk)
|
outpost: Outpost = Outpost.objects.get(pk=outpost_pk)
|
||||||
|
self.set_uid(slugify(outpost.name))
|
||||||
try:
|
try:
|
||||||
if outpost.type == OutpostType.PROXY:
|
if outpost.type == OutpostType.PROXY:
|
||||||
if outpost.deployment_type == OutpostDeploymentType.KUBERNETES:
|
service_connection = outpost.service_connection
|
||||||
logs = ProxyKubernetesController(outpost).up_with_logs()
|
if isinstance(service_connection, DockerServiceConnection):
|
||||||
if outpost.deployment_type == OutpostDeploymentType.DOCKER:
|
logs = ProxyDockerController(outpost, service_connection).up_with_logs()
|
||||||
logs = ProxyDockerController(outpost).up_with_logs()
|
if isinstance(service_connection, KubernetesServiceConnection):
|
||||||
|
logs = ProxyKubernetesController(
|
||||||
|
outpost, service_connection
|
||||||
|
).up_with_logs()
|
||||||
except ControllerException as exc:
|
except ControllerException as exc:
|
||||||
self.set_status(
|
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||||
TaskResult(TaskResultStatus.ERROR, uid=slugify(outpost.name)).with_error(
|
|
||||||
exc
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
self.set_status(
|
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, logs))
|
||||||
TaskResult(TaskResultStatus.SUCCESSFUL, logs, uid=slugify(outpost.name))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task()
|
@CELERY_APP.task()
|
||||||
@ -61,10 +79,11 @@ def outpost_pre_delete(outpost_pk: str):
|
|||||||
"""Delete outpost objects before deleting the DB Object"""
|
"""Delete outpost objects before deleting the DB Object"""
|
||||||
outpost = Outpost.objects.get(pk=outpost_pk)
|
outpost = Outpost.objects.get(pk=outpost_pk)
|
||||||
if outpost.type == OutpostType.PROXY:
|
if outpost.type == OutpostType.PROXY:
|
||||||
if outpost.deployment_type == OutpostDeploymentType.KUBERNETES:
|
service_connection = outpost.service_connection
|
||||||
ProxyKubernetesController(outpost).down()
|
if isinstance(service_connection, DockerServiceConnection):
|
||||||
if outpost.deployment_type == OutpostDeploymentType.DOCKER:
|
ProxyDockerController(outpost, service_connection).down()
|
||||||
ProxyDockerController(outpost).down()
|
if isinstance(service_connection, KubernetesServiceConnection):
|
||||||
|
ProxyKubernetesController(outpost, service_connection).down()
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task()
|
@CELERY_APP.task()
|
||||||
@ -94,6 +113,10 @@ def outpost_post_save(model_class: str, model_pk: Any):
|
|||||||
outpost_send_update(instance)
|
outpost_send_update(instance)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if isinstance(instance, OutpostServiceConnection):
|
||||||
|
LOGGER.debug("triggering ServiceConnection state update", instance=instance)
|
||||||
|
outpost_service_connection_state.delay(instance.pk)
|
||||||
|
|
||||||
for field in instance._meta.get_fields():
|
for field in instance._meta.get_fields():
|
||||||
# Each field is checked if it has a `related_model` attribute (when ForeginKeys or M2Ms)
|
# Each field is checked if it has a `related_model` attribute (when ForeginKeys or M2Ms)
|
||||||
# are used, and if it has a value
|
# are used, and if it has a value
|
||||||
@ -128,6 +151,9 @@ def outpost_send_update(model_instace: Model):
|
|||||||
|
|
||||||
def _outpost_single_update(outpost: Outpost, layer=None):
|
def _outpost_single_update(outpost: Outpost, layer=None):
|
||||||
"""Update outpost instances connected to a single outpost"""
|
"""Update outpost instances connected to a single outpost"""
|
||||||
|
# Ensure token again, because this function is called when anything related to an
|
||||||
|
# OutpostModel is saved, so we can be sure permissions are right
|
||||||
|
_ = outpost.token
|
||||||
if not layer: # pragma: no cover
|
if not layer: # pragma: no cover
|
||||||
layer = get_channel_layer()
|
layer = get_channel_layer()
|
||||||
for state in OutpostState.for_outpost(outpost):
|
for state in OutpostState.for_outpost(outpost):
|
||||||
|
|||||||
@ -24,6 +24,7 @@
|
|||||||
<label class="pf-c-form__label" for="help-text-simple-form-name">
|
<label class="pf-c-form__label" for="help-text-simple-form-name">
|
||||||
<span class="pf-c-form__label-text">PASSBOOK_TOKEN</span>
|
<span class="pf-c-form__label-text">PASSBOOK_TOKEN</span>
|
||||||
</label>
|
</label>
|
||||||
|
{# TODO: Only load key on modal open #}
|
||||||
<input class="pf-c-form-control" data-pb-fetch-key="key" data-pb-fetch-fill="{% url 'passbook_api:token-view-key' identifier=outpost.token_identifier %}" readonly type="text" value="" />
|
<input class="pf-c-form-control" data-pb-fetch-key="key" data-pb-fetch-fill="{% url 'passbook_api:token-view-key' identifier=outpost.token_identifier %}" readonly type="text" value="" />
|
||||||
</div>
|
</div>
|
||||||
<h3>{% trans 'If your passbook Instance is using a self-signed certificate, set this value.' %}</h3>
|
<h3>{% trans 'If your passbook Instance is using a self-signed certificate, set this value.' %}</h3>
|
||||||
|
|||||||
@ -1,10 +1,17 @@
|
|||||||
"""outpost tests"""
|
"""outpost tests"""
|
||||||
|
from os import environ
|
||||||
|
from unittest.case import skipUnless
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from guardian.models import UserObjectPermission
|
from guardian.models import UserObjectPermission
|
||||||
|
|
||||||
from passbook.crypto.models import CertificateKeyPair
|
from passbook.crypto.models import CertificateKeyPair
|
||||||
from passbook.flows.models import Flow
|
from passbook.flows.models import Flow
|
||||||
from passbook.outposts.models import Outpost, OutpostDeploymentType, OutpostType
|
from passbook.outposts.controllers.k8s.base import NeedsUpdate
|
||||||
|
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
|
||||||
|
from passbook.outposts.controllers.kubernetes import KubernetesController
|
||||||
|
from passbook.outposts.models import KubernetesServiceConnection, Outpost, OutpostType
|
||||||
from passbook.providers.proxy.models import ProxyProvider
|
from passbook.providers.proxy.models import ProxyProvider
|
||||||
|
|
||||||
|
|
||||||
@ -22,7 +29,6 @@ class OutpostTests(TestCase):
|
|||||||
outpost: Outpost = Outpost.objects.create(
|
outpost: Outpost = Outpost.objects.create(
|
||||||
name="test",
|
name="test",
|
||||||
type=OutpostType.PROXY,
|
type=OutpostType.PROXY,
|
||||||
deployment_type=OutpostDeploymentType.CUSTOM,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Before we add a provider, the user should only have access to the outpost
|
# Before we add a provider, the user should only have access to the outpost
|
||||||
@ -58,3 +64,51 @@ class OutpostTests(TestCase):
|
|||||||
permissions = UserObjectPermission.objects.filter(user=outpost.user)
|
permissions = UserObjectPermission.objects.filter(user=outpost.user)
|
||||||
self.assertEqual(len(permissions), 1)
|
self.assertEqual(len(permissions), 1)
|
||||||
self.assertEqual(permissions[0].object_pk, str(outpost.pk))
|
self.assertEqual(permissions[0].object_pk, str(outpost.pk))
|
||||||
|
|
||||||
|
|
||||||
|
@skipUnless("PB_TEST_K8S" in environ, "Kubernetes test cluster required")
|
||||||
|
class OutpostKubernetesTests(TestCase):
|
||||||
|
"""Test Kubernetes Controllers"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super().setUp()
|
||||||
|
self.provider: ProxyProvider = ProxyProvider.objects.create(
|
||||||
|
name="test",
|
||||||
|
internal_host="http://localhost",
|
||||||
|
external_host="http://localhost",
|
||||||
|
authorization_flow=Flow.objects.first(),
|
||||||
|
)
|
||||||
|
self.service_connection = KubernetesServiceConnection.objects.first()
|
||||||
|
self.outpost: Outpost = Outpost.objects.create(
|
||||||
|
name="test",
|
||||||
|
type=OutpostType.PROXY,
|
||||||
|
service_connection=self.service_connection,
|
||||||
|
)
|
||||||
|
self.outpost.providers.add(self.provider)
|
||||||
|
self.outpost.save()
|
||||||
|
|
||||||
|
def test_deployment_reconciler(self):
|
||||||
|
"""test that deployment requires update"""
|
||||||
|
controller = KubernetesController(self.outpost, self.service_connection)
|
||||||
|
deployment_reconciler = DeploymentReconciler(controller)
|
||||||
|
|
||||||
|
self.assertIsNotNone(deployment_reconciler.retrieve())
|
||||||
|
|
||||||
|
config = self.outpost.config
|
||||||
|
config.kubernetes_replicas = 3
|
||||||
|
self.outpost.config = config
|
||||||
|
|
||||||
|
with self.assertRaises(NeedsUpdate):
|
||||||
|
deployment_reconciler.reconcile(
|
||||||
|
deployment_reconciler.retrieve(),
|
||||||
|
deployment_reconciler.get_reference_object(),
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch.object(deployment_reconciler, "image_base", "test"):
|
||||||
|
with self.assertRaises(NeedsUpdate):
|
||||||
|
deployment_reconciler.reconcile(
|
||||||
|
deployment_reconciler.retrieve(),
|
||||||
|
deployment_reconciler.get_reference_object(),
|
||||||
|
)
|
||||||
|
|
||||||
|
deployment_reconciler.delete(deployment_reconciler.get_reference_object())
|
||||||
|
|||||||
@ -12,7 +12,12 @@ from structlog import get_logger
|
|||||||
|
|
||||||
from passbook.core.models import User
|
from passbook.core.models import User
|
||||||
from passbook.outposts.controllers.docker import DockerController
|
from passbook.outposts.controllers.docker import DockerController
|
||||||
from passbook.outposts.models import Outpost, OutpostType
|
from passbook.outposts.models import (
|
||||||
|
DockerServiceConnection,
|
||||||
|
KubernetesServiceConnection,
|
||||||
|
Outpost,
|
||||||
|
OutpostType,
|
||||||
|
)
|
||||||
from passbook.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
from passbook.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
@ -35,7 +40,7 @@ class DockerComposeView(LoginRequiredMixin, View):
|
|||||||
)
|
)
|
||||||
manifest = ""
|
manifest = ""
|
||||||
if outpost.type == OutpostType.PROXY:
|
if outpost.type == OutpostType.PROXY:
|
||||||
controller = DockerController(outpost)
|
controller = DockerController(outpost, DockerServiceConnection())
|
||||||
manifest = controller.get_static_deployment()
|
manifest = controller.get_static_deployment()
|
||||||
|
|
||||||
return HttpResponse(manifest, content_type="text/vnd.yaml")
|
return HttpResponse(manifest, content_type="text/vnd.yaml")
|
||||||
@ -53,7 +58,9 @@ class KubernetesManifestView(LoginRequiredMixin, View):
|
|||||||
)
|
)
|
||||||
manifest = ""
|
manifest = ""
|
||||||
if outpost.type == OutpostType.PROXY:
|
if outpost.type == OutpostType.PROXY:
|
||||||
controller = ProxyKubernetesController(outpost)
|
controller = ProxyKubernetesController(
|
||||||
|
outpost, KubernetesServiceConnection()
|
||||||
|
)
|
||||||
manifest = controller.get_static_deployment()
|
manifest = controller.get_static_deployment()
|
||||||
|
|
||||||
return HttpResponse(manifest, content_type="text/vnd.yaml")
|
return HttpResponse(manifest, content_type="text/vnd.yaml")
|
||||||
|
|||||||
@ -12,5 +12,4 @@ class PassbookPoliciesConfig(AppConfig):
|
|||||||
verbose_name = "passbook Policies"
|
verbose_name = "passbook Policies"
|
||||||
|
|
||||||
def ready(self):
|
def ready(self):
|
||||||
"""Load policy cache clearing signals"""
|
|
||||||
import_module("passbook.policies.signals")
|
import_module("passbook.policies.signals")
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
"""passbook expression policy evaluator"""
|
"""passbook expression policy evaluator"""
|
||||||
from ipaddress import ip_address
|
from ipaddress import ip_address, ip_network
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from django.http import HttpRequest
|
from django.http import HttpRequest
|
||||||
@ -22,6 +22,8 @@ class PolicyEvaluator(BaseEvaluator):
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
self._messages = []
|
self._messages = []
|
||||||
self._context["pb_message"] = self.expr_func_message
|
self._context["pb_message"] = self.expr_func_message
|
||||||
|
self._context["ip_address"] = ip_address
|
||||||
|
self._context["ip_network"] = ip_network
|
||||||
self._filename = policy_name or "PolicyEvaluator"
|
self._filename = policy_name or "PolicyEvaluator"
|
||||||
|
|
||||||
def expr_func_message(self, message: str):
|
def expr_func_message(self, message: str):
|
||||||
|
|||||||
@ -112,7 +112,7 @@ class ProxyOutpostConfigSerializer(ModelSerializer):
|
|||||||
return ProviderInfoView(request=self.context["request"]._request).get_info(obj)
|
return ProviderInfoView(request=self.context["request"]._request).get_info(obj)
|
||||||
|
|
||||||
|
|
||||||
class OutpostConfigViewSet(ModelViewSet):
|
class ProxyOutpostConfigViewSet(ModelViewSet):
|
||||||
"""ProxyProvider Viewset"""
|
"""ProxyProvider Viewset"""
|
||||||
|
|
||||||
queryset = ProxyProvider.objects.filter(application__isnull=False)
|
queryset = ProxyProvider.objects.filter(application__isnull=False)
|
||||||
|
|||||||
@ -3,15 +3,15 @@ from typing import Dict
|
|||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from passbook.outposts.controllers.docker import DockerController
|
from passbook.outposts.controllers.docker import DockerController
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import DockerServiceConnection, Outpost
|
||||||
from passbook.providers.proxy.models import ProxyProvider
|
from passbook.providers.proxy.models import ProxyProvider
|
||||||
|
|
||||||
|
|
||||||
class ProxyDockerController(DockerController):
|
class ProxyDockerController(DockerController):
|
||||||
"""Proxy Provider Docker Contoller"""
|
"""Proxy Provider Docker Contoller"""
|
||||||
|
|
||||||
def __init__(self, outpost: Outpost):
|
def __init__(self, outpost: Outpost, connection: DockerServiceConnection):
|
||||||
super().__init__(outpost)
|
super().__init__(outpost, connection)
|
||||||
self.deployment_ports = {
|
self.deployment_ports = {
|
||||||
"http": 4180,
|
"http": 4180,
|
||||||
"https": 4443,
|
"https": 4443,
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
"""Kubernetes Ingress Reconciler"""
|
"""Kubernetes Ingress Reconciler"""
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING, Dict
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from kubernetes.client import (
|
from kubernetes.client import (
|
||||||
@ -30,11 +30,11 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
|
|||||||
|
|
||||||
def __init__(self, controller: "KubernetesController") -> None:
|
def __init__(self, controller: "KubernetesController") -> None:
|
||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = NetworkingV1beta1Api()
|
self.api = NetworkingV1beta1Api(controller.client)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
return f"passbook-outpost-{self.controller.outpost.name}"
|
return f"passbook-outpost-{self.controller.outpost.uuid.hex}"
|
||||||
|
|
||||||
def reconcile(
|
def reconcile(
|
||||||
self, current: NetworkingV1beta1Ingress, reference: NetworkingV1beta1Ingress
|
self, current: NetworkingV1beta1Ingress, reference: NetworkingV1beta1Ingress
|
||||||
@ -56,7 +56,10 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
|
|||||||
have_hosts = [rule.host for rule in reference.spec.rules]
|
have_hosts = [rule.host for rule in reference.spec.rules]
|
||||||
have_hosts.sort()
|
have_hosts.sort()
|
||||||
|
|
||||||
have_hosts_tls = reference.spec.tls.hosts
|
have_hosts_tls = []
|
||||||
|
for tls_config in reference.spec.tls:
|
||||||
|
if tls_config:
|
||||||
|
have_hosts_tls += tls_config.hosts
|
||||||
have_hosts_tls.sort()
|
have_hosts_tls.sort()
|
||||||
|
|
||||||
if have_hosts != expected_hosts:
|
if have_hosts != expected_hosts:
|
||||||
@ -64,11 +67,24 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
|
|||||||
if have_hosts_tls != expected_hosts_tls:
|
if have_hosts_tls != expected_hosts_tls:
|
||||||
raise NeedsUpdate()
|
raise NeedsUpdate()
|
||||||
|
|
||||||
|
def get_ingress_annotations(self) -> Dict[str, str]:
|
||||||
|
"""Get ingress annotations"""
|
||||||
|
annotations = {
|
||||||
|
# Ensure that with multiple proxy replicas deployed, the same CSRF request
|
||||||
|
# goes to the same pod
|
||||||
|
"nginx.ingress.kubernetes.io/affinity": "cookie",
|
||||||
|
"traefik.ingress.kubernetes.io/affinity": "true",
|
||||||
|
}
|
||||||
|
annotations.update(
|
||||||
|
self.controller.outpost.config.kubernetes_ingress_annotations
|
||||||
|
)
|
||||||
|
return dict()
|
||||||
|
|
||||||
def get_reference_object(self) -> NetworkingV1beta1Ingress:
|
def get_reference_object(self) -> NetworkingV1beta1Ingress:
|
||||||
"""Get deployment object for outpost"""
|
"""Get deployment object for outpost"""
|
||||||
meta = self.get_object_meta(
|
meta = self.get_object_meta(
|
||||||
name=self.name,
|
name=self.name,
|
||||||
annotations=self.controller.outpost.config.kubernetes_ingress_annotations,
|
annotations=self.get_ingress_annotations(),
|
||||||
)
|
)
|
||||||
rules = []
|
rules = []
|
||||||
tls_hosts = []
|
tls_hosts = []
|
||||||
@ -102,7 +118,7 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
|
|||||||
)
|
)
|
||||||
return NetworkingV1beta1Ingress(
|
return NetworkingV1beta1Ingress(
|
||||||
metadata=meta,
|
metadata=meta,
|
||||||
spec=NetworkingV1beta1IngressSpec(rules=rules, tls=tls_config),
|
spec=NetworkingV1beta1IngressSpec(rules=rules, tls=[tls_config]),
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, reference: NetworkingV1beta1Ingress):
|
def create(self, reference: NetworkingV1beta1Ingress):
|
||||||
@ -114,9 +130,7 @@ class IngressReconciler(KubernetesObjectReconciler[NetworkingV1beta1Ingress]):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def retrieve(self) -> NetworkingV1beta1Ingress:
|
def retrieve(self) -> NetworkingV1beta1Ingress:
|
||||||
return self.api.read_namespaced_ingress(
|
return self.api.read_namespaced_ingress(self.name, self.namespace)
|
||||||
f"passbook-outpost-{self.controller.outpost.name}", self.namespace
|
|
||||||
)
|
|
||||||
|
|
||||||
def update(
|
def update(
|
||||||
self, current: NetworkingV1beta1Ingress, reference: NetworkingV1beta1Ingress
|
self, current: NetworkingV1beta1Ingress, reference: NetworkingV1beta1Ingress
|
||||||
|
|||||||
@ -1,14 +1,14 @@
|
|||||||
"""Proxy Provider Kubernetes Contoller"""
|
"""Proxy Provider Kubernetes Contoller"""
|
||||||
from passbook.outposts.controllers.kubernetes import KubernetesController
|
from passbook.outposts.controllers.kubernetes import KubernetesController
|
||||||
from passbook.outposts.models import Outpost
|
from passbook.outposts.models import KubernetesServiceConnection, Outpost
|
||||||
from passbook.providers.proxy.controllers.k8s.ingress import IngressReconciler
|
from passbook.providers.proxy.controllers.k8s.ingress import IngressReconciler
|
||||||
|
|
||||||
|
|
||||||
class ProxyKubernetesController(KubernetesController):
|
class ProxyKubernetesController(KubernetesController):
|
||||||
"""Proxy Provider Kubernetes Contoller"""
|
"""Proxy Provider Kubernetes Contoller"""
|
||||||
|
|
||||||
def __init__(self, outpost: Outpost):
|
def __init__(self, outpost: Outpost, connection: KubernetesServiceConnection):
|
||||||
super().__init__(outpost)
|
super().__init__(outpost, connection)
|
||||||
self.deployment_ports = {
|
self.deployment_ports = {
|
||||||
"http": 4180,
|
"http": 4180,
|
||||||
"https": 4443,
|
"https": 4443,
|
||||||
|
|||||||
@ -6,7 +6,7 @@ import yaml
|
|||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
|
||||||
from passbook.flows.models import Flow
|
from passbook.flows.models import Flow
|
||||||
from passbook.outposts.models import Outpost, OutpostDeploymentType, OutpostType
|
from passbook.outposts.models import KubernetesServiceConnection, Outpost, OutpostType
|
||||||
from passbook.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
from passbook.providers.proxy.controllers.kubernetes import ProxyKubernetesController
|
||||||
from passbook.providers.proxy.models import ProxyProvider
|
from passbook.providers.proxy.models import ProxyProvider
|
||||||
|
|
||||||
@ -23,17 +23,18 @@ class TestControllers(TestCase):
|
|||||||
external_host="http://localhost",
|
external_host="http://localhost",
|
||||||
authorization_flow=Flow.objects.first(),
|
authorization_flow=Flow.objects.first(),
|
||||||
)
|
)
|
||||||
|
service_connection = KubernetesServiceConnection.objects.first()
|
||||||
outpost: Outpost = Outpost.objects.create(
|
outpost: Outpost = Outpost.objects.create(
|
||||||
name="test",
|
name="test",
|
||||||
type=OutpostType.PROXY,
|
type=OutpostType.PROXY,
|
||||||
deployment_type=OutpostDeploymentType.KUBERNETES,
|
service_connection=service_connection,
|
||||||
)
|
)
|
||||||
outpost.providers.add(provider)
|
outpost.providers.add(provider)
|
||||||
outpost.save()
|
outpost.save()
|
||||||
|
|
||||||
controller = ProxyKubernetesController(outpost.pk)
|
controller = ProxyKubernetesController(outpost, service_connection)
|
||||||
manifest = controller.get_static_deployment()
|
manifest = controller.get_static_deployment()
|
||||||
self.assertEqual(len(list(yaml.load_all(manifest, Loader=yaml.SafeLoader))), 3)
|
self.assertEqual(len(list(yaml.load_all(manifest, Loader=yaml.SafeLoader))), 4)
|
||||||
|
|
||||||
def test_kubernetes_controller_deploy(self):
|
def test_kubernetes_controller_deploy(self):
|
||||||
"""Test Kubernetes Controller"""
|
"""Test Kubernetes Controller"""
|
||||||
@ -43,13 +44,15 @@ class TestControllers(TestCase):
|
|||||||
external_host="http://localhost",
|
external_host="http://localhost",
|
||||||
authorization_flow=Flow.objects.first(),
|
authorization_flow=Flow.objects.first(),
|
||||||
)
|
)
|
||||||
|
service_connection = KubernetesServiceConnection.objects.first()
|
||||||
outpost: Outpost = Outpost.objects.create(
|
outpost: Outpost = Outpost.objects.create(
|
||||||
name="test",
|
name="test",
|
||||||
type=OutpostType.PROXY,
|
type=OutpostType.PROXY,
|
||||||
deployment_type=OutpostDeploymentType.KUBERNETES,
|
service_connection=service_connection,
|
||||||
)
|
)
|
||||||
outpost.providers.add(provider)
|
outpost.providers.add(provider)
|
||||||
outpost.save()
|
outpost.save()
|
||||||
|
|
||||||
controller = ProxyKubernetesController(outpost.pk)
|
controller = ProxyKubernetesController(outpost, service_connection)
|
||||||
controller.up()
|
controller.up()
|
||||||
|
controller.down()
|
||||||
|
|||||||
@ -25,6 +25,7 @@ class SAMLProviderSerializer(ModelSerializer):
|
|||||||
"signature_algorithm",
|
"signature_algorithm",
|
||||||
"signing_kp",
|
"signing_kp",
|
||||||
"require_signing",
|
"require_signing",
|
||||||
|
"verification_kp",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -7,6 +7,7 @@ from django.utils.translation import gettext as _
|
|||||||
|
|
||||||
from passbook.admin.fields import CodeMirrorWidget
|
from passbook.admin.fields import CodeMirrorWidget
|
||||||
from passbook.core.expression import PropertyMappingEvaluator
|
from passbook.core.expression import PropertyMappingEvaluator
|
||||||
|
from passbook.crypto.models import CertificateKeyPair
|
||||||
from passbook.flows.models import Flow, FlowDesignation
|
from passbook.flows.models import Flow, FlowDesignation
|
||||||
from passbook.providers.saml.models import SAMLPropertyMapping, SAMLProvider
|
from passbook.providers.saml.models import SAMLPropertyMapping, SAMLProvider
|
||||||
|
|
||||||
@ -20,6 +21,9 @@ class SAMLProviderForm(forms.ModelForm):
|
|||||||
designation=FlowDesignation.AUTHORIZATION
|
designation=FlowDesignation.AUTHORIZATION
|
||||||
)
|
)
|
||||||
self.fields["property_mappings"].queryset = SAMLPropertyMapping.objects.all()
|
self.fields["property_mappings"].queryset = SAMLPropertyMapping.objects.all()
|
||||||
|
self.fields["signing_kp"].queryset = CertificateKeyPair.objects.exclude(
|
||||||
|
key_data__iexact=""
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
@ -34,11 +38,12 @@ class SAMLProviderForm(forms.ModelForm):
|
|||||||
"assertion_valid_not_before",
|
"assertion_valid_not_before",
|
||||||
"assertion_valid_not_on_or_after",
|
"assertion_valid_not_on_or_after",
|
||||||
"session_valid_not_on_or_after",
|
"session_valid_not_on_or_after",
|
||||||
"property_mappings",
|
|
||||||
"digest_algorithm",
|
"digest_algorithm",
|
||||||
"require_signing",
|
"require_signing",
|
||||||
"signature_algorithm",
|
"signature_algorithm",
|
||||||
"signing_kp",
|
"signing_kp",
|
||||||
|
"verification_kp",
|
||||||
|
"property_mappings",
|
||||||
]
|
]
|
||||||
widgets = {
|
widgets = {
|
||||||
"name": forms.TextInput(),
|
"name": forms.TextInput(),
|
||||||
|
|||||||
@ -0,0 +1,28 @@
|
|||||||
|
# Generated by Django 3.1.3 on 2020-11-08 21:22
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("passbook_crypto", "0002_create_self_signed_kp"),
|
||||||
|
("passbook_providers_saml", "0006_remove_samlprovider_name"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="samlprovider",
|
||||||
|
name="verification_kp",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
default=None,
|
||||||
|
help_text="If selected, incoming assertion's Signatures will be validated.",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="+",
|
||||||
|
to="passbook_crypto.certificatekeypair",
|
||||||
|
verbose_name="Verification Keypair",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user