Compare commits
116 Commits
version/0.
...
version/0.
| Author | SHA1 | Date | |
|---|---|---|---|
| a4cc653757 | |||
| db4ff20906 | |||
| 1f0fbd33b6 | |||
| 5de8d2721e | |||
| 0d65da9a9e | |||
| 4316ee4330 | |||
| 2ed9a1dbe3 | |||
| 8e03824d20 | |||
| 754dbdd0e5 | |||
| e13d348315 | |||
| 169f3ebe5b | |||
| f8ad604e85 | |||
| 774b9c8a61 | |||
| d8c522233e | |||
| 82d50f7eaa | |||
| 1c426c5136 | |||
| d6e14cc551 | |||
| c3917ebc2e | |||
| 7203bd37a3 | |||
| 597188c7ee | |||
| ac4c314042 | |||
| 05866d3544 | |||
| 6596bc6034 | |||
| c6661ef4d2 | |||
| 386e23dfac | |||
| 5d7220ca70 | |||
| 5de0d03acf | |||
| b0cc91f343 | |||
| 029a78f108 | |||
| 3f4a8dc4f6 | |||
| 32f6ba6302 | |||
| 8da0b14f29 | |||
| 83eb4aff02 | |||
| 927d02f591 | |||
| d04afcd6d0 | |||
| 89c6db66fd | |||
| e6ffa65a7e | |||
| 8a2f982a77 | |||
| 16cf6315e3 | |||
| 1d85874f41 | |||
| ff64182ae8 | |||
| a9ee67bf2d | |||
| e87d52a76b | |||
| 8b09cf55a2 | |||
| 0203d20759 | |||
| 7861e2e0bd | |||
| ad29d54bbf | |||
| c698ba37d9 | |||
| 6a53069653 | |||
| 152b2d863d | |||
| ee670d5e19 | |||
| 36e095671c | |||
| 1088b947a8 | |||
| c4a30c50ac | |||
| 2831df45a0 | |||
| ee5bac099f | |||
| 69f7b41044 | |||
| f9cede7b31 | |||
| 903cdeaa7f | |||
| e909e7fa8a | |||
| bee38551f3 | |||
| c0ec6388df | |||
| 8f08836885 | |||
| dd0d7e7481 | |||
| 25d0ac6534 | |||
| 971713d1aa | |||
| 5135d828b4 | |||
| b2c571bf1b | |||
| 6b1d30d230 | |||
| 3454760731 | |||
| 96846220c3 | |||
| a4f5678144 | |||
| a18baa3cb3 | |||
| dfedd4a7f1 | |||
| 897f64600a | |||
| c6eb015d18 | |||
| 54088239ab | |||
| aa9c7a6567 | |||
| 6c0c12c90a | |||
| c49b57ad1d | |||
| 2339e855bb | |||
| bdc019c7cf | |||
| 5e2fb6d56e | |||
| 3b9524cdfc | |||
| 7154f19668 | |||
| 8fedd9ec07 | |||
| 4ac87d8739 | |||
| e4f45eba0a | |||
| 4b3e0f0f96 | |||
| 482da81522 | |||
| c5226fd0e8 | |||
| 7806cff96f | |||
| fa504e4bf9 | |||
| 86cfb10b9b | |||
| f6b8171624 | |||
| 91ce7f7363 | |||
| 17060238f0 | |||
| c392c2a74b | |||
| 8cbaec8ba8 | |||
| 4750f8c653 | |||
| 69d2a1cf3b | |||
| 635f6c1ef2 | |||
| 18da7565c2 | |||
| 45699a1a69 | |||
| 5556e9f8e7 | |||
| 327bb09dd4 | |||
| 8ca23451c6 | |||
| b99e2b10fe | |||
| e966dff1a7 | |||
| 481fbedef2 | |||
| d104012eee | |||
| b03a508475 | |||
| 8ede4b6a13 | |||
| 41323afccc | |||
| 4a10b4999b | |||
| 20ee634cda |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 0.11.0-stable
|
||||
current_version = 0.12.3-stable
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)
|
||||
|
||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@ -6,12 +6,16 @@ updates:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
||||
- package-ecosystem: npm
|
||||
directory: "/passbook/static/static"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
|
||||
54
.github/workflows/codeql-analysis.yml
vendored
54
.github/workflows/codeql-analysis.yml
vendored
@ -1,54 +0,0 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, admin-more-info, ci-deploy-dev, gh-pages, provider-saml-v2]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
schedule:
|
||||
- cron: '0 20 * * 2'
|
||||
|
||||
jobs:
|
||||
analyse:
|
||||
name: Analyse
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event, then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
14
.github/workflows/release.yml
vendored
14
.github/workflows/release.yml
vendored
@ -18,11 +18,11 @@ jobs:
|
||||
- name: Building Docker Image
|
||||
run: docker build
|
||||
--no-cache
|
||||
-t beryju/passbook:0.11.0-stable
|
||||
-t beryju/passbook:0.12.3-stable
|
||||
-t beryju/passbook:latest
|
||||
-f Dockerfile .
|
||||
- name: Push Docker Container to Registry (versioned)
|
||||
run: docker push beryju/passbook:0.11.0-stable
|
||||
run: docker push beryju/passbook:0.12.3-stable
|
||||
- name: Push Docker Container to Registry (latest)
|
||||
run: docker push beryju/passbook:latest
|
||||
build-proxy:
|
||||
@ -48,11 +48,11 @@ jobs:
|
||||
cd proxy
|
||||
docker build \
|
||||
--no-cache \
|
||||
-t beryju/passbook-proxy:0.11.0-stable \
|
||||
-t beryju/passbook-proxy:0.12.3-stable \
|
||||
-t beryju/passbook-proxy:latest \
|
||||
-f Dockerfile .
|
||||
- name: Push Docker Container to Registry (versioned)
|
||||
run: docker push beryju/passbook-proxy:0.11.0-stable
|
||||
run: docker push beryju/passbook-proxy:0.12.3-stable
|
||||
- name: Push Docker Container to Registry (latest)
|
||||
run: docker push beryju/passbook-proxy:latest
|
||||
build-static:
|
||||
@ -77,11 +77,11 @@ jobs:
|
||||
run: docker build
|
||||
--no-cache
|
||||
--network=$(docker network ls | grep github | awk '{print $1}')
|
||||
-t beryju/passbook-static:0.11.0-stable
|
||||
-t beryju/passbook-static:0.12.3-stable
|
||||
-t beryju/passbook-static:latest
|
||||
-f static.Dockerfile .
|
||||
- name: Push Docker Container to Registry (versioned)
|
||||
run: docker push beryju/passbook-static:0.11.0-stable
|
||||
run: docker push beryju/passbook-static:0.12.3-stable
|
||||
- name: Push Docker Container to Registry (latest)
|
||||
run: docker push beryju/passbook-static:latest
|
||||
test-release:
|
||||
@ -114,5 +114,5 @@ jobs:
|
||||
SENTRY_PROJECT: passbook
|
||||
SENTRY_URL: https://sentry.beryju.org
|
||||
with:
|
||||
tagName: 0.11.0-stable
|
||||
tagName: 0.12.3-stable
|
||||
environment: beryjuorg-prod
|
||||
|
||||
4
Pipfile
4
Pipfile
@ -17,10 +17,10 @@ django-otp = "*"
|
||||
django-prometheus = "*"
|
||||
django-recaptcha = "*"
|
||||
django-redis = "*"
|
||||
djangorestframework = "==3.11.1"
|
||||
djangorestframework = "*"
|
||||
django-storages = "*"
|
||||
djangorestframework-guardian = "*"
|
||||
drf-yasg = "*"
|
||||
drf_yasg2 = "*"
|
||||
facebook-sdk = "*"
|
||||
ldap3 = "*"
|
||||
lxml = "*"
|
||||
|
||||
222
Pipfile.lock
generated
222
Pipfile.lock
generated
@ -1,7 +1,7 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "77737b63b2469755fd2a3d06b23054ae42b07b3f24cf887472f05fb8ab165cc6"
|
||||
"sha256": "d1a9883d864e25f18e34b298b72b58db333a037571c7a20cefb7ba7a4037a434"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
@ -74,18 +74,18 @@
|
||||
},
|
||||
"boto3": {
|
||||
"hashes": [
|
||||
"sha256:1627f97e050be59cfef839481acc73eba4b29e475a067f374a493e6b7f25601e",
|
||||
"sha256:8aafa1ec72451cf70fe6d8c7e86b1a83d2e195d4dda95e5bf21e40132a38c309"
|
||||
"sha256:2e16f02c8b832d401d958d7ca0a14c5bc7da17827918e6b24e5bc43dce8f496e",
|
||||
"sha256:ab5353a968a4e664b9da2dd950169b755066525fcbfdfc90e7e49c8333d95c19"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.15.15"
|
||||
"version": "==1.16.0"
|
||||
},
|
||||
"botocore": {
|
||||
"hashes": [
|
||||
"sha256:3b9179edbba61c96f5d1eaa4328c9cda686bd461e102c5878c4880479c24e268",
|
||||
"sha256:f59437ff69d260faa876a2bb7d76debcbbb3b1a497e9ff49550a1a5501679720"
|
||||
"sha256:226effa72e3ddd0a802e812c0e204999393ca7982fee754cc0c770a7a1caef3a",
|
||||
"sha256:9bf8586b69f20cf0a8ed1e27338cd10ce847751d1a2fd98b92662565c8a2df24"
|
||||
],
|
||||
"version": "==1.18.15"
|
||||
"version": "==1.19.0"
|
||||
},
|
||||
"cachetools": {
|
||||
"hashes": [
|
||||
@ -96,11 +96,11 @@
|
||||
},
|
||||
"celery": {
|
||||
"hashes": [
|
||||
"sha256:313930fddde703d8e37029a304bf91429cd11aeef63c57de6daca9d958e1f255",
|
||||
"sha256:72138dc3887f68dc58e1a2397e477256f80f1894c69fa4337f8ed70be460375b"
|
||||
"sha256:7aa4ee46ed318bc177900ae7c01500354aee62d723255b0925db0754bcd4d390",
|
||||
"sha256:e3e8956d74af986b1e9770e0a294338b259618bf70283d6157416328e50c2bd6"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==5.0.0"
|
||||
"version": "==5.0.1"
|
||||
},
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
@ -349,11 +349,11 @@
|
||||
},
|
||||
"djangorestframework": {
|
||||
"hashes": [
|
||||
"sha256:6dd02d5a4bd2516fb93f80360673bf540c3b6641fec8766b1da2870a5aa00b32",
|
||||
"sha256:8b1ac62c581dbc5799b03e535854b92fc4053ecfe74bad3f9c05782063d4196b"
|
||||
"sha256:5c5071fcbad6dce16f566d492015c829ddb0df42965d488b878594aabc3aed21",
|
||||
"sha256:d54452aedebb4b650254ca092f9f4f5df947cb1de6ab245d817b08b4f4156249"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.11.1"
|
||||
"version": "==3.12.1"
|
||||
},
|
||||
"djangorestframework-guardian": {
|
||||
"hashes": [
|
||||
@ -371,13 +371,13 @@
|
||||
"index": "pypi",
|
||||
"version": "==4.3.1"
|
||||
},
|
||||
"drf-yasg": {
|
||||
"drf-yasg2": {
|
||||
"hashes": [
|
||||
"sha256:5572e9d5baab9f6b49318169df9789f7399d0e3c7bdac8fdb8dfccf1d5d2b1ca",
|
||||
"sha256:7d7af27ad16e18507e9392b2afd6b218fbffc432ec8dbea053099a2241e184ff"
|
||||
"sha256:c4aa21d52f3964f99748eed68eb24be0fdad65e55bb56b99ae85c950718bac64",
|
||||
"sha256:e880b3fa298a614360f4d882e8bc1712b51e1b28696acbd2684ac0ab18275a62"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.17.1"
|
||||
"version": "==1.19.2"
|
||||
},
|
||||
"eight": {
|
||||
"hashes": [
|
||||
@ -555,11 +555,11 @@
|
||||
},
|
||||
"kubernetes": {
|
||||
"hashes": [
|
||||
"sha256:1a2472f8b01bc6aa87e3a34781f859bded5a5c8ff791a53d889a8bd6cc550430",
|
||||
"sha256:4af81201520977139a143f96123fb789fa351879df37f122916b9b6ed050bbaf"
|
||||
"sha256:72f095a1cd593401ff26b3b8d71749340394ca6d8413770ea28ce18efd5bcf4c",
|
||||
"sha256:9a339a32d6c79e6461cb6050c3662cb4e33058b508d8d34ee5d5206add395828"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==11.0.0"
|
||||
"version": "==12.0.0"
|
||||
},
|
||||
"ldap3": {
|
||||
"hashes": [
|
||||
@ -571,40 +571,45 @@
|
||||
},
|
||||
"lxml": {
|
||||
"hashes": [
|
||||
"sha256:05a444b207901a68a6526948c7cc8f9fe6d6f24c70781488e32fd74ff5996e3f",
|
||||
"sha256:08fc93257dcfe9542c0a6883a25ba4971d78297f63d7a5a26ffa34861ca78730",
|
||||
"sha256:107781b213cf7201ec3806555657ccda67b1fccc4261fb889ef7fc56976db81f",
|
||||
"sha256:121b665b04083a1e85ff1f5243d4a93aa1aaba281bc12ea334d5a187278ceaf1",
|
||||
"sha256:1fa21263c3aba2b76fd7c45713d4428dbcc7644d73dcf0650e9d344e433741b3",
|
||||
"sha256:2b30aa2bcff8e958cd85d907d5109820b01ac511eae5b460803430a7404e34d7",
|
||||
"sha256:4b4a111bcf4b9c948e020fd207f915c24a6de3f1adc7682a2d92660eb4e84f1a",
|
||||
"sha256:5591c4164755778e29e69b86e425880f852464a21c7bb53c7ea453bbe2633bbe",
|
||||
"sha256:59daa84aef650b11bccd18f99f64bfe44b9f14a08a28259959d33676554065a1",
|
||||
"sha256:5a9c8d11aa2c8f8b6043d845927a51eb9102eb558e3f936df494e96393f5fd3e",
|
||||
"sha256:5dd20538a60c4cc9a077d3b715bb42307239fcd25ef1ca7286775f95e9e9a46d",
|
||||
"sha256:74f48ec98430e06c1fa8949b49ebdd8d27ceb9df8d3d1c92e1fdc2773f003f20",
|
||||
"sha256:786aad2aa20de3dbff21aab86b2fb6a7be68064cbbc0219bde414d3a30aa47ae",
|
||||
"sha256:7ad7906e098ccd30d8f7068030a0b16668ab8aa5cda6fcd5146d8d20cbaa71b5",
|
||||
"sha256:80a38b188d20c0524fe8959c8ce770a8fdf0e617c6912d23fc97c68301bb9aba",
|
||||
"sha256:8f0ec6b9b3832e0bd1d57af41f9238ea7709bbd7271f639024f2fc9d3bb01293",
|
||||
"sha256:92282c83547a9add85ad658143c76a64a8d339028926d7dc1998ca029c88ea6a",
|
||||
"sha256:94150231f1e90c9595ccc80d7d2006c61f90a5995db82bccbca7944fd457f0f6",
|
||||
"sha256:9dc9006dcc47e00a8a6a029eb035c8f696ad38e40a27d073a003d7d1443f5d88",
|
||||
"sha256:a76979f728dd845655026ab991df25d26379a1a8fc1e9e68e25c7eda43004bed",
|
||||
"sha256:aa8eba3db3d8761db161003e2d0586608092e217151d7458206e243be5a43843",
|
||||
"sha256:bea760a63ce9bba566c23f726d72b3c0250e2fa2569909e2d83cda1534c79443",
|
||||
"sha256:c3f511a3c58676147c277eff0224c061dd5a6a8e1373572ac817ac6324f1b1e0",
|
||||
"sha256:c9d317efde4bafbc1561509bfa8a23c5cab66c44d49ab5b63ff690f5159b2304",
|
||||
"sha256:cc411ad324a4486b142c41d9b2b6a722c534096963688d879ea6fa8a35028258",
|
||||
"sha256:cdc13a1682b2a6241080745b1953719e7fe0850b40a5c71ca574f090a1391df6",
|
||||
"sha256:cfd7c5dd3c35c19cec59c63df9571c67c6d6e5c92e0fe63517920e97f61106d1",
|
||||
"sha256:e1cacf4796b20865789083252186ce9dc6cc59eca0c2e79cca332bdff24ac481",
|
||||
"sha256:e70d4e467e243455492f5de463b72151cc400710ac03a0678206a5f27e79ddef",
|
||||
"sha256:ecc930ae559ea8a43377e8b60ca6f8d61ac532fc57efb915d899de4a67928efd",
|
||||
"sha256:f161af26f596131b63b236372e4ce40f3167c1b5b5d459b29d2514bd8c9dc9ee"
|
||||
"sha256:0e89f5d422988c65e6936e4ec0fe54d6f73f3128c80eb7ecc3b87f595523607b",
|
||||
"sha256:189ad47203e846a7a4951c17694d845b6ade7917c47c64b29b86526eefc3adf5",
|
||||
"sha256:1d87936cb5801c557f3e981c9c193861264c01209cb3ad0964a16310ca1b3301",
|
||||
"sha256:211b3bcf5da70c2d4b84d09232534ad1d78320762e2c59dedc73bf01cb1fc45b",
|
||||
"sha256:2358809cc64394617f2719147a58ae26dac9e21bae772b45cfb80baa26bfca5d",
|
||||
"sha256:23c83112b4dada0b75789d73f949dbb4e8f29a0a3511647024a398ebd023347b",
|
||||
"sha256:24e811118aab6abe3ce23ff0d7d38932329c513f9cef849d3ee88b0f848f2aa9",
|
||||
"sha256:2d5896ddf5389560257bbe89317ca7bcb4e54a02b53a3e572e1ce4226512b51b",
|
||||
"sha256:2d6571c48328be4304aee031d2d5046cbc8aed5740c654575613c5a4f5a11311",
|
||||
"sha256:2e311a10f3e85250910a615fe194839a04a0f6bc4e8e5bb5cac221344e3a7891",
|
||||
"sha256:302160eb6e9764168e01d8c9ec6becddeb87776e81d3fcb0d97954dd51d48e0a",
|
||||
"sha256:3a7a380bfecc551cfd67d6e8ad9faa91289173bdf12e9cfafbd2bdec0d7b1ec1",
|
||||
"sha256:3d9b2b72eb0dbbdb0e276403873ecfae870599c83ba22cadff2db58541e72856",
|
||||
"sha256:475325e037fdf068e0c2140b818518cf6bc4aa72435c407a798b2db9f8e90810",
|
||||
"sha256:4b7572145054330c8e324a72d808c8c8fbe12be33368db28c39a255ad5f7fb51",
|
||||
"sha256:4fff34721b628cce9eb4538cf9a73d02e0f3da4f35a515773cce6f5fe413b360",
|
||||
"sha256:56eff8c6fb7bc4bcca395fdff494c52712b7a57486e4fbde34c31bb9da4c6cc4",
|
||||
"sha256:573b2f5496c7e9f4985de70b9bbb4719ffd293d5565513e04ac20e42e6e5583f",
|
||||
"sha256:7ecaef52fd9b9535ae5f01a1dd2651f6608e4ec9dc136fc4dfe7ebe3c3ddb230",
|
||||
"sha256:803a80d72d1f693aa448566be46ffd70882d1ad8fc689a2e22afe63035eb998a",
|
||||
"sha256:8862d1c2c020cb7a03b421a9a7b4fe046a208db30994fc8ff68c627a7915987f",
|
||||
"sha256:9b06690224258db5cd39a84e993882a6874676f5de582da57f3df3a82ead9174",
|
||||
"sha256:a71400b90b3599eb7bf241f947932e18a066907bf84617d80817998cee81e4bf",
|
||||
"sha256:bb252f802f91f59767dcc559744e91efa9df532240a502befd874b54571417bd",
|
||||
"sha256:be1ebf9cc25ab5399501c9046a7dcdaa9e911802ed0e12b7d620cd4bbf0518b3",
|
||||
"sha256:be7c65e34d1b50ab7093b90427cbc488260e4b3a38ef2435d65b62e9fa3d798a",
|
||||
"sha256:c0dac835c1a22621ffa5e5f999d57359c790c52bbd1c687fe514ae6924f65ef5",
|
||||
"sha256:c152b2e93b639d1f36ec5a8ca24cde4a8eefb2b6b83668fcd8e83a67badcb367",
|
||||
"sha256:d182eada8ea0de61a45a526aa0ae4bcd222f9673424e65315c35820291ff299c",
|
||||
"sha256:d18331ea905a41ae71596502bd4c9a2998902328bbabd29e3d0f5f8569fabad1",
|
||||
"sha256:d20d32cbb31d731def4b1502294ca2ee99f9249b63bc80e03e67e8f8e126dea8",
|
||||
"sha256:d4ad7fd3269281cb471ad6c7bafca372e69789540d16e3755dd717e9e5c9d82f",
|
||||
"sha256:d6f8c23f65a4bfe4300b85f1f40f6c32569822d08901db3b6454ab785d9117cc",
|
||||
"sha256:d84d741c6e35c9f3e7406cb7c4c2e08474c2a6441d59322a00dcae65aac6315d",
|
||||
"sha256:e65c221b2115a91035b55a593b6eb94aa1206fa3ab374f47c6dc10d364583ff9",
|
||||
"sha256:f98b6f256be6cec8dd308a8563976ddaff0bdc18b730720f6f4bee927ffe926f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.5.2"
|
||||
"version": "==4.6.1"
|
||||
},
|
||||
"markupsafe": {
|
||||
"hashes": [
|
||||
@ -691,10 +696,10 @@
|
||||
},
|
||||
"prompt-toolkit": {
|
||||
"hashes": [
|
||||
"sha256:822f4605f28f7d2ba6b0b09a31e25e140871e96364d1d377667b547bb3bf4489",
|
||||
"sha256:83074ee28ad4ba6af190593d4d4c607ff525272a504eb159199b6dd9f950c950"
|
||||
"sha256:25c95d2ac813909f813c93fde734b6e44406d1477a9faef7c915ff37d39c0a8c",
|
||||
"sha256:7debb9a521e0b1ee7d2fe96ee4bd60ef03c6492784de0547337ca4433e46aa63"
|
||||
],
|
||||
"version": "==3.0.7"
|
||||
"version": "==3.0.8"
|
||||
},
|
||||
"psycopg2-binary": {
|
||||
"hashes": [
|
||||
@ -708,6 +713,7 @@
|
||||
"sha256:7312e931b90fe14f925729cde58022f5d034241918a5c4f9797cac62f6b3a9dd",
|
||||
"sha256:7d92a09b788cbb1aec325af5fcba9fed7203897bbd9269d5691bb1e3bce29550",
|
||||
"sha256:833709a5c66ca52f1d21d41865a637223b368c0ee76ea54ca5bad6f2526c7679",
|
||||
"sha256:89705f45ce07b2dfa806ee84439ec67c5d9a0ef20154e0e475e2b2ed392a5b83",
|
||||
"sha256:8cd0fb36c7412996859cb4606a35969dd01f4ea34d9812a141cd920c3b18be77",
|
||||
"sha256:950bc22bb56ee6ff142a2cb9ee980b571dd0912b0334aa3fe0fe3788d860bea2",
|
||||
"sha256:a0c50db33c32594305b0ef9abc0cb7db13de7621d2cadf8392a1d9b3c437ef77",
|
||||
@ -974,6 +980,8 @@
|
||||
"sha256:a873e4d4954f865dcb60bdc4914af7eaae48fb56b60ed6daa1d6251c72f5337c",
|
||||
"sha256:ab845f1f51f7eb750a78937be9f79baea4a42c7960f5a94dde34e69f3cce1988",
|
||||
"sha256:b1e981fe1aff1fd11627f531524826a4dcc1f26c726235a52fcb62ded27d150f",
|
||||
"sha256:b4b0d31f2052b3f9f9b5327024dc629a253a83d8649d4734ca7f35b60ec3e9e5",
|
||||
"sha256:c6ac7e45367b1317e56f1461719c853fd6825226f45b835df7436bb04031fd8a",
|
||||
"sha256:daf21aa33ee9b351f66deed30a3d450ab55c14242cfdfcd377798e2c0d25c9f1",
|
||||
"sha256:e9f7d1d8c26a6a12c23421061f9022bb62704e38211fe375c645485f38df34a2",
|
||||
"sha256:f6061a31880c1ed6b6ce341215336e2f3d0c1deccd84957b6fa8ca474b41e89f"
|
||||
@ -990,11 +998,11 @@
|
||||
},
|
||||
"sentry-sdk": {
|
||||
"hashes": [
|
||||
"sha256:1d91a0059d2d8bb980bec169578035c2f2d4b93cd8a4fb5b85c81904d33e221a",
|
||||
"sha256:6222cf623e404c3e62b8e0e81c6db866ac2d12a663b7c1f7963350e3f397522a"
|
||||
"sha256:0eea248408d36e8e7037c7b73827bea20b13a4375bf1719c406cae6fcbc094e3",
|
||||
"sha256:5cf36eb6b1dc62d55f3c64289792cbaebc8ffa5a9da14474f49b46d20caa7fc8"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.18.0"
|
||||
"version": "==0.19.1"
|
||||
},
|
||||
"service-identity": {
|
||||
"hashes": [
|
||||
@ -1092,23 +1100,23 @@
|
||||
"secure"
|
||||
],
|
||||
"hashes": [
|
||||
"sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a",
|
||||
"sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"
|
||||
"sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2",
|
||||
"sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": null,
|
||||
"version": "==1.25.10"
|
||||
"version": "==1.25.11"
|
||||
},
|
||||
"uvicorn": {
|
||||
"extras": [
|
||||
"standard"
|
||||
],
|
||||
"hashes": [
|
||||
"sha256:a461e76406088f448f36323f5ac774d50e5a552b6ccb54e4fca8d83ef614a7c2",
|
||||
"sha256:d06a25caa8dc680ad92eb3ec67363f5281c092059613a1cc0100acba37fc0f45"
|
||||
"sha256:8ff7495c74b8286a341526ff9efa3988ebab9a4b2f561c7438c3cb420992d7dd",
|
||||
"sha256:e5dbed4a8a44c7b04376021021d63798d6a7bcfae9c654a0b153577b93854fba"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.12.1"
|
||||
"version": "==0.12.2"
|
||||
},
|
||||
"uvloop": {
|
||||
"hashes": [
|
||||
@ -1300,11 +1308,11 @@
|
||||
},
|
||||
"colorama": {
|
||||
"hashes": [
|
||||
"sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff",
|
||||
"sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"
|
||||
"sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b",
|
||||
"sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.4.3"
|
||||
"version": "==0.4.4"
|
||||
},
|
||||
"coverage": {
|
||||
"hashes": [
|
||||
@ -1399,10 +1407,10 @@
|
||||
},
|
||||
"iniconfig": {
|
||||
"hashes": [
|
||||
"sha256:80cf40c597eb564e86346103f609d74efce0f6b4d4f30ec8ce9e2c26411ba437",
|
||||
"sha256:e5f92f89355a67de0595932a6c6c02ab4afddc6fcdc0bfc5becd0d60884d3f69"
|
||||
"sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3",
|
||||
"sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"
|
||||
],
|
||||
"version": "==1.0.1"
|
||||
"version": "==1.1.1"
|
||||
},
|
||||
"isort": {
|
||||
"hashes": [
|
||||
@ -1468,10 +1476,10 @@
|
||||
},
|
||||
"pbr": {
|
||||
"hashes": [
|
||||
"sha256:14bfd98f51c78a3dd22a1ef45cf194ad79eee4a19e8e1a0d5c7f8e81ffe182ea",
|
||||
"sha256:5adc0f9fc64319d8df5ca1e4e06eea674c26b80e6f00c530b18ce6a6592ead15"
|
||||
"sha256:5fad80b613c402d5b7df7bd84812548b2a61e9977387a80a5fc5c396492b13c9",
|
||||
"sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00"
|
||||
],
|
||||
"version": "==5.5.0"
|
||||
"version": "==5.5.1"
|
||||
},
|
||||
"pep8-naming": {
|
||||
"hashes": [
|
||||
@ -1574,11 +1582,11 @@
|
||||
},
|
||||
"pytest-django": {
|
||||
"hashes": [
|
||||
"sha256:4de6dbd077ed8606616958f77655fed0d5e3ee45159475671c7fa67596c6dba6",
|
||||
"sha256:c33e3d3da14d8409b125d825d4e74da17bb252191bf6fc3da6856e27a8b73ea4"
|
||||
"sha256:0e91003fdd41ac0322c1978682be2ca180bc564203dd53c698f99242bf513614",
|
||||
"sha256:5f964ccda1f551e00589ab0679a7c45c36c509a44b5bfb5ad07954e0ae3f4bed"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.10.0"
|
||||
"version": "==4.0.0"
|
||||
},
|
||||
"pytz": {
|
||||
"hashes": [
|
||||
@ -1606,35 +1614,35 @@
|
||||
},
|
||||
"regex": {
|
||||
"hashes": [
|
||||
"sha256:088afc8c63e7bd187a3c70a94b9e50ab3f17e1d3f52a32750b5b77dbe99ef5ef",
|
||||
"sha256:1fe0a41437bbd06063aa184c34804efa886bcc128222e9916310c92cd54c3b4c",
|
||||
"sha256:3d20024a70b97b4f9546696cbf2fd30bae5f42229fbddf8661261b1eaff0deb7",
|
||||
"sha256:41bb65f54bba392643557e617316d0d899ed5b4946dccee1cb6696152b29844b",
|
||||
"sha256:4318d56bccfe7d43e5addb272406ade7a2274da4b70eb15922a071c58ab0108c",
|
||||
"sha256:4707f3695b34335afdfb09be3802c87fa0bc27030471dbc082f815f23688bc63",
|
||||
"sha256:49f23ebd5ac073765ecbcf046edc10d63dcab2f4ae2bce160982cb30df0c0302",
|
||||
"sha256:5533a959a1748a5c042a6da71fe9267a908e21eded7a4f373efd23a2cbdb0ecc",
|
||||
"sha256:5d892a4f1c999834eaa3c32bc9e8b976c5825116cde553928c4c8e7e48ebda67",
|
||||
"sha256:5f18875ac23d9aa2f060838e8b79093e8bb2313dbaaa9f54c6d8e52a5df097be",
|
||||
"sha256:60b0e9e6dc45683e569ec37c55ac20c582973841927a85f2d8a7d20ee80216ab",
|
||||
"sha256:816064fc915796ea1f26966163f6845de5af78923dfcecf6551e095f00983650",
|
||||
"sha256:84cada8effefe9a9f53f9b0d2ba9b7b6f5edf8d2155f9fdbe34616e06ececf81",
|
||||
"sha256:84e9407db1b2eb368b7ecc283121b5e592c9aaedbe8c78b1a2f1102eb2e21d19",
|
||||
"sha256:8d69cef61fa50c8133382e61fd97439de1ae623fe943578e477e76a9d9471637",
|
||||
"sha256:9a02d0ae31d35e1ec12a4ea4d4cca990800f66a917d0fb997b20fbc13f5321fc",
|
||||
"sha256:9bc13e0d20b97ffb07821aa3e113f9998e84994fe4d159ffa3d3a9d1b805043b",
|
||||
"sha256:a6f32aea4260dfe0e55dc9733ea162ea38f0ea86aa7d0f77b15beac5bf7b369d",
|
||||
"sha256:ae91972f8ac958039920ef6e8769277c084971a142ce2b660691793ae44aae6b",
|
||||
"sha256:c570f6fa14b9c4c8a4924aaad354652366577b4f98213cf76305067144f7b100",
|
||||
"sha256:c9443124c67b1515e4fe0bb0aa18df640965e1030f468a2a5dc2589b26d130ad",
|
||||
"sha256:d23a18037313714fb3bb5a94434d3151ee4300bae631894b1ac08111abeaa4a3",
|
||||
"sha256:eaf548d117b6737df379fdd53bdde4f08870e66d7ea653e230477f071f861121",
|
||||
"sha256:ebbe29186a3d9b0c591e71b7393f1ae08c83cb2d8e517d2a822b8f7ec99dfd8b",
|
||||
"sha256:eda4771e0ace7f67f58bc5b560e27fb20f32a148cbc993b0c3835970935c2707",
|
||||
"sha256:f1b3afc574a3db3b25c89161059d857bd4909a1269b0b3cb3c904677c8c4a3f7",
|
||||
"sha256:f2388013e68e750eaa16ccbea62d4130180c26abb1d8e5d584b9baf69672b30f"
|
||||
"sha256:02686a2f0b1a4be0facdd0d3ad4dc6c23acaa0f38fb5470d892ae88584ba705c",
|
||||
"sha256:137da580d1e6302484be3ef41d72cf5c3ad22a076070051b7449c0e13ab2c482",
|
||||
"sha256:20cdd7e1736f4f61a5161aa30d05ac108ab8efc3133df5eb70fe1e6a23ea1ca6",
|
||||
"sha256:25991861c6fef1e5fd0a01283cf5658c5e7f7aa644128e85243bc75304e91530",
|
||||
"sha256:26b85672275d8c7a9d4ff93dbc4954f5146efdb2ecec89ad1de49439984dea14",
|
||||
"sha256:2f60ba5c33f00ce9be29a140e6f812e39880df8ba9cb92ad333f0016dbc30306",
|
||||
"sha256:3dd952f3f8dc01b72c0cf05b3631e05c50ac65ddd2afdf26551638e97502107b",
|
||||
"sha256:578ac6379e65eb8e6a85299b306c966c852712c834dc7eef0ba78d07a828f67b",
|
||||
"sha256:5d4a3221f37520bb337b64a0632716e61b26c8ae6aaffceeeb7ad69c009c404b",
|
||||
"sha256:608d6c05452c0e6cc49d4d7407b4767963f19c4d2230fa70b7201732eedc84f2",
|
||||
"sha256:65b6b018b07e9b3b6a05c2c3bb7710ed66132b4df41926c243887c4f1ff303d5",
|
||||
"sha256:698f8a5a2815e1663d9895830a063098ae2f8f2655ae4fdc5dfa2b1f52b90087",
|
||||
"sha256:6c72adb85adecd4522a488a751e465842cdd2a5606b65464b9168bf029a54272",
|
||||
"sha256:6d4cdb6c20e752426b2e569128488c5046fb1b16b1beadaceea9815c36da0847",
|
||||
"sha256:6e9f72e0ee49f7d7be395bfa29e9533f0507a882e1e6bf302c0a204c65b742bf",
|
||||
"sha256:828618f3c3439c5e6ef8621e7c885ca561bbaaba90ddbb6a7dfd9e1ec8341103",
|
||||
"sha256:85b733a1ef2b2e7001aff0e204a842f50ad699c061856a214e48cfb16ace7d0c",
|
||||
"sha256:8958befc139ac4e3f16d44ec386c490ea2121ed8322f4956f83dd9cad8e9b922",
|
||||
"sha256:a51e51eecdac39a50ede4aeed86dbef4776e3b73347d31d6ad0bc9648ba36049",
|
||||
"sha256:aeac7c9397480450016bc4a840eefbfa8ca68afc1e90648aa6efbfe699e5d3bb",
|
||||
"sha256:aef23aed9d4017cc74d37f703d57ce254efb4c8a6a01905f40f539220348abf9",
|
||||
"sha256:af1f5e997dd1ee71fb6eb4a0fb6921bf7a778f4b62f1f7ef0d7445ecce9155d6",
|
||||
"sha256:b5eeaf4b5ef38fab225429478caf71f44d4a0b44d39a1aa4d4422cda23a9821b",
|
||||
"sha256:d25f5cca0f3af6d425c9496953445bf5b288bb5b71afc2b8308ad194b714c159",
|
||||
"sha256:d81be22d5d462b96a2aa5c512f741255ba182995efb0114e5a946fe254148df1",
|
||||
"sha256:e935a166a5f4c02afe3f7e4ce92ce5a786f75c6caa0c4ce09c922541d74b77e8",
|
||||
"sha256:ef3a55b16c6450574734db92e0a3aca283290889934a23f7498eaf417e3af9f0"
|
||||
],
|
||||
"version": "==2020.9.27"
|
||||
"version": "==2020.10.15"
|
||||
},
|
||||
"requirements-detector": {
|
||||
"hashes": [
|
||||
@ -1737,12 +1745,12 @@
|
||||
"secure"
|
||||
],
|
||||
"hashes": [
|
||||
"sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a",
|
||||
"sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"
|
||||
"sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2",
|
||||
"sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": null,
|
||||
"version": "==1.25.10"
|
||||
"version": "==1.25.11"
|
||||
},
|
||||
"wrapt": {
|
||||
"hashes": [
|
||||
|
||||
@ -6,8 +6,9 @@ As passbook is currently in a pre-stable, only the latest "stable" version is su
|
||||
|
||||
| Version | Supported |
|
||||
| -------- | ------------------ |
|
||||
| 0.9.x | :white_check_mark: |
|
||||
| 0.10.x | :white_check_mark: |
|
||||
| 0.11.x | :white_check_mark: |
|
||||
| 0.12.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
||||
@ -89,7 +89,7 @@ stages:
|
||||
versionSpec: '3.8'
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: npm install -g pyright@1.1.75
|
||||
script: npm install -g pyright@1.1.79
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: |
|
||||
@ -169,6 +169,13 @@ stages:
|
||||
dockerComposeFile: 'scripts/ci.docker-compose.yml'
|
||||
action: 'Run services'
|
||||
buildImages: false
|
||||
- task: CmdLine@2
|
||||
displayName: Install K3d and prepare
|
||||
inputs:
|
||||
script: |
|
||||
wget -q -O - https://raw.githubusercontent.com/rancher/k3d/main/install.sh | bash
|
||||
k3d cluster create
|
||||
k3d kubeconfig write -o ~/.kube/config --overwrite
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: |
|
||||
@ -178,6 +185,7 @@ stages:
|
||||
displayName: Run full test suite
|
||||
inputs:
|
||||
script: |
|
||||
export PB_TEST_K8S=true
|
||||
pipenv run coverage run ./manage.py test passbook -v 3
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
@ -203,6 +211,13 @@ stages:
|
||||
dockerComposeFile: 'scripts/ci.docker-compose.yml'
|
||||
action: 'Run services'
|
||||
buildImages: false
|
||||
- task: CmdLine@2
|
||||
displayName: Install K3d and prepare
|
||||
inputs:
|
||||
script: |
|
||||
wget -q -O - https://raw.githubusercontent.com/rancher/k3d/main/install.sh | bash
|
||||
k3d cluster create
|
||||
k3d kubeconfig write -o ~/.kube/config --overwrite
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: |
|
||||
@ -225,6 +240,7 @@ stages:
|
||||
displayName: Run full test suite
|
||||
inputs:
|
||||
script: |
|
||||
export PB_TEST_K8S=true
|
||||
pipenv run coverage run ./manage.py test e2e -v 3 --failfast
|
||||
- task: CmdLine@2
|
||||
condition: always()
|
||||
@ -232,6 +248,7 @@ stages:
|
||||
inputs:
|
||||
script: |
|
||||
docker stop $(docker ps -aq)
|
||||
docker container prune -f
|
||||
- task: CmdLine@2
|
||||
displayName: Prepare unittests and coverage for upload
|
||||
inputs:
|
||||
|
||||
@ -12,18 +12,14 @@ services:
|
||||
- POSTGRES_PASSWORD=${PG_PASS:-thisisnotagoodpassword}
|
||||
- POSTGRES_USER=passbook
|
||||
- POSTGRES_DB=passbook
|
||||
labels:
|
||||
- traefik.enable=false
|
||||
env_file:
|
||||
- .env
|
||||
redis:
|
||||
image: redis
|
||||
networks:
|
||||
- internal
|
||||
labels:
|
||||
- traefik.enable=false
|
||||
server:
|
||||
image: beryju/passbook:${PASSBOOK_TAG:-0.11.0-stable}
|
||||
image: beryju/passbook:${PASSBOOK_TAG:-0.12.3-stable}
|
||||
command: server
|
||||
environment:
|
||||
PASSBOOK_REDIS__HOST: redis
|
||||
@ -34,18 +30,20 @@ services:
|
||||
networks:
|
||||
- internal
|
||||
labels:
|
||||
- traefik.port=8000
|
||||
- traefik.docker.network=internal
|
||||
- traefik.frontend.rule=PathPrefix:/
|
||||
traefik.enable: 'true'
|
||||
traefik.docker.network: internal
|
||||
traefik.http.routers.app-router.rule: PathPrefix(`/`)
|
||||
traefik.http.routers.app-router.service: app-service
|
||||
traefik.http.routers.app-router.tls: 'true'
|
||||
traefik.http.services.app-service.loadbalancer.healthcheck.hostname: passbook-healthcheck-host
|
||||
traefik.http.services.app-service.loadbalancer.server.port: '8000'
|
||||
env_file:
|
||||
- .env
|
||||
worker:
|
||||
image: beryju/passbook:${PASSBOOK_TAG:-0.11.0-stable}
|
||||
image: beryju/passbook:${PASSBOOK_TAG:-0.12.3-stable}
|
||||
command: worker
|
||||
networks:
|
||||
- internal
|
||||
labels:
|
||||
- traefik.enable=false
|
||||
environment:
|
||||
PASSBOOK_REDIS__HOST: redis
|
||||
PASSBOOK_POSTGRESQL__HOST: postgresql
|
||||
@ -56,16 +54,26 @@ services:
|
||||
env_file:
|
||||
- .env
|
||||
static:
|
||||
image: beryju/passbook-static:${PASSBOOK_TAG:-0.11.0-stable}
|
||||
image: beryju/passbook-static:${PASSBOOK_TAG:-0.12.3-stable}
|
||||
networks:
|
||||
- internal
|
||||
labels:
|
||||
- traefik.frontend.rule=PathPrefix:/static, /robots.txt, /favicon.ico
|
||||
- traefik.port=80
|
||||
- traefik.docker.network=internal
|
||||
traefik.enable: 'true'
|
||||
traefik.docker.network: internal
|
||||
traefik.http.routers.static-router.rule: PathPrefix(`/static`, `/robots.txt`, `/favicon.ico`)
|
||||
traefik.http.routers.static-router.tls: 'true'
|
||||
traefik.http.routers.static-router.service: static-service
|
||||
traefik.http.services.static-service.loadbalancer.healthcheck.path: /
|
||||
traefik.http.services.static-service.loadbalancer.server.port: '80'
|
||||
traefik:
|
||||
image: traefik:1.7
|
||||
command: --api --docker --defaultentrypoints=https --entryPoints='Name:http Address::80 Redirect.EntryPoint:https' --entryPoints='Name:https Address::443 TLS'
|
||||
image: traefik:2.3
|
||||
command:
|
||||
- "--accesslog=true"
|
||||
- "--api.insecure=true"
|
||||
- "--providers.docker=true"
|
||||
- "--providers.docker.exposedbydefault=false"
|
||||
- "--entrypoints.http.address=:80"
|
||||
- "--entrypoints.https.address=:443"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
ports:
|
||||
|
||||
@ -101,7 +101,7 @@
|
||||
{
|
||||
"identifiers": {
|
||||
"pk": "975d5502-1e22-4d10-b560-fbc5bd70ff4d",
|
||||
"name": "default-password-change-prompt"
|
||||
"name": "Change your password"
|
||||
},
|
||||
"model": "passbook_stages_prompt.promptstage",
|
||||
"attrs": {
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 342 KiB After Width: | Height: | Size: 373 KiB |
@ -4,8 +4,8 @@ This installation method is for test-setups and small-scale productive setups.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- docker
|
||||
- docker-compose
|
||||
- docker
|
||||
- docker-compose
|
||||
|
||||
## Install
|
||||
|
||||
@ -13,7 +13,7 @@ Download the latest `docker-compose.yml` from [here](https://raw.githubuserconte
|
||||
|
||||
To optionally enable error-reporting, run `echo PASSBOOK_ERROR_REPORTING__ENABLED=true >> .env`
|
||||
|
||||
To optionally deploy a different version run `echo PASSBOOK_TAG=0.11.0-stable >> .env`
|
||||
To optionally deploy a different version run `echo PASSBOOK_TAG=0.12.3-stable >> .env`
|
||||
|
||||
If this is a fresh passbook install run the following commands to generate a password:
|
||||
|
||||
|
||||
@ -11,29 +11,32 @@ This installation automatically applies database migrations on startup. After th
|
||||
image:
|
||||
name: beryju/passbook
|
||||
name_static: beryju/passbook-static
|
||||
tag: 0.11.0-stable
|
||||
tag: 0.12.3-stable
|
||||
|
||||
nameOverride: ""
|
||||
|
||||
serverReplicas: 1
|
||||
workerReplicas: 1
|
||||
|
||||
# Enable the Kubernetes integration which lets passbook deploy outposts into kubernetes
|
||||
kubernetesIntegration: true
|
||||
|
||||
config:
|
||||
# Optionally specify fixed secret_key, otherwise generated automatically
|
||||
# secret_key: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
||||
# secretKey: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
||||
# Enable error reporting
|
||||
error_reporting:
|
||||
errorReporting:
|
||||
enabled: false
|
||||
environment: customer
|
||||
send_pii: false
|
||||
sendPii: false
|
||||
# Log level used by web and worker
|
||||
# Can be either debug, info, warning, error
|
||||
log_level: warning
|
||||
logLevel: warning
|
||||
|
||||
# Enable Database Backups to S3
|
||||
# backup:
|
||||
# access_key: access-key
|
||||
# secret_key: secret-key
|
||||
# accessKey: access-key
|
||||
# secretKey: secret-key
|
||||
# bucket: s3-bucket
|
||||
# region: eu-central-1
|
||||
# host: s3-host
|
||||
|
||||
@ -16,6 +16,10 @@ From https://en.wikipedia.org/wiki/VCenter
|
||||
|
||||
This requires VMware vCenter 7.0.0 or newer.
|
||||
|
||||
!!! note
|
||||
|
||||
It seems that the vCenter still needs to be joined to the Active Directory Domain, otherwise group membership does not work correctly. We're working on a fix for this, for the meantime your vCenter should be part of your Domain.
|
||||
|
||||
## Preparation
|
||||
|
||||
The following placeholders will be used:
|
||||
|
||||
BIN
docs/integrations/sources/active-directory/01_user_create.png
Normal file
BIN
docs/integrations/sources/active-directory/01_user_create.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 26 KiB |
BIN
docs/integrations/sources/active-directory/02_delegate.png
Normal file
BIN
docs/integrations/sources/active-directory/02_delegate.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 31 KiB |
BIN
docs/integrations/sources/active-directory/03_pb_status.png
Normal file
BIN
docs/integrations/sources/active-directory/03_pb_status.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 64 KiB |
55
docs/integrations/sources/active-directory/index.md
Normal file
55
docs/integrations/sources/active-directory/index.md
Normal file
@ -0,0 +1,55 @@
|
||||
# Active Directory Integration
|
||||
|
||||
## Preparation
|
||||
|
||||
The following placeholders will be used:
|
||||
|
||||
- `ad.company` is the Name of the Active Directory domain.
|
||||
- `passbook.company` is the FQDN of the passbook install.
|
||||
|
||||
## Active Directory Setup
|
||||
|
||||
1. Open Active Directory Users and Computers
|
||||
|
||||
2. Create a user in Active Directory, matching your naming scheme
|
||||
|
||||

|
||||
|
||||
3. Give the User a password, generated using for example `pwgen 64 1`.
|
||||
|
||||
4. Open the Delegation of Control Wizard by right-clicking the domain.
|
||||
|
||||
5. Select the passbook service user you've just created.
|
||||
|
||||
6. Ensure the "Reset user password and force password change at next logon" Option is checked.
|
||||
|
||||

|
||||
|
||||
## passbook Setup
|
||||
|
||||
In passbook, create a new LDAP Source in Administration -> Sources.
|
||||
|
||||
Use these settings:
|
||||
|
||||
- Server URI: `ldap://ad.company`
|
||||
|
||||
For passbook to be able to write passwords back to Active Directory, make sure to use `ldaps://`
|
||||
|
||||
- Bind CN: `<name of your service user>@ad.company`
|
||||
- Bind Password: The password you've given the user above
|
||||
- Base DN: The base DN which you want passbook to sync
|
||||
- Property Mappings: Select all and click the right arrow
|
||||
|
||||
The other settings might need to be adjusted based on the setup of your domain.
|
||||
|
||||
- Addition User/Group DN: Additional DN which is *prepended* to your Base DN for user synchronization.
|
||||
- Addition Group DN: Additional DN which is *prepended* to your Base DN for group synchronization.
|
||||
- User object filter: Which objects should be considered users.
|
||||
- Group object filter: Which objects should be considered groups.
|
||||
- User group membership field: Which user field saves the group membership
|
||||
- Object uniqueness field: A user field which contains a unique Identifier
|
||||
- Sync parent group: If enabled, all synchronized groups will be given this group as a parent.
|
||||
|
||||
After you save the source, a synchronization will start in the background. When its done, you cen see the summary on the System Tasks page.
|
||||
|
||||

|
||||
@ -6,6 +6,10 @@
|
||||
|
||||
### Backup
|
||||
|
||||
!!! notice
|
||||
|
||||
Local backups are **enabled** by default, and will be run daily at 00:00
|
||||
|
||||
Local backups can be created by running the following command in your passbook installation directory
|
||||
|
||||
```
|
||||
@ -14,15 +18,6 @@ docker-compose run --rm worker backup
|
||||
|
||||
This will dump the current database into the `./backups` folder. By defaults, the last 10 Backups are kept.
|
||||
|
||||
To schedule these backups, use the following snippet in a crontab
|
||||
|
||||
```
|
||||
0 0 * * * bash -c "cd <passbook install location> && docker-compose run --rm worker backup" >/dev/null
|
||||
```
|
||||
|
||||
!!! notice
|
||||
|
||||
passbook does support automatic backups on a schedule, however this is currently not recommended, as there is no way to monitor these scheduled tasks.
|
||||
|
||||
### Restore
|
||||
|
||||
@ -42,11 +37,7 @@ After you've restored the backup, it is recommended to restart all services with
|
||||
|
||||
### S3 Configuration
|
||||
|
||||
!!! notice
|
||||
|
||||
To trigger backups with S3 enabled, use the same commands as above.
|
||||
|
||||
#### S3 Preparation
|
||||
#### Preparation
|
||||
|
||||
passbook expects the bucket you select to already exist. The IAM User given to passbook should have the following permissions
|
||||
|
||||
@ -101,11 +92,11 @@ Simply enable these options in your values.yaml file
|
||||
```yaml
|
||||
# Enable Database Backups to S3
|
||||
backup:
|
||||
access_key: access-key
|
||||
secret_key: secret-key
|
||||
accessKey: access-key
|
||||
secretKey: secret-key
|
||||
bucket: s3-bucket
|
||||
region: eu-central-1
|
||||
host: s3-host
|
||||
```
|
||||
|
||||
Afterwards, run a `helm upgrade` to update the ConfigMap. Because passbook-scheduled backups are not recommended currently, a Kubernetes CronJob is created that runs the backup daily.
|
||||
Afterwards, run a `helm upgrade` to update the ConfigMap. Backups are done automatically as above, at 00:00 every day.
|
||||
|
||||
20
docs/upgrading/to-0.11.md
Normal file
20
docs/upgrading/to-0.11.md
Normal file
@ -0,0 +1,20 @@
|
||||
# Upgrading to 0.11
|
||||
|
||||
This update brings these headline features:
|
||||
|
||||
- Add Backup and Restore, currently only externally schedulable, documented [here](https://passbook.beryju.org/maintenance/backups/)
|
||||
- New Admin Dashboard with more metrics and Charts
|
||||
|
||||
Shows successful and failed logins from the last 24 hours, as well as the most used applications
|
||||
- Add search to all table views
|
||||
- Outpost now supports a Docker Controller, which installs the Outpost on the same host as passbook, updates and manages it
|
||||
- Add Token Identifier
|
||||
|
||||
Tokens now have an identifier which is used to reference to them, so the Primary key is not shown in URLs
|
||||
- `core/applications/list` API now shows applications the user has access to via policies
|
||||
|
||||
## Upgrading
|
||||
|
||||
This upgrade can be done as with minor upgrades, the only external change is the new docker-compose file, which enabled the Docker Integration for Outposts. To use this feature, please download the latest docker-compose from [here](https://raw.githubusercontent.com/BeryJu/passbook/master/docker-compose.yml).
|
||||
|
||||
Afterwards, you can simply run `docker-compose up -d` and then the normal upgrade command of `docker-compose run --rm server migrate`.
|
||||
63
docs/upgrading/to-0.12.md
Normal file
63
docs/upgrading/to-0.12.md
Normal file
@ -0,0 +1,63 @@
|
||||
# Upgrading to 0.12
|
||||
|
||||
This update brings these headline features:
|
||||
|
||||
- Rewrite Outpost state Logic, which now supports multiple concurrent Outpost instances.
|
||||
- Add Kubernetes Integration for Outposts, which deploys and maintains Outposts with High Availability in a Kubernetes Cluster
|
||||
- Add System Task Overview to see all background tasks, their status, the log output, and retry them
|
||||
- Alerts now disappear automatically
|
||||
- Audit Logs are now searchable
|
||||
- Users can now create their own Tokens to access the API
|
||||
- docker-compose deployment now uses traefik 2.3
|
||||
|
||||
Fixes:
|
||||
|
||||
- Fix high CPU Usage of the proxy when Websocket connections fail
|
||||
|
||||
## Upgrading
|
||||
|
||||
### docker-compose
|
||||
|
||||
Docker-compose users should download the latest docker-compose file from [here](https://raw.githubusercontent.com/BeryJu/passbook/master/docker-compose.yml). This includes the new traefik 2.3.
|
||||
|
||||
Afterwards, you can simply run `docker-compose up -d` and then the normal upgrade command of `docker-compose run --rm server migrate`.
|
||||
|
||||
### Kubernetes
|
||||
|
||||
For Kubernetes users, there are some changes to the helm values.
|
||||
|
||||
The values change from
|
||||
|
||||
```yaml
|
||||
config:
|
||||
# Optionally specify fixed secret_key, otherwise generated automatically
|
||||
# secret_key: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
||||
# Enable error reporting
|
||||
error_reporting:
|
||||
enabled: false
|
||||
environment: customer
|
||||
send_pii: false
|
||||
# Log level used by web and worker
|
||||
# Can be either debug, info, warning, error
|
||||
log_level: warning
|
||||
```
|
||||
|
||||
to
|
||||
|
||||
```yaml
|
||||
config:
|
||||
# Optionally specify fixed secret_key, otherwise generated automatically
|
||||
# secretKey: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
||||
# Enable error reporting
|
||||
errorReporting:
|
||||
enabled: false
|
||||
environment: customer
|
||||
sendPii: false
|
||||
# Log level used by web and worker
|
||||
# Can be either debug, info, warning, error
|
||||
logLevel: warning
|
||||
```
|
||||
|
||||
in order to be consistent with the rest of the settings.
|
||||
|
||||
There is also a new setting called `kubernetesIntegration`, which controls the Kubernetes integration for passbook. When enabled (the default), a Service Account is created, which allows passbook to deploy and update Outposts.
|
||||
@ -1,4 +1,5 @@
|
||||
"""Proxy and Outpost e2e tests"""
|
||||
from dataclasses import asdict
|
||||
from sys import platform
|
||||
from time import sleep
|
||||
from typing import Any, Dict, Optional
|
||||
@ -14,7 +15,12 @@ from e2e.utils import USER, SeleniumTestCase
|
||||
from passbook import __version__
|
||||
from passbook.core.models import Application
|
||||
from passbook.flows.models import Flow
|
||||
from passbook.outposts.models import Outpost, OutpostDeploymentType, OutpostType
|
||||
from passbook.outposts.models import (
|
||||
Outpost,
|
||||
OutpostConfig,
|
||||
OutpostDeploymentType,
|
||||
OutpostType,
|
||||
)
|
||||
from passbook.providers.proxy.models import ProxyProvider
|
||||
|
||||
|
||||
@ -46,7 +52,7 @@ class TestProviderProxy(SeleniumTestCase):
|
||||
auto_remove=True,
|
||||
environment={
|
||||
"PASSBOOK_HOST": self.live_server_url,
|
||||
"PASSBOOK_TOKEN": outpost.token.token_uuid.hex,
|
||||
"PASSBOOK_TOKEN": outpost.token.key,
|
||||
},
|
||||
)
|
||||
return container
|
||||
@ -79,8 +85,10 @@ class TestProviderProxy(SeleniumTestCase):
|
||||
# Wait until outpost healthcheck succeeds
|
||||
healthcheck_retries = 0
|
||||
while healthcheck_retries < 50:
|
||||
if outpost.deployment_health:
|
||||
break
|
||||
if len(outpost.state) > 0:
|
||||
state = outpost.state[0]
|
||||
if state.last_seen:
|
||||
break
|
||||
healthcheck_retries += 1
|
||||
sleep(0.5)
|
||||
|
||||
@ -102,27 +110,6 @@ class TestProviderProxy(SeleniumTestCase):
|
||||
class TestProviderProxyConnect(ChannelsLiveServerTestCase):
|
||||
"""Test Proxy connectivity over websockets"""
|
||||
|
||||
proxy_container: Container
|
||||
|
||||
def tearDown(self) -> None:
|
||||
self.proxy_container.kill()
|
||||
super().tearDown()
|
||||
|
||||
def start_proxy(self, outpost: Outpost) -> Container:
|
||||
"""Start proxy container based on outpost created"""
|
||||
client: DockerClient = from_env()
|
||||
container = client.containers.run(
|
||||
image=f"beryju/passbook-proxy:{__version__}",
|
||||
detach=True,
|
||||
network_mode="host",
|
||||
auto_remove=True,
|
||||
environment={
|
||||
"PASSBOOK_HOST": self.live_server_url,
|
||||
"PASSBOOK_TOKEN": outpost.token.token_uuid.hex,
|
||||
},
|
||||
)
|
||||
return container
|
||||
|
||||
def test_proxy_connectivity(self):
|
||||
"""Test proxy connectivity over websocket"""
|
||||
SeleniumTestCase().apply_default_data()
|
||||
@ -142,20 +129,27 @@ class TestProviderProxyConnect(ChannelsLiveServerTestCase):
|
||||
outpost: Outpost = Outpost.objects.create(
|
||||
name="proxy_outpost",
|
||||
type=OutpostType.PROXY,
|
||||
deployment_type=OutpostDeploymentType.CUSTOM,
|
||||
deployment_type=OutpostDeploymentType.DOCKER,
|
||||
_config=asdict(
|
||||
OutpostConfig(passbook_host=self.live_server_url, log_level="debug")
|
||||
),
|
||||
)
|
||||
outpost.providers.add(proxy)
|
||||
outpost.save()
|
||||
|
||||
self.proxy_container = self.start_proxy(outpost)
|
||||
|
||||
# Wait until outpost healthcheck succeeds
|
||||
healthcheck_retries = 0
|
||||
while healthcheck_retries < 50:
|
||||
if outpost.deployment_health:
|
||||
break
|
||||
if len(outpost.state) > 0:
|
||||
state = outpost.state[0]
|
||||
if state.last_seen and state.version:
|
||||
break
|
||||
healthcheck_retries += 1
|
||||
sleep(0.5)
|
||||
|
||||
self.assertIsNotNone(outpost.deployment_health)
|
||||
self.assertEqual(outpost.deployment_version.get("version"), __version__)
|
||||
state = outpost.state
|
||||
self.assertTrue(len(state), 1)
|
||||
self.assertEqual(state[0].version, __version__)
|
||||
|
||||
# Make sure to delete the outpost to remove the container
|
||||
outpost.delete()
|
||||
|
||||
@ -317,6 +317,7 @@ class TestSourceOAuth1(SeleniumTestCase):
|
||||
self.driver.find_element(By.CSS_SELECTOR, "[name='confirm']").click()
|
||||
|
||||
# Wait until we've loaded the user info page
|
||||
sleep(2)
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "user-settings")))
|
||||
self.driver.get(self.url("passbook_core:user-settings"))
|
||||
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
apiVersion: v2
|
||||
appVersion: "0.11.0-stable"
|
||||
appVersion: "0.12.3-stable"
|
||||
description: A Helm chart for passbook.
|
||||
name: passbook
|
||||
version: "0.11.0-stable"
|
||||
version: "0.12.3-stable"
|
||||
icon: https://github.com/BeryJu/passbook/blob/master/docs/images/logo.svg
|
||||
dependencies:
|
||||
- name: postgresql
|
||||
|
||||
@ -7,14 +7,14 @@ data:
|
||||
POSTGRESQL__NAME: "{{ .Values.postgresql.postgresqlDatabase }}"
|
||||
POSTGRESQL__USER: "{{ .Values.postgresql.postgresqlUsername }}"
|
||||
{{- if .Values.backup }}
|
||||
POSTGRESQL__S3_BACKUP__ACCESS_KEY: "{{ .Values.backup.access_key }}"
|
||||
POSTGRESQL__S3_BACKUP__SECRET_KEY: "{{ .Values.backup.secret_key }}"
|
||||
POSTGRESQL__S3_BACKUP__ACCESS_KEY: "{{ .Values.backup.accessKey }}"
|
||||
POSTGRESQL__S3_BACKUP__SECRET_KEY: "{{ .Values.backup.secretKey }}"
|
||||
POSTGRESQL__S3_BACKUP__BUCKET: "{{ .Values.backup.bucket }}"
|
||||
POSTGRESQL__S3_BACKUP__REGION: "{{ .Values.backup.region }}"
|
||||
POSTGRESQL__S3_BACKUP__HOST: "{{ .Values.backup.host }}"
|
||||
{{- end}}
|
||||
REDIS__HOST: "{{ .Release.Name }}-redis-master"
|
||||
ERROR_REPORTING__ENABLED: "{{ .Values.config.error_reporting.enabled }}"
|
||||
ERROR_REPORTING__ENVIRONMENT: "{{ .Values.config.error_reporting.environment }}"
|
||||
ERROR_REPORTING__SEND_PII: "{{ .Values.config.error_reporting.send_pii }}"
|
||||
LOG_LEVEL: "{{ .Values.config.log_level }}"
|
||||
ERROR_REPORTING__ENABLED: "{{ .Values.config.errorReporting.enabled }}"
|
||||
ERROR_REPORTING__ENVIRONMENT: "{{ .Values.config.errorReporting.environment }}"
|
||||
ERROR_REPORTING__SEND_PII: "{{ .Values.config.errorReporting.sendPii }}"
|
||||
LOG_LEVEL: "{{ .Values.config.logLevel }}"
|
||||
|
||||
@ -1,42 +0,0 @@
|
||||
{{- if .Values.backup }}
|
||||
apiVersion: batch/v1beta1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: {{ include "passbook.fullname" . }}-backup
|
||||
labels:
|
||||
app.kubernetes.io/name: {{ include "passbook.name" . }}
|
||||
helm.sh/chart: {{ include "passbook.chart" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
spec:
|
||||
schedule: "0 0 * * *"
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
restartPolicy: Never
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "{{ .Values.image.name }}:{{ .Values.image.tag }}"
|
||||
args: [server]
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: {{ include "passbook.fullname" . }}-config
|
||||
prefix: PASSBOOK_
|
||||
env:
|
||||
- name: PASSBOOK_SECRET_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ include "passbook.fullname" . }}-secret-key"
|
||||
key: "secret_key"
|
||||
- name: PASSBOOK_REDIS__PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Release.Name }}-redis"
|
||||
key: "redis-password"
|
||||
- name: PASSBOOK_POSTGRESQL__PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Release.Name }}-postgresql"
|
||||
key: "postgresql-password"
|
||||
{{- end}}
|
||||
@ -5,8 +5,8 @@ metadata:
|
||||
name: {{ include "passbook.fullname" . }}-secret-key
|
||||
data:
|
||||
monitoring_username: bW9uaXRvcg== # monitor in base64
|
||||
{{- if .Values.config.secret_key }}
|
||||
secret_key: {{ .Values.config.secret_key | b64enc | quote }}
|
||||
{{- if .Values.config.secretKey }}
|
||||
secret_key: {{ .Values.config.secretKey | b64enc | quote }}
|
||||
{{- else }}
|
||||
secret_key: {{ randAlphaNum 50 | b64enc | quote}}
|
||||
{{- end }}
|
||||
|
||||
64
helm/templates/service-account.yaml
Normal file
64
helm/templates/service-account.yaml
Normal file
@ -0,0 +1,64 @@
|
||||
{{- if .Values.kubernetesIntegration }}
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: {{ include "passbook.fullname" . }}-sa-role
|
||||
rules:
|
||||
- apiGroups:
|
||||
- ""
|
||||
resources:
|
||||
- secrets
|
||||
- services
|
||||
verbs:
|
||||
- "get"
|
||||
- "create"
|
||||
- "delete"
|
||||
- "read"
|
||||
- "patch"
|
||||
- apiGroups:
|
||||
- "extensions"
|
||||
- "apps"
|
||||
resources:
|
||||
- "deployments"
|
||||
verbs:
|
||||
- "get"
|
||||
- "create"
|
||||
- "delete"
|
||||
- "read"
|
||||
- "patch"
|
||||
- apiGroups:
|
||||
- "extensions"
|
||||
- "networking.k8s.io"
|
||||
resources:
|
||||
- "ingresses"
|
||||
verbs:
|
||||
- "get"
|
||||
- "create"
|
||||
- "delete"
|
||||
- "read"
|
||||
- "patch"
|
||||
- apiGroups:
|
||||
- ""
|
||||
resources:
|
||||
- namespaces
|
||||
verbs:
|
||||
- list
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ include "passbook.fullname" . }}-sa
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: {{ include "passbook.fullname" . }}-sa-role-binding
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: {{ include "passbook.fullname" . }}-sa-role
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: {{ include "passbook.fullname" . }}-sa
|
||||
namespace: {{ .Release.Namespace }}
|
||||
{{- end }}
|
||||
@ -100,14 +100,14 @@ spec:
|
||||
port: http
|
||||
httpHeaders:
|
||||
- name: Host
|
||||
value: kubernetes-healthcheck-host
|
||||
value: passbook-healthcheck-host
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
httpHeaders:
|
||||
- name: Host
|
||||
value: kubernetes-healthcheck-host
|
||||
value: passbook-healthcheck-host
|
||||
resources:
|
||||
requests:
|
||||
cpu: 100m
|
||||
|
||||
@ -22,6 +22,9 @@ spec:
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
k8s.passbook.beryju.org/component: worker
|
||||
spec:
|
||||
{{- if .Values.kubernetesIntegration }}
|
||||
serviceAccountName: {{ include "passbook.fullname" . }}-sa
|
||||
{{- end }}
|
||||
affinity:
|
||||
podAntiAffinity:
|
||||
preferredDuringSchedulingIgnoredDuringExecution:
|
||||
|
||||
21
helm/values.test.yaml
Normal file
21
helm/values.test.yaml
Normal file
@ -0,0 +1,21 @@
|
||||
image:
|
||||
tag: gh-master
|
||||
|
||||
serverReplicas: 1
|
||||
workerReplicas: 1
|
||||
|
||||
config:
|
||||
# Log level used by web and worker
|
||||
# Can be either debug, info, warning, error
|
||||
logLevel: debug
|
||||
|
||||
ingress:
|
||||
hosts:
|
||||
- passbook.127.0.0.1.nip.io
|
||||
|
||||
# These values influence the bundled postgresql and redis charts, but are also used by passbook to connect
|
||||
postgresql:
|
||||
postgresqlPassword: EK-5jnKfjrGRm<77
|
||||
|
||||
redis:
|
||||
password: password
|
||||
@ -4,29 +4,32 @@
|
||||
image:
|
||||
name: beryju/passbook
|
||||
name_static: beryju/passbook-static
|
||||
tag: 0.11.0-stable
|
||||
tag: 0.12.3-stable
|
||||
|
||||
nameOverride: ""
|
||||
|
||||
serverReplicas: 1
|
||||
workerReplicas: 1
|
||||
|
||||
# Enable the Kubernetes integration which lets passbook deploy outposts into kubernetes
|
||||
kubernetesIntegration: true
|
||||
|
||||
config:
|
||||
# Optionally specify fixed secret_key, otherwise generated automatically
|
||||
# secret_key: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
||||
# secretKey: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
||||
# Enable error reporting
|
||||
error_reporting:
|
||||
errorReporting:
|
||||
enabled: false
|
||||
environment: customer
|
||||
send_pii: false
|
||||
sendPii: false
|
||||
# Log level used by web and worker
|
||||
# Can be either debug, info, warning, error
|
||||
log_level: warning
|
||||
logLevel: warning
|
||||
|
||||
# Enable Database Backups to S3
|
||||
# backup:
|
||||
# access_key: access-key
|
||||
# secret_key: secret-key
|
||||
# accessKey: access-key
|
||||
# secretKey: secret-key
|
||||
# bucket: s3-bucket
|
||||
# region: eu-central-1
|
||||
# host: s3-host
|
||||
|
||||
@ -4,7 +4,7 @@ printf '{"event": "Bootstrap completed", "level": "info", "logger": "bootstrap",
|
||||
if [[ "$1" == "server" ]]; then
|
||||
gunicorn -c /lifecycle/gunicorn.conf.py passbook.root.asgi:application
|
||||
elif [[ "$1" == "worker" ]]; then
|
||||
celery -A passbook.root.celery worker --autoscale 10,3 -E -B -s /tmp/celerybeat-schedule -Q passbook,passbook_scheduled
|
||||
celery -A passbook.root.celery worker --autoscale 3,1 -E -B -s /tmp/celerybeat-schedule -Q passbook,passbook_scheduled
|
||||
elif [[ "$1" == "migrate" ]]; then
|
||||
# Run system migrations first, run normal migrations after
|
||||
python -m lifecycle.migrate
|
||||
|
||||
@ -47,7 +47,9 @@ if __name__ == "__main__":
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
spec.loader.exec_module(mod)
|
||||
|
||||
for _, sub in getmembers(mod, isclass):
|
||||
for name, sub in getmembers(mod, isclass):
|
||||
if name != "Migration":
|
||||
continue
|
||||
migration = sub(curr, conn)
|
||||
if migration.needs_migration():
|
||||
LOGGER.info("Migration needs to be applied", migration=sub)
|
||||
|
||||
@ -25,7 +25,7 @@ delete from django_migrations where app = 'passbook_stages_password' and
|
||||
name = '0002_passwordstage_change_flow';"""
|
||||
|
||||
|
||||
class To010Migration(BaseMigration):
|
||||
class Migration(BaseMigration):
|
||||
def needs_migration(self) -> bool:
|
||||
self.cur.execute(
|
||||
"select * from information_schema.tables where table_name='oidc_provider_client'"
|
||||
|
||||
24
mkdocs.yml
24
mkdocs.yml
@ -47,22 +47,26 @@ nav:
|
||||
- Overview: policies/index.md
|
||||
- Expression: policies/expression.md
|
||||
- Integrations:
|
||||
- as Source:
|
||||
- Active Directory: integrations/sources/active-directory/index.md
|
||||
- as Provider:
|
||||
- Amazon Web Services: integrations/services/aws/index.md
|
||||
- GitLab: integrations/services/gitlab/index.md
|
||||
- Rancher: integrations/services/rancher/index.md
|
||||
- Harbor: integrations/services/harbor/index.md
|
||||
- Sentry: integrations/services/sentry/index.md
|
||||
- Ansible Tower/AWX: integrations/services/tower-awx/index.md
|
||||
- VMware vCenter: integrations/services/vmware-vcenter/index.md
|
||||
- Ubuntu Landscape: integrations/services/ubuntu-landscape/index.md
|
||||
- Sonarr: integrations/services/sonarr/index.md
|
||||
- Tautulli: integrations/services/tautulli/index.md
|
||||
- Amazon Web Services: integrations/services/aws/index.md
|
||||
- GitLab: integrations/services/gitlab/index.md
|
||||
- Rancher: integrations/services/rancher/index.md
|
||||
- Harbor: integrations/services/harbor/index.md
|
||||
- Sentry: integrations/services/sentry/index.md
|
||||
- Ansible Tower/AWX: integrations/services/tower-awx/index.md
|
||||
- VMware vCenter: integrations/services/vmware-vcenter/index.md
|
||||
- Ubuntu Landscape: integrations/services/ubuntu-landscape/index.md
|
||||
- Sonarr: integrations/services/sonarr/index.md
|
||||
- Tautulli: integrations/services/tautulli/index.md
|
||||
- Maintenance:
|
||||
- Backups: maintenance/backups/index.md
|
||||
- Upgrading:
|
||||
- to 0.9: upgrading/to-0.9.md
|
||||
- to 0.10: upgrading/to-0.10.md
|
||||
- to 0.11: upgrading/to-0.11.md
|
||||
- to 0.12: upgrading/to-0.12.md
|
||||
- Troubleshooting:
|
||||
- Access problems: troubleshooting/access.md
|
||||
|
||||
|
||||
@ -1,2 +1,2 @@
|
||||
"""passbook"""
|
||||
__version__ = "0.11.0-stable"
|
||||
__version__ = "0.12.3-stable"
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
"""passbook administration overview"""
|
||||
from django.core.cache import cache
|
||||
from django.http import response
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg2.utils import swagger_auto_schema
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
|
||||
@ -9,7 +9,7 @@ from django.db.models.fields import DurationField
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.http import response
|
||||
from django.utils.timezone import now
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg2.utils import swagger_auto_schema
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
|
||||
64
passbook/admin/api/tasks.py
Normal file
64
passbook/admin/api/tasks.py
Normal file
@ -0,0 +1,64 @@
|
||||
"""Tasks API"""
|
||||
from importlib import import_module
|
||||
|
||||
from django.contrib import messages
|
||||
from django.http.response import Http404
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_yasg2.utils import swagger_auto_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, DateTimeField, IntegerField, ListField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import Serializer
|
||||
from rest_framework.viewsets import ViewSet
|
||||
|
||||
from passbook.lib.tasks import TaskInfo
|
||||
|
||||
|
||||
class TaskSerializer(Serializer):
|
||||
"""Serialize TaskInfo and TaskResult"""
|
||||
|
||||
task_name = CharField()
|
||||
task_description = CharField()
|
||||
task_finish_timestamp = DateTimeField(source="finish_timestamp")
|
||||
|
||||
status = IntegerField(source="result.status.value")
|
||||
messages = ListField(source="result.messages")
|
||||
|
||||
def create(self, request: Request) -> Response:
|
||||
raise NotImplementedError
|
||||
|
||||
def update(self, request: Request) -> Response:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class TaskViewSet(ViewSet):
|
||||
"""Read-only view set that returns all background tasks"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@swagger_auto_schema(responses={200: TaskSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
"""List current messages and pass into Serializer"""
|
||||
return Response(TaskSerializer(TaskInfo.all().values(), many=True).data)
|
||||
|
||||
@action(detail=True, methods=["post"])
|
||||
# pylint: disable=invalid-name
|
||||
def retry(self, request: Request, pk=None) -> Response:
|
||||
"""Retry task"""
|
||||
task = TaskInfo.by_name(pk)
|
||||
if not task:
|
||||
raise Http404
|
||||
task_module = import_module(task.task_call_module)
|
||||
task_func = getattr(task_module, task.task_call_func)
|
||||
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
|
||||
messages.success(
|
||||
self.request,
|
||||
_("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}),
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"successful": True,
|
||||
}
|
||||
)
|
||||
@ -3,6 +3,7 @@ from django.core.cache import cache
|
||||
from requests import RequestException, get
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.lib.tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
from passbook.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
@ -10,8 +11,8 @@ VERSION_CACHE_KEY = "passbook_latest_version"
|
||||
VERSION_CACHE_TIMEOUT = 2 * 60 * 60 # 2 hours
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def update_latest_version():
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
def update_latest_version(self: MonitoredTask):
|
||||
"""Update latest version info"""
|
||||
try:
|
||||
data = get(
|
||||
@ -19,5 +20,11 @@ def update_latest_version():
|
||||
).json()
|
||||
tag_name = data.get("tag_name")
|
||||
cache.set(VERSION_CACHE_KEY, tag_name.split("/")[1], VERSION_CACHE_TIMEOUT)
|
||||
except (RequestException, IndexError):
|
||||
self.set_status(
|
||||
TaskResult(
|
||||
TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"]
|
||||
)
|
||||
)
|
||||
except (RequestException, IndexError) as exc:
|
||||
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||
|
||||
@ -146,6 +146,12 @@
|
||||
{% trans 'Groups' %}
|
||||
</a>
|
||||
</li>
|
||||
<li class="pf-c-nav__item">
|
||||
<a href="{% url 'passbook_admin:tasks' %}"
|
||||
class="pf-c-nav__link {% is_active 'passbook_admin:tasks' %}">
|
||||
{% trans 'System Tasks' %}
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
|
||||
@ -48,28 +48,41 @@
|
||||
{{ outpost.providers.all.select_subclasses|join:", " }}
|
||||
</span>
|
||||
</td>
|
||||
<td role="cell">
|
||||
{% with health=outpost.deployment_health %}
|
||||
{% if health %}
|
||||
<i class="fas fa-check pf-m-success"></i> {{ health|naturaltime }}
|
||||
{% else %}
|
||||
<i class="fas fa-times pf-m-danger"></i> Unhealthy
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
</td>
|
||||
<td role="cell">
|
||||
<span>
|
||||
{% with ver=outpost.deployment_version %}
|
||||
{% if not ver.version %}
|
||||
{% with states=outpost.state %}
|
||||
{% if states|length > 0 %}
|
||||
<td role="cell">
|
||||
{% for state in states %}
|
||||
<div>
|
||||
{% if state.last_seen %}
|
||||
<i class="fas fa-check pf-m-success"></i> {{ state.last_seen|naturaltime }}
|
||||
{% else %}
|
||||
<i class="fas fa-times pf-m-danger"></i> {% trans 'Unhealthy' %}
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</td>
|
||||
<td role="cell">
|
||||
{% for state in states %}
|
||||
<div>
|
||||
{% if not state.version %}
|
||||
<i class="fas fa-question-circle"></i>
|
||||
{% elif state.version_outdated %}
|
||||
<i class="fas fa-times pf-m-danger"></i> {% blocktrans with is=state.version should=state.version_should %}{{ is }}, should be {{ should }}{% endblocktrans %}
|
||||
{% else %}
|
||||
<i class="fas fa-check pf-m-success"></i> {{ state.version }}
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</td>
|
||||
{% else %}
|
||||
<td role="cell">
|
||||
<i class="fas fa-question-circle"></i>
|
||||
{% elif ver.outdated %}
|
||||
<i class="fas fa-times pf-m-danger"></i> {% blocktrans with is=ver.version should=ver.should %}{{ is }}, should be {{ should }}{% endblocktrans %}
|
||||
{% else %}
|
||||
<i class="fas fa-check pf-m-success"></i> {{ ver.version }}
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
</span>
|
||||
</td>
|
||||
</td>
|
||||
<td role="cell">
|
||||
<i class="fas fa-question-circle"></i>
|
||||
</td>
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
<td>
|
||||
<a class="pf-c-button pf-m-secondary" href="{% url 'passbook_admin:outpost-update' pk=outpost.pk %}?back={{ request.get_full_path }}">{% trans 'Edit' %}</a>
|
||||
<a class="pf-c-button pf-m-danger" href="{% url 'passbook_admin:outpost-delete' pk=outpost.pk %}?back={{ request.get_full_path }}">{% trans 'Delete' %}</a>
|
||||
|
||||
@ -56,19 +56,22 @@
|
||||
</div>
|
||||
|
||||
<div class="pf-c-card pf-c-card-aggregate pf-l-gallery__item pf-m-compact">
|
||||
<div class="pf-c-card__header">
|
||||
<div class="pf-c-card__header pf-l-flex pf-m-justify-content-space-between">
|
||||
<div class="pf-c-card__header-main">
|
||||
<i class="pf-icon pf-icon-plugged"></i> {% trans 'Providers' %}
|
||||
</div>
|
||||
<a href="{% url 'passbook_admin:providers' %}">
|
||||
<i class="fa fa-external-link-alt"> </i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="pf-c-card__body">
|
||||
{% if providers_without_application.exists %}
|
||||
<p class="aggregate-status">
|
||||
<p class="pb-aggregate-card">
|
||||
<i class="fa fa-exclamation-triangle"></i> {{ provider_count }}
|
||||
</p>
|
||||
<p>{% trans 'Warning: At least one Provider has no application assigned.' %}</p>
|
||||
{% else %}
|
||||
<p class="aggregate-status">
|
||||
<p class="pb-aggregate-card">
|
||||
<i class="fa fa-check-circle"></i> {{ provider_count }}
|
||||
</p>
|
||||
{% endif %}
|
||||
@ -76,19 +79,22 @@
|
||||
</div>
|
||||
|
||||
<div class="pf-c-card pf-c-card-aggregate pf-l-gallery__item pf-m-compact">
|
||||
<div class="pf-c-card__header">
|
||||
<div class="pf-c-card__header pf-l-flex pf-m-justify-content-space-between">
|
||||
<div class="pf-c-card__header-main">
|
||||
<i class="pf-icon pf-icon-infrastructure"></i> {% trans 'Policies' %}
|
||||
</div>
|
||||
<a href="{% url 'passbook_admin:policies' %}">
|
||||
<i class="fa fa-external-link-alt"> </i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="pf-c-card__body">
|
||||
{% if policies_without_binding %}
|
||||
<p class="aggregate-status">
|
||||
<p class="pb-aggregate-card">
|
||||
<i class="fa fa-exclamation-triangle"></i> {{ policy_count }}
|
||||
</p>
|
||||
<p>{% trans 'Policies without binding exist.' %}</p>
|
||||
{% else %}
|
||||
<p class="aggregate-status">
|
||||
<p class="pb-aggregate-card">
|
||||
<i class="fa fa-check-circle"></i> {{ policy_count }}
|
||||
</p>
|
||||
{% endif %}
|
||||
@ -96,26 +102,32 @@
|
||||
</div>
|
||||
|
||||
<div class="pf-c-card pf-c-card-aggregate pf-l-gallery__item pf-m-compact">
|
||||
<div class="pf-c-card__header">
|
||||
<div class="pf-c-card__header pf-l-flex pf-m-justify-content-space-between">
|
||||
<div class="pf-c-card__header-main">
|
||||
<i class="pf-icon pf-icon-user"></i> {% trans 'Users' %}
|
||||
</div>
|
||||
<a href="{% url 'passbook_admin:users' %}">
|
||||
<i class="fa fa-external-link-alt"> </i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="pf-c-card__body">
|
||||
<p class="aggregate-status">
|
||||
<p class="pb-aggregate-card">
|
||||
<i class="fa fa-check-circle"></i> {{ user_count }}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="pf-c-card pf-c-card-aggregate pf-l-gallery__item pf-m-compact">
|
||||
<div class="pf-c-card__header">
|
||||
<div class="pf-c-card__header pf-l-flex pf-m-justify-content-space-between">
|
||||
<div class="pf-c-card__header-main">
|
||||
<i class="pf-icon pf-icon-bundle"></i> {% trans 'Version' %}
|
||||
</div>
|
||||
<a href="https://github.com/BeryJu/passbook/releases" target="_blank">
|
||||
<i class="fa fa-external-link-alt"> </i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="pf-c-card__body">
|
||||
<p class="aggregate-status">
|
||||
<p class="pb-aggregate-card">
|
||||
{% if version >= version_latest %}
|
||||
<i class="fa fa-check-circle"></i> {{ version }}
|
||||
{% else %}
|
||||
@ -142,13 +154,13 @@
|
||||
</div>
|
||||
<fetch-fill-slot class="pf-c-card__body" url="{% url 'passbook_api:admin_overview-list' %}" key="worker_count">
|
||||
<div slot="value < 1">
|
||||
<p class="aggregate-status">
|
||||
<p class="pb-aggregate-card">
|
||||
<i class="fa fa-exclamation-triangle"></i> <span data-value></span>
|
||||
</p>
|
||||
<p>{% trans 'No workers connected.' %}</p>
|
||||
</div>
|
||||
<div slot="value >= 1">
|
||||
<p class="aggregate-status">
|
||||
<p class="pb-aggregate-card">
|
||||
<i class="fa fa-check-circle"></i> <span data-value></span>
|
||||
</p>
|
||||
</div>
|
||||
@ -162,74 +174,104 @@
|
||||
</fetch-fill-slot>
|
||||
</div>
|
||||
|
||||
<a class="pf-c-card pf-c-card-aggregate pf-l-gallery__item pf-m-hoverable pf-m-compact" data-target="modal" data-modal="clearCacheModalRoot">
|
||||
<div class="pf-c-card__header">
|
||||
<div class="pf-c-card pf-c-card-aggregate pf-l-gallery__item pf-m-compact">
|
||||
<div class="pf-c-card__header pf-l-flex pf-m-justify-content-space-between">
|
||||
<div class="pf-c-card__header-main">
|
||||
<i class="pf-icon pf-icon-server"></i> {% trans 'Cached Policies' %}
|
||||
</div>
|
||||
<a data-target="modal" data-modal="clearPolicyCache">
|
||||
<i class="fa fa-trash"> </i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="pf-c-card__body">
|
||||
{% if cached_policies < 1 %}
|
||||
<p class="aggregate-status">
|
||||
<p class="pb-aggregate-card">
|
||||
<i class="fa fa-exclamation-triangle"></i> {{ cached_policies }}
|
||||
</p>
|
||||
<p>{% trans 'No policies cached. Users may experience slow response times.' %}</p>
|
||||
{% else %}
|
||||
<p class="aggregate-status">
|
||||
<p class="pb-aggregate-card">
|
||||
<i class="fa fa-check-circle"></i> {{ cached_policies }}
|
||||
</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div class="pf-c-card pf-c-card-aggregate pf-l-gallery__item pf-m-compact">
|
||||
<div class="pf-c-card__header">
|
||||
<div class="pf-c-card__header pf-l-flex pf-m-justify-content-space-between">
|
||||
<div class="pf-c-card__header-main">
|
||||
<i class="pf-icon pf-icon-server"></i> {% trans 'Cached Flows' %}
|
||||
</div>
|
||||
<a data-target="modal" data-modal="clearFlowCache">
|
||||
<i class="fa fa-trash"> </i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="pf-c-card__body">
|
||||
{% if cached_flows < 1 %}
|
||||
<p class="aggregate-status">
|
||||
<p class="pb-aggregate-card">
|
||||
<span class="fa fa-exclamation-triangle"></span> {{ cached_flows }}
|
||||
</p>
|
||||
<p>{% trans 'No flows cached.' %}</p>
|
||||
{% else %}
|
||||
<p class="aggregate-status">
|
||||
<p class="pb-aggregate-card">
|
||||
<i class="fa fa-check-circle"></i> {{ cached_flows }}
|
||||
</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
<div class="pf-c-backdrop" id="clearCacheModalRoot" hidden>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<div class="pf-c-backdrop" id="clearPolicyCache" hidden>
|
||||
<div class="pf-l-bullseye">
|
||||
<div class="pf-c-modal-box pf-m-sm" role="dialog">
|
||||
<button data-modal-close class="pf-c-button pf-m-plain" type="button" aria-label="Close dialog">
|
||||
<i class="fas fa-times" aria-hidden="true"></i>
|
||||
</button>
|
||||
<div class="pf-c-modal-box__header">
|
||||
<h1 class="pf-c-title pf-m-2xl" id="modal-title">{% trans 'Clear Cache' %}?</h1>
|
||||
<h1 class="pf-c-title pf-m-2xl" id="modal-title">{% trans 'Clear Policy Cache' %}?</h1>
|
||||
</div>
|
||||
<div class="pf-c-modal-box__body" id="modal-description">
|
||||
<form method="post" id="clearForm">
|
||||
<form method="post" id="clear_policies">
|
||||
{% csrf_token %}
|
||||
<input type="hidden" name="clear">
|
||||
<input type="hidden" name="clear_policies">
|
||||
<p>
|
||||
{% blocktrans %}
|
||||
Are you sure you want to clear the cache? This includes all user sessions and all cached Policy results.
|
||||
Are you sure you want to clear the policy cache? This will cause all policies to be re-evaluated on their next usage.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
<h3>
|
||||
{% blocktrans %}
|
||||
This will also log you out.
|
||||
{% endblocktrans %}
|
||||
</h3>
|
||||
</form>
|
||||
</div>
|
||||
<footer class="pf-c-modal-box__footer pf-m-align-left">
|
||||
<button form="clearForm" class="pf-c-button pf-m-primary" type="submit">{% trans 'Clear' %}</button>
|
||||
<button form="clear_policies" class="pf-c-button pf-m-primary" type="submit">{% trans 'Clear' %}</button>
|
||||
<button data-modal-close class="pf-c-button pf-m-link" type="button">{% trans 'Cancel' %}</button>
|
||||
</footer>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="pf-c-backdrop" id="clearFlowCache" hidden>
|
||||
<div class="pf-l-bullseye">
|
||||
<div class="pf-c-modal-box pf-m-sm" role="dialog">
|
||||
<button data-modal-close class="pf-c-button pf-m-plain" type="button" aria-label="Close dialog">
|
||||
<i class="fas fa-times" aria-hidden="true"></i>
|
||||
</button>
|
||||
<div class="pf-c-modal-box__header">
|
||||
<h1 class="pf-c-title pf-m-2xl" id="modal-title">{% trans 'Clear Flow Cache' %}?</h1>
|
||||
</div>
|
||||
<div class="pf-c-modal-box__body" id="modal-description">
|
||||
<form method="post" id="clear_flows">
|
||||
{% csrf_token %}
|
||||
<input type="hidden" name="clear_flows">
|
||||
<p>
|
||||
{% blocktrans %}
|
||||
Are you sure you want to clear the flow cache? This will cause all flows to be re-evaluated on their next usage.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
</form>
|
||||
</div>
|
||||
<footer class="pf-c-modal-box__footer pf-m-align-left">
|
||||
<button form="clear_flows" class="pf-c-button pf-m-primary" type="submit">{% trans 'Clear' %}</button>
|
||||
<button data-modal-close class="pf-c-button pf-m-link" type="button">{% trans 'Cancel' %}</button>
|
||||
</footer>
|
||||
</div>
|
||||
@ -274,7 +316,6 @@ fetch("{% url 'passbook_api:admin_metrics-list' %}").then(r => r.json()).then(r
|
||||
const date = new Date();
|
||||
const delta = (date - values[index].value);
|
||||
const ago = Math.round(delta / 1000 / 3600);
|
||||
console.log(ago);
|
||||
return `${ago} Hours ago`;
|
||||
},
|
||||
autoSkip: true,
|
||||
@ -282,9 +323,6 @@ fetch("{% url 'passbook_api:admin_metrics-list' %}").then(r => r.json()).then(r
|
||||
}
|
||||
}],
|
||||
yAxes: [{
|
||||
ticks: {
|
||||
stepSize: 1
|
||||
},
|
||||
stacked: true,
|
||||
gridLines: {
|
||||
color: "rgba(0, 0, 0, 0)",
|
||||
|
||||
77
passbook/admin/templates/administration/task/list.html
Normal file
77
passbook/admin/templates/administration/task/list.html
Normal file
@ -0,0 +1,77 @@
|
||||
{% extends "administration/base.html" %}
|
||||
|
||||
{% load i18n %}
|
||||
{% load humanize %}
|
||||
{% load passbook_utils %}
|
||||
|
||||
{% block content %}
|
||||
<section class="pf-c-page__main-section pf-m-light">
|
||||
<div class="pf-c-content">
|
||||
<h1>
|
||||
<i class="pf-icon pf-icon-automation"></i>
|
||||
{% trans 'System Tasks' %}
|
||||
</h1>
|
||||
<p>{% trans "Long-running operations which passbook executes in the background." %}</p>
|
||||
</div>
|
||||
</section>
|
||||
<section class="pf-c-page__main-section pf-m-no-padding-mobile">
|
||||
<div class="pf-c-card">
|
||||
<table class="pf-c-table pf-m-compact pf-m-grid-xl" role="grid">
|
||||
<thead>
|
||||
<tr role="row">
|
||||
<th role="columnheader" scope="col">{% trans 'Identifier' %}</th>
|
||||
<th role="columnheader" scope="col">{% trans 'Description' %}</th>
|
||||
<th role="columnheader" scope="col">{% trans 'Last Status' %}</th>
|
||||
<th role="columnheader" scope="col">{% trans 'Status' %}</th>
|
||||
<th role="columnheader" scope="col">{% trans 'Messages' %}</th>
|
||||
<th role="cell"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody role="rowgroup">
|
||||
{% for task in object_list %}
|
||||
<tr role="row">
|
||||
<th role="columnheader">
|
||||
<pre>{{ task.task_name }}</pre>
|
||||
</th>
|
||||
<td role="cell">
|
||||
<span>
|
||||
{{ task.task_description }}
|
||||
</span>
|
||||
</td>
|
||||
<td role="cell">
|
||||
<span>
|
||||
{{ task.finish_timestamp|naturaltime }}
|
||||
</span>
|
||||
</td>
|
||||
<td role="cell">
|
||||
<span>
|
||||
{% if task.result.status == task_successful %}
|
||||
<i class="fas fa-check pf-m-success"></i> {% trans 'Successful' %}
|
||||
{% elif task.result.status == task_warning %}
|
||||
<i class="fas fa-exclamation-triangle pf-m-warning"></i> {% trans 'Warning' %}
|
||||
{% elif task.result.status == task_error %}
|
||||
<i class="fas fa-times pf-m-danger"></i> {% trans 'Error' %}
|
||||
{% else %}
|
||||
<i class="fas fa-question-circle"></i> {% trans 'Unknown' %}
|
||||
{% endif %}
|
||||
</span>
|
||||
</td>
|
||||
<td>
|
||||
{% for message in task.result.messages %}
|
||||
<div>
|
||||
{{ message }}
|
||||
</div>
|
||||
{% endfor %}
|
||||
</td>
|
||||
<td>
|
||||
<button is="action-button" class="pf-c-button pf-m-primary" url="{% url 'passbook_api:admin_system_tasks-retry' pk=task.task_name %}">
|
||||
{% trans 'Retry Task' %}
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</section>
|
||||
{% endblock %}
|
||||
42
passbook/admin/templates/administration/user/disable.html
Normal file
42
passbook/admin/templates/administration/user/disable.html
Normal file
@ -0,0 +1,42 @@
|
||||
{% extends "administration/base.html" %}
|
||||
|
||||
{% load i18n %}
|
||||
{% load passbook_utils %}
|
||||
|
||||
{% block content %}
|
||||
<section class="pf-c-page__main-section pf-m-light">
|
||||
<div class="pf-c-content">
|
||||
{% block above_form %}
|
||||
<h1>
|
||||
{% blocktrans with object_type=object|verbose_name %}
|
||||
Disable {{ object_type }}
|
||||
{% endblocktrans %}
|
||||
</h1>
|
||||
{% endblock %}
|
||||
</div>
|
||||
</section>
|
||||
<section class="pf-c-page__main-section">
|
||||
<div class="pf-l-stack">
|
||||
<div class="pf-l-stack__item">
|
||||
<div class="pf-c-card">
|
||||
<div class="pf-c-card__body">
|
||||
<form action="" method="post" class="pf-c-form">
|
||||
{% csrf_token %}
|
||||
<p>
|
||||
{% blocktrans with object_type=object|verbose_name name=object %}
|
||||
Are you sure you want to disable {{ object_type }} "{{ object }}"?
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
<div class="pf-c-form__group pf-m-action">
|
||||
<div class="pf-c-form__actions">
|
||||
<input class="pf-c-button pf-m-danger" type="submit" value="{% trans 'Disable' %}" />
|
||||
<a class="pf-c-button pf-m-secondary" href="{% back %}">{% trans "Back" %}</a>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
{% endblock %}
|
||||
@ -54,7 +54,11 @@
|
||||
</td>
|
||||
<td>
|
||||
<a class="pf-c-button pf-m-secondary" href="{% url 'passbook_admin:user-update' pk=user.pk %}?back={{ request.get_full_path }}">{% trans 'Edit' %}</a>
|
||||
<a class="pf-c-button pf-m-danger" href="{% url 'passbook_admin:user-delete' pk=user.pk %}?back={{ request.get_full_path }}">{% trans 'Delete' %}</a>
|
||||
{% if user.is_active %}
|
||||
<a class="pf-c-button pf-m-warning" href="{% url 'passbook_admin:user-disable' pk=user.pk %}?back={{ request.get_full_path }}">{% trans 'Disable' %}</a>
|
||||
{% else %}
|
||||
<a class="pf-c-button pf-m-primary" href="{% url 'passbook_admin:user-enable' pk=user.pk %}?back={{ request.get_full_path }}">{% trans 'Enable' %}</a>
|
||||
{% endif %}
|
||||
<a class="pf-c-button pf-m-tertiary" href="{% url 'passbook_admin:user-password-reset' pk=user.pk %}?back={{ request.get_full_path }}">{% trans 'Reset Password' %}</a>
|
||||
<a class="pf-c-button pf-m-tertiary" href="{% url 'passbook_core:impersonate-init' user_id=user.pk %}">{% trans 'Impersonate' %}</a>
|
||||
</td>
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
{% extends "administration/base.html" %}
|
||||
{% extends container_template|default:"administration/base.html" %}
|
||||
|
||||
{% load i18n %}
|
||||
{% load passbook_utils %}
|
||||
|
||||
@ -17,6 +17,7 @@ from passbook.admin.views import (
|
||||
stages_bindings,
|
||||
stages_invitations,
|
||||
stages_prompts,
|
||||
tasks,
|
||||
tokens,
|
||||
users,
|
||||
)
|
||||
@ -247,6 +248,10 @@ urlpatterns = [
|
||||
path("users/create/", users.UserCreateView.as_view(), name="user-create"),
|
||||
path("users/<int:pk>/update/", users.UserUpdateView.as_view(), name="user-update"),
|
||||
path("users/<int:pk>/delete/", users.UserDeleteView.as_view(), name="user-delete"),
|
||||
path(
|
||||
"users/<int:pk>/disable/", users.UserDisableView.as_view(), name="user-disable"
|
||||
),
|
||||
path("users/<int:pk>/enable/", users.UserEnableView.as_view(), name="user-enable"),
|
||||
path(
|
||||
"users/<int:pk>/reset/",
|
||||
users.UserPasswordResetView.as_view(),
|
||||
@ -307,4 +312,10 @@ urlpatterns = [
|
||||
outposts.OutpostDeleteView.as_view(),
|
||||
name="outpost-delete",
|
||||
),
|
||||
# Tasks
|
||||
path(
|
||||
"tasks/",
|
||||
tasks.TaskListView.as_view(),
|
||||
name="tasks",
|
||||
),
|
||||
]
|
||||
|
||||
@ -5,9 +5,9 @@ from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.db.models import Count
|
||||
from django.db.models.fields.json import KeyTextTransform
|
||||
from django.shortcuts import redirect, reverse
|
||||
from django.views.generic import TemplateView
|
||||
from packaging.version import LegacyVersion, Version, parse
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook import __version__
|
||||
from passbook.admin.mixins import AdminRequiredMixin
|
||||
@ -16,6 +16,8 @@ from passbook.audit.models import Event, EventAction
|
||||
from passbook.core.models import Provider, User
|
||||
from passbook.policies.models import Policy
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class AdministrationOverviewView(AdminRequiredMixin, TemplateView):
|
||||
"""Overview View"""
|
||||
@ -24,9 +26,14 @@ class AdministrationOverviewView(AdminRequiredMixin, TemplateView):
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
"""Handle post (clear cache from modal)"""
|
||||
if "clear" in self.request.POST:
|
||||
cache.clear()
|
||||
return redirect(reverse("passbook_flows:default-authentication"))
|
||||
if "clear_policies" in self.request.POST:
|
||||
keys = cache.keys("policy_*")
|
||||
cache.delete_many(keys)
|
||||
LOGGER.debug("Cleared Policy cache", keys=len(keys))
|
||||
if "clear_flows" in self.request.POST:
|
||||
keys = cache.keys("flow_*")
|
||||
cache.delete_many(keys)
|
||||
LOGGER.debug("Cleared flow cache", keys=len(keys))
|
||||
return self.get(*args, **kwargs)
|
||||
|
||||
def get_latest_version(self) -> Union[LegacyVersion, Version]:
|
||||
|
||||
23
passbook/admin/views/tasks.py
Normal file
23
passbook/admin/views/tasks.py
Normal file
@ -0,0 +1,23 @@
|
||||
"""passbook Tasks List"""
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.views.generic.base import TemplateView
|
||||
|
||||
from passbook.admin.mixins import AdminRequiredMixin
|
||||
from passbook.lib.tasks import TaskInfo, TaskResultStatus
|
||||
|
||||
|
||||
class TaskListView(AdminRequiredMixin, TemplateView):
|
||||
"""Show list of all background tasks"""
|
||||
|
||||
template_name = "administration/task/list.html"
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
|
||||
kwargs = super().get_context_data(**kwargs)
|
||||
kwargs["object_list"] = sorted(
|
||||
TaskInfo.all().values(), key=lambda x: x.task_name
|
||||
)
|
||||
kwargs["task_successful"] = TaskResultStatus.SUCCESSFUL
|
||||
kwargs["task_warning"] = TaskResultStatus.WARNING
|
||||
kwargs["task_error"] = TaskResultStatus.ERROR
|
||||
return kwargs
|
||||
@ -6,6 +6,7 @@ from django.contrib.auth.mixins import (
|
||||
)
|
||||
from django.contrib.messages.views import SuccessMessageMixin
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.http.response import HttpResponseRedirect
|
||||
from django.shortcuts import redirect
|
||||
from django.urls import reverse, reverse_lazy
|
||||
from django.utils.http import urlencode
|
||||
@ -98,6 +99,53 @@ class UserDeleteView(LoginRequiredMixin, PermissionRequiredMixin, DeleteMessageV
|
||||
success_message = _("Successfully deleted User")
|
||||
|
||||
|
||||
class UserDisableView(
|
||||
LoginRequiredMixin, PermissionRequiredMixin, BackSuccessUrlMixin, DeleteMessageView
|
||||
):
|
||||
"""Disable user"""
|
||||
|
||||
object: User
|
||||
|
||||
model = User
|
||||
permission_required = "passbook_core.update_user"
|
||||
|
||||
# By default the object's name is user which is used by other checks
|
||||
context_object_name = "object"
|
||||
template_name = "administration/user/disable.html"
|
||||
success_url = reverse_lazy("passbook_admin:users")
|
||||
success_message = _("Successfully disabled User")
|
||||
|
||||
def delete(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
self.object: User = self.get_object()
|
||||
success_url = self.get_success_url()
|
||||
self.object.is_active = False
|
||||
self.object.save()
|
||||
return HttpResponseRedirect(success_url)
|
||||
|
||||
|
||||
class UserEnableView(
|
||||
LoginRequiredMixin, PermissionRequiredMixin, BackSuccessUrlMixin, DetailView
|
||||
):
|
||||
"""Enable user"""
|
||||
|
||||
object: User
|
||||
|
||||
model = User
|
||||
permission_required = "passbook_core.update_user"
|
||||
|
||||
# By default the object's name is user which is used by other checks
|
||||
context_object_name = "object"
|
||||
success_url = reverse_lazy("passbook_admin:users")
|
||||
success_message = _("Successfully enabled User")
|
||||
|
||||
def get(self, request: HttpRequest, *args, **kwargs):
|
||||
self.object: User = self.get_object()
|
||||
success_url = self.get_success_url()
|
||||
self.object.is_active = True
|
||||
self.object.save()
|
||||
return HttpResponseRedirect(success_url)
|
||||
|
||||
|
||||
class UserPasswordResetView(LoginRequiredMixin, PermissionRequiredMixin, DetailView):
|
||||
"""Get Password reset link for user"""
|
||||
|
||||
@ -110,7 +158,7 @@ class UserPasswordResetView(LoginRequiredMixin, PermissionRequiredMixin, DetailV
|
||||
token, _ = Token.objects.get_or_create(
|
||||
identifier="password-reset-temp", user=self.object
|
||||
)
|
||||
querystring = urlencode({"token": token.token_uuid})
|
||||
querystring = urlencode({"token": token.key})
|
||||
link = request.build_absolute_uri(
|
||||
reverse("passbook_flows:default-recovery") + f"?{querystring}"
|
||||
)
|
||||
|
||||
@ -1,43 +1,60 @@
|
||||
"""API Authentication"""
|
||||
from base64 import b64decode
|
||||
from typing import Any, Tuple, Union
|
||||
from typing import Any, Optional, Tuple, Union
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework import HTTP_HEADER_ENCODING, exceptions
|
||||
from rest_framework.authentication import BaseAuthentication, get_authorization_header
|
||||
from rest_framework.request import Request
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.core.models import Token, TokenIntents, User
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def token_from_header(raw_header: bytes) -> Optional[Token]:
|
||||
"""raw_header in the Format of `Basic dGVzdDp0ZXN0`"""
|
||||
auth_credentials = raw_header.decode()
|
||||
# Accept headers with Type format and without
|
||||
if " " in auth_credentials:
|
||||
auth_type, auth_credentials = auth_credentials.split()
|
||||
if auth_type.lower() != "basic":
|
||||
LOGGER.debug(
|
||||
"Unsupported authentication type, denying", type=auth_type.lower()
|
||||
)
|
||||
return None
|
||||
try:
|
||||
auth_credentials = b64decode(auth_credentials.encode()).decode()
|
||||
except UnicodeDecodeError:
|
||||
# TODO: Remove this workaround
|
||||
# temporary fallback for 0.11 to 0.12 upgrade
|
||||
# 0.11 and below proxy sends authorization header not base64 encoded
|
||||
pass
|
||||
# Accept credentials with username and without
|
||||
if ":" in auth_credentials:
|
||||
_, password = auth_credentials.split(":")
|
||||
else:
|
||||
password = auth_credentials
|
||||
if password == "":
|
||||
return None
|
||||
tokens = Token.filter_not_expired(key=password, intent=TokenIntents.INTENT_API)
|
||||
if not tokens.exists():
|
||||
LOGGER.debug("Token not found")
|
||||
return None
|
||||
return tokens.first()
|
||||
|
||||
|
||||
class PassbookTokenAuthentication(BaseAuthentication):
|
||||
"""Token-based authentication using HTTP Basic authentication"""
|
||||
|
||||
def authenticate(self, request: Request) -> Union[Tuple[User, Any], None]:
|
||||
"""Token-based authentication using HTTP Basic authentication"""
|
||||
auth = get_authorization_header(request).split()
|
||||
auth = get_authorization_header(request)
|
||||
|
||||
if not auth or auth[0].lower() != b"basic":
|
||||
token = token_from_header(auth)
|
||||
if not token:
|
||||
return None
|
||||
|
||||
if len(auth) == 1:
|
||||
msg = _("Invalid basic header. No credentials provided.")
|
||||
raise exceptions.AuthenticationFailed(msg)
|
||||
if len(auth) > 2:
|
||||
msg = _(
|
||||
"Invalid basic header. Credentials string should not contain spaces."
|
||||
)
|
||||
raise exceptions.AuthenticationFailed(msg)
|
||||
|
||||
header_data = b64decode(auth[1]).decode(HTTP_HEADER_ENCODING).partition(":")
|
||||
|
||||
tokens = Token.filter_not_expired(
|
||||
token_uuid=header_data[2], intent=TokenIntents.INTENT_API
|
||||
)
|
||||
if not tokens.exists():
|
||||
raise exceptions.AuthenticationFailed(_("Invalid token."))
|
||||
|
||||
return (tokens.first().user, None)
|
||||
return (token.user, None)
|
||||
|
||||
def authenticate_header(self, request: Request) -> str:
|
||||
return 'Basic realm="passbook"'
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
"""core messages API"""
|
||||
from django.contrib.messages import get_messages
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from drf_yasg2.utils import swagger_auto_schema
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
"""api v2 urls"""
|
||||
from django.urls import path, re_path
|
||||
from drf_yasg import openapi
|
||||
from drf_yasg.views import get_schema_view
|
||||
from drf_yasg2 import openapi
|
||||
from drf_yasg2.views import get_schema_view
|
||||
from rest_framework import routers
|
||||
from rest_framework.permissions import AllowAny
|
||||
|
||||
from passbook.admin.api.overview import AdministrationOverviewViewSet
|
||||
from passbook.admin.api.overview_metrics import AdministrationMetricsViewSet
|
||||
from passbook.admin.api.tasks import TaskViewSet
|
||||
from passbook.api.v2.messages import MessagesViewSet
|
||||
from passbook.audit.api import EventViewSet
|
||||
from passbook.core.api.applications import ApplicationViewSet
|
||||
@ -57,6 +58,7 @@ router.register(
|
||||
"admin/overview", AdministrationOverviewViewSet, basename="admin_overview"
|
||||
)
|
||||
router.register("admin/metrics", AdministrationMetricsViewSet, basename="admin_metrics")
|
||||
router.register("admin/system_tasks", TaskViewSet, basename="admin_system_tasks")
|
||||
|
||||
router.register("core/applications", ApplicationViewSet)
|
||||
router.register("core/groups", GroupViewSet)
|
||||
|
||||
@ -15,7 +15,6 @@ class EventSerializer(ModelSerializer):
|
||||
"pk",
|
||||
"user",
|
||||
"action",
|
||||
"date",
|
||||
"app",
|
||||
"context",
|
||||
"client_ip",
|
||||
|
||||
42
passbook/audit/migrations/0006_auto_20201017_2024.py
Normal file
42
passbook/audit/migrations/0006_auto_20201017_2024.py
Normal file
@ -0,0 +1,42 @@
|
||||
# Generated by Django 3.1.2 on 2020-10-17 20:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("passbook_audit", "0005_auto_20201005_2139"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="event",
|
||||
name="date",
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("token_view", "Token View"),
|
||||
("invitation_created", "Invite Created"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
]
|
||||
@ -100,6 +100,8 @@ class EventAction(models.TextChoices):
|
||||
SUSPICIOUS_REQUEST = "suspicious_request"
|
||||
PASSWORD_SET = "password_set" # noqa # nosec
|
||||
|
||||
TOKEN_VIEW = "token_view"
|
||||
|
||||
INVITE_CREATED = "invitation_created"
|
||||
INVITE_USED = "invitation_used"
|
||||
|
||||
@ -122,7 +124,6 @@ class Event(models.Model):
|
||||
event_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||
user = models.JSONField(default=dict)
|
||||
action = models.TextField(choices=EventAction.choices)
|
||||
date = models.DateTimeField(auto_now_add=True)
|
||||
app = models.TextField()
|
||||
context = models.JSONField(default=dict, blank=True)
|
||||
client_ip = models.GenericIPAddressField(null=True)
|
||||
|
||||
@ -17,6 +17,7 @@
|
||||
<div class="pf-c-card">
|
||||
<div class="pf-c-toolbar">
|
||||
<div class="pf-c-toolbar__content">
|
||||
{% include 'partials/toolbar_search.html' %}
|
||||
{% include 'partials/pagination.html' %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -3,12 +3,16 @@ from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.views.generic import ListView
|
||||
from guardian.mixins import PermissionListMixin
|
||||
|
||||
from passbook.admin.views.utils import UserPaginateListMixin
|
||||
from passbook.admin.views.utils import SearchListMixin, UserPaginateListMixin
|
||||
from passbook.audit.models import Event
|
||||
|
||||
|
||||
class EventListView(
|
||||
PermissionListMixin, LoginRequiredMixin, UserPaginateListMixin, ListView
|
||||
PermissionListMixin,
|
||||
LoginRequiredMixin,
|
||||
SearchListMixin,
|
||||
UserPaginateListMixin,
|
||||
ListView,
|
||||
):
|
||||
"""Show list of all invitations"""
|
||||
|
||||
@ -16,3 +20,11 @@ class EventListView(
|
||||
template_name = "audit/list.html"
|
||||
permission_required = "passbook_audit.view_event"
|
||||
ordering = "-created"
|
||||
|
||||
search_fields = [
|
||||
"user",
|
||||
"action",
|
||||
"app",
|
||||
"context",
|
||||
"client_ip",
|
||||
]
|
||||
|
||||
@ -1,7 +1,12 @@
|
||||
"""Tokens API Viewset"""
|
||||
from django.http.response import Http404
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from passbook.audit.models import Event, EventAction
|
||||
from passbook.core.models import Token
|
||||
|
||||
|
||||
@ -17,6 +22,16 @@ class TokenSerializer(ModelSerializer):
|
||||
class TokenViewSet(ModelViewSet):
|
||||
"""Token Viewset"""
|
||||
|
||||
queryset = Token.objects.all()
|
||||
lookup_field = "identifier"
|
||||
queryset = Token.filter_not_expired()
|
||||
serializer_class = TokenSerializer
|
||||
|
||||
@action(detail=True)
|
||||
def view_key(self, request: Request, identifier: str) -> Response:
|
||||
"""Return token key and log access"""
|
||||
tokens = Token.filter_not_expired(identifier=identifier)
|
||||
if not tokens.exists():
|
||||
raise Http404
|
||||
token = tokens.first()
|
||||
Event.new(EventAction.TOKEN_VIEW, token=token).from_http(request)
|
||||
return Response({"key": token.key})
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
"""Channels base classes"""
|
||||
from channels.generic.websocket import JsonWebsocketConsumer
|
||||
from django.core.exceptions import ValidationError
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.core.models import Token, TokenIntents, User
|
||||
from passbook.api.auth import token_from_header
|
||||
from passbook.core.models import User
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -18,20 +18,15 @@ class AuthJsonConsumer(JsonWebsocketConsumer):
|
||||
if b"authorization" not in headers:
|
||||
LOGGER.warning("WS Request without authorization header")
|
||||
self.close()
|
||||
return False
|
||||
|
||||
token = headers[b"authorization"]
|
||||
try:
|
||||
token_uuid = token.decode("utf-8")
|
||||
tokens = Token.filter_not_expired(
|
||||
token_uuid=token_uuid, intent=TokenIntents.INTENT_API
|
||||
)
|
||||
if not tokens.exists():
|
||||
LOGGER.warning("WS Request with invalid token")
|
||||
self.close()
|
||||
return False
|
||||
except ValidationError:
|
||||
LOGGER.warning("WS Invalid UUID")
|
||||
raw_header = headers[b"authorization"]
|
||||
|
||||
token = token_from_header(raw_header)
|
||||
if not token:
|
||||
LOGGER.warning("Failed to authenticate")
|
||||
self.close()
|
||||
return False
|
||||
self.user = tokens.first().user
|
||||
|
||||
self.user = token.user
|
||||
return True
|
||||
|
||||
22
passbook/core/forms/token.py
Normal file
22
passbook/core/forms/token.py
Normal file
@ -0,0 +1,22 @@
|
||||
"""Core user token form"""
|
||||
from django import forms
|
||||
|
||||
from passbook.core.models import Token
|
||||
|
||||
|
||||
class UserTokenForm(forms.ModelForm):
|
||||
"""Token form, for tokens created by endusers"""
|
||||
|
||||
class Meta:
|
||||
|
||||
model = Token
|
||||
fields = [
|
||||
"identifier",
|
||||
"expires",
|
||||
"expiring",
|
||||
"description",
|
||||
]
|
||||
widgets = {
|
||||
"identifier": forms.TextInput(),
|
||||
"description": forms.TextInput(),
|
||||
}
|
||||
50
passbook/core/migrations/0014_auto_20201018_1158.py
Normal file
50
passbook/core/migrations/0014_auto_20201018_1158.py
Normal file
@ -0,0 +1,50 @@
|
||||
# Generated by Django 3.1.2 on 2020-10-18 11:58
|
||||
from django.apps.registry import Apps
|
||||
from django.db import migrations, models
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
import passbook.core.models
|
||||
|
||||
|
||||
def set_default_token_key(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
Token = apps.get_model("passbook_core", "Token")
|
||||
|
||||
for token in Token.objects.using(db_alias).all():
|
||||
token.key = token.pk.hex
|
||||
token.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("passbook_core", "0013_auto_20201003_2132"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="token",
|
||||
name="key",
|
||||
field=models.TextField(default=passbook.core.models.default_token_key),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="token",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="token",
|
||||
name="identifier",
|
||||
field=models.SlugField(max_length=255),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="token",
|
||||
index=models.Index(fields=["key"], name="passbook_co_key_e45007_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="token",
|
||||
index=models.Index(
|
||||
fields=["identifier"], name="passbook_co_identif_1a34a8_idx"
|
||||
),
|
||||
),
|
||||
migrations.RunPython(set_default_token_key),
|
||||
]
|
||||
@ -32,6 +32,11 @@ def default_token_duration():
|
||||
return now() + timedelta(minutes=30)
|
||||
|
||||
|
||||
def default_token_key():
|
||||
"""Default token key"""
|
||||
return uuid4().hex
|
||||
|
||||
|
||||
class Group(models.Model):
|
||||
"""Custom Group model which supports a basic hierarchy"""
|
||||
|
||||
@ -274,10 +279,8 @@ class ExpiringModel(models.Model):
|
||||
def filter_not_expired(cls, **kwargs) -> QuerySet:
|
||||
"""Filer for tokens which are not expired yet or are not expiring,
|
||||
and match filters in `kwargs`"""
|
||||
query = Q(**kwargs)
|
||||
query_not_expired_yet = Q(expires__lt=now(), expiring=True)
|
||||
query_not_expiring = Q(expiring=False)
|
||||
return cls.objects.filter(query & (query_not_expired_yet | query_not_expiring))
|
||||
expired = Q(expires__lt=now(), expiring=True)
|
||||
return cls.objects.exclude(expired).filter(**kwargs)
|
||||
|
||||
@property
|
||||
def is_expired(self) -> bool:
|
||||
@ -298,6 +301,7 @@ class TokenIntents(models.TextChoices):
|
||||
# Allow access to API
|
||||
INTENT_API = "api"
|
||||
|
||||
# Recovery use for the recovery app
|
||||
INTENT_RECOVERY = "recovery"
|
||||
|
||||
|
||||
@ -305,7 +309,8 @@ class Token(ExpiringModel):
|
||||
"""Token used to authenticate the User for API Access or confirm another Stage like Email."""
|
||||
|
||||
token_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||
identifier = models.TextField()
|
||||
identifier = models.SlugField(max_length=255)
|
||||
key = models.TextField(default=default_token_key)
|
||||
intent = models.TextField(
|
||||
choices=TokenIntents.choices, default=TokenIntents.INTENT_VERIFICATION
|
||||
)
|
||||
@ -313,13 +318,19 @@ class Token(ExpiringModel):
|
||||
description = models.TextField(default="", blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"Token {self.identifier} (expires={self.expires})"
|
||||
description = f"{self.identifier}"
|
||||
if self.expiring:
|
||||
description += f" (expires={self.expires})"
|
||||
return description
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _("Token")
|
||||
verbose_name_plural = _("Tokens")
|
||||
unique_together = (("identifier", "user"),)
|
||||
indexes = [
|
||||
models.Index(fields=["identifier"]),
|
||||
models.Index(fields=["key"]),
|
||||
]
|
||||
|
||||
|
||||
class PropertyMapping(models.Model):
|
||||
|
||||
@ -3,14 +3,16 @@ from django.utils.timezone import now
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.core.models import ExpiringModel
|
||||
from passbook.lib.tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
from passbook.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def clean_expired_models():
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
def clean_expired_models(self: MonitoredTask):
|
||||
"""Remove expired objects"""
|
||||
messages = []
|
||||
for cls in ExpiringModel.__subclasses__():
|
||||
cls: ExpiringModel
|
||||
amount, _ = (
|
||||
@ -20,3 +22,5 @@ def clean_expired_models():
|
||||
.delete()
|
||||
)
|
||||
LOGGER.debug("Deleted expired models", model=cls, amount=amount)
|
||||
messages.append(f"Deleted {amount} expired {cls._meta.verbose_name_plural}")
|
||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, messages))
|
||||
|
||||
@ -7,7 +7,7 @@
|
||||
{% load passbook_utils %}
|
||||
|
||||
{% block body %}
|
||||
{% include 'partials/messages.html' %}
|
||||
<pb-messages url="{% url 'passbook_api:messages-list' %}"></pb-messages>
|
||||
<div class="pf-c-page" id="page-default-nav-example">
|
||||
<a class="pf-c-skip-to-content pf-c-button pf-m-primary" href="#main-content">{% trans 'Skip to content' %}</a>
|
||||
<header role="banner" class="pf-c-page__header ws-page-header">
|
||||
@ -21,7 +21,7 @@
|
||||
</div>
|
||||
<a href="{% url 'passbook_core:overview' %}" class="pf-c-page__header-brand-link">
|
||||
<div class="pf-c-brand pb-brand">
|
||||
<img src="{{ config.passbook.branding.logo }}" alt="passbook icon">
|
||||
<img src="{{ config.passbook.branding.logo }}" style="width: 100px;" alt="passbook icon">
|
||||
{% if config.passbook.branding.title_show %}
|
||||
<small><small>{{ config.passbook.branding.title }}</small></small>
|
||||
{% endif %}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
{% extends "administration/base.html" %}
|
||||
{% extends container_template|default:"administration/base.html" %}
|
||||
|
||||
{% load i18n %}
|
||||
{% load passbook_utils %}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
{% load static %}
|
||||
{% load i18n %}
|
||||
|
||||
{% include 'partials/messages.html' %}
|
||||
<pb-messages url="{% url 'passbook_api:messages-list' %}"></pb-messages>
|
||||
|
||||
<header class="pf-c-login__main-header">
|
||||
<h1 class="pf-c-title pf-m-3xl">
|
||||
|
||||
@ -18,7 +18,7 @@
|
||||
</filter>
|
||||
</svg>
|
||||
</div>
|
||||
{% include 'partials/messages.html' %}
|
||||
<pb-messages url="{% url 'passbook_api:messages-list' %}"></pb-messages>
|
||||
<div class="pf-c-login">
|
||||
<div class="pf-c-login__container">
|
||||
<header class="pf-c-login__header">
|
||||
|
||||
@ -1,22 +0,0 @@
|
||||
<ul class="pf-c-alert-group pf-m-toast">
|
||||
{% for msg in messages %}
|
||||
<li class="pf-c-alert-group__item">
|
||||
<div class="pf-c-alert pf-m-{{ msg.level_tag }} {% if msg.level_tag == 'error' %}pf-m-danger{% endif %}">
|
||||
<div class="pf-c-alert__icon">
|
||||
{% if msg.level_tag == 'error' %}
|
||||
<i class="fas fa-exclamation-circle"></i>
|
||||
{% elif msg.level_tag == 'warning' %}
|
||||
<i class="fas fa-exclamation-triangle"></i>
|
||||
{% elif msg.level_tag == 'success' %}
|
||||
<i class="fas fa-check-circle"></i>
|
||||
{% elif msg.level_tag == 'info' %}
|
||||
<i class="fas fa-info"></i>
|
||||
{% endif %}
|
||||
</div>
|
||||
<h4 class="pf-c-alert__title">
|
||||
{{ msg.message|safe }}
|
||||
</h4>
|
||||
</div>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
@ -16,6 +16,10 @@
|
||||
<a href="{% url 'passbook_core:user-settings' %}"
|
||||
class="pf-c-nav__link {% is_active 'passbook_core:user-settings' %}">{% trans 'User Details' %}</a>
|
||||
</li>
|
||||
<li class="pf-c-nav__item">
|
||||
<a href="{% url 'passbook_core:user-tokens' %}"
|
||||
class="pf-c-nav__link {% is_active 'passbook_core:user-tokens' 'passbook_core:user-tokens-create' 'passbook_core:user-tokens-update' 'passbook_core:user-tokens-delete' %}">{% trans 'Tokens' %}</a>
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
{% user_stages as user_stages_loc %}
|
||||
@ -53,6 +57,7 @@
|
||||
</div>
|
||||
</div>
|
||||
<main role="main" class="pf-c-page__main" tabindex="-1" id="main-content">
|
||||
{% block content %}
|
||||
<section class="pf-c-page__main-section">
|
||||
<div class="pf-u-display-flex pf-u-justify-content-center">
|
||||
<div class="pf-u-w-75">
|
||||
@ -61,5 +66,6 @@
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
{% endblock %}
|
||||
</main>
|
||||
{% endblock %}
|
||||
|
||||
91
passbook/core/templates/user/token_list.html
Normal file
91
passbook/core/templates/user/token_list.html
Normal file
@ -0,0 +1,91 @@
|
||||
{% extends "user/base.html" %}
|
||||
|
||||
{% load i18n %}
|
||||
{% load passbook_utils %}
|
||||
|
||||
{% block content %}
|
||||
<section class="pf-c-page__main-section pf-m-light">
|
||||
<div class="pf-c-content">
|
||||
<h1>
|
||||
<i class="pf-icon pf-icon-users"></i>
|
||||
{% trans 'Tokens' %}
|
||||
</h1>
|
||||
<p>{% trans "Tokens can be used to access passbook's API." %}
|
||||
</p>
|
||||
</div>
|
||||
</section>
|
||||
<section class="pf-c-page__main-section pf-m-no-padding-mobile">
|
||||
<div class="pf-c-card">
|
||||
{% if object_list %}
|
||||
<div class="pf-c-toolbar">
|
||||
<div class="pf-c-toolbar__content">
|
||||
{% include 'partials/toolbar_search.html' %}
|
||||
<div class="pf-c-toolbar__bulk-select">
|
||||
<a href="{% url 'passbook_core:user-tokens-create' %}?back={{ request.get_full_path }}" class="pf-c-button pf-m-primary" type="button">{% trans 'Create' %}</a>
|
||||
</div>
|
||||
{% include 'partials/pagination.html' %}
|
||||
</div>
|
||||
</div>
|
||||
<table class="pf-c-table pf-m-compact pf-m-grid-xl" role="grid">
|
||||
<thead>
|
||||
<tr role="row">
|
||||
<th role="columnheader" scope="col">{% trans 'Identifier' %}</th>
|
||||
<th role="columnheader" scope="col">{% trans 'Expires?' %}</th>
|
||||
<th role="columnheader" scope="col">{% trans 'Expiry Date' %}</th>
|
||||
<th role="columnheader" scope="col">{% trans 'Description' %}</th>
|
||||
<th role="cell"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody role="rowgroup">
|
||||
{% for token in object_list %}
|
||||
<tr role="row">
|
||||
<th role="columnheader">
|
||||
<div>{{ token.identifier }}</div>
|
||||
</th>
|
||||
<td role="cell">
|
||||
<span>
|
||||
{{ token.expiring|yesno:"Yes,No" }}
|
||||
</span>
|
||||
</td>
|
||||
<td role="cell">
|
||||
<span>
|
||||
{% if not token.expiring %}
|
||||
-
|
||||
{% else %}
|
||||
{{ token.expires }}
|
||||
{% endif %}
|
||||
</span>
|
||||
</td>
|
||||
<td role="cell">
|
||||
<span>
|
||||
{{ token.description }}
|
||||
</span>
|
||||
</td>
|
||||
<td>
|
||||
<a class="pf-c-button pf-m-secondary" href="{% url 'passbook_core:user-tokens-update' identifier=token.identifier %}?back={{ request.get_full_path }}">{% trans 'Edit' %}</a>
|
||||
<a class="pf-c-button pf-m-danger" href="{% url 'passbook_core:user-tokens-delete' identifier=token.identifier %}?back={{ request.get_full_path }}">{% trans 'Delete' %}</a>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="pf-c-pagination pf-m-bottom">
|
||||
{% include 'partials/pagination.html' %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="pf-c-empty-state">
|
||||
<div class="pf-c-empty-state__content">
|
||||
<i class="fas fa-cubes pf-c-empty-state__icon" aria-hidden="true"></i>
|
||||
<h1 class="pf-c-title pf-m-lg">
|
||||
{% trans 'No Tokens.' %}
|
||||
</h1>
|
||||
<div class="pf-c-empty-state__body">
|
||||
{% trans 'Currently no tokens exist. Click the button below to create one.' %}
|
||||
</div>
|
||||
<a href="{% url 'passbook_core:user-tokens-create' %}?back={{ request.get_full_path }}" class="pf-c-button pf-m-primary" type="button">{% trans 'Create' %}</a>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</section>
|
||||
{% endblock %}
|
||||
@ -14,5 +14,5 @@ class TestTasks(TestCase):
|
||||
"""Test Token cleanup task"""
|
||||
Token.objects.create(expires=now(), user=get_anonymous_user())
|
||||
self.assertEqual(Token.objects.all().count(), 1)
|
||||
clean_expired_models()
|
||||
clean_expired_models.delay().get()
|
||||
self.assertEqual(Token.objects.all().count(), 0)
|
||||
|
||||
@ -6,6 +6,22 @@ from passbook.core.views import impersonate, overview, user
|
||||
urlpatterns = [
|
||||
# User views
|
||||
path("-/user/", user.UserSettingsView.as_view(), name="user-settings"),
|
||||
path("-/user/tokens/", user.TokenListView.as_view(), name="user-tokens"),
|
||||
path(
|
||||
"-/user/tokens/create/",
|
||||
user.TokenCreateView.as_view(),
|
||||
name="user-tokens-create",
|
||||
),
|
||||
path(
|
||||
"-/user/tokens/<slug:identifier>/update/",
|
||||
user.TokenUpdateView.as_view(),
|
||||
name="user-tokens-update",
|
||||
),
|
||||
path(
|
||||
"-/user/tokens/<slug:identifier>/delete/",
|
||||
user.TokenDeleteView.as_view(),
|
||||
name="user-tokens-delete",
|
||||
),
|
||||
# Overview
|
||||
path("", overview.OverviewView.as_view(), name="overview"),
|
||||
# Impersonation
|
||||
|
||||
@ -2,13 +2,28 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.contrib.auth.mixins import (
|
||||
PermissionRequiredMixin as DjangoPermissionRequiredMixin,
|
||||
)
|
||||
from django.contrib.messages.views import SuccessMessageMixin
|
||||
from django.db.models.query import QuerySet
|
||||
from django.http.response import HttpResponse
|
||||
from django.urls import reverse_lazy
|
||||
from django.utils.translation import gettext as _
|
||||
from django.views.generic import UpdateView
|
||||
from django.views.generic import ListView, UpdateView
|
||||
from guardian.mixins import PermissionListMixin, PermissionRequiredMixin
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
|
||||
from passbook.admin.views.utils import (
|
||||
DeleteMessageView,
|
||||
SearchListMixin,
|
||||
UserPaginateListMixin,
|
||||
)
|
||||
from passbook.core.forms.token import UserTokenForm
|
||||
from passbook.core.forms.users import UserDetailForm
|
||||
from passbook.core.models import Token, TokenIntents
|
||||
from passbook.flows.models import Flow, FlowDesignation
|
||||
from passbook.lib.views import CreateAssignPermView
|
||||
|
||||
|
||||
class UserSettingsView(SuccessMessageMixin, LoginRequiredMixin, UpdateView):
|
||||
@ -30,3 +45,93 @@ class UserSettingsView(SuccessMessageMixin, LoginRequiredMixin, UpdateView):
|
||||
)
|
||||
kwargs["unenrollment_enabled"] = bool(unenrollment_flow)
|
||||
return kwargs
|
||||
|
||||
|
||||
class TokenListView(
|
||||
LoginRequiredMixin,
|
||||
PermissionListMixin,
|
||||
UserPaginateListMixin,
|
||||
SearchListMixin,
|
||||
ListView,
|
||||
):
|
||||
"""Show list of all tokens"""
|
||||
|
||||
model = Token
|
||||
ordering = "expires"
|
||||
permission_required = "passbook_core.view_token"
|
||||
|
||||
template_name = "user/token_list.html"
|
||||
search_fields = [
|
||||
"identifier",
|
||||
"intent",
|
||||
"description",
|
||||
]
|
||||
|
||||
def get_queryset(self) -> QuerySet:
|
||||
return super().get_queryset().filter(intent=TokenIntents.INTENT_API)
|
||||
|
||||
|
||||
class TokenCreateView(
|
||||
SuccessMessageMixin,
|
||||
LoginRequiredMixin,
|
||||
DjangoPermissionRequiredMixin,
|
||||
CreateAssignPermView,
|
||||
):
|
||||
"""Create new Token"""
|
||||
|
||||
model = Token
|
||||
form_class = UserTokenForm
|
||||
permission_required = "passbook_core.add_token"
|
||||
|
||||
template_name = "generic/create.html"
|
||||
success_url = reverse_lazy("passbook_core:user-tokens")
|
||||
success_message = _("Successfully created Token")
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
|
||||
kwargs = super().get_context_data(**kwargs)
|
||||
kwargs["container_template"] = "user/base.html"
|
||||
return kwargs
|
||||
|
||||
def form_valid(self, form: UserTokenForm) -> HttpResponse:
|
||||
form.instance.user = self.request.user
|
||||
form.instance.intent = TokenIntents.INTENT_API
|
||||
return super().form_valid(form)
|
||||
|
||||
|
||||
class TokenUpdateView(
|
||||
SuccessMessageMixin, LoginRequiredMixin, PermissionRequiredMixin, UpdateView
|
||||
):
|
||||
"""Update token"""
|
||||
|
||||
model = Token
|
||||
form_class = UserTokenForm
|
||||
permission_required = "passbook_core.update_token"
|
||||
template_name = "generic/update.html"
|
||||
success_url = reverse_lazy("passbook_core:user-tokens")
|
||||
success_message = _("Successfully updated Token")
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
|
||||
kwargs = super().get_context_data(**kwargs)
|
||||
kwargs["container_template"] = "user/base.html"
|
||||
return kwargs
|
||||
|
||||
def get_object(self) -> Token:
|
||||
identifier = self.kwargs.get("identifier")
|
||||
return get_objects_for_user(
|
||||
self.request.user, "passbook_core.update_token", self.model
|
||||
).filter(intent=TokenIntents.INTENT_API, identifier=identifier)
|
||||
|
||||
|
||||
class TokenDeleteView(LoginRequiredMixin, PermissionRequiredMixin, DeleteMessageView):
|
||||
"""Delete token"""
|
||||
|
||||
model = Token
|
||||
permission_required = "passbook_core.delete_token"
|
||||
template_name = "generic/delete.html"
|
||||
success_url = reverse_lazy("passbook_core:user-tokens")
|
||||
success_message = _("Successfully deleted Token")
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
|
||||
kwargs = super().get_context_data(**kwargs)
|
||||
kwargs["container_template"] = "user/base.html"
|
||||
return kwargs
|
||||
|
||||
@ -91,8 +91,8 @@ class Command(BaseCommand): # pragma: no cover
|
||||
|
||||
def output_overview(self, values):
|
||||
"""Output results human readable"""
|
||||
total_max = max([max(inner) for inner in values])
|
||||
total_min = min([min(inner) for inner in values])
|
||||
total_max: int = max([max(inner) for inner in values])
|
||||
total_min: int = min([min(inner) for inner in values])
|
||||
total_avg = sum([sum(inner) for inner in values]) / sum(
|
||||
[len(inner) for inner in values]
|
||||
)
|
||||
|
||||
@ -21,135 +21,8 @@
|
||||
{% endblock %}
|
||||
|
||||
{% block main_container %}
|
||||
<main class="pf-c-login__main" id="flow-body">
|
||||
<div class="pf-c-login__main-body pb-loading">
|
||||
<span class="pf-c-spinner" role="progressbar" aria-valuetext="Loading...">
|
||||
<span class="pf-c-spinner__clipper"></span>
|
||||
<span class="pf-c-spinner__lead-ball"></span>
|
||||
<span class="pf-c-spinner__tail-ball"></span>
|
||||
</span>
|
||||
</div>
|
||||
</main>
|
||||
<script>
|
||||
const flowBodyUrl = "{{ exec_url }}";
|
||||
const messagesUrl = "{{ msg_url }}";
|
||||
const flowBody = document.querySelector("#flow-body");
|
||||
const spinner = document.querySelector(".pb-loading");
|
||||
|
||||
const updateMessages = () => {
|
||||
let messageContainer = document.querySelector(".pf-c-alert-group");
|
||||
fetch(messagesUrl).then(response => {
|
||||
messageContainer.innerHTML = "";
|
||||
response.json().then(data => {
|
||||
data.forEach(msg => {
|
||||
let icon = "";
|
||||
switch (msg.level_tag) {
|
||||
case 'error':
|
||||
icon = 'fas fa-exclamation-circle'
|
||||
break;
|
||||
case 'warning':
|
||||
icon = 'fas fa-exclamation-triangle'
|
||||
break;
|
||||
case 'success':
|
||||
icon = 'fas fa-check-circle'
|
||||
break;
|
||||
case 'info':
|
||||
icon = 'fas fa-info'
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (msg.level_tag === "error") {
|
||||
msg.extra_tags = "pf-m-danger";
|
||||
}
|
||||
let item = `<li class="pf-c-alert-group__item">
|
||||
<div class="pf-c-alert pf-m-${msg.level_tag} ${msg.extra_tags}">
|
||||
<div class="pf-c-alert__icon">
|
||||
<i class="${icon}"></i>
|
||||
</div>
|
||||
<h4 class="pf-c-alert__title">
|
||||
${msg.message}
|
||||
</h4>
|
||||
</div>
|
||||
</li>`;
|
||||
var template = document.createElement('template');
|
||||
template.innerHTML = item;
|
||||
messageContainer.appendChild(template.content.firstChild);
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
const updateCard = (data) => {
|
||||
switch (data.type) {
|
||||
case "redirect":
|
||||
window.location = data.to
|
||||
break;
|
||||
case "template":
|
||||
flowBody.innerHTML = data.body;
|
||||
checkAutofocus();
|
||||
updateMessages();
|
||||
loadFormCode();
|
||||
setFormSubmitHandlers();
|
||||
default:
|
||||
break;
|
||||
}
|
||||
};
|
||||
const showSpinner = () => {
|
||||
flowBody.innerHTML = "";
|
||||
flowBody.appendChild(spinner);
|
||||
};
|
||||
const loadFormCode = () => {
|
||||
document.querySelectorAll("#flow-body script").forEach(script => {
|
||||
let newScript = document.createElement("script");
|
||||
newScript.src = script.src;
|
||||
document.head.appendChild(newScript);
|
||||
});
|
||||
};
|
||||
const checkAutofocus = () => {
|
||||
const autofocusElement = document.querySelector("#flow-body [autofocus]");
|
||||
if (autofocusElement !== null) {
|
||||
autofocusElement.focus();
|
||||
}
|
||||
};
|
||||
const updateFormAction = (form) => {
|
||||
for (let index = 0; index < form.elements.length; index++) {
|
||||
const element = form.elements[index];
|
||||
if (element.value === form.action) {
|
||||
console.log("pb-flow: Found Form action URL in form elements, not changing form action.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
form.action = flowBodyUrl;
|
||||
console.log(`pb-flow: updated form.action ${flowBodyUrl}`);
|
||||
return true;
|
||||
};
|
||||
const checkAutosubmit = (form) => {
|
||||
if ("autosubmit" in form.attributes) {
|
||||
return form.submit();
|
||||
}
|
||||
};
|
||||
const setFormSubmitHandlers = () => {
|
||||
document.querySelectorAll("#flow-body form").forEach(form => {
|
||||
console.log(`pb-flow: Checking for autosubmit attribute ${form}`);
|
||||
checkAutosubmit(form);
|
||||
console.log(`pb-flow: Setting action for form ${form}`);
|
||||
updateFormAction(form);
|
||||
console.log(`pb-flow: Adding handler for form ${form}`);
|
||||
form.addEventListener('submit', (e) => {
|
||||
e.preventDefault();
|
||||
let formData = new FormData(form);
|
||||
showSpinner();
|
||||
fetch(flowBodyUrl, {
|
||||
method: 'post',
|
||||
body: formData,
|
||||
}).then(response => response.json()).then(data => {
|
||||
updateCard(data);
|
||||
});
|
||||
});
|
||||
form.classList.add("pb-flow-wrapped");
|
||||
});
|
||||
};
|
||||
|
||||
fetch(flowBodyUrl).then(response => response.json()).then(data => updateCard(data));
|
||||
</script>
|
||||
<flow-shell-card
|
||||
class="pf-c-login__main"
|
||||
flowBodyUrl="{{ exec_url }}">
|
||||
</flow-shell-card>
|
||||
{% endblock %}
|
||||
|
||||
@ -236,7 +236,6 @@ class FlowExecutorShellView(TemplateView):
|
||||
|
||||
def get_context_data(self, **kwargs) -> Dict[str, Any]:
|
||||
kwargs["exec_url"] = reverse("passbook_flows:flow-executor", kwargs=self.kwargs)
|
||||
kwargs["msg_url"] = reverse("passbook_api:messages-list")
|
||||
self.request.session[SESSION_KEY_GET] = self.request.GET
|
||||
return kwargs
|
||||
|
||||
|
||||
134
passbook/lib/tasks/__init__.py
Normal file
134
passbook/lib/tasks/__init__.py
Normal file
@ -0,0 +1,134 @@
|
||||
"""Monitored tasks"""
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from traceback import format_tb
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from celery import Task
|
||||
from django.core.cache import cache
|
||||
|
||||
|
||||
class TaskResultStatus(Enum):
|
||||
"""Possible states of tasks"""
|
||||
|
||||
SUCCESSFUL = 1
|
||||
WARNING = 2
|
||||
ERROR = 4
|
||||
|
||||
|
||||
@dataclass
|
||||
class TaskResult:
|
||||
"""Result of a task run, this class is created by the task itself
|
||||
and used by self.set_status"""
|
||||
|
||||
status: TaskResultStatus
|
||||
|
||||
messages: List[str] = field(default_factory=list)
|
||||
|
||||
# Optional UID used in cache for tasks that run in different instances
|
||||
uid: Optional[str] = field(default=None)
|
||||
|
||||
def with_error(self, exc: Exception) -> "TaskResult":
|
||||
"""Since errors might not always be pickle-able, set the traceback"""
|
||||
self.messages.extend(format_tb(exc.__traceback__))
|
||||
self.messages.append(str(exc))
|
||||
return self
|
||||
|
||||
|
||||
@dataclass
|
||||
class TaskInfo:
|
||||
"""Info about a task run"""
|
||||
|
||||
task_name: str
|
||||
finish_timestamp: datetime
|
||||
|
||||
result: TaskResult
|
||||
|
||||
task_call_module: str
|
||||
task_call_func: str
|
||||
task_call_args: List[Any] = field(default_factory=list)
|
||||
task_call_kwargs: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
task_description: Optional[str] = field(default=None)
|
||||
|
||||
@staticmethod
|
||||
def all() -> Dict[str, "TaskInfo"]:
|
||||
"""Get all TaskInfo objects"""
|
||||
return cache.get_many(cache.keys("task_*"))
|
||||
|
||||
@staticmethod
|
||||
def by_name(name: str) -> Optional["TaskInfo"]:
|
||||
"""Get TaskInfo Object by name"""
|
||||
return cache.get(f"task_{name}")
|
||||
|
||||
def save(self):
|
||||
"""Save task into cache"""
|
||||
key = f"task_{self.task_name}"
|
||||
if self.result.uid:
|
||||
key += f"_{self.result.uid}"
|
||||
self.task_name += f"_{self.result.uid}"
|
||||
cache.set(key, self, timeout=6 * 60 * 60)
|
||||
|
||||
|
||||
class MonitoredTask(Task):
|
||||
"""Task which can save its state to the cache"""
|
||||
|
||||
# For tasks that should only be listed if they failed, set this to False
|
||||
save_on_success: bool
|
||||
|
||||
_result: TaskResult
|
||||
|
||||
_uid: Optional[str]
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.save_on_success = True
|
||||
self._uid = None
|
||||
self._result = TaskResult(status=TaskResultStatus.ERROR, messages=[])
|
||||
|
||||
def set_uid(self, uid: str):
|
||||
"""Set UID, so in the case of an unexpected error its saved correctly"""
|
||||
self._uid = uid
|
||||
|
||||
def set_status(self, result: TaskResult):
|
||||
"""Set result for current run, will overwrite previous result."""
|
||||
self._result = result
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def after_return(
|
||||
self, status, retval, task_id, args: List[Any], kwargs: Dict[str, Any], einfo
|
||||
):
|
||||
if not self._result.uid:
|
||||
self._result.uid = self._uid
|
||||
if self.save_on_success:
|
||||
TaskInfo(
|
||||
task_name=self.__name__,
|
||||
task_description=self.__doc__,
|
||||
finish_timestamp=datetime.now(),
|
||||
result=self._result,
|
||||
task_call_module=self.__module__,
|
||||
task_call_func=self.__name__,
|
||||
task_call_args=args,
|
||||
task_call_kwargs=kwargs,
|
||||
).save()
|
||||
return super().after_return(status, retval, task_id, args, kwargs, einfo=einfo)
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
||||
if not self._result.uid:
|
||||
self._result.uid = self._uid
|
||||
TaskInfo(
|
||||
task_name=self.__name__,
|
||||
task_description=self.__doc__,
|
||||
finish_timestamp=datetime.now(),
|
||||
result=self._result,
|
||||
task_call_module=self.__module__,
|
||||
task_call_func=self.__name__,
|
||||
task_call_args=args,
|
||||
task_call_kwargs=kwargs,
|
||||
).save()
|
||||
return super().on_failure(exc, task_id, args, kwargs, einfo=einfo)
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
raise NotImplementedError
|
||||
34
passbook/lib/tasks/backup.py
Normal file
34
passbook/lib/tasks/backup.py
Normal file
@ -0,0 +1,34 @@
|
||||
"""Database backup task"""
|
||||
from datetime import datetime
|
||||
from io import StringIO
|
||||
|
||||
from botocore.exceptions import BotoCoreError, ClientError
|
||||
from django.contrib.humanize.templatetags.humanize import naturaltime
|
||||
from django.core import management
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.lib.tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
from passbook.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
def backup_database(self: MonitoredTask): # pragma: no cover
|
||||
"""Database backup"""
|
||||
try:
|
||||
start = datetime.now()
|
||||
out = StringIO()
|
||||
management.call_command("dbbackup", quiet=True, stdout=out)
|
||||
self.set_status(
|
||||
TaskResult(
|
||||
TaskResultStatus.SUCCESSFUL,
|
||||
[
|
||||
f"Successfully finished database backup {naturaltime(start)}",
|
||||
out.getvalue(),
|
||||
],
|
||||
)
|
||||
)
|
||||
LOGGER.info("Successfully backed up database.")
|
||||
except (IOError, BotoCoreError, ClientError) as exc:
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||
@ -1,17 +1,16 @@
|
||||
"""Outpost websocket handler"""
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from datetime import datetime
|
||||
from enum import IntEnum
|
||||
from time import time
|
||||
from typing import Any, Dict
|
||||
|
||||
from dacite import from_dict
|
||||
from dacite.data import Data
|
||||
from django.core.cache import cache
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.core.channels import AuthJsonConsumer
|
||||
from passbook.outposts.models import Outpost
|
||||
from passbook.outposts.models import OUTPOST_HELLO_INTERVAL, Outpost, OutpostState
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@ -54,33 +53,35 @@ class OutpostConsumer(AuthJsonConsumer):
|
||||
return
|
||||
self.accept()
|
||||
self.outpost = outpost.first()
|
||||
self.outpost.channels.append(self.channel_name)
|
||||
LOGGER.debug("added channel to outpost", channel_name=self.channel_name)
|
||||
self.outpost.save()
|
||||
OutpostState(
|
||||
uid=self.channel_name, last_seen=datetime.now(), _outpost=self.outpost
|
||||
).save(timeout=OUTPOST_HELLO_INTERVAL * 1.5)
|
||||
LOGGER.debug("added channel to cache", channel_name=self.channel_name)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def disconnect(self, close_code):
|
||||
self.outpost.channels.remove(self.channel_name)
|
||||
self.outpost.save()
|
||||
LOGGER.debug("removed channel from outpost", channel_name=self.channel_name)
|
||||
OutpostState.for_channel(self.outpost, self.channel_name).delete()
|
||||
LOGGER.debug("removed channel from cache", channel_name=self.channel_name)
|
||||
|
||||
def receive_json(self, content: Data):
|
||||
msg = from_dict(WebsocketMessage, content)
|
||||
state = OutpostState(
|
||||
uid=self.channel_name,
|
||||
last_seen=datetime.now(),
|
||||
_outpost=self.outpost,
|
||||
)
|
||||
if msg.instruction == WebsocketMessageInstruction.HELLO:
|
||||
cache.set(self.outpost.state_cache_prefix("health"), time(), timeout=60)
|
||||
if "version" in msg.args:
|
||||
cache.set(
|
||||
self.outpost.state_cache_prefix("version"), msg.args["version"]
|
||||
)
|
||||
state.version = msg.args.get("version", None)
|
||||
elif msg.instruction == WebsocketMessageInstruction.ACK:
|
||||
return
|
||||
state.save(timeout=OUTPOST_HELLO_INTERVAL * 1.5)
|
||||
|
||||
response = WebsocketMessage(instruction=WebsocketMessageInstruction.ACK)
|
||||
self.send_json(asdict(response))
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def event_update(self, event):
|
||||
"""Event handler which is called by post_save signals"""
|
||||
"""Event handler which is called by post_save signals, Send update instruction"""
|
||||
self.send_json(
|
||||
asdict(
|
||||
WebsocketMessage(instruction=WebsocketMessageInstruction.TRIGGER_UPDATE)
|
||||
|
||||
@ -1,11 +1,17 @@
|
||||
"""Base Controller"""
|
||||
from typing import Dict
|
||||
from typing import Dict, List
|
||||
|
||||
from structlog import get_logger
|
||||
from structlog.testing import capture_logs
|
||||
|
||||
from passbook.lib.sentry import SentryIgnoredException
|
||||
from passbook.outposts.models import Outpost
|
||||
|
||||
|
||||
class ControllerException(SentryIgnoredException):
|
||||
"""Exception raise when anything fails during controller run"""
|
||||
|
||||
|
||||
class BaseController:
|
||||
"""Base Outpost deployment controller"""
|
||||
|
||||
@ -13,17 +19,26 @@ class BaseController:
|
||||
|
||||
outpost: Outpost
|
||||
|
||||
def __init__(self, outpost_pk: str):
|
||||
self.outpost = Outpost.objects.get(pk=outpost_pk)
|
||||
self.logger = get_logger(
|
||||
controller=self.__class__.__name__, outpost=self.outpost
|
||||
)
|
||||
def __init__(self, outpost: Outpost):
|
||||
self.outpost = outpost
|
||||
self.logger = get_logger()
|
||||
self.deployment_ports = {}
|
||||
|
||||
def run(self):
|
||||
# pylint: disable=invalid-name
|
||||
def up(self):
|
||||
"""Called by scheduled task to reconcile deployment/service/etc"""
|
||||
raise NotImplementedError
|
||||
|
||||
def up_with_logs(self) -> List[str]:
|
||||
"""Call .up() but capture all log output and return it."""
|
||||
with capture_logs() as logs:
|
||||
self.up()
|
||||
return [x["event"] for x in logs]
|
||||
|
||||
def down(self):
|
||||
"""Handler to delete everything we've created"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_static_deployment(self) -> str:
|
||||
"""Return a static deployment configuration"""
|
||||
raise NotImplementedError
|
||||
|
||||
@ -1,13 +1,16 @@
|
||||
"""Docker controller"""
|
||||
from time import sleep
|
||||
from typing import Dict, Tuple
|
||||
|
||||
from django.conf import settings
|
||||
from docker import DockerClient, from_env
|
||||
from docker.errors import NotFound
|
||||
from docker.errors import DockerException, NotFound
|
||||
from docker.models.containers import Container
|
||||
from yaml import safe_dump
|
||||
|
||||
from passbook import __version__
|
||||
from passbook.outposts.controllers.base import BaseController
|
||||
from passbook.outposts.controllers.base import BaseController, ControllerException
|
||||
from passbook.outposts.models import Outpost
|
||||
|
||||
|
||||
class DockerController(BaseController):
|
||||
@ -19,15 +22,18 @@ class DockerController(BaseController):
|
||||
|
||||
image_base = "beryju/passbook"
|
||||
|
||||
def __init__(self, outpost_pk: str) -> None:
|
||||
super().__init__(outpost_pk)
|
||||
def __init__(self, outpost: Outpost) -> None:
|
||||
super().__init__(outpost)
|
||||
self.client = from_env()
|
||||
|
||||
def _get_labels(self) -> Dict[str, str]:
|
||||
return {}
|
||||
|
||||
def _get_env(self) -> Dict[str, str]:
|
||||
return {
|
||||
"PASSBOOK_HOST": self.outpost.config.passbook_host,
|
||||
"PASSBOOK_INSECURE": str(self.outpost.config.passbook_host_insecure),
|
||||
"PASSBOOK_TOKEN": self.outpost.token.token_uuid.hex,
|
||||
"PASSBOOK_TOKEN": self.outpost.token.key,
|
||||
}
|
||||
|
||||
def _comp_env(self, container: Container) -> bool:
|
||||
@ -57,48 +63,68 @@ class DockerController(BaseController):
|
||||
detach=True,
|
||||
ports={x: x for _, x in self.deployment_ports.items()},
|
||||
environment=self._get_env(),
|
||||
network_mode="host" if settings.TEST else "bridge",
|
||||
labels=self._get_labels(),
|
||||
),
|
||||
True,
|
||||
)
|
||||
|
||||
def run(self):
|
||||
container, has_been_created = self._get_container()
|
||||
if has_been_created:
|
||||
return None
|
||||
# Check if the container is out of date, delete it and retry
|
||||
if len(container.image.tags) > 0:
|
||||
tag: str = container.image.tags[0]
|
||||
_, _, version = tag.partition(":")
|
||||
if version != __version__:
|
||||
self.logger.info(
|
||||
"Container has mismatched version, re-creating...",
|
||||
has=version,
|
||||
should=__version__,
|
||||
)
|
||||
def up(self):
|
||||
try:
|
||||
container, has_been_created = self._get_container()
|
||||
# Check if the container is out of date, delete it and retry
|
||||
if len(container.image.tags) > 0:
|
||||
tag: str = container.image.tags[0]
|
||||
_, _, version = tag.partition(":")
|
||||
if version != __version__:
|
||||
self.logger.info(
|
||||
"Container has mismatched version, re-creating...",
|
||||
has=version,
|
||||
should=__version__,
|
||||
)
|
||||
container.kill()
|
||||
container.remove(force=True)
|
||||
return self.up()
|
||||
# Check that container values match our values
|
||||
if self._comp_env(container):
|
||||
self.logger.info("Container has outdated config, re-creating...")
|
||||
container.kill()
|
||||
container.remove(force=True)
|
||||
return self.run()
|
||||
# Check that container values match our values
|
||||
if self._comp_env(container):
|
||||
self.logger.info("Container has outdated config, re-creating...")
|
||||
return self.up()
|
||||
# Check that container is healthy
|
||||
if (
|
||||
container.status == "running"
|
||||
and container.attrs.get("State", {}).get("Health", {}).get("Status", "")
|
||||
!= "healthy"
|
||||
):
|
||||
# At this point we know the config is correct, but the container isn't healthy,
|
||||
# so we just restart it with the same config
|
||||
if has_been_created:
|
||||
# Since we've just created the container, give it some time to start.
|
||||
# If its still not up by then, restart it
|
||||
self.logger.info(
|
||||
"Container is unhealthy and new, giving it time to boot."
|
||||
)
|
||||
sleep(60)
|
||||
self.logger.info("Container is unhealthy, restarting...")
|
||||
container.restart()
|
||||
return None
|
||||
# Check that container is running
|
||||
if container.status != "running":
|
||||
self.logger.info("Container is not running, restarting...")
|
||||
container.start()
|
||||
return None
|
||||
return None
|
||||
except DockerException as exc:
|
||||
raise ControllerException from exc
|
||||
|
||||
def down(self):
|
||||
try:
|
||||
container, _ = self._get_container()
|
||||
container.kill()
|
||||
container.remove(force=True)
|
||||
return self.run()
|
||||
# Check that container is healthy
|
||||
if (
|
||||
container.status == "running"
|
||||
and container.attrs.get("State", {}).get("Health", {}).get("Status", "")
|
||||
!= "healthy"
|
||||
):
|
||||
# At this point we know the config is correct, but the container isn't healthy,
|
||||
# so we just restart it with the same config
|
||||
self.logger.info("Container is unhealthy, restarting...")
|
||||
container.restart()
|
||||
# Check that container is running
|
||||
if container.status != "running":
|
||||
self.logger.info("Container is not running, restarting...")
|
||||
container.start()
|
||||
return None
|
||||
container.remove()
|
||||
except DockerException as exc:
|
||||
raise ControllerException from exc
|
||||
|
||||
def get_static_deployment(self) -> str:
|
||||
"""Generate docker-compose yaml for proxy, version 3.5"""
|
||||
@ -114,7 +140,7 @@ class DockerController(BaseController):
|
||||
"PASSBOOK_INSECURE": str(
|
||||
self.outpost.config.passbook_host_insecure
|
||||
),
|
||||
"PASSBOOK_TOKEN": self.outpost.token.token_uuid.hex,
|
||||
"PASSBOOK_TOKEN": self.outpost.token.key,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
0
passbook/outposts/controllers/k8s/__init__.py
Normal file
0
passbook/outposts/controllers/k8s/__init__.py
Normal file
126
passbook/outposts/controllers/k8s/base.py
Normal file
126
passbook/outposts/controllers/k8s/base.py
Normal file
@ -0,0 +1,126 @@
|
||||
"""Base Kubernetes Reconciler"""
|
||||
from typing import TYPE_CHECKING, Generic, TypeVar
|
||||
|
||||
from kubernetes.client import V1ObjectMeta
|
||||
from kubernetes.client.rest import ApiException
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook import __version__
|
||||
from passbook.lib.sentry import SentryIgnoredException
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from passbook.outposts.controllers.kubernetes import KubernetesController
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class ReconcileTrigger(SentryIgnoredException):
|
||||
"""Base trigger raised by child classes to notify us"""
|
||||
|
||||
|
||||
class NeedsRecreate(ReconcileTrigger):
|
||||
"""Exception to trigger a complete recreate of the Kubernetes Object"""
|
||||
|
||||
|
||||
class NeedsUpdate(ReconcileTrigger):
|
||||
"""Exception to trigger an update to the Kubernetes Object"""
|
||||
|
||||
|
||||
class KubernetesObjectReconciler(Generic[T]):
|
||||
"""Base Kubernetes Reconciler, handles the basic logic."""
|
||||
|
||||
controller: "KubernetesController"
|
||||
|
||||
def __init__(self, controller: "KubernetesController"):
|
||||
self.controller = controller
|
||||
self.namespace = controller.outpost.config.kubernetes_namespace
|
||||
self.logger = get_logger()
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Get the name of the object this reconciler manages"""
|
||||
raise NotImplementedError
|
||||
|
||||
def up(self):
|
||||
"""Create object if it doesn't exist, update if needed or recreate if needed."""
|
||||
current = None
|
||||
reference = self.get_reference_object()
|
||||
try:
|
||||
try:
|
||||
current = self.retrieve()
|
||||
except ApiException as exc:
|
||||
if exc.status == 404:
|
||||
self.logger.debug("Failed to get current, triggering recreate")
|
||||
raise NeedsRecreate from exc
|
||||
self.logger.debug("Other unhandled error", exc=exc)
|
||||
raise exc
|
||||
else:
|
||||
self.logger.debug("Got current, running reconcile")
|
||||
self.reconcile(current, reference)
|
||||
except NeedsRecreate:
|
||||
self.logger.debug("Recreate requested")
|
||||
if current:
|
||||
self.logger.debug("Deleted old")
|
||||
self.delete(current)
|
||||
else:
|
||||
self.logger.debug("No old found, creating")
|
||||
self.logger.debug("Created")
|
||||
self.create(reference)
|
||||
except NeedsUpdate:
|
||||
self.logger.debug("Updating")
|
||||
self.update(current, reference)
|
||||
else:
|
||||
self.logger.debug("Nothing to do...")
|
||||
|
||||
def down(self):
|
||||
"""Delete object if found"""
|
||||
try:
|
||||
current = self.retrieve()
|
||||
self.delete(current)
|
||||
self.logger.debug("Removing")
|
||||
except ApiException as exc:
|
||||
if exc.status == 404:
|
||||
self.logger.debug("Failed to get current, assuming non-existant")
|
||||
return
|
||||
self.logger.debug("Other unhandled error", exc=exc)
|
||||
raise exc
|
||||
|
||||
def get_reference_object(self) -> T:
|
||||
"""Return object as it should be"""
|
||||
raise NotImplementedError
|
||||
|
||||
def reconcile(self, current: T, reference: T):
|
||||
"""Check what operations should be done, should be raised as
|
||||
ReconcileTrigger"""
|
||||
raise NotImplementedError
|
||||
|
||||
def create(self, reference: T):
|
||||
"""API Wrapper to create object"""
|
||||
raise NotImplementedError
|
||||
|
||||
def retrieve(self) -> T:
|
||||
"""API Wrapper to retrive object"""
|
||||
raise NotImplementedError
|
||||
|
||||
def delete(self, reference: T):
|
||||
"""API Wrapper to delete object"""
|
||||
raise NotImplementedError
|
||||
|
||||
def update(self, current: T, reference: T):
|
||||
"""API Wrapper to update object"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_object_meta(self, **kwargs) -> V1ObjectMeta:
|
||||
"""Get common object metadata"""
|
||||
return V1ObjectMeta(
|
||||
namespace=self.namespace,
|
||||
labels={
|
||||
"app.kubernetes.io/name": f"passbook-{self.controller.outpost.type.lower()}",
|
||||
"app.kubernetes.io/instance": self.controller.outpost.name,
|
||||
"app.kubernetes.io/version": __version__,
|
||||
"app.kubernetes.io/managed-by": "passbook.beryju.org",
|
||||
"passbook.beryju.org/outpost-uuid": self.controller.outpost.uuid.hex,
|
||||
},
|
||||
**kwargs,
|
||||
)
|
||||
134
passbook/outposts/controllers/k8s/deployment.py
Normal file
134
passbook/outposts/controllers/k8s/deployment.py
Normal file
@ -0,0 +1,134 @@
|
||||
"""Kubernetes Deployment Reconciler"""
|
||||
from typing import TYPE_CHECKING, Dict
|
||||
|
||||
from kubernetes.client import (
|
||||
AppsV1Api,
|
||||
V1Container,
|
||||
V1ContainerPort,
|
||||
V1Deployment,
|
||||
V1DeploymentSpec,
|
||||
V1EnvVar,
|
||||
V1EnvVarSource,
|
||||
V1LabelSelector,
|
||||
V1ObjectMeta,
|
||||
V1PodSpec,
|
||||
V1PodTemplateSpec,
|
||||
V1SecretKeySelector,
|
||||
)
|
||||
|
||||
from passbook import __version__
|
||||
from passbook.outposts.controllers.k8s.base import (
|
||||
KubernetesObjectReconciler,
|
||||
NeedsUpdate,
|
||||
)
|
||||
from passbook.outposts.models import Outpost
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from passbook.outposts.controllers.kubernetes import KubernetesController
|
||||
|
||||
|
||||
class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
||||
"""Kubernetes Deployment Reconciler"""
|
||||
|
||||
image_base = "beryju/passbook"
|
||||
|
||||
outpost: Outpost
|
||||
|
||||
def __init__(self, controller: "KubernetesController") -> None:
|
||||
super().__init__(controller)
|
||||
self.api = AppsV1Api()
|
||||
self.outpost = self.controller.outpost
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return f"passbook-outpost-{self.controller.outpost.uuid.hex}"
|
||||
|
||||
def reconcile(self, current: V1Deployment, reference: V1Deployment):
|
||||
if current.spec.replicas != reference.spec.replicas:
|
||||
raise NeedsUpdate()
|
||||
if (
|
||||
current.spec.template.spec.containers[0].image
|
||||
!= reference.spec.template.spec.containers[0].image
|
||||
):
|
||||
raise NeedsUpdate()
|
||||
|
||||
def get_pod_meta(self) -> Dict[str, str]:
|
||||
"""Get common object metadata"""
|
||||
return {
|
||||
"app.kubernetes.io/name": "passbook-outpost",
|
||||
"app.kubernetes.io/managed-by": "passbook.beryju.org",
|
||||
"passbook.beryju.org/outpost-uuid": self.controller.outpost.uuid.hex,
|
||||
}
|
||||
|
||||
def get_reference_object(self) -> V1Deployment:
|
||||
"""Get deployment object for outpost"""
|
||||
# Generate V1ContainerPort objects
|
||||
container_ports = []
|
||||
for port_name, port in self.controller.deployment_ports.items():
|
||||
container_ports.append(V1ContainerPort(container_port=port, name=port_name))
|
||||
meta = self.get_object_meta(name=self.name)
|
||||
secret_name = f"passbook-outpost-{self.controller.outpost.uuid.hex}-api"
|
||||
return V1Deployment(
|
||||
metadata=meta,
|
||||
spec=V1DeploymentSpec(
|
||||
replicas=self.outpost.config.kubernetes_replicas,
|
||||
selector=V1LabelSelector(match_labels=self.get_pod_meta()),
|
||||
template=V1PodTemplateSpec(
|
||||
metadata=V1ObjectMeta(labels=self.get_pod_meta()),
|
||||
spec=V1PodSpec(
|
||||
containers=[
|
||||
V1Container(
|
||||
name=str(self.outpost.type),
|
||||
image=f"{self.image_base}-{self.outpost.type}:{__version__}",
|
||||
ports=container_ports,
|
||||
env=[
|
||||
V1EnvVar(
|
||||
name="PASSBOOK_HOST",
|
||||
value_from=V1EnvVarSource(
|
||||
secret_key_ref=V1SecretKeySelector(
|
||||
name=secret_name,
|
||||
key="passbook_host",
|
||||
)
|
||||
),
|
||||
),
|
||||
V1EnvVar(
|
||||
name="PASSBOOK_TOKEN",
|
||||
value_from=V1EnvVarSource(
|
||||
secret_key_ref=V1SecretKeySelector(
|
||||
name=secret_name,
|
||||
key="token",
|
||||
)
|
||||
),
|
||||
),
|
||||
V1EnvVar(
|
||||
name="PASSBOOK_INSECURE",
|
||||
value_from=V1EnvVarSource(
|
||||
secret_key_ref=V1SecretKeySelector(
|
||||
name=secret_name,
|
||||
key="passbook_host_insecure",
|
||||
)
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
def create(self, reference: V1Deployment):
|
||||
return self.api.create_namespaced_deployment(self.namespace, reference)
|
||||
|
||||
def delete(self, reference: V1Deployment):
|
||||
return self.api.delete_namespaced_deployment(
|
||||
reference.metadata.name, self.namespace
|
||||
)
|
||||
|
||||
def retrieve(self) -> V1Deployment:
|
||||
return self.api.read_namespaced_deployment(self.name, self.namespace)
|
||||
|
||||
def update(self, current: V1Deployment, reference: V1Deployment):
|
||||
return self.api.patch_namespaced_deployment(
|
||||
current.metadata.name, self.namespace, reference
|
||||
)
|
||||
67
passbook/outposts/controllers/k8s/secret.py
Normal file
67
passbook/outposts/controllers/k8s/secret.py
Normal file
@ -0,0 +1,67 @@
|
||||
"""Kubernetes Secret Reconciler"""
|
||||
from base64 import b64encode
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from kubernetes.client import CoreV1Api, V1Secret
|
||||
|
||||
from passbook.outposts.controllers.k8s.base import (
|
||||
KubernetesObjectReconciler,
|
||||
NeedsUpdate,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from passbook.outposts.controllers.kubernetes import KubernetesController
|
||||
|
||||
|
||||
def b64string(source: str) -> str:
|
||||
"""Base64 Encode string"""
|
||||
return b64encode(source.encode()).decode("utf-8")
|
||||
|
||||
|
||||
class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
|
||||
"""Kubernetes Secret Reconciler"""
|
||||
|
||||
def __init__(self, controller: "KubernetesController") -> None:
|
||||
super().__init__(controller)
|
||||
self.api = CoreV1Api()
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return f"passbook-outpost-{self.controller.outpost.uuid.hex}-api"
|
||||
|
||||
def reconcile(self, current: V1Secret, reference: V1Secret):
|
||||
for key in reference.data.keys():
|
||||
if current.data[key] != reference.data[key]:
|
||||
raise NeedsUpdate()
|
||||
|
||||
def get_reference_object(self) -> V1Secret:
|
||||
"""Get deployment object for outpost"""
|
||||
meta = self.get_object_meta(name=self.name)
|
||||
return V1Secret(
|
||||
metadata=meta,
|
||||
data={
|
||||
"passbook_host": b64string(
|
||||
self.controller.outpost.config.passbook_host
|
||||
),
|
||||
"passbook_host_insecure": b64string(
|
||||
str(self.controller.outpost.config.passbook_host_insecure)
|
||||
),
|
||||
"token": b64string(self.controller.outpost.token.token_uuid.hex),
|
||||
},
|
||||
)
|
||||
|
||||
def create(self, reference: V1Secret):
|
||||
return self.api.create_namespaced_secret(self.namespace, reference)
|
||||
|
||||
def delete(self, reference: V1Secret):
|
||||
return self.api.delete_namespaced_secret(
|
||||
reference.metadata.name, self.namespace
|
||||
)
|
||||
|
||||
def retrieve(self) -> V1Secret:
|
||||
return self.api.read_namespaced_secret(self.name, self.namespace)
|
||||
|
||||
def update(self, current: V1Secret, reference: V1Secret):
|
||||
return self.api.patch_namespaced_secret(
|
||||
current.metadata.name, self.namespace, reference
|
||||
)
|
||||
60
passbook/outposts/controllers/k8s/service.py
Normal file
60
passbook/outposts/controllers/k8s/service.py
Normal file
@ -0,0 +1,60 @@
|
||||
"""Kubernetes Service Reconciler"""
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from kubernetes.client import CoreV1Api, V1Service, V1ServicePort, V1ServiceSpec
|
||||
|
||||
from passbook.outposts.controllers.k8s.base import (
|
||||
KubernetesObjectReconciler,
|
||||
NeedsUpdate,
|
||||
)
|
||||
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from passbook.outposts.controllers.kubernetes import KubernetesController
|
||||
|
||||
|
||||
class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
|
||||
"""Kubernetes Service Reconciler"""
|
||||
|
||||
def __init__(self, controller: "KubernetesController") -> None:
|
||||
super().__init__(controller)
|
||||
self.api = CoreV1Api()
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return f"passbook-outpost-{self.controller.outpost.uuid.hex}"
|
||||
|
||||
def reconcile(self, current: V1Service, reference: V1Service):
|
||||
if len(current.spec.ports) != len(reference.spec.ports):
|
||||
raise NeedsUpdate()
|
||||
for port in reference.spec.ports:
|
||||
if port not in current.spec.ports:
|
||||
raise NeedsUpdate()
|
||||
|
||||
def get_reference_object(self) -> V1Service:
|
||||
"""Get deployment object for outpost"""
|
||||
meta = self.get_object_meta(name=self.name)
|
||||
ports = []
|
||||
for port_name, port in self.controller.deployment_ports.items():
|
||||
ports.append(V1ServicePort(name=port_name, port=port))
|
||||
selector_labels = DeploymentReconciler(self.controller).get_pod_meta()
|
||||
return V1Service(
|
||||
metadata=meta,
|
||||
spec=V1ServiceSpec(ports=ports, selector=selector_labels, type="ClusterIP"),
|
||||
)
|
||||
|
||||
def create(self, reference: V1Service):
|
||||
return self.api.create_namespaced_service(self.namespace, reference)
|
||||
|
||||
def delete(self, reference: V1Service):
|
||||
return self.api.delete_namespaced_service(
|
||||
reference.metadata.name, self.namespace
|
||||
)
|
||||
|
||||
def retrieve(self) -> V1Service:
|
||||
return self.api.read_namespaced_service(self.name, self.namespace)
|
||||
|
||||
def update(self, current: V1Service, reference: V1Service):
|
||||
return self.api.patch_namespaced_service(
|
||||
current.metadata.name, self.namespace, reference
|
||||
)
|
||||
@ -1,156 +1,80 @@
|
||||
"""Kubernetes deployment controller"""
|
||||
from base64 import b64encode
|
||||
from io import StringIO
|
||||
from typing import Dict, List, Type
|
||||
|
||||
from kubernetes.client import (
|
||||
V1Container,
|
||||
V1ContainerPort,
|
||||
V1Deployment,
|
||||
V1DeploymentSpec,
|
||||
V1EnvVar,
|
||||
V1EnvVarSource,
|
||||
V1LabelSelector,
|
||||
V1ObjectMeta,
|
||||
V1PodSpec,
|
||||
V1PodTemplateSpec,
|
||||
V1Secret,
|
||||
V1SecretKeySelector,
|
||||
V1Service,
|
||||
V1ServicePort,
|
||||
V1ServiceSpec,
|
||||
)
|
||||
from kubernetes.client import OpenApiException
|
||||
from kubernetes.config import load_incluster_config, load_kube_config
|
||||
from kubernetes.config.config_exception import ConfigException
|
||||
from structlog.testing import capture_logs
|
||||
from yaml import dump_all
|
||||
|
||||
from passbook import __version__
|
||||
from passbook.outposts.controllers.base import BaseController
|
||||
|
||||
|
||||
def b64encode_str(input_string: str) -> str:
|
||||
"""base64 encode string"""
|
||||
return b64encode(input_string.encode()).decode()
|
||||
from passbook.outposts.controllers.base import BaseController, ControllerException
|
||||
from passbook.outposts.controllers.k8s.base import KubernetesObjectReconciler
|
||||
from passbook.outposts.controllers.k8s.deployment import DeploymentReconciler
|
||||
from passbook.outposts.controllers.k8s.secret import SecretReconciler
|
||||
from passbook.outposts.controllers.k8s.service import ServiceReconciler
|
||||
from passbook.outposts.models import Outpost
|
||||
|
||||
|
||||
class KubernetesController(BaseController):
|
||||
"""Manage deployment of outpost in kubernetes"""
|
||||
|
||||
image_base = "beryju/passbook"
|
||||
reconcilers: Dict[str, Type[KubernetesObjectReconciler]]
|
||||
reconcile_order: List[str]
|
||||
|
||||
def run(self):
|
||||
"""Called by scheduled task to reconcile deployment/service/etc"""
|
||||
# TODO
|
||||
def __init__(self, outpost: Outpost) -> None:
|
||||
super().__init__(outpost)
|
||||
try:
|
||||
load_incluster_config()
|
||||
except ConfigException:
|
||||
load_kube_config()
|
||||
self.reconcilers = {
|
||||
"secret": SecretReconciler,
|
||||
"deployment": DeploymentReconciler,
|
||||
"service": ServiceReconciler,
|
||||
}
|
||||
self.reconcile_order = ["secret", "deployment", "service"]
|
||||
|
||||
def up(self):
|
||||
try:
|
||||
for reconcile_key in self.reconcile_order:
|
||||
reconciler = self.reconcilers[reconcile_key](self)
|
||||
reconciler.up()
|
||||
|
||||
except OpenApiException as exc:
|
||||
raise ControllerException from exc
|
||||
|
||||
def up_with_logs(self) -> List[str]:
|
||||
try:
|
||||
all_logs = []
|
||||
for reconcile_key in self.reconcile_order:
|
||||
with capture_logs() as logs:
|
||||
reconciler = self.reconcilers[reconcile_key](self)
|
||||
reconciler.up()
|
||||
all_logs += [f"{reconcile_key.title()}: {x['event']}" for x in logs]
|
||||
return all_logs
|
||||
except OpenApiException as exc:
|
||||
raise ControllerException from exc
|
||||
|
||||
def down(self):
|
||||
try:
|
||||
for reconcile_key in self.reconcile_order:
|
||||
reconciler = self.reconcilers[reconcile_key](self)
|
||||
reconciler.down()
|
||||
|
||||
except OpenApiException as exc:
|
||||
raise ControllerException from exc
|
||||
|
||||
def get_static_deployment(self) -> str:
|
||||
documents = []
|
||||
for reconcile_key in self.reconcile_order:
|
||||
reconciler = self.reconcilers[reconcile_key](self)
|
||||
documents.append(reconciler.get_reference_object().to_dict())
|
||||
|
||||
with StringIO() as _str:
|
||||
dump_all(
|
||||
[
|
||||
self.get_deployment_secret().to_dict(),
|
||||
self.get_deployment().to_dict(),
|
||||
self.get_service().to_dict(),
|
||||
],
|
||||
documents,
|
||||
stream=_str,
|
||||
default_flow_style=False,
|
||||
)
|
||||
return _str.getvalue()
|
||||
|
||||
def get_object_meta(self, **kwargs) -> V1ObjectMeta:
|
||||
"""Get common object metadata"""
|
||||
return V1ObjectMeta(
|
||||
namespace="self.instance.namespace",
|
||||
labels={
|
||||
"app.kubernetes.io/name": f"passbook-{self.outpost.type.lower()}",
|
||||
"app.kubernetes.io/instance": self.outpost.name,
|
||||
"app.kubernetes.io/version": __version__,
|
||||
"app.kubernetes.io/managed-by": "passbook.beryju.org",
|
||||
"passbook.beryju.org/outpost/uuid": self.outpost.uuid.hex,
|
||||
},
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def get_deployment_secret(self) -> V1Secret:
|
||||
"""Get secret with token and passbook host"""
|
||||
return V1Secret(
|
||||
api_version="v1",
|
||||
kind="secret",
|
||||
type="Opaque",
|
||||
metadata=self.get_object_meta(
|
||||
name=f"passbook-outpost-{self.outpost.name}-api"
|
||||
),
|
||||
data={
|
||||
"passbook_host": b64encode_str(self.outpost.config.passbook_host),
|
||||
"passbook_host_insecure": b64encode_str(
|
||||
str(self.outpost.config.passbook_host_insecure)
|
||||
),
|
||||
"token": b64encode_str(self.outpost.token.token_uuid.hex),
|
||||
},
|
||||
)
|
||||
|
||||
def get_service(self) -> V1Service:
|
||||
"""Get service object for outpost based on ports defined"""
|
||||
meta = self.get_object_meta(name=f"passbook-outpost-{self.outpost.name}")
|
||||
ports = []
|
||||
for port_name, port in self.deployment_ports.items():
|
||||
ports.append(V1ServicePort(name=port_name, port=port))
|
||||
return V1Service(
|
||||
api_version="v1",
|
||||
kind="service",
|
||||
metadata=meta,
|
||||
spec=V1ServiceSpec(ports=ports, selector=meta.labels, type="ClusterIP"),
|
||||
)
|
||||
|
||||
def get_deployment(self) -> V1Deployment:
|
||||
"""Get deployment object for outpost"""
|
||||
# Generate V1ContainerPort objects
|
||||
container_ports = []
|
||||
for port_name, port in self.deployment_ports.items():
|
||||
container_ports.append(V1ContainerPort(container_port=port, name=port_name))
|
||||
meta = self.get_object_meta(name=f"passbook-outpost-{self.outpost.name}")
|
||||
return V1Deployment(
|
||||
api_version="apps/v1",
|
||||
kind="deployment",
|
||||
metadata=meta,
|
||||
spec=V1DeploymentSpec(
|
||||
replicas=1,
|
||||
selector=V1LabelSelector(match_labels=meta.labels),
|
||||
template=V1PodTemplateSpec(
|
||||
metadata=V1ObjectMeta(labels=meta.labels),
|
||||
spec=V1PodSpec(
|
||||
containers=[
|
||||
V1Container(
|
||||
name=self.outpost.type,
|
||||
image=f"{self.image_base}-{self.outpost.type}:{__version__}",
|
||||
ports=container_ports,
|
||||
env=[
|
||||
V1EnvVar(
|
||||
name="PASSBOOK_HOST",
|
||||
value_from=V1EnvVarSource(
|
||||
secret_key_ref=V1SecretKeySelector(
|
||||
name=f"passbook-outpost-{self.outpost.name}-api",
|
||||
key="passbook_host",
|
||||
)
|
||||
),
|
||||
),
|
||||
V1EnvVar(
|
||||
name="PASSBOOK_TOKEN",
|
||||
value_from=V1EnvVarSource(
|
||||
secret_key_ref=V1SecretKeySelector(
|
||||
name=f"passbook-outpost-{self.outpost.name}-api",
|
||||
key="token",
|
||||
)
|
||||
),
|
||||
),
|
||||
V1EnvVar(
|
||||
name="PASSBOOK_INSECURE",
|
||||
value_from=V1EnvVarSource(
|
||||
secret_key_ref=V1SecretKeySelector(
|
||||
name=f"passbook-outpost-{self.outpost.name}-api",
|
||||
key="passbook_host_insecure",
|
||||
)
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
17
passbook/outposts/migrations/0007_remove_outpost_channels.py
Normal file
17
passbook/outposts/migrations/0007_remove_outpost_channels.py
Normal file
@ -0,0 +1,17 @@
|
||||
# Generated by Django 3.1.2 on 2020-10-14 08:32
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("passbook_outposts", "0006_auto_20201003_2239"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="outpost",
|
||||
name="channels",
|
||||
),
|
||||
]
|
||||
26
passbook/outposts/migrations/0008_auto_20201014_1547.py
Normal file
26
passbook/outposts/migrations/0008_auto_20201014_1547.py
Normal file
@ -0,0 +1,26 @@
|
||||
# Generated by Django 3.1.2 on 2020-10-14 15:47
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("passbook_outposts", "0007_remove_outpost_channels"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="outpost",
|
||||
name="deployment_type",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("kubernetes", "Kubernetes"),
|
||||
("docker", "Docker"),
|
||||
("custom", "Custom"),
|
||||
],
|
||||
default="custom",
|
||||
help_text="Select between passbook-managed deployment types or a custom deployment.",
|
||||
),
|
||||
),
|
||||
]
|
||||
@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.1.2 on 2020-10-17 14:26
|
||||
|
||||
from django.apps.registry import Apps
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def fix_missing_token_identifier(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
from passbook.outposts.models import Outpost
|
||||
|
||||
for outpost in Outpost.objects.using(schema_editor.connection.alias).all():
|
||||
token = outpost.token
|
||||
if token.identifier != outpost.token_identifier:
|
||||
token.identifier = outpost.token_identifier
|
||||
token.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("passbook_core", "0014_auto_20201018_1158"),
|
||||
("passbook_outposts", "0008_auto_20201014_1547"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(fix_missing_token_identifier),
|
||||
]
|
||||
@ -1,20 +1,18 @@
|
||||
"""Outpost models"""
|
||||
from dataclasses import asdict, dataclass
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Iterable, Optional
|
||||
from typing import Dict, Iterable, List, Optional, Union
|
||||
from uuid import uuid4
|
||||
|
||||
from dacite import from_dict
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.cache import cache
|
||||
from django.db import models, transaction
|
||||
from django.db.models.base import Model
|
||||
from django.http import HttpRequest
|
||||
from django.utils import version
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from guardian.models import UserObjectPermission
|
||||
from guardian.shortcuts import assign_perm
|
||||
from packaging.version import InvalidVersion, parse
|
||||
from packaging.version import LegacyVersion, Version, parse
|
||||
|
||||
from passbook import __version__
|
||||
from passbook.core.models import Provider, Token, TokenIntents, User
|
||||
@ -22,6 +20,7 @@ from passbook.lib.config import CONFIG
|
||||
from passbook.lib.utils.template import render_to_string
|
||||
|
||||
OUR_VERSION = parse(__version__)
|
||||
OUTPOST_HELLO_INTERVAL = 10
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -37,6 +36,11 @@ class OutpostConfig:
|
||||
"error_reporting.environment", "customer"
|
||||
)
|
||||
|
||||
kubernetes_replicas: int = field(default=1)
|
||||
kubernetes_namespace: str = field(default="default")
|
||||
kubernetes_ingress_annotations: Dict[str, str] = field(default_factory=dict)
|
||||
kubernetes_ingress_secret_name: str = field(default="passbook-outpost")
|
||||
|
||||
|
||||
class OutpostModel(Model):
|
||||
"""Base model for providers that need more objects than just themselves"""
|
||||
@ -59,7 +63,7 @@ class OutpostType(models.TextChoices):
|
||||
class OutpostDeploymentType(models.TextChoices):
|
||||
"""Deployment types that are managed through passbook"""
|
||||
|
||||
# KUBERNETES = "kubernetes"
|
||||
KUBERNETES = "kubernetes"
|
||||
DOCKER = "docker"
|
||||
CUSTOM = "custom"
|
||||
|
||||
@ -87,8 +91,6 @@ class Outpost(models.Model):
|
||||
|
||||
providers = models.ManyToManyField(Provider)
|
||||
|
||||
channels = ArrayField(models.TextField(), default=list)
|
||||
|
||||
@property
|
||||
def config(self) -> OutpostConfig:
|
||||
"""Load config as OutpostConfig object"""
|
||||
@ -99,36 +101,15 @@ class Outpost(models.Model):
|
||||
"""Dump config into json"""
|
||||
self._config = asdict(value)
|
||||
|
||||
def state_cache_prefix(self, suffix: str) -> str:
|
||||
@property
|
||||
def state_cache_prefix(self) -> str:
|
||||
"""Key by which the outposts status is saved"""
|
||||
return f"outpost_{self.uuid.hex}_state_{suffix}"
|
||||
return f"outpost_{self.uuid.hex}_state"
|
||||
|
||||
@property
|
||||
def deployment_health(self) -> Optional[datetime]:
|
||||
def state(self) -> List["OutpostState"]:
|
||||
"""Get outpost's health status"""
|
||||
key = self.state_cache_prefix("health")
|
||||
value = cache.get(key, None)
|
||||
if value:
|
||||
return datetime.fromtimestamp(value)
|
||||
return None
|
||||
|
||||
@property
|
||||
def deployment_version(self) -> Dict[str, Any]:
|
||||
"""Get deployed outposts version, and if the version is behind ours.
|
||||
Returns a dict with keys version and outdated."""
|
||||
key = self.state_cache_prefix("version")
|
||||
value = cache.get(key, None)
|
||||
if not value:
|
||||
return {"version": None, "outdated": False, "should": OUR_VERSION}
|
||||
try:
|
||||
outpost_version = parse(value)
|
||||
return {
|
||||
"version": value,
|
||||
"outdated": outpost_version < OUR_VERSION,
|
||||
"should": OUR_VERSION,
|
||||
}
|
||||
except InvalidVersion:
|
||||
return {"version": version, "outdated": False, "should": OUR_VERSION}
|
||||
return OutpostState.for_outpost(self)
|
||||
|
||||
@property
|
||||
def user(self) -> User:
|
||||
@ -189,3 +170,57 @@ class Outpost(models.Model):
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Outpost {self.name}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class OutpostState:
|
||||
"""Outpost instance state, last_seen and version"""
|
||||
|
||||
uid: str
|
||||
last_seen: Optional[datetime] = field(default=None)
|
||||
version: Optional[str] = field(default=None)
|
||||
version_should: Union[Version, LegacyVersion] = field(default=OUR_VERSION)
|
||||
|
||||
_outpost: Optional[Outpost] = field(default=None)
|
||||
|
||||
@property
|
||||
def version_outdated(self) -> bool:
|
||||
"""Check if outpost version matches our version"""
|
||||
if not self.version:
|
||||
return False
|
||||
return parse(self.version) < OUR_VERSION
|
||||
|
||||
@staticmethod
|
||||
def for_outpost(outpost: Outpost) -> List["OutpostState"]:
|
||||
"""Get all states for an outpost"""
|
||||
keys = cache.keys(f"{outpost.state_cache_prefix}_*")
|
||||
states = []
|
||||
for key in keys:
|
||||
channel = key.replace(f"{outpost.state_cache_prefix}_", "")
|
||||
states.append(OutpostState.for_channel(outpost, channel))
|
||||
return states
|
||||
|
||||
@staticmethod
|
||||
def for_channel(outpost: Outpost, channel: str) -> "OutpostState":
|
||||
"""Get state for a single channel"""
|
||||
key = f"{outpost.state_cache_prefix}_{channel}"
|
||||
default_data = {"uid": channel}
|
||||
data = cache.get(key, default_data)
|
||||
if isinstance(data, str):
|
||||
cache.delete(key)
|
||||
data = default_data
|
||||
state = from_dict(OutpostState, data)
|
||||
state.uid = channel
|
||||
# pylint: disable=protected-access
|
||||
state._outpost = outpost
|
||||
return state
|
||||
|
||||
def save(self, timeout=OUTPOST_HELLO_INTERVAL):
|
||||
"""Save current state to cache"""
|
||||
full_key = f"{self._outpost.state_cache_prefix}_{self.uid}"
|
||||
return cache.set(full_key, asdict(self), timeout=timeout)
|
||||
|
||||
def delete(self):
|
||||
"""Manually delete from cache, used on channel disconnect"""
|
||||
full_key = f"{self._outpost.state_cache_prefix}_{self.uid}"
|
||||
cache.delete(full_key)
|
||||
|
||||
@ -2,9 +2,9 @@
|
||||
from celery.schedules import crontab
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"outposts_k8s": {
|
||||
"task": "passbook.outposts.tasks.outpost_controller",
|
||||
"schedule": crontab(minute="*/5"), # Run every 5 minutes
|
||||
"outposts_controller": {
|
||||
"task": "passbook.outposts.tasks.outpost_controller_all",
|
||||
"schedule": crontab(minute="*/5"),
|
||||
"options": {"queue": "passbook_scheduled"},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@ -1,49 +1,36 @@
|
||||
"""passbook outpost signals"""
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_save
|
||||
from django.db.models.signals import post_save, pre_delete
|
||||
from django.dispatch import receiver
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.lib.utils.reflection import class_to_path
|
||||
from passbook.outposts.models import Outpost, OutpostModel
|
||||
from passbook.outposts.tasks import outpost_send_update
|
||||
from passbook.outposts.models import Outpost
|
||||
from passbook.outposts.tasks import outpost_post_save, outpost_pre_delete
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@receiver(post_save, sender=Outpost)
|
||||
# pylint: disable=unused-argument
|
||||
def ensure_user_and_token(sender, instance: Model, **_):
|
||||
"""Ensure that token is created/updated on save"""
|
||||
_ = instance.token
|
||||
|
||||
|
||||
@receiver(post_save)
|
||||
# pylint: disable=unused-argument
|
||||
def post_save_update(sender, instance: Model, **_):
|
||||
"""If an OutpostModel, or a model that is somehow connected to an OutpostModel is saved,
|
||||
"""If an Outpost is saved, Ensure that token is created/updated
|
||||
|
||||
If an OutpostModel, or a model that is somehow connected to an OutpostModel is saved,
|
||||
we send a message down the relevant OutpostModels WS connection to trigger an update"""
|
||||
if isinstance(instance, OutpostModel):
|
||||
LOGGER.debug("triggering outpost update from outpostmodel", instance=instance)
|
||||
outpost_send_update.delay(class_to_path(instance.__class__), instance.pk)
|
||||
if instance.__module__ == "django.db.migrations.recorder":
|
||||
return
|
||||
if instance.__module__ == "__fake__":
|
||||
return
|
||||
outpost_post_save.delay(class_to_path(instance.__class__), instance.pk)
|
||||
|
||||
for field in instance._meta.get_fields():
|
||||
# Each field is checked if it has a `related_model` attribute (when ForeginKeys or M2Ms)
|
||||
# are used, and if it has a value
|
||||
if not hasattr(field, "related_model"):
|
||||
continue
|
||||
if not field.related_model:
|
||||
continue
|
||||
if not issubclass(field.related_model, OutpostModel):
|
||||
continue
|
||||
|
||||
field_name = f"{field.name}_set"
|
||||
if not hasattr(instance, field_name):
|
||||
continue
|
||||
|
||||
LOGGER.debug("triggering outpost update from from field", field=field.name)
|
||||
# Because the Outpost Model has an M2M to Provider,
|
||||
# we have to iterate over the entire QS
|
||||
for reverse in getattr(instance, field_name).all():
|
||||
outpost_send_update(class_to_path(reverse.__class__), reverse.pk)
|
||||
@receiver(pre_delete, sender=Outpost)
|
||||
# pylint: disable=unused-argument
|
||||
def pre_delete_cleanup(sender, instance: Outpost, **_):
|
||||
"""Ensure that Outpost's user is deleted (which will delete the token through cascade)"""
|
||||
instance.user.delete()
|
||||
# To ensure that deployment is cleaned up *consistently* we call the controller, and wait
|
||||
# for it to finish. We don't want to call it in this thread, as we don't have the K8s
|
||||
# credentials here
|
||||
outpost_pre_delete.delay(instance.pk.hex).get()
|
||||
|
||||
@ -3,13 +3,18 @@ from typing import Any
|
||||
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.layers import get_channel_layer
|
||||
from django.db.models.base import Model
|
||||
from django.utils.text import slugify
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.lib.tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
from passbook.lib.utils.reflection import path_to_class
|
||||
from passbook.outposts.controllers.base import ControllerException
|
||||
from passbook.outposts.models import (
|
||||
Outpost,
|
||||
OutpostDeploymentType,
|
||||
OutpostModel,
|
||||
OutpostState,
|
||||
OutpostType,
|
||||
)
|
||||
from passbook.providers.proxy.controllers.docker import ProxyDockerController
|
||||
@ -19,39 +24,107 @@ from passbook.root.celery import CELERY_APP
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True)
|
||||
# pylint: disable=unused-argument
|
||||
def outpost_controller(self):
|
||||
@CELERY_APP.task()
|
||||
def outpost_controller_all():
|
||||
"""Launch Controller for all Outposts which support it"""
|
||||
for outpost in Outpost.objects.exclude(
|
||||
deployment_type=OutpostDeploymentType.CUSTOM
|
||||
):
|
||||
outpost_controller_single.delay(
|
||||
outpost.pk.hex, outpost.deployment_type, outpost.type
|
||||
)
|
||||
outpost_controller.delay(outpost.pk.hex)
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True)
|
||||
# pylint: disable=unused-argument
|
||||
def outpost_controller_single(
|
||||
self, outpost: str, deployment_type: str, outpost_type: str
|
||||
):
|
||||
"""Launch controller and reconcile deployment/service/etc"""
|
||||
if outpost_type == OutpostType.PROXY:
|
||||
if deployment_type == OutpostDeploymentType.KUBERNETES:
|
||||
ProxyKubernetesController(outpost).run()
|
||||
if deployment_type == OutpostDeploymentType.DOCKER:
|
||||
ProxyDockerController(outpost).run()
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
def outpost_controller(self: MonitoredTask, outpost_pk: str):
|
||||
"""Create/update/monitor the deployment of an Outpost"""
|
||||
logs = []
|
||||
outpost: Outpost = Outpost.objects.get(pk=outpost_pk)
|
||||
self.set_uid(slugify(outpost.name))
|
||||
try:
|
||||
if outpost.type == OutpostType.PROXY:
|
||||
if outpost.deployment_type == OutpostDeploymentType.KUBERNETES:
|
||||
logs = ProxyKubernetesController(outpost).up_with_logs()
|
||||
if outpost.deployment_type == OutpostDeploymentType.DOCKER:
|
||||
logs = ProxyDockerController(outpost).up_with_logs()
|
||||
except ControllerException as exc:
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
||||
else:
|
||||
self.set_status(TaskResult(TaskResultStatus.SUCCESSFUL, logs))
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def outpost_send_update(model_class: str, model_pk: Any):
|
||||
def outpost_pre_delete(outpost_pk: str):
|
||||
"""Delete outpost objects before deleting the DB Object"""
|
||||
outpost = Outpost.objects.get(pk=outpost_pk)
|
||||
if outpost.type == OutpostType.PROXY:
|
||||
if outpost.deployment_type == OutpostDeploymentType.KUBERNETES:
|
||||
ProxyKubernetesController(outpost).down()
|
||||
if outpost.deployment_type == OutpostDeploymentType.DOCKER:
|
||||
ProxyDockerController(outpost).down()
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def outpost_post_save(model_class: str, model_pk: Any):
|
||||
"""If an Outpost is saved, Ensure that token is created/updated
|
||||
|
||||
If an OutpostModel, or a model that is somehow connected to an OutpostModel is saved,
|
||||
we send a message down the relevant OutpostModels WS connection to trigger an update"""
|
||||
model: Model = path_to_class(model_class)
|
||||
try:
|
||||
instance = model.objects.get(pk=model_pk)
|
||||
except model.DoesNotExist:
|
||||
LOGGER.warning("Model does not exist", model=model, pk=model_pk)
|
||||
return
|
||||
|
||||
if isinstance(instance, Outpost):
|
||||
LOGGER.debug("Ensuring token for outpost", instance=instance)
|
||||
_ = instance.token
|
||||
LOGGER.debug("Trigger reconcile for outpost")
|
||||
outpost_controller.delay(instance.pk)
|
||||
return
|
||||
|
||||
if isinstance(instance, (OutpostModel, Outpost)):
|
||||
LOGGER.debug(
|
||||
"triggering outpost update from outpostmodel/outpost", instance=instance
|
||||
)
|
||||
outpost_send_update(instance)
|
||||
return
|
||||
|
||||
for field in instance._meta.get_fields():
|
||||
# Each field is checked if it has a `related_model` attribute (when ForeginKeys or M2Ms)
|
||||
# are used, and if it has a value
|
||||
if not hasattr(field, "related_model"):
|
||||
continue
|
||||
if not field.related_model:
|
||||
continue
|
||||
if not issubclass(field.related_model, OutpostModel):
|
||||
continue
|
||||
|
||||
field_name = f"{field.name}_set"
|
||||
if not hasattr(instance, field_name):
|
||||
continue
|
||||
|
||||
LOGGER.debug("triggering outpost update from from field", field=field.name)
|
||||
# Because the Outpost Model has an M2M to Provider,
|
||||
# we have to iterate over the entire QS
|
||||
for reverse in getattr(instance, field_name).all():
|
||||
outpost_send_update(reverse)
|
||||
|
||||
|
||||
def outpost_send_update(model_instace: Model):
|
||||
"""Send outpost update to all registered outposts, irregardless to which passbook
|
||||
instance they are connected"""
|
||||
model = path_to_class(model_class)
|
||||
outpost_model: OutpostModel = model.objects.get(pk=model_pk)
|
||||
for outpost in outpost_model.outpost_set.all():
|
||||
channel_layer = get_channel_layer()
|
||||
for channel in outpost.channels:
|
||||
LOGGER.debug("sending update", channel=channel)
|
||||
async_to_sync(channel_layer.send)(channel, {"type": "event.update"})
|
||||
channel_layer = get_channel_layer()
|
||||
if isinstance(model_instace, OutpostModel):
|
||||
for outpost in model_instace.outpost_set.all():
|
||||
_outpost_single_update(outpost, channel_layer)
|
||||
elif isinstance(model_instace, Outpost):
|
||||
_outpost_single_update(model_instace, channel_layer)
|
||||
|
||||
|
||||
def _outpost_single_update(outpost: Outpost, layer=None):
|
||||
"""Update outpost instances connected to a single outpost"""
|
||||
if not layer: # pragma: no cover
|
||||
layer = get_channel_layer()
|
||||
for state in OutpostState.for_outpost(outpost):
|
||||
LOGGER.debug("sending update", channel=state.uid, outpost=outpost)
|
||||
async_to_sync(layer.send)(state.uid, {"type": "event.update"})
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<label class="pf-c-form__label" for="help-text-simple-form-name">
|
||||
<span class="pf-c-form__label-text">PASSBOOK_TOKEN</span>
|
||||
</label>
|
||||
<input class="pf-c-form-control" data-pb-fetch-key="pk" data-pb-fetch-fill="{% url 'passbook_api:token-detail' identifier=outpost.token_identifier %}" readonly type="text" value="" />
|
||||
<input class="pf-c-form-control" data-pb-fetch-key="key" data-pb-fetch-fill="{% url 'passbook_api:token-view-key' identifier=outpost.token_identifier %}" readonly type="text" value="" />
|
||||
</div>
|
||||
<h3>{% trans 'If your passbook Instance is using a self-signed certificate, set this value.' %}</h3>
|
||||
<div class="pf-c-form__group">
|
||||
|
||||
@ -1,68 +0,0 @@
|
||||
{% load i18n %}
|
||||
{% load static %}
|
||||
<div class="pf-c-dropdown">
|
||||
<button class="pf-c-button pf-m-tertiary pf-c-dropdown__toggle" type="button">
|
||||
<span class="pf-c-dropdown__toggle-text">{% trans 'Setup with...' %}</span>
|
||||
<i class="fas fa-caret-down pf-c-dropdown__toggle-icon" aria-hidden="true"></i>
|
||||
</button>
|
||||
<ul class="pf-c-dropdown__menu" hidden>
|
||||
<li>
|
||||
<button class="pf-c-dropdown__menu-item" data-target="modal" data-modal="docker-compose-{{ provider.pk }}">{% trans 'docker-compose' %}</button>
|
||||
</li>
|
||||
<li>
|
||||
<button class="pf-c-dropdown__menu-item" data-target="modal" data-modal="k8s-{{ provider.pk }}">{% trans 'Kubernetes' %}</button>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="pf-c-backdrop" id="docker-compose-{{ provider.pk }}" hidden>
|
||||
<div class="pf-l-bullseye">
|
||||
<div class="pf-c-modal-box pf-m-lg" role="dialog">
|
||||
<button data-modal-close class="pf-c-button pf-m-plain" type="button" aria-label="Close dialog">
|
||||
<i class="fas fa-times" aria-hidden="true"></i>
|
||||
</button>
|
||||
<div class="pf-c-modal-box__header">
|
||||
<h1 class="pf-c-title pf-m-2xl">{% trans 'Setup with docker-compose' %}</h1>
|
||||
</div>
|
||||
<div class="pf-c-modal-box__body">
|
||||
{% trans 'Add the following snippet to your docker-compose file.' %}
|
||||
<textarea class="codemirror" readonly data-cm-mode="yaml">{{ docker_compose }}</textarea>
|
||||
</div>
|
||||
<footer class="pf-c-modal-box__footer pf-m-align-left">
|
||||
<button data-modal-close class="pf-c-button pf-m-primary" type="button">{% trans 'Close' %}</button>
|
||||
</footer>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="pf-c-backdrop" id="k8s-{{ provider.pk }}" hidden>
|
||||
<div class="pf-l-bullseye">
|
||||
<div class="pf-c-modal-box pf-m-lg" role="dialog">
|
||||
<button data-modal-close class="pf-c-button pf-m-plain" type="button" aria-label="Close dialog">
|
||||
<i class="fas fa-times" aria-hidden="true"></i>
|
||||
</button>
|
||||
<div class="pf-c-modal-box__header">
|
||||
<h1 class="pf-c-title pf-m-2xl">{% trans 'Setup with Kubernetes' %}</h1>
|
||||
</div>
|
||||
<div class="pf-c-modal-box__body">
|
||||
<p>{% trans 'Download the manifest to create the Proxy deployment and service:' %}</p>
|
||||
<a href="{% url 'passbook_providers_proxy:k8s-manifest' provider=provider.pk %}">{% trans 'Here' %}</a>
|
||||
<p>{% trans 'Afterwards, add the following annotations to the Ingress you want to secure:' %}</p>
|
||||
<textarea class="codemirror" readonly data-cm-mode="yaml">
|
||||
nginx.ingress.kubernetes.io/auth-signin: https://$host/oauth2/start?rd=$escaped_request_uri
|
||||
nginx.ingress.kubernetes.io/auth-url: https://$host/oauth2/auth
|
||||
nginx.ingress.kubernetes.io/configuration-snippet: |
|
||||
auth_request_set $user_id $upstream_http_x_auth_request_user;
|
||||
auth_request_set $email $upstream_http_x_auth_request_email;
|
||||
auth_request_set $user_name $upstream_http_x_auth_request_preferred_username;
|
||||
proxy_set_header X-User-Id $user_id;
|
||||
proxy_set_header X-User $user_name;
|
||||
proxy_set_header X-Email $email;
|
||||
</textarea>
|
||||
</div>
|
||||
<footer class="pf-c-modal-box__footer pf-m-align-left">
|
||||
<button data-modal-close class="pf-c-button pf-m-primary" type="button">{% trans 'Close' %}</button>
|
||||
</footer>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@ -1,59 +0,0 @@
|
||||
{% extends "administration/base.html" %}
|
||||
|
||||
{% load i18n %}
|
||||
{% load humanize %}
|
||||
{% load passbook_utils %}
|
||||
|
||||
{% block head %}
|
||||
{{ block.super }}
|
||||
<style>
|
||||
.pf-m-success {
|
||||
color: var(--pf-global--success-color--100);
|
||||
}
|
||||
.pf-m-danger {
|
||||
color: var(--pf-global--danger-color--100);
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<section class="pf-c-page__main-section pf-m-light">
|
||||
<div class="pf-c-content">
|
||||
<h1>
|
||||
<i class="fas fa-map-marker"></i>
|
||||
{% trans 'Outpost Setup' %}
|
||||
</h1>
|
||||
<p>{% trans "Outposts are deployments of passbook components to support different environments and protocols, like reverse proxies." %}</p>
|
||||
</div>
|
||||
</section>
|
||||
<div class="pf-c-tabs pf-m-fill" id="filled-example">
|
||||
<button class="pf-c-tabs__scroll-button" disabled aria-hidden="true" aria-label="Scroll left">
|
||||
<i class="fas fa-angle-left" aria-hidden="true"></i>
|
||||
</button>
|
||||
<ul class="pf-c-tabs__list">
|
||||
<li class="pf-c-tabs__item">
|
||||
<button class="pf-c-tabs__link" id="filled-example-users-link">
|
||||
<span class="pf-c-tabs__item-text">Users</span>
|
||||
</button>
|
||||
</li>
|
||||
<li class="pf-c-tabs__item pf-m-current">
|
||||
<button class="pf-c-tabs__link" id="filled-example-containers-link">
|
||||
<span class="pf-c-tabs__item-text">Containers</span>
|
||||
</button>
|
||||
</li>
|
||||
<li class="pf-c-tabs__item">
|
||||
<button class="pf-c-tabs__link" id="filled-example-database-link">
|
||||
<span class="pf-c-tabs__item-text">Database</span>
|
||||
</button>
|
||||
</li>
|
||||
</ul>
|
||||
<button class="pf-c-tabs__scroll-button" disabled aria-hidden="true" aria-label="Scroll right">
|
||||
<i class="fas fa-angle-right" aria-hidden="true"></i>
|
||||
</button>
|
||||
</div>
|
||||
<section class="pf-c-page__main-section pf-m-no-padding-mobile">
|
||||
<div class="pf-c-card">
|
||||
|
||||
</div>
|
||||
</section>
|
||||
{% endblock %}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user