Compare commits
431 Commits
version/0.
...
version/0.
Author | SHA1 | Date | |
---|---|---|---|
e13d348315 | |||
169f3ebe5b | |||
f8ad604e85 | |||
774b9c8a61 | |||
d8c522233e | |||
82d50f7eaa | |||
1c426c5136 | |||
d6e14cc551 | |||
c3917ebc2e | |||
7203bd37a3 | |||
597188c7ee | |||
ac4c314042 | |||
05866d3544 | |||
6596bc6034 | |||
c6661ef4d2 | |||
386e23dfac | |||
5d7220ca70 | |||
5de0d03acf | |||
b0cc91f343 | |||
029a78f108 | |||
3f4a8dc4f6 | |||
32f6ba6302 | |||
8da0b14f29 | |||
83eb4aff02 | |||
927d02f591 | |||
d04afcd6d0 | |||
89c6db66fd | |||
e6ffa65a7e | |||
8a2f982a77 | |||
16cf6315e3 | |||
1d85874f41 | |||
ff64182ae8 | |||
a9ee67bf2d | |||
e87d52a76b | |||
8b09cf55a2 | |||
0203d20759 | |||
7861e2e0bd | |||
ad29d54bbf | |||
c698ba37d9 | |||
6a53069653 | |||
152b2d863d | |||
ee670d5e19 | |||
36e095671c | |||
1088b947a8 | |||
c4a30c50ac | |||
2831df45a0 | |||
ee5bac099f | |||
69f7b41044 | |||
f9cede7b31 | |||
903cdeaa7f | |||
e909e7fa8a | |||
bee38551f3 | |||
c0ec6388df | |||
8f08836885 | |||
dd0d7e7481 | |||
25d0ac6534 | |||
971713d1aa | |||
5135d828b4 | |||
b2c571bf1b | |||
6b1d30d230 | |||
3454760731 | |||
96846220c3 | |||
a4f5678144 | |||
a18baa3cb3 | |||
dfedd4a7f1 | |||
897f64600a | |||
c6eb015d18 | |||
54088239ab | |||
aa9c7a6567 | |||
6c0c12c90a | |||
c49b57ad1d | |||
2339e855bb | |||
bdc019c7cf | |||
5e2fb6d56e | |||
3b9524cdfc | |||
7154f19668 | |||
8fedd9ec07 | |||
4ac87d8739 | |||
e4f45eba0a | |||
4b3e0f0f96 | |||
482da81522 | |||
c5226fd0e8 | |||
7806cff96f | |||
fa504e4bf9 | |||
86cfb10b9b | |||
f6b8171624 | |||
91ce7f7363 | |||
17060238f0 | |||
c392c2a74b | |||
8cbaec8ba8 | |||
4750f8c653 | |||
69d2a1cf3b | |||
635f6c1ef2 | |||
18da7565c2 | |||
45699a1a69 | |||
5556e9f8e7 | |||
327bb09dd4 | |||
8ca23451c6 | |||
b99e2b10fe | |||
e966dff1a7 | |||
481fbedef2 | |||
d104012eee | |||
b03a508475 | |||
8ede4b6a13 | |||
41323afccc | |||
4a10b4999b | |||
20ee634cda | |||
713025d218 | |||
58ae159835 | |||
c95efe3cde | |||
b6eb0bf53d | |||
610b6c7f70 | |||
1ea2d99ff2 | |||
67be43679c | |||
fd42389bd5 | |||
71b1df2fec | |||
7a3122f25c | |||
63041d788b | |||
bfc1bae0bb | |||
8ab7f7fcbb | |||
c1eb8317f7 | |||
7a578e5e83 | |||
b10912d8ba | |||
ef24b1cde2 | |||
26cacc2a06 | |||
ca0e89c799 | |||
17950119ad | |||
876618c1ec | |||
2293ab69b9 | |||
9df00e09a4 | |||
cf6ce9c915 | |||
3b61191614 | |||
9954eeac86 | |||
ac88bd5d44 | |||
2406a619df | |||
63087c9393 | |||
da9aaf69df | |||
ae125dd1f0 | |||
f636595230 | |||
d506e8f1a3 | |||
d3a96ac7aa | |||
189b0ec324 | |||
c5a6b4961f | |||
b590589324 | |||
9fb1ac98ec | |||
195d8fe71f | |||
b0602a3215 | |||
0150a5c58c | |||
b35d27c83e | |||
801bb90806 | |||
55a83abb26 | |||
c09b4e9713 | |||
247015e955 | |||
fe3634be64 | |||
ead20b03aa | |||
932a475af7 | |||
e9a1a18ba3 | |||
6cd9edd38a | |||
9b5f9167cd | |||
1f30bcd335 | |||
94eaeb5a60 | |||
a5420fe019 | |||
2e1849a732 | |||
4039e96803 | |||
8f585eca70 | |||
516455f482 | |||
719099a5af | |||
7f74d32253 | |||
525d271535 | |||
9ef39f1e04 | |||
9099dc5713 | |||
c3c525a3f0 | |||
e699dfe88c | |||
c0b334eb02 | |||
815ad26b91 | |||
03647fa6af | |||
5aec581585 | |||
68e9b7e140 | |||
b42bca4e3e | |||
42c9ac61b2 | |||
7cdc5f0568 | |||
a063613f4c | |||
3af04bf1e4 | |||
74f8b68af8 | |||
59dbc15be7 | |||
9d5dd896f3 | |||
02f5f12089 | |||
90ea6dba90 | |||
b0b2c0830b | |||
acb2b825f3 | |||
e956b86649 | |||
739c66da1c | |||
e8c7cce68f | |||
f741d382c2 | |||
a13d4047b6 | |||
e0d8189442 | |||
760352202e | |||
9724ded194 | |||
5da4ff4ff1 | |||
e54b98a80e | |||
67b69cb5d3 | |||
863111ac57 | |||
bd78087582 | |||
8f4e954160 | |||
553f184aad | |||
b6d7847eae | |||
ad0d339794 | |||
737cd22bb9 | |||
6ad1465f8f | |||
d74fa4abbf | |||
b24938fc6b | |||
ea1564548c | |||
3663c3c8a1 | |||
07e20a2950 | |||
6366d50a0e | |||
c3e64df95b | |||
d2bf2c8896 | |||
f27b43507c | |||
c1058c7438 | |||
c37901feb9 | |||
44b815efae | |||
64a71a3663 | |||
ae435f423e | |||
7aa89c6d4f | |||
7e9d7e5198 | |||
2be6cd70d9 | |||
2b9705b33c | |||
502e43085f | |||
40f1de3b11 | |||
899c5b63ea | |||
e104c74761 | |||
5d46c1ea5a | |||
7d533889bc | |||
d9c2b32cba | |||
6e4ce8dbaa | |||
03d58b439f | |||
ea38da441b | |||
bdaf0111c2 | |||
974c2ddb11 | |||
769ce1c642 | |||
f294791d41 | |||
4ee22f8ec1 | |||
74d3cfbba0 | |||
d278acb83b | |||
84da454612 | |||
52101007aa | |||
dc57f433fd | |||
3d4c5b8f4e | |||
e66424cc49 | |||
8fa83a8d08 | |||
397892b282 | |||
7be50c2574 | |||
2aad523596 | |||
6982b97eb0 | |||
3de879496d | |||
4e75118a43 | |||
52c4fb431f | |||
d696d854ff | |||
6966c119a7 | |||
8cf5e647e3 | |||
99bc6241f6 | |||
e5f837ebb7 | |||
9d93da3d45 | |||
9f6f18f9bb | |||
6458b1dbf8 | |||
1aff9afca6 | |||
e0bc7d3932 | |||
9fd9b2611c | |||
6f3a1dfd08 | |||
464b2cce88 | |||
4eaa46e717 | |||
59e8dca499 | |||
945d5bfaf6 | |||
dbcdab05ff | |||
e2cc2843d8 | |||
241d59be8d | |||
74251a8883 | |||
585afd1bcd | |||
8358574484 | |||
cbcdaaf532 | |||
f99eaa85ac | |||
5007a6befe | |||
50c75087b8 | |||
438e4efd49 | |||
c7ca95ff2b | |||
9f403a71ed | |||
2f4139df65 | |||
f3ee8f7d9c | |||
5fa3729702 | |||
87f44fada4 | |||
c0026f3e16 | |||
c1051059f4 | |||
c25eda63ba | |||
c90906c968 | |||
f6b52b9281 | |||
b04f92c8b4 | |||
a02fcb0a7a | |||
c1ea605c7e | |||
116be0b3c0 | |||
438250b3a9 | |||
5e6acee2a5 | |||
8b4222e7bb | |||
4af563ce89 | |||
77842fab58 | |||
5689f25c39 | |||
a69c494feb | |||
83408b6ae0 | |||
d30abc64d0 | |||
6674d3e017 | |||
4749c3fad0 | |||
18886697d6 | |||
e75c9e9a79 | |||
5a3c1137ab | |||
ddca46e24a | |||
22a9abf7bf | |||
fb16502466 | |||
421bd13ddf | |||
404c9ef753 | |||
a57b545093 | |||
d8530f238d | |||
fe4a0c3b44 | |||
e0c104ee5c | |||
6ab8794754 | |||
316e6cb17f | |||
9d5d99290c | |||
20ffe833de | |||
d4d026bf6a | |||
dfe093b2b9 | |||
60739e620e | |||
d6cc6770b8 | |||
ddc1022461 | |||
2c2226610e | |||
cba78b4de7 | |||
1eeb64ee39 | |||
22dea62084 | |||
5ff1dd8426 | |||
da15a8878f | |||
bf33828ac1 | |||
950a1fc77e | |||
895e7d7393 | |||
3beca0574d | |||
990f5f0a43 | |||
97ce143efe | |||
cbbe174fd8 | |||
da3c640343 | |||
4b39c71de0 | |||
818f417fd8 | |||
f1ccef7f6a | |||
6187436518 | |||
9559ee7cb9 | |||
72e9c4e6fa | |||
97b8a025b3 | |||
ea9687c30b | |||
0a5e14a352 | |||
0325847c22 | |||
491dcc1159 | |||
6292049c74 | |||
1e97af772f | |||
5c622cd4d2 | |||
c4de808c4e | |||
8c604d225b | |||
c7daadfb18 | |||
683968c96e | |||
c94added99 | |||
61c00e5b39 | |||
566ebae065 | |||
9b62a6403b | |||
8c465b2026 | |||
6b7da71aa8 | |||
e95bbfab9a | |||
e401575894 | |||
6428801270 | |||
3e13c13619 | |||
92f79eb30e | |||
e7472de4bf | |||
494950ac65 | |||
4d51295db2 | |||
3bbded3555 | |||
b3262e2a82 | |||
40614a65fc | |||
3cf558d594 | |||
812cc0d2f1 | |||
e21ed92848 | |||
5184c4b7ef | |||
2c07859b68 | |||
ae6304c05e | |||
501683e3cb | |||
cc8afa8706 | |||
17a9e02bc0 | |||
6a669992a8 | |||
7ea5c22b6c | |||
b11d6a5891 | |||
49830367a7 | |||
e69ca5a229 | |||
a57d21f5e8 | |||
c7026407c6 | |||
69eecd6b60 | |||
810f10edfe | |||
1c57128f11 | |||
82eade3eb1 | |||
56a9dcc88d | |||
fe70d80189 | |||
e97e22c58a | |||
bb4e39aab6 | |||
a8744f443c | |||
7fe9b8f0b4 | |||
696aa7e5f6 | |||
e1d82aee1d | |||
151374f565 | |||
bebeff9f7f | |||
8b99afa34d | |||
b317852e8a | |||
24ae35c35a | |||
8e6bb48227 | |||
7a4e8af1ae | |||
0161205c82 | |||
ca0ba85023 | |||
c2ebaa7f64 | |||
23cccebb96 | |||
3f5d30e6fe | |||
ca735349f9 | |||
25ce8c6dc7 | |||
081ac0bcdb | |||
8a07b349ee | |||
b3468bc265 | |||
4edfad869f | |||
404f5d7912 | |||
8bea99a953 | |||
0b0ba33dce | |||
e3627b2cd9 | |||
37fac3ae00 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 0.10.0-rc4
|
||||
current_version = 0.12.2-stable
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)
|
||||
@ -15,10 +15,10 @@ values =
|
||||
beta
|
||||
stable
|
||||
|
||||
[bumpversion:file:README.md]
|
||||
|
||||
[bumpversion:file:docs/installation/docker-compose.md]
|
||||
|
||||
[bumpversion:file:docs/installation/kubernetes.md]
|
||||
|
||||
[bumpversion:file:docker-compose.yml]
|
||||
|
||||
[bumpversion:file:helm/values.yaml]
|
||||
@ -28,3 +28,5 @@ values =
|
||||
[bumpversion:file:.github/workflows/release.yml]
|
||||
|
||||
[bumpversion:file:passbook/__init__.py]
|
||||
|
||||
[bumpversion:file:proxy/pkg/version.go]
|
||||
|
@ -1,5 +1,6 @@
|
||||
[run]
|
||||
source = passbook
|
||||
relative_files = true
|
||||
omit =
|
||||
*/asgi.py
|
||||
manage.py
|
||||
|
34
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Logs**
|
||||
Output of docker-compose logs or kubectl logs respectively
|
||||
|
||||
**Version and Deployment (please complete the following information):**
|
||||
- passbook version: [e.g. 0.10.0-stable]
|
||||
- Deployment: [e.g. docker-compose, helm]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
26
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: gomod
|
||||
directory: "/proxy"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
||||
- package-ecosystem: npm
|
||||
directory: "/passbook/static/static"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
open-pull-requests-limit: 10
|
||||
assignees:
|
||||
- BeryJu
|
54
.github/workflows/codeql-analysis.yml
vendored
@ -1,54 +0,0 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, admin-more-info, ci-deploy-dev, gh-pages, provider-saml-v2]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [master]
|
||||
schedule:
|
||||
- cron: '0 20 * * 2'
|
||||
|
||||
jobs:
|
||||
analyse:
|
||||
name: Analyse
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event, then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
34
.github/workflows/release.yml
vendored
@ -1,6 +1,8 @@
|
||||
name: passbook-release
|
||||
name: passbook-on-release
|
||||
|
||||
on:
|
||||
release
|
||||
release:
|
||||
types: [published, created]
|
||||
|
||||
jobs:
|
||||
# Build
|
||||
@ -16,17 +18,26 @@ jobs:
|
||||
- name: Building Docker Image
|
||||
run: docker build
|
||||
--no-cache
|
||||
-t beryju/passbook:0.10.0-rc4
|
||||
-t beryju/passbook:0.12.2-stable
|
||||
-t beryju/passbook:latest
|
||||
-f Dockerfile .
|
||||
- name: Push Docker Container to Registry (versioned)
|
||||
run: docker push beryju/passbook:0.10.0-rc4
|
||||
run: docker push beryju/passbook:0.12.2-stable
|
||||
- name: Push Docker Container to Registry (latest)
|
||||
run: docker push beryju/passbook:latest
|
||||
build-proxy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: "^1.15"
|
||||
- name: prepare go api client
|
||||
run: |
|
||||
cd proxy
|
||||
go get -u github.com/go-swagger/go-swagger/cmd/swagger
|
||||
swagger generate client -f ../swagger.yaml -A passbook -t pkg/
|
||||
go build -v .
|
||||
- name: Docker Login Registry
|
||||
env:
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
@ -37,11 +48,11 @@ jobs:
|
||||
cd proxy
|
||||
docker build \
|
||||
--no-cache \
|
||||
-t beryju/passbook-proxy:0.10.0-rc4 \
|
||||
-t beryju/passbook-proxy:0.12.2-stable \
|
||||
-t beryju/passbook-proxy:latest \
|
||||
-f Dockerfile .
|
||||
- name: Push Docker Container to Registry (versioned)
|
||||
run: docker push beryju/passbook-proxy:0.10.0-rc4
|
||||
run: docker push beryju/passbook-proxy:0.12.2-stable
|
||||
- name: Push Docker Container to Registry (latest)
|
||||
run: docker push beryju/passbook-proxy:latest
|
||||
build-static:
|
||||
@ -66,11 +77,11 @@ jobs:
|
||||
run: docker build
|
||||
--no-cache
|
||||
--network=$(docker network ls | grep github | awk '{print $1}')
|
||||
-t beryju/passbook-static:0.10.0-rc4
|
||||
-t beryju/passbook-static:0.12.2-stable
|
||||
-t beryju/passbook-static:latest
|
||||
-f static.Dockerfile .
|
||||
- name: Push Docker Container to Registry (versioned)
|
||||
run: docker push beryju/passbook-static:0.10.0-rc4
|
||||
run: docker push beryju/passbook-static:0.12.2-stable
|
||||
- name: Push Docker Container to Registry (latest)
|
||||
run: docker push beryju/passbook-static:latest
|
||||
test-release:
|
||||
@ -82,10 +93,13 @@ jobs:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Run test suite in final docker images
|
||||
run: |
|
||||
sudo apt-get install -y pwgen
|
||||
echo "PG_PASS=$(pwgen 40 1)" >> .env
|
||||
echo "PASSBOOK_SECRET_KEY=$(pwgen 50 1)" >> .env
|
||||
docker-compose pull -q
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server bash -c "pip install --no-cache -r requirements-dev.txt && ./manage.py test"
|
||||
docker-compose run -u root --entrypoint /bin/bash server -c "pip install --no-cache -r requirements-dev.txt && ./manage.py test passbook"
|
||||
sentry-release:
|
||||
needs:
|
||||
- test-release
|
||||
@ -100,5 +114,5 @@ jobs:
|
||||
SENTRY_PROJECT: passbook
|
||||
SENTRY_URL: https://sentry.beryju.org
|
||||
with:
|
||||
tagName: 0.10.0-rc4
|
||||
tagName: 0.12.2-stable
|
||||
environment: beryjuorg-prod
|
||||
|
15
.github/workflows/tag.yml
vendored
@ -1,10 +1,10 @@
|
||||
name: passbook-on-tag
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'version/*'
|
||||
|
||||
name: passbook-version-tag
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Create Release from Tag
|
||||
@ -13,7 +13,10 @@ jobs:
|
||||
- uses: actions/checkout@master
|
||||
- name: Pre-release test
|
||||
run: |
|
||||
export PASSBOOK_TAG=latest
|
||||
sudo apt-get install -y pwgen
|
||||
echo "PASSBOOK_TAG=latest" >> .env
|
||||
echo "PG_PASS=$(pwgen 40 1)" >> .env
|
||||
echo "PASSBOOK_SECRET_KEY=$(pwgen 50 1)" >> .env
|
||||
docker-compose pull -q
|
||||
docker build \
|
||||
--no-cache \
|
||||
@ -21,7 +24,7 @@ jobs:
|
||||
-f Dockerfile .
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root --entrypoint /bin/bash server -c "pip install --no-cache -r requirements-dev.txt && ./manage.py test"
|
||||
docker-compose run -u root --entrypoint /bin/bash server -c "pip install --no-cache -r requirements-dev.txt && ./manage.py test passbook"
|
||||
- name: Install Helm
|
||||
run: |
|
||||
apt update && apt install -y curl
|
||||
@ -31,7 +34,7 @@ jobs:
|
||||
helm dependency update helm/
|
||||
helm package helm/
|
||||
mv passbook-*.tgz passbook-chart.tgz
|
||||
- name: Extract verison number
|
||||
- name: Extract version number
|
||||
id: get_version
|
||||
uses: actions/github-script@0.2.0
|
||||
with:
|
||||
@ -46,7 +49,7 @@ jobs:
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: Release ${{ steps.get_version.outputs.result }}
|
||||
draft: false
|
||||
draft: true
|
||||
prerelease: false
|
||||
- name: Upload packaged Helm Chart
|
||||
id: upload-release-asset
|
||||
|
@ -1,9 +1,16 @@
|
||||
[MASTER]
|
||||
|
||||
disable=arguments-differ,no-self-use,fixme,locally-disabled,too-many-ancestors,too-few-public-methods,import-outside-toplevel,bad-continuation,signature-differs,similarities,cyclic-import
|
||||
|
||||
load-plugins=pylint_django,pylint.extensions.bad_builtin
|
||||
|
||||
extension-pkg-whitelist=lxml
|
||||
|
||||
# Allow constants to be shorter than normal (and lowercase, for settings.py)
|
||||
const-rgx=[a-zA-Z0-9_]{1,40}$
|
||||
|
||||
ignored-modules=django-otp
|
||||
jobs=12
|
||||
ignore=migrations
|
||||
max-attributes=12
|
||||
|
||||
jobs=12
|
||||
|
12
Dockerfile
@ -16,11 +16,15 @@ COPY --from=locker /app/requirements.txt /
|
||||
COPY --from=locker /app/requirements-dev.txt /
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends postgresql-client-11 build-essential && \
|
||||
rm -rf /var/lib/apt/ && \
|
||||
pip install -r /requirements.txt --no-cache-dir && \
|
||||
apt-get install -y --no-install-recommends curl ca-certificates gnupg && \
|
||||
curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
|
||||
echo "deb http://apt.postgresql.org/pub/repos/apt buster-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends postgresql-client-12 postgresql-client-11 build-essential && \
|
||||
apt-get clean && \
|
||||
pip install -r /requirements.txt --no-cache-dir && \
|
||||
apt-get remove --purge -y build-essential && \
|
||||
apt-get autoremove --purge && \
|
||||
apt-get autoremove --purge -y && \
|
||||
adduser --system --no-create-home --uid 1000 --group --home /passbook passbook
|
||||
|
||||
COPY ./passbook/ /passbook
|
||||
|
16
Makefile
@ -1,20 +1,26 @@
|
||||
all: lint-fix lint coverage gen
|
||||
|
||||
coverage:
|
||||
coverage run --concurrency=multiprocessing manage.py test passbook --failfast
|
||||
coverage run --concurrency=multiprocessing manage.py test --failfast -v 3
|
||||
coverage combine
|
||||
coverage html
|
||||
coverage report
|
||||
|
||||
lint-fix:
|
||||
isort -rc .
|
||||
black .
|
||||
black passbook e2e lifecycle
|
||||
|
||||
lint:
|
||||
pyright
|
||||
bandit -r .
|
||||
pylint passbook
|
||||
pyright passbook e2e lifecycle
|
||||
bandit -r passbook e2e lifecycle
|
||||
pylint passbook e2e lifecycle
|
||||
prospector
|
||||
|
||||
gen: coverage
|
||||
./manage.py generate_swagger -o swagger.yaml -f yaml
|
||||
|
||||
local-stack:
|
||||
export PASSBOOK_TAG=testing
|
||||
docker build -t beryju/passbook:testng .
|
||||
docker-compose up -d
|
||||
docker-compose run --rm server migrate
|
||||
|
13
Pipfile
@ -17,10 +17,10 @@ django-otp = "*"
|
||||
django-prometheus = "*"
|
||||
django-recaptcha = "*"
|
||||
django-redis = "*"
|
||||
django-rest-framework = "*"
|
||||
djangorestframework = "*"
|
||||
django-storages = "*"
|
||||
djangorestframework-guardian = "*"
|
||||
drf-yasg = "*"
|
||||
drf_yasg2 = "*"
|
||||
facebook-sdk = "*"
|
||||
ldap3 = "*"
|
||||
lxml = "*"
|
||||
@ -28,7 +28,7 @@ packaging = "*"
|
||||
psycopg2-binary = "*"
|
||||
pycryptodome = "*"
|
||||
pyjwkest = "*"
|
||||
uvicorn = "*"
|
||||
uvicorn = {extras = ["standard"],version = "*"}
|
||||
gunicorn = "*"
|
||||
pyyaml = "*"
|
||||
qrcode = "*"
|
||||
@ -43,6 +43,7 @@ dacite = "*"
|
||||
channels = "*"
|
||||
channels-redis = "*"
|
||||
kubernetes = "*"
|
||||
docker = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.8"
|
||||
@ -50,14 +51,14 @@ python_version = "3.8"
|
||||
[dev-packages]
|
||||
autopep8 = "*"
|
||||
bandit = "*"
|
||||
black = "==19.10b0"
|
||||
black = "==20.8b1"
|
||||
bumpversion = "*"
|
||||
colorama = "*"
|
||||
coverage = "*"
|
||||
django-debug-toolbar = "*"
|
||||
docker = "*"
|
||||
pylint = "*"
|
||||
pylint-django = "*"
|
||||
selenium = "*"
|
||||
unittest-xml-reporting = "*"
|
||||
prospector = "*"
|
||||
pytest = "*"
|
||||
pytest-django = "*"
|
||||
|
695
Pipfile.lock
generated
17
README.md
@ -1,4 +1,4 @@
|
||||
<img src="passbook/static/static/passbook/logo.svg" height="50" alt="passbook logo"><img src="passbook/static/static/passbook/brand_inverted.svg" height="50" alt="passbook">
|
||||
<img src="docs/images/logo.svg" height="50" alt="passbook logo"><img src="docs/images/brand_inverted.svg" height="50" alt="passbook">
|
||||
|
||||
[](https://dev.azure.com/beryjuorg/passbook/_build?definitionId=1)
|
||||

|
||||
@ -13,20 +13,7 @@ passbook is an open-source Identity Provider focused on flexibility and versatil
|
||||
|
||||
## Installation
|
||||
|
||||
For small/test setups it is recommended to use docker-compose.
|
||||
|
||||
```
|
||||
wget https://raw.githubusercontent.com/BeryJu/passbook/master/docker-compose.yml
|
||||
# Optionally enable Error-reporting
|
||||
# export PASSBOOK_ERROR_REPORTING=true
|
||||
# Optionally deploy a different version
|
||||
# export PASSBOOK_TAG=0.10.0-rc4
|
||||
# If this is a productive installation, set a different PostgreSQL Password
|
||||
# export PG_PASS=$(pwgen 40 1)
|
||||
docker-compose pull
|
||||
docker-compose up -d
|
||||
docker-compose run --rm server migrate
|
||||
```
|
||||
For small/test setups it is recommended to use docker-compose, see the [documentation](https://passbook.beryju.org/installation/docker-compose/)
|
||||
|
||||
For bigger setups, there is a Helm Chart in the `helm/` directory. This is documented [here](https://passbook.beryju.org//installation/kubernetes/)
|
||||
|
||||
|
@ -6,7 +6,9 @@ As passbook is currently in a pre-stable, only the latest "stable" version is su
|
||||
|
||||
| Version | Supported |
|
||||
| -------- | ------------------ |
|
||||
| 0.8.15 | :white_check_mark: |
|
||||
| 0.10.x | :white_check_mark: |
|
||||
| 0.11.x | :white_check_mark: |
|
||||
| 0.12.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
@ -8,6 +8,10 @@ variables:
|
||||
POSTGRES_DB: passbook
|
||||
POSTGRES_USER: passbook
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
${{ if startsWith(variables['Build.SourceBranch'], 'refs/heads/') }}:
|
||||
branchName: ${{ replace(variables['Build.SourceBranchName'], 'refs/heads/', '') }}
|
||||
${{ if startsWith(variables['Build.SourceBranch'], 'refs/pull/') }}:
|
||||
branchName: ${{ replace(variables['System.PullRequest.SourceBranch'], 'refs/heads/', '') }}
|
||||
|
||||
stages:
|
||||
- stage: Lint
|
||||
@ -26,7 +30,7 @@ stages:
|
||||
pipenv install --dev
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: pipenv run pylint passbook
|
||||
script: pipenv run pylint passbook e2e lifecycle
|
||||
- job: black
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
@ -41,7 +45,7 @@ stages:
|
||||
pipenv install --dev
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: pipenv run black --check passbook
|
||||
script: pipenv run black --check passbook e2e lifecycle
|
||||
- job: prospector
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
@ -57,7 +61,7 @@ stages:
|
||||
pipenv install --dev prospector --skip-lock
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: pipenv run prospector passbook
|
||||
script: pipenv run prospector
|
||||
- job: bandit
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
@ -72,7 +76,7 @@ stages:
|
||||
pipenv install --dev
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: pipenv run bandit -r passbook
|
||||
script: pipenv run bandit -r passbook e2e lifecycle
|
||||
- job: pyright
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
@ -85,7 +89,7 @@ stages:
|
||||
versionSpec: '3.8'
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: npm install -g pyright
|
||||
script: npm install -g pyright@1.1.79
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: |
|
||||
@ -93,7 +97,7 @@ stages:
|
||||
pipenv install --dev
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: pipenv run pyright
|
||||
script: pipenv run pyright e2e lifecycle
|
||||
- stage: Test
|
||||
jobs:
|
||||
- job: migrations
|
||||
@ -117,6 +121,41 @@ stages:
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: pipenv run ./manage.py migrate
|
||||
- job: migrations_from_previous_release
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '3.8'
|
||||
- task: DockerCompose@0
|
||||
displayName: Run services
|
||||
inputs:
|
||||
dockerComposeFile: 'scripts/ci.docker-compose.yml'
|
||||
action: 'Run services'
|
||||
buildImages: false
|
||||
- task: CmdLine@2
|
||||
displayName: Prepare Last tagged release
|
||||
inputs:
|
||||
script: |
|
||||
git checkout $(git describe --abbrev=0 --match 'version/*')
|
||||
sudo pip install -U wheel pipenv
|
||||
pipenv install --dev
|
||||
- task: CmdLine@2
|
||||
displayName: Migrate to last tagged release
|
||||
inputs:
|
||||
script: pipenv run ./manage.py migrate
|
||||
- task: CmdLine@2
|
||||
displayName: Install current branch
|
||||
inputs:
|
||||
script: |
|
||||
set -x
|
||||
git checkout ${{ variables.branchName }}
|
||||
pipenv sync --dev
|
||||
- task: CmdLine@2
|
||||
displayName: Migrate to current branch
|
||||
inputs:
|
||||
script: pipenv run ./manage.py migrate
|
||||
- job: coverage_unittest
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
@ -130,6 +169,13 @@ stages:
|
||||
dockerComposeFile: 'scripts/ci.docker-compose.yml'
|
||||
action: 'Run services'
|
||||
buildImages: false
|
||||
- task: CmdLine@2
|
||||
displayName: Install K3d and prepare
|
||||
inputs:
|
||||
script: |
|
||||
wget -q -O - https://raw.githubusercontent.com/rancher/k3d/main/install.sh | bash
|
||||
k3d cluster create
|
||||
k3d kubeconfig write -o ~/.kube/config --overwrite
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: |
|
||||
@ -139,7 +185,11 @@ stages:
|
||||
displayName: Run full test suite
|
||||
inputs:
|
||||
script: |
|
||||
pipenv run coverage run ./manage.py test passbook
|
||||
export PB_TEST_K8S=true
|
||||
pipenv run coverage run ./manage.py test passbook -v 3
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: |
|
||||
mkdir output-unittest
|
||||
mv unittest.xml output-unittest/unittest.xml
|
||||
mv .coverage output-unittest/coverage
|
||||
@ -150,7 +200,7 @@ stages:
|
||||
publishLocation: 'pipeline'
|
||||
- job: coverage_e2e
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
name: coventry
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
@ -161,6 +211,13 @@ stages:
|
||||
dockerComposeFile: 'scripts/ci.docker-compose.yml'
|
||||
action: 'Run services'
|
||||
buildImages: false
|
||||
- task: CmdLine@2
|
||||
displayName: Install K3d and prepare
|
||||
inputs:
|
||||
script: |
|
||||
wget -q -O - https://raw.githubusercontent.com/rancher/k3d/main/install.sh | bash
|
||||
k3d cluster create
|
||||
k3d kubeconfig write -o ~/.kube/config --overwrite
|
||||
- task: CmdLine@2
|
||||
inputs:
|
||||
script: |
|
||||
@ -177,11 +234,21 @@ stages:
|
||||
inputs:
|
||||
script: |
|
||||
cd passbook/static/static
|
||||
yarn
|
||||
npm i
|
||||
npm run build
|
||||
- task: CmdLine@2
|
||||
displayName: Run full test suite
|
||||
inputs:
|
||||
script: pipenv run coverage run ./manage.py test e2e
|
||||
script: |
|
||||
export PB_TEST_K8S=true
|
||||
pipenv run coverage run ./manage.py test e2e -v 3 --failfast
|
||||
- task: CmdLine@2
|
||||
condition: always()
|
||||
displayName: Cleanup
|
||||
inputs:
|
||||
script: |
|
||||
docker stop $(docker ps -aq)
|
||||
docker container prune -f
|
||||
- task: CmdLine@2
|
||||
displayName: Prepare unittests and coverage for upload
|
||||
inputs:
|
||||
@ -225,11 +292,9 @@ stages:
|
||||
script: |
|
||||
sudo pip install -U wheel pipenv
|
||||
pipenv install --dev
|
||||
find .
|
||||
pipenv run coverage combine coverage-e2e/coverage coverage-unittest/coverage
|
||||
pipenv run coverage xml
|
||||
pipenv run coverage html
|
||||
find .
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: 'Cobertura'
|
||||
@ -260,7 +325,7 @@ stages:
|
||||
repository: 'beryju/passbook'
|
||||
command: 'buildAndPush'
|
||||
Dockerfile: 'Dockerfile'
|
||||
tags: 'gh-$(Build.SourceBranchName)'
|
||||
tags: "gh-${{ variables.branchName }}"
|
||||
- job: build_static
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
@ -277,27 +342,11 @@ stages:
|
||||
repository: 'beryju/passbook-static'
|
||||
command: 'build'
|
||||
Dockerfile: 'static.Dockerfile'
|
||||
tags: 'gh-$(Build.SourceBranchName)'
|
||||
tags: "gh-${{ variables.branchName }}"
|
||||
arguments: "--network=beryjupassbook_default"
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
containerRegistry: 'dockerhub'
|
||||
repository: 'beryju/passbook-static'
|
||||
command: 'push'
|
||||
tags: 'gh-$(Build.SourceBranchName)'
|
||||
- stage: Deploy
|
||||
jobs:
|
||||
- job: deploy_dev
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: HelmDeploy@0
|
||||
inputs:
|
||||
connectionType: 'Kubernetes Service Connection'
|
||||
kubernetesServiceConnection: 'k8s-beryjuorg-prd'
|
||||
namespace: 'passbook-dev'
|
||||
command: 'upgrade'
|
||||
chartType: 'FilePath'
|
||||
chartPath: 'helm/'
|
||||
releaseName: 'passbook-dev'
|
||||
recreate: true
|
||||
tags: "gh-${{ variables.branchName }}"
|
||||
|
@ -3,7 +3,7 @@ version: '3.2'
|
||||
|
||||
services:
|
||||
postgresql:
|
||||
image: postgres
|
||||
image: postgres:12
|
||||
volumes:
|
||||
- database:/var/lib/postgresql/data
|
||||
networks:
|
||||
@ -12,55 +12,68 @@ services:
|
||||
- POSTGRES_PASSWORD=${PG_PASS:-thisisnotagoodpassword}
|
||||
- POSTGRES_USER=passbook
|
||||
- POSTGRES_DB=passbook
|
||||
labels:
|
||||
- traefik.enable=false
|
||||
env_file:
|
||||
- .env
|
||||
redis:
|
||||
image: redis
|
||||
networks:
|
||||
- internal
|
||||
labels:
|
||||
- traefik.enable=false
|
||||
server:
|
||||
image: beryju/passbook:${PASSBOOK_TAG:-0.10.0-rc4}
|
||||
image: beryju/passbook:${PASSBOOK_TAG:-0.12.2-stable}
|
||||
command: server
|
||||
environment:
|
||||
PASSBOOK_REDIS__HOST: redis
|
||||
PASSBOOK_ERROR_REPORTING: ${PASSBOOK_ERROR_REPORTING:-false}
|
||||
PASSBOOK_POSTGRESQL__HOST: postgresql
|
||||
PASSBOOK_POSTGRESQL__PASSWORD: ${PG_PASS:-thisisnotagoodpassword}
|
||||
PASSBOOK_LOG_LEVEL: debug
|
||||
PASSBOOK_POSTGRESQL__PASSWORD: ${PG_PASS}
|
||||
ports:
|
||||
- 8000
|
||||
networks:
|
||||
- internal
|
||||
labels:
|
||||
- traefik.port=8000
|
||||
- traefik.docker.network=internal
|
||||
- traefik.frontend.rule=PathPrefix:/
|
||||
traefik.enable: 'true'
|
||||
traefik.docker.network: internal
|
||||
traefik.http.routers.app-router.rule: PathPrefix(`/`)
|
||||
traefik.http.routers.app-router.service: app-service
|
||||
traefik.http.routers.app-router.tls: 'true'
|
||||
traefik.http.services.app-service.loadbalancer.healthcheck.hostname: passbook-healthcheck-host
|
||||
traefik.http.services.app-service.loadbalancer.server.port: '8000'
|
||||
env_file:
|
||||
- .env
|
||||
worker:
|
||||
image: beryju/passbook:${PASSBOOK_TAG:-0.10.0-rc4}
|
||||
image: beryju/passbook:${PASSBOOK_TAG:-0.12.2-stable}
|
||||
command: worker
|
||||
networks:
|
||||
- internal
|
||||
labels:
|
||||
- traefik.enable=false
|
||||
environment:
|
||||
PASSBOOK_REDIS__HOST: redis
|
||||
PASSBOOK_ERROR_REPORTING: ${PASSBOOK_ERROR_REPORTING:-false}
|
||||
PASSBOOK_POSTGRESQL__HOST: postgresql
|
||||
PASSBOOK_POSTGRESQL__PASSWORD: ${PG_PASS:-thisisnotagoodpassword}
|
||||
PASSBOOK_LOG_LEVEL: debug
|
||||
PASSBOOK_POSTGRESQL__PASSWORD: ${PG_PASS}
|
||||
volumes:
|
||||
- ./backups:/backups
|
||||
- /var/run/docker.socket:/var/run/docker.socket
|
||||
env_file:
|
||||
- .env
|
||||
static:
|
||||
image: beryju/passbook-static:${PASSBOOK_TAG:-0.10.0-rc4}
|
||||
image: beryju/passbook-static:${PASSBOOK_TAG:-0.12.2-stable}
|
||||
networks:
|
||||
- internal
|
||||
labels:
|
||||
- traefik.frontend.rule=PathPrefix:/static, /robots.txt, /favicon.ico
|
||||
- traefik.port=80
|
||||
- traefik.docker.network=internal
|
||||
traefik.enable: 'true'
|
||||
traefik.docker.network: internal
|
||||
traefik.http.routers.static-router.rule: PathPrefix(`/static`, `/robots.txt`, `/favicon.ico`)
|
||||
traefik.http.routers.static-router.tls: 'true'
|
||||
traefik.http.routers.static-router.service: static-service
|
||||
traefik.http.services.static-service.loadbalancer.healthcheck.path: /
|
||||
traefik.http.services.static-service.loadbalancer.server.port: '80'
|
||||
traefik:
|
||||
image: traefik:1.7
|
||||
command: --api --docker --defaultentrypoints=https --entryPoints='Name:http Address::80 Redirect.EntryPoint:https' --entryPoints='Name:https Address::443 TLS'
|
||||
image: traefik:2.3
|
||||
command:
|
||||
- "--accesslog=true"
|
||||
- "--api.insecure=true"
|
||||
- "--providers.docker=true"
|
||||
- "--providers.docker.exposedbydefault=false"
|
||||
- "--entrypoints.http.address=:80"
|
||||
- "--entrypoints.https.address=:443"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
ports:
|
||||
|
@ -2,20 +2,25 @@
|
||||
|
||||
The User object has the following attributes:
|
||||
|
||||
- `username`: User's username.
|
||||
- `email` User's email.
|
||||
- `name` User's display mame.
|
||||
- `is_staff` Boolean field if user is staff.
|
||||
- `is_active` Boolean field if user is active.
|
||||
- `date_joined` Date user joined/was created.
|
||||
- `password_change_date` Date password was last changed.
|
||||
- `attributes` Dynamic attributes.
|
||||
- `username`: User's username.
|
||||
- `email` User's email.
|
||||
- `name` User's display name.
|
||||
- `is_staff` Boolean field if user is staff.
|
||||
- `is_active` Boolean field if user is active.
|
||||
- `date_joined` Date user joined/was created.
|
||||
- `password_change_date` Date password was last changed.
|
||||
- `attributes` Dynamic attributes.
|
||||
- `pb_groups` This is a queryset of all the user's groups.
|
||||
|
||||
You can do additional filtering like `user.pb_groups.filter(name__startswith='test')`, see [here](https://docs.djangoproject.com/en/3.1/ref/models/querysets/#id4)
|
||||
|
||||
To get the name of all groups, you can do `[group.name for group in user.pb_groups.all()]`
|
||||
|
||||
## Examples
|
||||
|
||||
List all the User's group names:
|
||||
|
||||
```python
|
||||
for group in user.groups.all():
|
||||
for group in user.pb_groups.all():
|
||||
yield group.name
|
||||
```
|
||||
|
@ -84,15 +84,6 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"identifiers": {
|
||||
"pk": "9922212c-47a2-475a-9905-abeb5e621652"
|
||||
},
|
||||
"model": "passbook_policies_expression.expressionpolicy",
|
||||
"attrs": {
|
||||
"name": "policy-enrollment-password-equals",
|
||||
"expression": "# Verifies that the passwords are equal\r\nreturn request.context['password'] == request.context['password_repeat']"
|
||||
}
|
||||
},{
|
||||
"identifiers": {
|
||||
"pk": "096e6282-6b30-4695-bd03-3b143eab5580",
|
||||
"name": "default-enrollment-email-verficiation"
|
||||
@ -135,9 +126,6 @@
|
||||
"cb954fd4-65a5-4ad9-b1ee-180ee9559cf4",
|
||||
"7db91ee8-4290-4e08-8d39-63f132402515",
|
||||
"d30b5eb4-7787-4072-b1ba-65b46e928920"
|
||||
],
|
||||
"validation_policies": [
|
||||
"9922212c-47a2-475a-9905-abeb5e621652"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
@ -55,16 +55,6 @@
|
||||
"order": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"identifiers": {
|
||||
"pk": "cd042fc6-cc92-4b98-b7e6-f4729df798d8"
|
||||
},
|
||||
"model": "passbook_policies_expression.expressionpolicy",
|
||||
"attrs": {
|
||||
"name": "default-password-change-password-equal",
|
||||
"expression": "# Check that both passwords are equal.\nreturn request.context['password'] == request.context['password_repeat']"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identifiers": {
|
||||
"pk": "e54045a7-6ecb-4ad9-ad37-28e72d8e565e",
|
||||
@ -111,16 +101,13 @@
|
||||
{
|
||||
"identifiers": {
|
||||
"pk": "975d5502-1e22-4d10-b560-fbc5bd70ff4d",
|
||||
"name": "default-password-change-prompt"
|
||||
"name": "Change your password"
|
||||
},
|
||||
"model": "passbook_stages_prompt.promptstage",
|
||||
"attrs": {
|
||||
"fields": [
|
||||
"7db91ee8-4290-4e08-8d39-63f132402515",
|
||||
"d30b5eb4-7787-4072-b1ba-65b46e928920"
|
||||
],
|
||||
"validation_policies": [
|
||||
"cd042fc6-cc92-4b98-b7e6-f4729df798d8"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
@ -39,7 +39,6 @@ This designates a flow for unenrollment. This flow can contain any amount of ver
|
||||
This designates a flow for recovery. This flow normally contains an [**identification**](stages/identification/index.md) stage to find the user. It can also contain any amount of verification stages, such as [**email**](stages/email/index.md) or [**captcha**](stages/captcha/index.md).
|
||||
Afterwards, use the [**prompt**](stages/prompt/index.md) stage to ask the user for a new password and the [**user_write**](stages/user_write.md) stage to update the password.
|
||||
|
||||
### Change Password
|
||||
### Setup
|
||||
|
||||
This designates a flow for password changes. This flow can contain any amount of verification stages, such as [**email**](stages/email/index.md) or [**captcha**](stages/captcha/index.md).
|
||||
Afterwards, use the [**prompt**](stages/prompt/index.md) stage to ask the user for a new password and the [**user_write**](stages/user_write.md) stage to update the password.
|
||||
This designates a flow for general setup. This designation doesn't have any constraints in what you can do. For example, by default this designation is used to configure Factors, like change a password and setup TOTP.
|
||||
|
Before Width: | Height: | Size: 253 KiB After Width: | Height: | Size: 373 KiB |
Before Width: | Height: | Size: 338 KiB After Width: | Height: | Size: 450 KiB |
@ -4,21 +4,28 @@ This installation method is for test-setups and small-scale productive setups.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- docker
|
||||
- docker-compose
|
||||
- docker
|
||||
- docker-compose
|
||||
|
||||
## Install
|
||||
|
||||
Download the latest `docker-compose.yml` from [here](https://raw.githubusercontent.com/BeryJu/passbook/master/docker-compose.yml). Place it in a directory of your choice.
|
||||
|
||||
To optionally enable error-reporting, run `echo PASSBOOK_ERROR_REPORTING__ENABLED=true >> .env`
|
||||
|
||||
To optionally deploy a different version run `echo PASSBOOK_TAG=0.12.2-stable >> .env`
|
||||
|
||||
If this is a fresh passbook install run the following commands to generate a password:
|
||||
|
||||
```
|
||||
sudo apt-get install -y pwgen
|
||||
echo "PG_PASS=$(pwgen 40 1)" >> .env
|
||||
echo "PASSBOOK_SECRET_KEY=$(pwgen 50 1)" >> .env
|
||||
```
|
||||
|
||||
Afterwards, run these commands to finish
|
||||
|
||||
```
|
||||
wget https://raw.githubusercontent.com/BeryJu/passbook/master/docker-compose.yml
|
||||
# Optionally enable Error-reporting
|
||||
# export PASSBOOK_ERROR_REPORTING=true
|
||||
# Optionally deploy a different version
|
||||
# export PASSBOOK_TAG=0.10.0-rc4
|
||||
# If this is a productive installation, set a different PostgreSQL Password
|
||||
# export PG_PASS=$(pwgen 40 1)
|
||||
docker-compose pull
|
||||
docker-compose up -d
|
||||
docker-compose run --rm server migrate
|
||||
@ -32,4 +39,6 @@ Now you can pull the Docker images needed by running `docker-compose pull`. Afte
|
||||
|
||||
passbook will then be reachable via HTTP on port 80, and HTTPS on port 443. You can optionally configure the packaged traefik to use Let's Encrypt certificates for TLS Encryption.
|
||||
|
||||
If you plan to access passbook via a reverse proxy which does SSL Termination, make sure you use the HTTPS port, so passbook is aware of the SSL connection.
|
||||
|
||||
The initial setup process also creates a default admin user, the username and password for which is `pbadmin`. It is highly recommended to change this password as soon as you log in.
|
||||
|
@ -4,39 +4,55 @@ For a mid to high-load installation, Kubernetes is recommended. passbook is inst
|
||||
|
||||
This installation automatically applies database migrations on startup. After the installation is done, you can use `pbadmin` as username and password.
|
||||
|
||||
```
|
||||
```yaml
|
||||
###################################
|
||||
# Values directly affecting passbook
|
||||
###################################
|
||||
image:
|
||||
name: beryju/passbook
|
||||
name_static: beryju/passbook-static
|
||||
tag: 0.9.0-stable
|
||||
tag: 0.12.2-stable
|
||||
|
||||
nameOverride: ""
|
||||
|
||||
serverReplicas: 1
|
||||
workerReplicas: 1
|
||||
|
||||
# Enable the Kubernetes integration which lets passbook deploy outposts into kubernetes
|
||||
kubernetesIntegration: true
|
||||
|
||||
config:
|
||||
# Optionally specify fixed secret_key, otherwise generated automatically
|
||||
# secret_key: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
||||
# secretKey: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
||||
# Enable error reporting
|
||||
error_reporting:
|
||||
errorReporting:
|
||||
enabled: false
|
||||
environment: customer
|
||||
send_pii: false
|
||||
sendPii: false
|
||||
# Log level used by web and worker
|
||||
# Can be either debug, info, warning, error
|
||||
log_level: warning
|
||||
logLevel: warning
|
||||
|
||||
# Enable Database Backups to S3
|
||||
# backup:
|
||||
# access_key: access-key
|
||||
# secret_key: secret-key
|
||||
# accessKey: access-key
|
||||
# secretKey: secret-key
|
||||
# bucket: s3-bucket
|
||||
# region: eu-central-1
|
||||
# host: s3-host
|
||||
|
||||
ingress:
|
||||
annotations: {}
|
||||
# kubernetes.io/ingress.class: nginx
|
||||
# kubernetes.io/tls-acme: "true"
|
||||
path: /
|
||||
hosts:
|
||||
- passbook.k8s.local
|
||||
tls: []
|
||||
# - secretName: chart-example-tls
|
||||
# hosts:
|
||||
# - passbook.k8s.local
|
||||
|
||||
###################################
|
||||
# Values controlling dependencies
|
||||
###################################
|
||||
@ -57,16 +73,4 @@ redis:
|
||||
enabled: false
|
||||
# https://stackoverflow.com/a/59189742
|
||||
disableCommands: []
|
||||
|
||||
ingress:
|
||||
annotations: {}
|
||||
# kubernetes.io/ingress.class: nginx
|
||||
# kubernetes.io/tls-acme: "true"
|
||||
path: /
|
||||
hosts:
|
||||
- passbook.k8s.local
|
||||
tls: []
|
||||
# - secretName: chart-example-tls
|
||||
# hosts:
|
||||
# - passbook.k8s.local
|
||||
```
|
||||
|
42
docs/installation/reverse-proxy.md
Normal file
@ -0,0 +1,42 @@
|
||||
# passbook behind a reverse-proxy
|
||||
|
||||
If you want to access passbook behind a reverse-proxy, use a config like this. It is important that Websocket is enabled, so that Outposts can connect.
|
||||
|
||||
```
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
server {
|
||||
# Server config
|
||||
listen 80;
|
||||
server_name sso.domain.tld;
|
||||
|
||||
# 301 to SSL
|
||||
location / {
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
}
|
||||
server {
|
||||
# Server config
|
||||
listen 443 ssl http2;
|
||||
server_name sso.domain.tld;
|
||||
|
||||
# SSL Certs
|
||||
ssl_certificate /etc/letsencrypt/live/domain.tld/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/domain.tld/privkey.pem;
|
||||
|
||||
# Proxy site
|
||||
location / {
|
||||
proxy_pass https://<hostname of your passbook server>;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header X-Forwarded-Proto https;
|
||||
proxy_set_header X-Forwarded-Port 443;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
}
|
||||
```
|
@ -9,14 +9,14 @@
|
||||
|
||||
The following placeholders will be used:
|
||||
|
||||
- `passbook.company` is the FQDN of the passbook install.
|
||||
- `passbook.company` is the FQDN of the passbook install.
|
||||
|
||||
Create an application in passbook and note the slug, as this will be used later. Create a SAML provider with the following parameters:
|
||||
|
||||
- ACS URL: `https://signin.aws.amazon.com/saml`
|
||||
- Audience: `urn:amazon:webservices`
|
||||
- Issuer: `passbook`
|
||||
- Binding: `Post`
|
||||
- ACS URL: `https://signin.aws.amazon.com/saml`
|
||||
- Audience: `urn:amazon:webservices`
|
||||
- Issuer: `passbook`
|
||||
- Binding: `Post`
|
||||
|
||||
You can of course use a custom signing certificate, and adjust durations.
|
||||
|
||||
@ -24,10 +24,49 @@ You can of course use a custom signing certificate, and adjust durations.
|
||||
|
||||
Create a role with the permissions you desire, and note the ARN.
|
||||
|
||||
AWS requires two custom PropertyMappings; `Role` and `RoleSessionName`. Create them as following:
|
||||
After you've created the Property Mappings below, add them to the Provider.
|
||||
|
||||

|
||||
Create an application, assign policies, and assign this provider.
|
||||
|
||||

|
||||
Export the metadata from passbook, and create an Identity Provider [here](https://console.aws.amazon.com/iam/home#/providers).
|
||||
|
||||
Afterwards export the metadata from passbook, and create an Identity Provider [here](https://console.aws.amazon.com/iam/home#/providers).
|
||||
#### Role Mapping
|
||||
|
||||
The Role mapping specifies the AWS ARN(s) of the identity provider, and the role the user should assume ([see](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_saml_assertions.html#saml_role-attribute)).
|
||||
|
||||
This Mapping needs to have the SAML Name field set to "https://aws.amazon.com/SAML/Attributes/Role"
|
||||
|
||||
As expression, you can return a static ARN like so
|
||||
|
||||
```python
|
||||
return "arn:aws:iam::123412341234:role/saml_role,arn:aws:iam::123412341234:saml-provider/passbook"
|
||||
```
|
||||
|
||||
Or, if you want to assign AWS Roles based on Group membership, you can add a custom attribute to the Groups, for example "aws_role", and use this snippet below. Groups are sorted by name and later groups overwrite earlier groups' attributes.
|
||||
|
||||
```python
|
||||
role_name = user.group_attributes().get("aws_role", "")
|
||||
return f"arn:aws:iam::123412341234:role/{role_name},arn:aws:iam::123412341234:saml-provider/passbook"
|
||||
```
|
||||
|
||||
If you want to allow a user to choose from multiple roles, use this snippet
|
||||
|
||||
```python
|
||||
return [
|
||||
"arn:aws:iam::123412341234:role/role_a,arn:aws:iam::123412341234:saml-provider/passbook",
|
||||
"arn:aws:iam::123412341234:role/role_b,arn:aws:iam::123412341234:saml-provider/passbook",
|
||||
"arn:aws:iam::123412341234:role/role_c,arn:aws:iam::123412341234:saml-provider/passbook",
|
||||
]
|
||||
```
|
||||
|
||||
### RoleSessionName Mapping
|
||||
|
||||
The RoleSessionMapping specifies what identifier will be shown at the top of the Management Console ([see](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_saml_assertions.html#saml_role-session-attribute)).
|
||||
|
||||
This mapping needs to have the SAML Name field set to "https://aws.amazon.com/SAML/Attributes/RoleSessionName".
|
||||
|
||||
To use the user's username, use this snippet
|
||||
|
||||
```python
|
||||
return user.username
|
||||
```
|
||||
|
Before Width: | Height: | Size: 65 KiB |
Before Width: | Height: | Size: 66 KiB |
@ -11,14 +11,15 @@ From https://about.gitlab.com/what-is-gitlab/
|
||||
|
||||
The following placeholders will be used:
|
||||
|
||||
- `gitlab.company` is the FQDN of the GitLab Install
|
||||
- `passbook.company` is the FQDN of the passbook Install
|
||||
- `gitlab.company` is the FQDN of the GitLab Install
|
||||
- `passbook.company` is the FQDN of the passbook Install
|
||||
|
||||
Create an application in passbook and note the slug, as this will be used later. Create a SAML provider with the following parameters:
|
||||
|
||||
- ACS URL: `https://gitlab.company/users/auth/saml/callback`
|
||||
- Audience: `https://gitlab.company`
|
||||
- Issuer: `https://gitlab.company`
|
||||
- ACS URL: `https://gitlab.company/users/auth/saml/callback`
|
||||
- Audience: `https://gitlab.company`
|
||||
- Issuer: `https://gitlab.company`
|
||||
- Binding: `Post`
|
||||
|
||||
You can of course use a custom signing certificate, and adjust durations. To get the value for `idp_cert_fingerprint`, you can use a tool like [this](https://www.samltool.com/fingerprint.php).
|
||||
|
||||
@ -41,7 +42,7 @@ gitlab_rails['omniauth_providers'] = [
|
||||
args: {
|
||||
assertion_consumer_service_url: 'https://gitlab.company/users/auth/saml/callback',
|
||||
idp_cert_fingerprint: '4E:1E:CD:67:4A:67:5A:E9:6A:D0:3C:E6:DD:7A:F2:44:2E:76:00:6A',
|
||||
idp_sso_target_url: 'https://passbook.company/application/saml/<passbook application slug>/login/',
|
||||
idp_sso_target_url: 'https://passbook.company/application/saml/<passbook application slug>/sso/binding/post/',
|
||||
issuer: 'https://gitlab.company',
|
||||
name_identifier_format: 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress',
|
||||
attribute_statements: {
|
||||
|
BIN
docs/integrations/services/sentry/auth.png
Normal file
After Width: | Height: | Size: 316 KiB |
@ -15,27 +15,31 @@ From https://sentry.io
|
||||
|
||||
The following placeholders will be used:
|
||||
|
||||
- `sentry.company` is the FQDN of the Sentry install.
|
||||
- `passbook.company` is the FQDN of the passbook install.
|
||||
- `sentry.company` is the FQDN of the Sentry install.
|
||||
- `passbook.company` is the FQDN of the passbook install.
|
||||
|
||||
Create an application in passbook. Create an OpenID provider with the following parameters:
|
||||
Create an application in passbook. Create a SAML Provider with the following values
|
||||
|
||||
- Client Type: `Confidential`
|
||||
- Response types: `code (Authorization Code Flow)`
|
||||
- JWT Algorithm: `RS256`
|
||||
- Redirect URIs: `https://sentry.company/auth/sso/`
|
||||
- Scopes: `openid email`
|
||||
- ACS URL: `https://sentry.company/saml/acs/<sentry organisation name>/`
|
||||
- Audience: `https://sentry.company/saml/metadata/<sentry organisation name>/`
|
||||
- Issuer: `passbook`
|
||||
- Service Provider Binding: `Post`
|
||||
- Property Mapping: Select all Autogenerated Mappings
|
||||
|
||||
## Sentry
|
||||
|
||||
**This guide assumes you've installed Sentry using [getsentry/onpremise](https://github.com/getsentry/onpremise)**
|
||||
|
||||
- Add `sentry-auth-oidc` to `onpremise/sentry/requirements.txt` (Create the file if it doesn't exist yet)
|
||||
- Add the following block to your `onpremise/sentry/sentry.conf.py`:
|
||||
```
|
||||
OIDC_ISSUER = "passbook"
|
||||
OIDC_CLIENT_ID = "<Client ID from passbook>"
|
||||
OIDC_CLIENT_SECRET = "<Client Secret from passbook>"
|
||||
OIDC_SCOPE = "openid email"
|
||||
OIDC_DOMAIN = "https://passbook.company/application/oidc/"
|
||||
```
|
||||
Navigate to Settings -> Auth, and click on Configure next to SAML2
|
||||
|
||||

|
||||
|
||||
In passbook, get the Metadata URL by right-clicking `Download Metadata` and selecting Copy Link Address, and paste that URL into Sentry.
|
||||
|
||||
On the next screen, input these Values
|
||||
|
||||
IdP User ID: `urn:oid:0.9.2342.19200300.100.1.1`
|
||||
User Email: `urn:oid:0.9.2342.19200300.100.1.3`
|
||||
First Name: `urn:oid:2.5.4.3`
|
||||
|
||||
After confirming, Sentry will authenticate with passbook, and you should be redirected back to a page confirming your settings.
|
||||
|
37
docs/integrations/services/sonarr/index.md
Normal file
@ -0,0 +1,37 @@
|
||||
# Sonarr Integration
|
||||
|
||||
!!! note
|
||||
These instructions apply to all projects in the *arr Family. If you use multiple of these projects, you can assign them to the same Outpost.
|
||||
|
||||
## What is Sonarr
|
||||
|
||||
From https://github.com/Sonarr/Sonarr
|
||||
|
||||
!!! note ""
|
||||
Sonarr is a PVR for Usenet and BitTorrent users. It can monitor multiple RSS feeds for new episodes of your favorite shows and will grab, sort and rename them. It can also be configured to automatically upgrade the quality of files already downloaded when a better quality format becomes available.
|
||||
|
||||
|
||||
## Preparation
|
||||
|
||||
The following placeholders will be used:
|
||||
|
||||
- `sonarr.company` is the FQDN of the Sonarr install.
|
||||
- `passbook.company` is the FQDN of the passbook install.
|
||||
|
||||
Create an application in passbook. Create a Proxy Provider with the following values
|
||||
|
||||
- Internal host
|
||||
|
||||
If Sonarr is running in docker, and you're deploying the passbook proxy on the same host, set the value to `http://sonarr:8989`, where sonarr is the name of your container.
|
||||
|
||||
If Sonarr is running on a different server than where you are deploying the passbook proxy, set the value to `http://sonarr.company:8989`.
|
||||
|
||||
- External host
|
||||
|
||||
Set this to the external URL you will be accessing Sonarr from.
|
||||
|
||||
## Deployment
|
||||
|
||||
Create an outpost deployment for the provider you've created above, as described [here](../../../outposts/outposts.md). Deploy this Outpost either on the same host or a different host that can access Sonarr.
|
||||
|
||||
The outpost will connect to passbook and configure itself.
|
50
docs/integrations/services/tautulli/index.md
Normal file
@ -0,0 +1,50 @@
|
||||
# Tautulli Integration
|
||||
|
||||
## What is Tautulli
|
||||
|
||||
From https://tautulli.com/
|
||||
|
||||
!!! note
|
||||
Tautulli is a 3rd party application that you can run alongside your Plex Media Server to monitor activity and track various statistics. Most importantly, these statistics include what has been watched, who watched it, when and where they watched it, and how it was watched. The only thing missing is "why they watched it", but who am I to question your 42 plays of Frozen. All statistics are presented in a nice and clean interface with many tables and graphs, which makes it easy to brag about your server to everyone else.
|
||||
|
||||
## Preparation
|
||||
|
||||
The following placeholders will be used:
|
||||
|
||||
- `tautulli.company` is the FQDN of the Tautulli install.
|
||||
- `passbook.company` is the FQDN of the passbook install.
|
||||
|
||||
## passbook Setup
|
||||
|
||||
Because Tautulli requires valid HTTP Basic credentials, you must save your HTTP Basic Credentials in passbook. The recommended way to do this is to create a Group. Name the group "Tautulli Users", for example. For this group, add the following attributes:
|
||||
|
||||
```yaml
|
||||
tautulli_user: username
|
||||
tautulli_password: password
|
||||
```
|
||||
|
||||
Add all Tautulli users to the Group. You should also create a Group Membership Policy to limit access to the application.
|
||||
|
||||
Create an application in passbook. Create a Proxy provider with the following parameters:
|
||||
|
||||
- Internal host
|
||||
|
||||
If Tautulli is running in docker, and you're deploying the passbook proxy on the same host, set the value to `http://tautulli:3579`, where tautulli is the name of your container.
|
||||
|
||||
If Tautulli is running on a different server to where you are deploying the passbook proxy, set the value to `http://tautulli.company:3579`.
|
||||
|
||||
- External host
|
||||
|
||||
Set this to the external URL you will be accessing Tautulli from.
|
||||
|
||||
Enable the `Set HTTP-Basic Authentication` option. Set and `HTTP-Basic Username` and `HTTP-Basic Password` to `tautulli_user` and `tautulli_password` respectively. These values can be chosen freely, `tautulli_` is just used as a prefix for clarity.
|
||||
|
||||
## Tautulli Setup
|
||||
|
||||
In Tautulli, navigate to Settings and enable the "Show Advanced" option. Navigate to "Web Interface" on the sidebar, and ensure the Option `Use Basic Authentication` is checked.
|
||||
|
||||

|
||||
|
||||
Save the settings, and restart Tautulli if prompted.
|
||||
|
||||
Afterwards, you need to deploy an Outpost in front of Tautulli, as descried [here](../sonarr/index.md)
|
BIN
docs/integrations/services/tautulli/tautulli.png
Normal file
After Width: | Height: | Size: 90 KiB |
@ -16,14 +16,15 @@ From https://docs.ansible.com/ansible/2.5/reference_appendices/tower.html
|
||||
|
||||
The following placeholders will be used:
|
||||
|
||||
- `awx.company` is the FQDN of the AWX/Tower install.
|
||||
- `passbook.company` is the FQDN of the passbook install.
|
||||
- `awx.company` is the FQDN of the AWX/Tower install.
|
||||
- `passbook.company` is the FQDN of the passbook install.
|
||||
|
||||
Create an application in passbook and note the slug, as this will be used later. Create a SAML provider with the following parameters:
|
||||
|
||||
- ACS URL: `https://awx.company/sso/complete/saml/`
|
||||
- Audience: `awx`
|
||||
- Issuer: `https://awx.company/sso/metadata/saml/`
|
||||
- ACS URL: `https://awx.company/sso/complete/saml/`
|
||||
- Audience: `awx`
|
||||
- Service Provider Binding: Post
|
||||
- Issuer: `https://awx.company/sso/metadata/saml/`
|
||||
|
||||
You can of course use a custom signing certificate, and adjust durations.
|
||||
|
||||
|
58
docs/integrations/services/ubuntu-landscape/index.md
Normal file
@ -0,0 +1,58 @@
|
||||
# Ubuntu Landscape Integration
|
||||
|
||||
## What is Ubuntu Landscape
|
||||
|
||||
From https://en.wikipedia.org/wiki/Landscape_(software)
|
||||
|
||||
!!! note ""
|
||||
|
||||
Landscape is a systems management tool developed by Canonical. It can be run on-premises or in the cloud depending on the needs of the user. It is primarily designed for use with Ubuntu derivatives such as Desktop, Server, and Core.
|
||||
|
||||
!!! warning
|
||||
|
||||
This requires passbook 0.10.3 or newer.
|
||||
|
||||
## Preparation
|
||||
|
||||
The following placeholders will be used:
|
||||
|
||||
- `landscape.company` is the FQDN of the Landscape server.
|
||||
- `passbook.company` is the FQDN of the passbook install.
|
||||
|
||||
Landscape uses the OpenID-Connect Protocol for single-sign on.
|
||||
|
||||
## passbook Setup
|
||||
|
||||
Create an OAuth2/OpenID-Connect Provider with the default settings. Set the Redirect URIs to `https://landscape.company/login/handle-openid`. Select all Autogenerated Scopes.
|
||||
|
||||
Keep Note of the Client ID and the Client Secret.
|
||||
|
||||
Create an application and assign access policies to the application. Set the application's provider to the provider you've just created.
|
||||
|
||||
## Landscape Setup
|
||||
|
||||
On the Landscape Server, edit the file `/etc/landscape/service.conf` and add the following snippet under the `[landscape]` section:
|
||||
|
||||
```
|
||||
oidc-issuer = https://passbook.company/application/o/<slug of the application you've created>/
|
||||
oidc-client-id = <client ID of the provider you've created>
|
||||
oidc-client-secret = <client Secret of the provider you've created>
|
||||
```
|
||||
|
||||
Afterwards, run `sudo lsctl restart` to restart the Landscape services.
|
||||
|
||||
## Appendix
|
||||
|
||||
To make an OpenID-Connect User admin, you have to insert some rows into the database.
|
||||
|
||||
First login with your passbook user, and make sure the user is created successfully.
|
||||
|
||||
Run `sudo -u postgres psql landscape-standalone-main` on the Landscape server to open a PostgreSQL Prompt.
|
||||
Then run `select * from person;` to get a list of all users. Take note of the ID given to your new user.
|
||||
|
||||
Run the following commands to make this user an administrator:
|
||||
|
||||
```sql
|
||||
INSERT INTO person_account VALUES (<user id>, 1);
|
||||
INSERT INTO person_access VALUES (<user id>, 1, 1);
|
||||
```
|
83
docs/integrations/services/vmware-vcenter/index.md
Normal file
@ -0,0 +1,83 @@
|
||||
# VMware vCenter Integration
|
||||
|
||||
## What is vCenter
|
||||
|
||||
From https://en.wikipedia.org/wiki/VCenter
|
||||
|
||||
!!! note ""
|
||||
|
||||
vCenter Server is the centralized management utility for VMware, and is used to manage virtual machines, multiple ESXi hosts, and all dependent components from a single centralized location. VMware vMotion and svMotion require the use of vCenter and ESXi hosts.
|
||||
|
||||
!!! warning
|
||||
|
||||
This requires passbook 0.10.3 or newer.
|
||||
|
||||
!!! warning
|
||||
|
||||
This requires VMware vCenter 7.0.0 or newer.
|
||||
|
||||
!!! note
|
||||
|
||||
It seems that the vCenter still needs to be joined to the Active Directory Domain, otherwise group membership does not work correctly. We're working on a fix for this, for the meantime your vCenter should be part of your Domain.
|
||||
|
||||
## Preparation
|
||||
|
||||
The following placeholders will be used:
|
||||
|
||||
- `vcenter.company` is the FQDN of the vCenter server.
|
||||
- `passbook.company` is the FQDN of the passbook install.
|
||||
|
||||
Since vCenter only allows OpenID-Connect in combination with Active Directory, it is recommended to have passbook sync with the same Active Directory.
|
||||
|
||||
### Step 1
|
||||
|
||||
Under *Property Mappings*, create a *Scope Mapping*. Give it a name like "OIDC-Scope-VMware-vCenter". Set the scope name to `openid` and the expression to the following
|
||||
|
||||
```python
|
||||
return {
|
||||
"domain": "<your active directory domain>",
|
||||
}
|
||||
```
|
||||
|
||||
### Step 2
|
||||
|
||||
!!! note
|
||||
If your Active Directory Schema is the same as your Email address schema, skip to Step 3.
|
||||
|
||||
Under *Sources*, click *Edit* and ensure that "Autogenerated Active Directory Mapping: userPrincipalName -> attributes.upn" has been added to your source.
|
||||
|
||||
### Step 3
|
||||
|
||||
Under *Providers*, create an OAuth2/OpenID Provider with these settings:
|
||||
|
||||
- Client Type: Confidential
|
||||
- Response Type: code (ADFS Compatibility Mode, sends id_token as access_token)
|
||||
- JWT Algorithm: RS256
|
||||
- Redirect URI: `https://vcenter.company/ui/login/oauth2/authcode`
|
||||
- Post Logout Redirect URIs: `https://vcenter.company/ui/login`
|
||||
- Sub Mode: If your Email address Schema matches your UPN, select "Based on the User's Email...", otherwise select "Based on the User's UPN...".
|
||||
- Scopes: Select the Scope Mapping you've created in Step 1
|
||||
|
||||

|
||||
|
||||
### Step 4
|
||||
|
||||
Create an application which uses this provider. Optionally apply access restrictions to the application.
|
||||
|
||||
Set the Launch URL to `https://vcenter.company/ui/login/oauth2`. This will skip vCenter's User Prompt and directly log you in.
|
||||
|
||||
## vCenter Setup
|
||||
|
||||
Login as local Administrator account (most likely ends with vsphere.local). Using the Menu in the Navigation bar, navigate to *Administration -> Single Sing-on -> Configuration*.
|
||||
|
||||
Click on *Change Identity Provider* in the top-right corner.
|
||||
|
||||
In the wizard, select "Microsoft ADFS" and click Next.
|
||||
|
||||
Fill in the Client Identifier and Shared Secret from the Provider in passbook. For the OpenID Address, click on *View Setup URLs* in passbook, and copy the OpenID Configuration URL.
|
||||
|
||||
On the next page, fill in your Active Directory Connection Details. These should be similar to what you have set in passbook.
|
||||
|
||||

|
||||
|
||||
If your vCenter was already setup with LDAP beforehand, your Role assignments will continue to work.
|
BIN
docs/integrations/services/vmware-vcenter/passbook_setup.png
Normal file
After Width: | Height: | Size: 173 KiB |
BIN
docs/integrations/services/vmware-vcenter/vcenter_post_setup.png
Normal file
After Width: | Height: | Size: 89 KiB |
BIN
docs/integrations/sources/active-directory/01_user_create.png
Normal file
After Width: | Height: | Size: 26 KiB |
BIN
docs/integrations/sources/active-directory/02_delegate.png
Normal file
After Width: | Height: | Size: 31 KiB |
BIN
docs/integrations/sources/active-directory/03_pb_status.png
Normal file
After Width: | Height: | Size: 64 KiB |
55
docs/integrations/sources/active-directory/index.md
Normal file
@ -0,0 +1,55 @@
|
||||
# Active Directory Integration
|
||||
|
||||
## Preparation
|
||||
|
||||
The following placeholders will be used:
|
||||
|
||||
- `ad.company` is the Name of the Active Directory domain.
|
||||
- `passbook.company` is the FQDN of the passbook install.
|
||||
|
||||
## Active Directory Setup
|
||||
|
||||
1. Open Active Directory Users and Computers
|
||||
|
||||
2. Create a user in Active Directory, matching your naming scheme
|
||||
|
||||

|
||||
|
||||
3. Give the User a password, generated using for example `pwgen 64 1`.
|
||||
|
||||
4. Open the Delegation of Control Wizard by right-clicking the domain.
|
||||
|
||||
5. Select the passbook service user you've just created.
|
||||
|
||||
6. Ensure the "Reset user password and force password change at next logon" Option is checked.
|
||||
|
||||

|
||||
|
||||
## passbook Setup
|
||||
|
||||
In passbook, create a new LDAP Source in Administration -> Sources.
|
||||
|
||||
Use these settings:
|
||||
|
||||
- Server URI: `ldap://ad.company`
|
||||
|
||||
For passbook to be able to write passwords back to Active Directory, make sure to use `ldaps://`
|
||||
|
||||
- Bind CN: `<name of your service user>@ad.company`
|
||||
- Bind Password: The password you've given the user above
|
||||
- Base DN: The base DN which you want passbook to sync
|
||||
- Property Mappings: Select all and click the right arrow
|
||||
|
||||
The other settings might need to be adjusted based on the setup of your domain.
|
||||
|
||||
- Addition User/Group DN: Additional DN which is *prepended* to your Base DN for user synchronization.
|
||||
- Addition Group DN: Additional DN which is *prepended* to your Base DN for group synchronization.
|
||||
- User object filter: Which objects should be considered users.
|
||||
- Group object filter: Which objects should be considered groups.
|
||||
- User group membership field: Which user field saves the group membership
|
||||
- Object uniqueness field: A user field which contains a unique Identifier
|
||||
- Sync parent group: If enabled, all synchronized groups will be given this group as a parent.
|
||||
|
||||
After you save the source, a synchronization will start in the background. When its done, you cen see the summary on the System Tasks page.
|
||||
|
||||

|
111
docs/maintenance/backups/index.md
Normal file
@ -0,0 +1,111 @@
|
||||
# Backup and restore
|
||||
|
||||
!!! warning
|
||||
|
||||
Local backups are only supported for docker-compose installs. If you want to backup a Kubernetes instance locally, use an S3-compatible server such as [minio](https://min.io/)
|
||||
|
||||
### Backup
|
||||
|
||||
Local backups can be created by running the following command in your passbook installation directory
|
||||
|
||||
```
|
||||
docker-compose run --rm worker backup
|
||||
```
|
||||
|
||||
This will dump the current database into the `./backups` folder. By defaults, the last 10 Backups are kept.
|
||||
|
||||
To schedule these backups, use the following snippet in a crontab
|
||||
|
||||
```
|
||||
0 0 * * * bash -c "cd <passbook install location> && docker-compose run --rm worker backup" >/dev/null
|
||||
```
|
||||
|
||||
!!! notice
|
||||
|
||||
passbook does support automatic backups on a schedule, however this is currently not recommended, as there is no way to monitor these scheduled tasks.
|
||||
|
||||
### Restore
|
||||
|
||||
Run this command in your passbook installation directory
|
||||
|
||||
```
|
||||
docker-compose run --rm worker restore
|
||||
```
|
||||
|
||||
This will prompt you to restore from your last backup. If you want to restore from a specific file, use the `-i` flag with the filename:
|
||||
|
||||
```
|
||||
docker-compose run --rm worker restore -i default-2020-10-03-115557.psql
|
||||
```
|
||||
|
||||
After you've restored the backup, it is recommended to restart all services with `docker-compose restart`.
|
||||
|
||||
### S3 Configuration
|
||||
|
||||
!!! notice
|
||||
|
||||
To trigger backups with S3 enabled, use the same commands as above.
|
||||
|
||||
#### S3 Preparation
|
||||
|
||||
passbook expects the bucket you select to already exist. The IAM User given to passbook should have the following permissions
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Sid": "VisualEditor0",
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"s3:PutObject",
|
||||
"s3:GetObjectAcl",
|
||||
"s3:GetObject",
|
||||
"s3:ListBucket",
|
||||
"s3:DeleteObject",
|
||||
"s3:PutObjectAcl"
|
||||
],
|
||||
"Principal": {
|
||||
"AWS": "arn:aws:iam::example-AWS-account-ID:user/example-user-name"
|
||||
},
|
||||
"Resource": [
|
||||
"arn:aws:s3:::example-bucket-name/*",
|
||||
"arn:aws:s3:::example-bucket-name"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### docker-compose
|
||||
|
||||
Set the following values in your `.env` file.
|
||||
|
||||
```
|
||||
PASSBOOK_POSTGRESQL__S3_BACKUP__ACCESS_KEY=
|
||||
PASSBOOK_POSTGRESQL__S3_BACKUP__SECRET_KEY=
|
||||
PASSBOOK_POSTGRESQL__S3_BACKUP__BUCKET=
|
||||
PASSBOOK_POSTGRESQL__S3_BACKUP__REGION=
|
||||
```
|
||||
|
||||
If you want to backup to an S3-compatible server, like [minio](https://min.io/), use this setting:
|
||||
|
||||
```
|
||||
PASSBOOK_POSTGRESQL__S3_BACKUP__HOST=http://play.min.io
|
||||
```
|
||||
|
||||
#### Kubernetes
|
||||
|
||||
Simply enable these options in your values.yaml file
|
||||
|
||||
```yaml
|
||||
# Enable Database Backups to S3
|
||||
backup:
|
||||
access_key: access-key
|
||||
secret_key: secret-key
|
||||
bucket: s3-bucket
|
||||
region: eu-central-1
|
||||
host: s3-host
|
||||
```
|
||||
|
||||
Afterwards, run a `helm upgrade` to update the ConfigMap. Because passbook-scheduled backups are not recommended currently, a Kubernetes CronJob is created that runs the backup daily.
|
20
docs/outposts/deploy-docker-compose.md
Normal file
@ -0,0 +1,20 @@
|
||||
# Outpost deployment in docker-compose
|
||||
|
||||
To deploy an outpost with docker-compose, use this snippet in your docker-compose file.
|
||||
|
||||
You can also run the outpost in a separate docker-compose project, you just have to ensure that the outpost container can reach your application container.
|
||||
|
||||
```yaml
|
||||
version: '3.5'
|
||||
|
||||
services:
|
||||
passbook_proxy:
|
||||
image: beryju/passbook-proxy:0.10.0-stable
|
||||
ports:
|
||||
- 4180:4180
|
||||
- 4443:4443
|
||||
environment:
|
||||
PASSBOOK_HOST: https://your-passbook.tld
|
||||
PASSBOOK_INSECURE: 'false'
|
||||
PASSBOOK_TOKEN: token-generated-by-passbook
|
||||
```
|
99
docs/outposts/deploy-kubernetes.md
Normal file
@ -0,0 +1,99 @@
|
||||
# Outpost deployment on Kubernetes
|
||||
|
||||
Use the following manifest, replacing all values surrounded with `__`.
|
||||
|
||||
Afterwards, configure the proxy provider to connect to `<service name>.<namespace>.svc.cluster.local`, and update your Ingress to connect to the `passbook-outpost` service.
|
||||
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/instance: test
|
||||
app.kubernetes.io/managed-by: passbook.beryju.org
|
||||
app.kubernetes.io/name: passbook-proxy
|
||||
app.kubernetes.io/version: 0.10.0
|
||||
name: passbook-outpost-api
|
||||
stringData:
|
||||
passbook_host: '__PASSBOOK_URL__'
|
||||
passbook_host_insecure: 'true'
|
||||
token: '__PASSBOOK_TOKEN__'
|
||||
type: Opaque
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/instance: test
|
||||
app.kubernetes.io/managed-by: passbook.beryju.org
|
||||
app.kubernetes.io/name: passbook-proxy
|
||||
app.kubernetes.io/version: 0.10.0
|
||||
name: passbook-outpost
|
||||
spec:
|
||||
ports:
|
||||
- name: http
|
||||
port: 4180
|
||||
protocol: TCP
|
||||
targetPort: http
|
||||
- name: https
|
||||
port: 4443
|
||||
protocol: TCP
|
||||
targetPort: https
|
||||
selector:
|
||||
app.kubernetes.io/instance: test
|
||||
app.kubernetes.io/managed-by: passbook.beryju.org
|
||||
app.kubernetes.io/name: passbook-proxy
|
||||
app.kubernetes.io/version: 0.10.0
|
||||
type: ClusterIP
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/instance: test
|
||||
app.kubernetes.io/managed-by: passbook.beryju.org
|
||||
app.kubernetes.io/name: passbook-proxy
|
||||
app.kubernetes.io/version: 0.10.0
|
||||
name: passbook-outpost
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/instance: test
|
||||
app.kubernetes.io/managed-by: passbook.beryju.org
|
||||
app.kubernetes.io/name: passbook-proxy
|
||||
app.kubernetes.io/version: 0.10.0
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/instance: test
|
||||
app.kubernetes.io/managed-by: passbook.beryju.org
|
||||
app.kubernetes.io/name: passbook-proxy
|
||||
app.kubernetes.io/version: 0.10.0
|
||||
spec:
|
||||
containers:
|
||||
- env:
|
||||
- name: PASSBOOK_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: passbook_host
|
||||
name: passbook-outpost-api
|
||||
- name: PASSBOOK_TOKEN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: token
|
||||
name: passbook-outpost-api
|
||||
- name: PASSBOOK_INSECURE
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
key: passbook_host_insecure
|
||||
name: passbook-outpost-api
|
||||
image: beryju/passbook-proxy:0.10.0-stable
|
||||
name: proxy
|
||||
ports:
|
||||
- containerPort: 4180
|
||||
name: http
|
||||
protocol: TCP
|
||||
- containerPort: 4443
|
||||
name: https
|
||||
protocol: TCP
|
||||
```
|
@ -6,21 +6,9 @@ An outpost is a single deployment of a passbook component, which can be deployed
|
||||
|
||||
Upon creation, a service account and a token is generated. The service account only has permissions to read the outpost and provider configuration. This token is used by the Outpost to connect to passbook.
|
||||
|
||||
To deploy an outpost, you can for example use this docker-compose snippet:
|
||||
To deploy an outpost, see: <a name="deploy">
|
||||
|
||||
```yaml
|
||||
version: 3.5
|
||||
- [Kubernetes](deploy-kubernetes.md)
|
||||
- [docker-compose](deploy-docker-compose.md)
|
||||
|
||||
services:
|
||||
passbook_proxy:
|
||||
image: beryju/passbook-proxy:0.10.0-stable
|
||||
ports:
|
||||
- 4180:4180
|
||||
- 4443:4443
|
||||
environment:
|
||||
PASSBOOK_HOST: https://your-passbook.tld
|
||||
PASSBOOK_INSECURE: 'true'
|
||||
PASSBOOK_TOKEN: token-generated-by-passbook
|
||||
```
|
||||
|
||||
In future versions, this snippet will be automatically generated. You will also be able to deploy an outpost directly into a kubernetes cluster.w
|
||||
In future versions, this snippet will be automatically generated. You will also be able to deploy an outpost directly into a kubernetes cluster.
|
||||
|
9
docs/outposts/upgrading.md
Normal file
@ -0,0 +1,9 @@
|
||||
# Upgrading an Outpost
|
||||
|
||||
In the Outpost Overview list, you'll see if any deployed outposts are out of date.
|
||||
|
||||

|
||||
|
||||
To upgrade the Outpost to the latest version, simple adjust the docker tag of the outpost the the new version.
|
||||
|
||||
Since the configuration is managed by passbook, that's all you have to do.
|
BIN
docs/outposts/upgrading_outdated.png
Normal file
After Width: | Height: | Size: 37 KiB |
@ -27,4 +27,11 @@ return False
|
||||
- `request.context`: A dictionary with dynamic data. This depends on the origin of the execution.
|
||||
- `pb_is_sso_flow`: Boolean which is true if request was initiated by authenticating through an external provider.
|
||||
- `pb_client_ip`: Client's IP Address or '255.255.255.255' if no IP Address could be extracted. Can be [compared](../expressions/index.md#comparing-ip-addresses)
|
||||
- `pb_flow_plan`: Current Plan if Policy is called from the Flow Planner.
|
||||
|
||||
Additionally, when the policy is executed from a flow, every variable from the flow's current context is accessible under the `context` object.
|
||||
|
||||
This includes the following:
|
||||
|
||||
- `prompt_data`: Data which has been saved from a prompt stage or an external source.
|
||||
- `application`: The application the user is in the process of authorizing.
|
||||
- `pending_user`: The currently pending user
|
||||
|
11
docs/troubleshooting/access.md
Normal file
@ -0,0 +1,11 @@
|
||||
# Troubleshooting access problems
|
||||
|
||||
## I get an access denied error when trying to access an application.
|
||||
|
||||
If your user is a superuser, or has the attribute `passbook_user_debug` set to true:
|
||||
|
||||

|
||||
|
||||
Afterwards, try to access the application again. You will now see a message explaining which policy denied you access:
|
||||
|
||||

|
BIN
docs/troubleshooting/access_denied_message.png
Normal file
After Width: | Height: | Size: 98 KiB |
BIN
docs/troubleshooting/passbook_user_debug.png
Normal file
After Width: | Height: | Size: 13 KiB |
20
docs/upgrading/to-0.11.md
Normal file
@ -0,0 +1,20 @@
|
||||
# Upgrading to 0.11
|
||||
|
||||
This update brings these headline features:
|
||||
|
||||
- Add Backup and Restore, currently only externally schedulable, documented [here](https://passbook.beryju.org/maintenance/backups/)
|
||||
- New Admin Dashboard with more metrics and Charts
|
||||
|
||||
Shows successful and failed logins from the last 24 hours, as well as the most used applications
|
||||
- Add search to all table views
|
||||
- Outpost now supports a Docker Controller, which installs the Outpost on the same host as passbook, updates and manages it
|
||||
- Add Token Identifier
|
||||
|
||||
Tokens now have an identifier which is used to reference to them, so the Primary key is not shown in URLs
|
||||
- `core/applications/list` API now shows applications the user has access to via policies
|
||||
|
||||
## Upgrading
|
||||
|
||||
This upgrade can be done as with minor upgrades, the only external change is the new docker-compose file, which enabled the Docker Integration for Outposts. To use this feature, please download the latest docker-compose from [here](https://raw.githubusercontent.com/BeryJu/passbook/master/docker-compose.yml).
|
||||
|
||||
Afterwards, you can simply run `docker-compose up -d` and then the normal upgrade command of `docker-compose run --rm server migrate`.
|
63
docs/upgrading/to-0.12.md
Normal file
@ -0,0 +1,63 @@
|
||||
# Upgrading to 0.12
|
||||
|
||||
This update brings these headline features:
|
||||
|
||||
- Rewrite Outpost state Logic, which now supports multiple concurrent Outpost instances.
|
||||
- Add Kubernetes Integration for Outposts, which deploys and maintains Outposts with High Availability in a Kubernetes Cluster
|
||||
- Add System Task Overview to see all background tasks, their status, the log output, and retry them
|
||||
- Alerts now disappear automatically
|
||||
- Audit Logs are now searchable
|
||||
- Users can now create their own Tokens to access the API
|
||||
- docker-compose deployment now uses traefik 2.3
|
||||
|
||||
Fixes:
|
||||
|
||||
- Fix high CPU Usage of the proxy when Websocket connections fail
|
||||
|
||||
## Upgrading
|
||||
|
||||
### docker-compose
|
||||
|
||||
Docker-compose users should download the latest docker-compose file from [here](https://raw.githubusercontent.com/BeryJu/passbook/master/docker-compose.yml). This includes the new traefik 2.3.
|
||||
|
||||
Afterwards, you can simply run `docker-compose up -d` and then the normal upgrade command of `docker-compose run --rm server migrate`.
|
||||
|
||||
### Kubernetes
|
||||
|
||||
For Kubernetes users, there are some changes to the helm values.
|
||||
|
||||
The values change from
|
||||
|
||||
```yaml
|
||||
config:
|
||||
# Optionally specify fixed secret_key, otherwise generated automatically
|
||||
# secret_key: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
||||
# Enable error reporting
|
||||
error_reporting:
|
||||
enabled: false
|
||||
environment: customer
|
||||
send_pii: false
|
||||
# Log level used by web and worker
|
||||
# Can be either debug, info, warning, error
|
||||
log_level: warning
|
||||
```
|
||||
|
||||
to
|
||||
|
||||
```yaml
|
||||
config:
|
||||
# Optionally specify fixed secret_key, otherwise generated automatically
|
||||
# secretKey: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
||||
# Enable error reporting
|
||||
errorReporting:
|
||||
enabled: false
|
||||
environment: customer
|
||||
sendPii: false
|
||||
# Log level used by web and worker
|
||||
# Can be either debug, info, warning, error
|
||||
logLevel: warning
|
||||
```
|
||||
|
||||
in order to be consistent with the rest of the settings.
|
||||
|
||||
There is also a new setting called `kubernetesIntegration`, which controls the Kubernetes integration for passbook. When enabled (the default), a Service Account is created, which allows passbook to deploy and update Outposts.
|
@ -2,7 +2,7 @@ version: '3.7'
|
||||
|
||||
services:
|
||||
chrome:
|
||||
image: selenium/standalone-chrome:3.141.59-20200525
|
||||
image: selenium/standalone-chrome:3.141
|
||||
volumes:
|
||||
- /dev/shm:/dev/shm
|
||||
network_mode: host
|
||||
|
@ -2,7 +2,7 @@ version: '3.7'
|
||||
|
||||
services:
|
||||
chrome:
|
||||
image: selenium/standalone-chrome-debug:3.141.59-20200525
|
||||
image: selenium/standalone-chrome-debug:3.141
|
||||
volumes:
|
||||
- /dev/shm:/dev/shm
|
||||
network_mode: host
|
||||
|
@ -1,58 +1,38 @@
|
||||
"""Test Enroll flow"""
|
||||
from time import sleep
|
||||
from sys import platform
|
||||
from typing import Any, Dict, Optional
|
||||
from unittest.case import skipUnless
|
||||
|
||||
from django.test import override_settings
|
||||
from docker import DockerClient, from_env
|
||||
from docker.models.containers import Container
|
||||
from docker.types import Healthcheck
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.support import expected_conditions as ec
|
||||
from structlog import get_logger
|
||||
|
||||
from e2e.utils import USER, SeleniumTestCase
|
||||
from passbook.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||
from passbook.policies.expression.models import ExpressionPolicy
|
||||
from passbook.stages.email.models import EmailStage, EmailTemplates
|
||||
from passbook.stages.identification.models import IdentificationStage
|
||||
from passbook.stages.prompt.models import FieldTypes, Prompt, PromptStage
|
||||
from passbook.stages.user_login.models import UserLoginStage
|
||||
from passbook.stages.user_write.models import UserWriteStage
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestFlowsEnroll(SeleniumTestCase):
|
||||
"""Test Enroll flow"""
|
||||
|
||||
def setUp(self):
|
||||
self.container = self.setup_client()
|
||||
super().setUp()
|
||||
|
||||
def setup_client(self) -> Container:
|
||||
"""Setup test IdP container"""
|
||||
client: DockerClient = from_env()
|
||||
container = client.containers.run(
|
||||
image="mailhog/mailhog:v1.0.1",
|
||||
detach=True,
|
||||
network_mode="host",
|
||||
auto_remove=True,
|
||||
healthcheck=Healthcheck(
|
||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||
return {
|
||||
"image": "mailhog/mailhog:v1.0.1",
|
||||
"detach": True,
|
||||
"network_mode": "host",
|
||||
"auto_remove": True,
|
||||
"healthcheck": Healthcheck(
|
||||
test=["CMD", "wget", "--spider", "http://localhost:8025"],
|
||||
interval=5 * 100 * 1000000,
|
||||
start_period=1 * 100 * 1000000,
|
||||
),
|
||||
)
|
||||
while True:
|
||||
container.reload()
|
||||
status = container.attrs.get("State", {}).get("Health", {}).get("Status")
|
||||
if status == "healthy":
|
||||
return container
|
||||
LOGGER.info("Container failed healthcheck")
|
||||
sleep(1)
|
||||
|
||||
def tearDown(self):
|
||||
self.container.kill()
|
||||
super().tearDown()
|
||||
}
|
||||
|
||||
def test_enroll_2_step(self):
|
||||
"""Test 2-step enroll flow"""
|
||||
@ -78,16 +58,9 @@ class TestFlowsEnroll(SeleniumTestCase):
|
||||
field_key="email", label="E-Mail", order=1, type=FieldTypes.EMAIL
|
||||
)
|
||||
|
||||
# Password checking policy
|
||||
password_policy = ExpressionPolicy.objects.create(
|
||||
name="policy-enrollment-password-equals",
|
||||
expression="return request.context['password'] == request.context['password_repeat']",
|
||||
)
|
||||
|
||||
# Stages
|
||||
first_stage = PromptStage.objects.create(name="prompt-stage-first")
|
||||
first_stage.fields.set([username_prompt, password, password_repeat])
|
||||
first_stage.validation_policies.set([password_policy])
|
||||
first_stage.save()
|
||||
second_stage = PromptStage.objects.create(name="prompt-stage-second")
|
||||
second_stage.fields.set([name_field, email])
|
||||
@ -131,7 +104,7 @@ class TestFlowsEnroll(SeleniumTestCase):
|
||||
|
||||
self.wait_for_url(self.url("passbook_core:user-settings"))
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/-/user/')]").text,
|
||||
self.driver.find_element(By.ID, "user-settings").text,
|
||||
"foo",
|
||||
)
|
||||
self.assertEqual(
|
||||
@ -171,16 +144,9 @@ class TestFlowsEnroll(SeleniumTestCase):
|
||||
field_key="email", label="E-Mail", order=1, type=FieldTypes.EMAIL
|
||||
)
|
||||
|
||||
# Password checking policy
|
||||
password_policy = ExpressionPolicy.objects.create(
|
||||
name="policy-enrollment-password-equals",
|
||||
expression="return request.context['password'] == request.context['password_repeat']",
|
||||
)
|
||||
|
||||
# Stages
|
||||
first_stage = PromptStage.objects.create(name="prompt-stage-first")
|
||||
first_stage.fields.set([username_prompt, password, password_repeat])
|
||||
first_stage.validation_policies.set([password_policy])
|
||||
first_stage.save()
|
||||
second_stage = PromptStage.objects.create(name="prompt-stage-second")
|
||||
second_stage.fields.set([name_field, email])
|
||||
@ -220,30 +186,30 @@ class TestFlowsEnroll(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "id_name").send_keys("some name")
|
||||
self.driver.find_element(By.ID, "id_email").send_keys("foo@bar.baz")
|
||||
self.driver.find_element(By.CSS_SELECTOR, ".pf-c-button").click()
|
||||
sleep(3)
|
||||
# Wait for the success message so we know the email is sent
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, ".pf-c-form > p"))
|
||||
)
|
||||
|
||||
# Open Mailhog
|
||||
self.driver.get("http://localhost:8025")
|
||||
|
||||
# Click on first message
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CLASS_NAME, "msglist-message"))
|
||||
)
|
||||
self.driver.find_element(By.CLASS_NAME, "msglist-message").click()
|
||||
sleep(3)
|
||||
self.driver.switch_to.frame(self.driver.find_element(By.CLASS_NAME, "tab-pane"))
|
||||
self.driver.find_element(By.ID, "confirm").click()
|
||||
self.driver.close()
|
||||
self.driver.switch_to.window(self.driver.window_handles[0])
|
||||
|
||||
# We're now logged in
|
||||
sleep(3)
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located(
|
||||
(By.XPATH, "//a[contains(@href, '/-/user/')]")
|
||||
)
|
||||
)
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/-/user/')]").click()
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "user-settings")))
|
||||
self.driver.find_element(By.ID, "user-settings").click()
|
||||
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/-/user/')]").text,
|
||||
self.driver.find_element(By.ID, "user-settings").text,
|
||||
"foo",
|
||||
)
|
||||
self.assertEqual(
|
||||
|
@ -1,10 +1,14 @@
|
||||
"""test default login flow"""
|
||||
from sys import platform
|
||||
from unittest.case import skipUnless
|
||||
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
|
||||
from e2e.utils import USER, SeleniumTestCase
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestFlowsLogin(SeleniumTestCase):
|
||||
"""test default login flow"""
|
||||
|
||||
@ -17,6 +21,6 @@ class TestFlowsLogin(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/-/user/')]").text,
|
||||
self.driver.find_element(By.ID, "user-settings").text,
|
||||
USER().username,
|
||||
)
|
||||
|
141
e2e/test_flows_otp.py
Normal file
@ -0,0 +1,141 @@
|
||||
"""test flow with otp stages"""
|
||||
from base64 import b32decode
|
||||
from sys import platform
|
||||
from time import sleep
|
||||
from unittest.case import skipUnless
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
|
||||
from django_otp.oath import TOTP
|
||||
from django_otp.plugins.otp_static.models import StaticDevice, StaticToken
|
||||
from django_otp.plugins.otp_totp.models import TOTPDevice
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
from selenium.webdriver.support import expected_conditions as ec
|
||||
|
||||
from e2e.utils import USER, SeleniumTestCase
|
||||
from passbook.flows.models import Flow, FlowStageBinding
|
||||
from passbook.stages.otp_validate.models import OTPValidateStage
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestFlowsOTP(SeleniumTestCase):
|
||||
"""test flow with otp stages"""
|
||||
|
||||
def test_otp_validate(self):
|
||||
"""test flow with otp stages"""
|
||||
sleep(1)
|
||||
# Setup TOTP Device
|
||||
user = USER()
|
||||
device = TOTPDevice.objects.create(user=user, confirmed=True, digits=6)
|
||||
|
||||
flow: Flow = Flow.objects.get(slug="default-authentication-flow")
|
||||
# Move the user_login stage to order 3
|
||||
FlowStageBinding.objects.filter(target=flow, order=2).update(order=3)
|
||||
FlowStageBinding.objects.create(
|
||||
target=flow, order=2, stage=OTPValidateStage.objects.create()
|
||||
)
|
||||
|
||||
self.driver.get(f"{self.live_server_url}/flows/{flow.slug}/")
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
|
||||
# Get expected token
|
||||
totp = TOTP(device.bin_key, device.step, device.t0, device.digits, device.drift)
|
||||
self.driver.find_element(By.ID, "id_code").send_keys(totp.token())
|
||||
self.driver.find_element(By.ID, "id_code").send_keys(Keys.ENTER)
|
||||
self.wait_for_url(self.url("passbook_core:overview"))
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "user-settings").text,
|
||||
USER().username,
|
||||
)
|
||||
|
||||
def test_otp_totp_setup(self):
|
||||
"""test TOTP Setup stage"""
|
||||
flow: Flow = Flow.objects.get(slug="default-authentication-flow")
|
||||
|
||||
self.driver.get(f"{self.live_server_url}/flows/{flow.slug}/")
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "user-settings").text,
|
||||
USER().username,
|
||||
)
|
||||
|
||||
self.driver.find_element(By.CSS_SELECTOR, ".pf-c-page__header").click()
|
||||
self.driver.get(self.url("passbook_core:user-settings"))
|
||||
|
||||
self.driver.find_element(By.LINK_TEXT, "Time-based OTP").click()
|
||||
|
||||
# Remember the current URL as we should end up back here
|
||||
destination_url = self.driver.current_url
|
||||
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR, ".pf-c-card__body a.pf-c-button"
|
||||
).click()
|
||||
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "qr")))
|
||||
otp_uri = self.driver.find_element(By.ID, "qr").get_attribute("data-otpuri")
|
||||
|
||||
# Parse the OTP URI, extract the secret and get the next token
|
||||
otp_args = urlparse(otp_uri)
|
||||
self.assertEqual(otp_args.scheme, "otpauth")
|
||||
otp_qs = parse_qs(otp_args.query)
|
||||
secret_key = b32decode(otp_qs["secret"][0])
|
||||
|
||||
totp = TOTP(secret_key)
|
||||
|
||||
self.driver.find_element(By.ID, "id_code").send_keys(totp.token())
|
||||
self.driver.find_element(By.ID, "id_code").send_keys(Keys.ENTER)
|
||||
|
||||
self.wait_for_url(destination_url)
|
||||
sleep(1)
|
||||
|
||||
self.assertTrue(TOTPDevice.objects.filter(user=USER(), confirmed=True).exists())
|
||||
|
||||
def test_otp_static_setup(self):
|
||||
"""test Static OTP Setup stage"""
|
||||
flow: Flow = Flow.objects.get(slug="default-authentication-flow")
|
||||
|
||||
self.driver.get(f"{self.live_server_url}/flows/{flow.slug}/")
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "user-settings").text,
|
||||
USER().username,
|
||||
)
|
||||
|
||||
self.driver.find_element(By.CSS_SELECTOR, ".pf-c-page__header").click()
|
||||
self.driver.find_element(By.ID, "user-settings").click()
|
||||
self.wait_for_url(self.url("passbook_core:user-settings"))
|
||||
|
||||
self.driver.find_element(By.LINK_TEXT, "Static OTP").click()
|
||||
|
||||
# Remember the current URL as we should end up back here
|
||||
destination_url = self.driver.current_url
|
||||
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR, ".pf-c-card__body a.pf-c-button"
|
||||
).click()
|
||||
token = self.driver.find_element(
|
||||
By.CSS_SELECTOR, ".pb-otp-tokens li:nth-child(1)"
|
||||
).text
|
||||
|
||||
self.driver.find_element(By.CSS_SELECTOR, "button[type=submit]").click()
|
||||
|
||||
self.wait_for_url(destination_url)
|
||||
sleep(1)
|
||||
|
||||
self.assertTrue(
|
||||
StaticDevice.objects.filter(user=USER(), confirmed=True).exists()
|
||||
)
|
||||
device = StaticDevice.objects.filter(user=USER(), confirmed=True).first()
|
||||
self.assertTrue(StaticToken.objects.filter(token=token, device=device).exists())
|
@ -1,7 +1,6 @@
|
||||
"""test stage setup flows (password change)"""
|
||||
import string
|
||||
from random import SystemRandom
|
||||
from time import sleep
|
||||
from sys import platform
|
||||
from unittest.case import skipUnless
|
||||
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
@ -9,9 +8,11 @@ from selenium.webdriver.common.keys import Keys
|
||||
from e2e.utils import USER, SeleniumTestCase
|
||||
from passbook.core.models import User
|
||||
from passbook.flows.models import Flow, FlowDesignation
|
||||
from passbook.providers.oauth2.generators import generate_client_secret
|
||||
from passbook.stages.password.models import PasswordStage
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestFlowsStageSetup(SeleniumTestCase):
|
||||
"""test stage setup flows"""
|
||||
|
||||
@ -19,18 +20,15 @@ class TestFlowsStageSetup(SeleniumTestCase):
|
||||
"""test password change flow"""
|
||||
# Ensure that password stage has change_flow set
|
||||
flow = Flow.objects.get(
|
||||
slug="default-password-change", designation=FlowDesignation.STAGE_SETUP,
|
||||
slug="default-password-change",
|
||||
designation=FlowDesignation.STAGE_CONFIGURATION,
|
||||
)
|
||||
|
||||
stages = PasswordStage.objects.filter(name="default-authentication-password")
|
||||
stage = stages.first()
|
||||
stage.change_flow = flow
|
||||
stage = PasswordStage.objects.get(name="default-authentication-password")
|
||||
stage.configure_flow = flow
|
||||
stage.save()
|
||||
|
||||
new_password = "".join(
|
||||
SystemRandom().choice(string.ascii_uppercase + string.digits)
|
||||
for _ in range(8)
|
||||
)
|
||||
new_password = generate_client_secret()
|
||||
|
||||
self.driver.get(
|
||||
f"{self.live_server_url}/flows/default-authentication-flow/?next=%2F"
|
||||
@ -40,7 +38,7 @@ class TestFlowsStageSetup(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.CSS_SELECTOR, ".pf-c-page__header").click()
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/-/user/')]").click()
|
||||
self.driver.find_element(By.ID, "user-settings").click()
|
||||
self.wait_for_url(self.url("passbook_core:user-settings"))
|
||||
self.driver.find_element(By.LINK_TEXT, "Change password").click()
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(new_password)
|
||||
@ -48,7 +46,7 @@ class TestFlowsStageSetup(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "id_password_repeat").send_keys(new_password)
|
||||
self.driver.find_element(By.CSS_SELECTOR, ".pf-c-button").click()
|
||||
|
||||
sleep(2)
|
||||
self.wait_for_url(self.url("passbook_core:user-settings"))
|
||||
# Because USER() is cached, we need to get the user manually here
|
||||
user = User.objects.get(username=USER().username)
|
||||
self.assertTrue(user.check_password(new_password))
|
||||
|
@ -1,12 +1,13 @@
|
||||
"""test OAuth Provider flow"""
|
||||
from sys import platform
|
||||
from time import sleep
|
||||
from typing import Any, Dict, Optional
|
||||
from unittest.case import skipUnless
|
||||
|
||||
from docker import DockerClient, from_env
|
||||
from docker.models.containers import Container
|
||||
from docker.types import Healthcheck
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
from structlog import get_logger
|
||||
from selenium.webdriver.support import expected_conditions as ec
|
||||
|
||||
from e2e.utils import USER, SeleniumTestCase
|
||||
from passbook.core.models import Application
|
||||
@ -19,32 +20,29 @@ from passbook.providers.oauth2.generators import (
|
||||
)
|
||||
from passbook.providers.oauth2.models import ClientTypes, OAuth2Provider, ResponseTypes
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestProviderOAuth2Github(SeleniumTestCase):
|
||||
"""test OAuth Provider flow"""
|
||||
|
||||
def setUp(self):
|
||||
self.client_id = generate_client_id()
|
||||
self.client_secret = generate_client_secret()
|
||||
self.container = self.setup_client()
|
||||
super().setUp()
|
||||
|
||||
def setup_client(self) -> Container:
|
||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||
"""Setup client grafana container which we test OAuth against"""
|
||||
client: DockerClient = from_env()
|
||||
container = client.containers.run(
|
||||
image="grafana/grafana:7.1.0",
|
||||
detach=True,
|
||||
network_mode="host",
|
||||
auto_remove=True,
|
||||
healthcheck=Healthcheck(
|
||||
return {
|
||||
"image": "grafana/grafana:7.1.0",
|
||||
"detach": True,
|
||||
"network_mode": "host",
|
||||
"auto_remove": True,
|
||||
"healthcheck": Healthcheck(
|
||||
test=["CMD", "wget", "--spider", "http://localhost:3000"],
|
||||
interval=5 * 100 * 1000000,
|
||||
start_period=1 * 100 * 1000000,
|
||||
),
|
||||
environment={
|
||||
"environment": {
|
||||
"GF_AUTH_GITHUB_ENABLED": "true",
|
||||
"GF_AUTH_GITHUB_ALLOW_SIGN_UP": "true",
|
||||
"GF_AUTH_GITHUB_CLIENT_ID": self.client_id,
|
||||
@ -61,22 +59,10 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
||||
),
|
||||
"GF_LOG_LEVEL": "debug",
|
||||
},
|
||||
)
|
||||
while True:
|
||||
container.reload()
|
||||
status = container.attrs.get("State", {}).get("Health", {}).get("Status")
|
||||
if status == "healthy":
|
||||
return container
|
||||
LOGGER.info("Container failed healthcheck")
|
||||
sleep(1)
|
||||
|
||||
def tearDown(self):
|
||||
self.container.kill()
|
||||
super().tearDown()
|
||||
}
|
||||
|
||||
def test_authorization_consent_implied(self):
|
||||
"""test OAuth Provider flow (default authorization flow with implied consent)"""
|
||||
sleep(1)
|
||||
# Bootstrap all needed objects
|
||||
authorization_flow = Flow.objects.get(
|
||||
slug="default-provider-authorization-implicit-consent"
|
||||
@ -91,7 +77,9 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
||||
authorization_flow=authorization_flow,
|
||||
)
|
||||
Application.objects.create(
|
||||
name="Grafana", slug="grafana", provider=provider,
|
||||
name="Grafana",
|
||||
slug="grafana",
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
self.driver.get("http://localhost:3000")
|
||||
@ -103,7 +91,7 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
|
||||
self.wait_for_url("http://localhost:3000/?orgId=1")
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/profile')]").click()
|
||||
self.driver.get("http://localhost:3000/profile")
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CLASS_NAME, "page-header__title").text,
|
||||
USER().username,
|
||||
@ -129,7 +117,6 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
||||
|
||||
def test_authorization_consent_explicit(self):
|
||||
"""test OAuth Provider flow (default authorization flow with explicit consent)"""
|
||||
sleep(1)
|
||||
# Bootstrap all needed objects
|
||||
authorization_flow = Flow.objects.get(
|
||||
slug="default-provider-authorization-explicit-consent"
|
||||
@ -144,7 +131,9 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
||||
authorization_flow=authorization_flow,
|
||||
)
|
||||
app = Application.objects.create(
|
||||
name="Grafana", slug="grafana", provider=provider,
|
||||
name="Grafana",
|
||||
slug="grafana",
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
self.driver.get("http://localhost:3000")
|
||||
@ -155,23 +144,23 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
|
||||
self.assertIn(
|
||||
sleep(1)
|
||||
|
||||
self.assertEqual(
|
||||
app.name,
|
||||
self.driver.find_element(
|
||||
By.XPATH, "/html/body/div[2]/div/main/div/form/div[2]/p[1]"
|
||||
).text,
|
||||
self.driver.find_element(By.ID, "application-name").text,
|
||||
)
|
||||
self.assertEqual(
|
||||
"GitHub Compatibility: Access you Email addresses",
|
||||
self.driver.find_element(
|
||||
By.XPATH, "/html/body/div[2]/div/main/div/form/div[2]/ul/li[1]"
|
||||
).text,
|
||||
self.driver.find_element(By.ID, "scope-user:email").text,
|
||||
)
|
||||
sleep(1)
|
||||
self.driver.find_element(By.CSS_SELECTOR, "[type=submit]").click()
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR,
|
||||
("[type=submit]"),
|
||||
).click()
|
||||
|
||||
self.wait_for_url("http://localhost:3000/?orgId=1")
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/profile')]").click()
|
||||
self.driver.get("http://localhost:3000/profile")
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CLASS_NAME, "page-header__title").text,
|
||||
USER().username,
|
||||
@ -197,7 +186,6 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
||||
|
||||
def test_denied(self):
|
||||
"""test OAuth Provider flow (default authorization flow, denied)"""
|
||||
sleep(1)
|
||||
# Bootstrap all needed objects
|
||||
authorization_flow = Flow.objects.get(
|
||||
slug="default-provider-authorization-explicit-consent"
|
||||
@ -212,7 +200,9 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
||||
authorization_flow=authorization_flow,
|
||||
)
|
||||
app = Application.objects.create(
|
||||
name="Grafana", slug="grafana", provider=provider,
|
||||
name="Grafana",
|
||||
slug="grafana",
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
negative_policy = ExpressionPolicy.objects.create(
|
||||
@ -227,7 +217,10 @@ class TestProviderOAuth2Github(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
self.wait_for_url(self.url("passbook_flows:denied"))
|
||||
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, "header > h1"))
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CSS_SELECTOR, "header > h1").text,
|
||||
"Permission denied",
|
||||
|
375
e2e/test_provider_oauth2_grafana.py
Normal file
@ -0,0 +1,375 @@
|
||||
"""test OAuth2 OpenID Provider flow"""
|
||||
from sys import platform
|
||||
from time import sleep
|
||||
from typing import Any, Dict, Optional
|
||||
from unittest.case import skipUnless
|
||||
|
||||
from docker.types import Healthcheck
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
from selenium.webdriver.support import expected_conditions as ec
|
||||
from structlog import get_logger
|
||||
|
||||
from e2e.utils import USER, SeleniumTestCase
|
||||
from passbook.core.models import Application
|
||||
from passbook.crypto.models import CertificateKeyPair
|
||||
from passbook.flows.models import Flow
|
||||
from passbook.policies.expression.models import ExpressionPolicy
|
||||
from passbook.policies.models import PolicyBinding
|
||||
from passbook.providers.oauth2.constants import (
|
||||
SCOPE_OPENID,
|
||||
SCOPE_OPENID_EMAIL,
|
||||
SCOPE_OPENID_PROFILE,
|
||||
)
|
||||
from passbook.providers.oauth2.generators import (
|
||||
generate_client_id,
|
||||
generate_client_secret,
|
||||
)
|
||||
from passbook.providers.oauth2.models import (
|
||||
ClientTypes,
|
||||
OAuth2Provider,
|
||||
ResponseTypes,
|
||||
ScopeMapping,
|
||||
)
|
||||
|
||||
LOGGER = get_logger()
|
||||
APPLICATION_SLUG = "grafana"
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestProviderOAuth2OAuth(SeleniumTestCase):
|
||||
"""test OAuth with OAuth Provider flow"""
|
||||
|
||||
def setUp(self):
|
||||
self.client_id = generate_client_id()
|
||||
self.client_secret = generate_client_secret()
|
||||
super().setUp()
|
||||
|
||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||
return {
|
||||
"image": "grafana/grafana:7.1.0",
|
||||
"detach": True,
|
||||
"network_mode": "host",
|
||||
"auto_remove": True,
|
||||
"healthcheck": Healthcheck(
|
||||
test=["CMD", "wget", "--spider", "http://localhost:3000"],
|
||||
interval=5 * 100 * 1000000,
|
||||
start_period=1 * 100 * 1000000,
|
||||
),
|
||||
"environment": {
|
||||
"GF_AUTH_GENERIC_OAUTH_ENABLED": "true",
|
||||
"GF_AUTH_GENERIC_OAUTH_CLIENT_ID": self.client_id,
|
||||
"GF_AUTH_GENERIC_OAUTH_CLIENT_SECRET": self.client_secret,
|
||||
"GF_AUTH_GENERIC_OAUTH_SCOPES": "openid email profile",
|
||||
"GF_AUTH_GENERIC_OAUTH_AUTH_URL": (
|
||||
self.url("passbook_providers_oauth2:authorize")
|
||||
),
|
||||
"GF_AUTH_GENERIC_OAUTH_TOKEN_URL": (
|
||||
self.url("passbook_providers_oauth2:token")
|
||||
),
|
||||
"GF_AUTH_GENERIC_OAUTH_API_URL": (
|
||||
self.url("passbook_providers_oauth2:userinfo")
|
||||
),
|
||||
"GF_AUTH_SIGNOUT_REDIRECT_URL": (
|
||||
self.url(
|
||||
"passbook_providers_oauth2:end-session",
|
||||
application_slug=APPLICATION_SLUG,
|
||||
)
|
||||
),
|
||||
"GF_LOG_LEVEL": "debug",
|
||||
},
|
||||
}
|
||||
|
||||
def test_redirect_uri_error(self):
|
||||
"""test OpenID Provider flow (invalid redirect URI, check error message)"""
|
||||
sleep(1)
|
||||
# Bootstrap all needed objects
|
||||
authorization_flow = Flow.objects.get(
|
||||
slug="default-provider-authorization-implicit-consent"
|
||||
)
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name="grafana",
|
||||
client_type=ClientTypes.CONFIDENTIAL,
|
||||
client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
rsa_key=CertificateKeyPair.objects.first(),
|
||||
redirect_uris="http://localhost:3000/",
|
||||
authorization_flow=authorization_flow,
|
||||
response_type=ResponseTypes.CODE,
|
||||
)
|
||||
provider.property_mappings.set(
|
||||
ScopeMapping.objects.filter(
|
||||
scope_name__in=[SCOPE_OPENID, SCOPE_OPENID_EMAIL, SCOPE_OPENID_PROFILE]
|
||||
)
|
||||
)
|
||||
provider.save()
|
||||
Application.objects.create(
|
||||
name="Grafana",
|
||||
slug=APPLICATION_SLUG,
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
self.driver.get("http://localhost:3000")
|
||||
self.driver.find_element(By.CLASS_NAME, "btn-service--oauth").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
sleep(2)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CLASS_NAME, "pf-c-title").text,
|
||||
"Redirect URI Error",
|
||||
)
|
||||
|
||||
def test_authorization_consent_implied(self):
|
||||
"""test OpenID Provider flow (default authorization flow with implied consent)"""
|
||||
sleep(1)
|
||||
# Bootstrap all needed objects
|
||||
authorization_flow = Flow.objects.get(
|
||||
slug="default-provider-authorization-implicit-consent"
|
||||
)
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name="grafana",
|
||||
client_type=ClientTypes.CONFIDENTIAL,
|
||||
client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
rsa_key=CertificateKeyPair.objects.first(),
|
||||
redirect_uris="http://localhost:3000/login/generic_oauth",
|
||||
authorization_flow=authorization_flow,
|
||||
response_type=ResponseTypes.CODE,
|
||||
)
|
||||
provider.property_mappings.set(
|
||||
ScopeMapping.objects.filter(
|
||||
scope_name__in=[SCOPE_OPENID, SCOPE_OPENID_EMAIL, SCOPE_OPENID_PROFILE]
|
||||
)
|
||||
)
|
||||
provider.save()
|
||||
Application.objects.create(
|
||||
name="Grafana",
|
||||
slug=APPLICATION_SLUG,
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
self.driver.get("http://localhost:3000")
|
||||
self.driver.find_element(By.CLASS_NAME, "btn-service--oauth").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
self.wait_for_url("http://localhost:3000/?orgId=1")
|
||||
self.driver.get("http://localhost:3000/profile")
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CLASS_NAME, "page-header__title").text,
|
||||
USER().name,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CSS_SELECTOR, "input[name=name]").get_attribute(
|
||||
"value"
|
||||
),
|
||||
USER().name,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR, "input[name=email]"
|
||||
).get_attribute("value"),
|
||||
USER().email,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR, "input[name=login]"
|
||||
).get_attribute("value"),
|
||||
USER().email,
|
||||
)
|
||||
|
||||
def test_authorization_logout(self):
|
||||
"""test OpenID Provider flow with logout"""
|
||||
sleep(1)
|
||||
# Bootstrap all needed objects
|
||||
authorization_flow = Flow.objects.get(
|
||||
slug="default-provider-authorization-implicit-consent"
|
||||
)
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name="grafana",
|
||||
client_type=ClientTypes.CONFIDENTIAL,
|
||||
client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
rsa_key=CertificateKeyPair.objects.first(),
|
||||
redirect_uris="http://localhost:3000/login/generic_oauth",
|
||||
authorization_flow=authorization_flow,
|
||||
response_type=ResponseTypes.CODE,
|
||||
)
|
||||
provider.property_mappings.set(
|
||||
ScopeMapping.objects.filter(
|
||||
scope_name__in=[SCOPE_OPENID, SCOPE_OPENID_EMAIL, SCOPE_OPENID_PROFILE]
|
||||
)
|
||||
)
|
||||
provider.save()
|
||||
Application.objects.create(
|
||||
name="Grafana",
|
||||
slug=APPLICATION_SLUG,
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
self.driver.get("http://localhost:3000")
|
||||
self.driver.find_element(By.CLASS_NAME, "btn-service--oauth").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
self.wait_for_url("http://localhost:3000/?orgId=1")
|
||||
self.driver.get("http://localhost:3000/profile")
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CLASS_NAME, "page-header__title").text,
|
||||
USER().name,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CSS_SELECTOR, "input[name=name]").get_attribute(
|
||||
"value"
|
||||
),
|
||||
USER().name,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR, "input[name=email]"
|
||||
).get_attribute("value"),
|
||||
USER().email,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR, "input[name=login]"
|
||||
).get_attribute("value"),
|
||||
USER().email,
|
||||
)
|
||||
self.driver.get("http://localhost:3000/logout")
|
||||
self.wait_for_url(
|
||||
self.url(
|
||||
"passbook_providers_oauth2:end-session",
|
||||
application_slug=APPLICATION_SLUG,
|
||||
)
|
||||
)
|
||||
self.driver.find_element(By.ID, "logout").click()
|
||||
|
||||
def test_authorization_consent_explicit(self):
|
||||
"""test OpenID Provider flow (default authorization flow with explicit consent)"""
|
||||
sleep(1)
|
||||
# Bootstrap all needed objects
|
||||
authorization_flow = Flow.objects.get(
|
||||
slug="default-provider-authorization-explicit-consent"
|
||||
)
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name="grafana",
|
||||
authorization_flow=authorization_flow,
|
||||
response_type=ResponseTypes.CODE,
|
||||
client_type=ClientTypes.CONFIDENTIAL,
|
||||
client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
rsa_key=CertificateKeyPair.objects.first(),
|
||||
redirect_uris="http://localhost:3000/login/generic_oauth",
|
||||
)
|
||||
provider.property_mappings.set(
|
||||
ScopeMapping.objects.filter(
|
||||
scope_name__in=[SCOPE_OPENID, SCOPE_OPENID_EMAIL, SCOPE_OPENID_PROFILE]
|
||||
)
|
||||
)
|
||||
provider.save()
|
||||
app = Application.objects.create(
|
||||
name="Grafana",
|
||||
slug=APPLICATION_SLUG,
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
self.driver.get("http://localhost:3000")
|
||||
self.driver.find_element(By.CLASS_NAME, "btn-service--oauth").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
|
||||
self.assertEqual(
|
||||
app.name,
|
||||
self.driver.find_element(By.ID, "application-name").text,
|
||||
)
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, "[type=submit]"))
|
||||
)
|
||||
sleep(1)
|
||||
self.driver.find_element(By.CSS_SELECTOR, "[type=submit]").click()
|
||||
|
||||
self.wait_for_url("http://localhost:3000/?orgId=1")
|
||||
self.driver.get("http://localhost:3000/profile")
|
||||
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CLASS_NAME, "page-header__title").text,
|
||||
USER().name,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CSS_SELECTOR, "input[name=name]").get_attribute(
|
||||
"value"
|
||||
),
|
||||
USER().name,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR, "input[name=email]"
|
||||
).get_attribute("value"),
|
||||
USER().email,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR, "input[name=login]"
|
||||
).get_attribute("value"),
|
||||
USER().email,
|
||||
)
|
||||
|
||||
def test_authorization_denied(self):
|
||||
"""test OpenID Provider flow (default authorization with access deny)"""
|
||||
sleep(1)
|
||||
# Bootstrap all needed objects
|
||||
authorization_flow = Flow.objects.get(
|
||||
slug="default-provider-authorization-explicit-consent"
|
||||
)
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name="grafana",
|
||||
authorization_flow=authorization_flow,
|
||||
response_type=ResponseTypes.CODE,
|
||||
client_type=ClientTypes.CONFIDENTIAL,
|
||||
client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
rsa_key=CertificateKeyPair.objects.first(),
|
||||
redirect_uris="http://localhost:3000/login/generic_oauth",
|
||||
)
|
||||
provider.property_mappings.set(
|
||||
ScopeMapping.objects.filter(
|
||||
scope_name__in=[SCOPE_OPENID, SCOPE_OPENID_EMAIL, SCOPE_OPENID_PROFILE]
|
||||
)
|
||||
)
|
||||
provider.save()
|
||||
app = Application.objects.create(
|
||||
name="Grafana",
|
||||
slug=APPLICATION_SLUG,
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
negative_policy = ExpressionPolicy.objects.create(
|
||||
name="negative-static", expression="return False"
|
||||
)
|
||||
PolicyBinding.objects.create(target=app, policy=negative_policy, order=0)
|
||||
self.driver.get("http://localhost:3000")
|
||||
self.driver.find_element(By.CLASS_NAME, "btn-service--oauth").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, "header > h1"))
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CSS_SELECTOR, "header > h1").text,
|
||||
"Permission denied",
|
||||
)
|
@ -1,5 +1,8 @@
|
||||
"""test OAuth2 OpenID Provider flow"""
|
||||
from json import loads
|
||||
from sys import platform
|
||||
from time import sleep
|
||||
from unittest.case import skipUnless
|
||||
|
||||
from docker import DockerClient, from_env
|
||||
from docker.models.containers import Container
|
||||
@ -34,43 +37,35 @@ from passbook.providers.oauth2.models import (
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestProviderOAuth2OIDC(SeleniumTestCase):
|
||||
"""test OAuth with OpenID Provider flow"""
|
||||
|
||||
def setUp(self):
|
||||
self.client_id = generate_client_id()
|
||||
self.client_secret = generate_client_secret()
|
||||
self.container = self.setup_client()
|
||||
self.application_slug = "test"
|
||||
super().setUp()
|
||||
|
||||
def setup_client(self) -> Container:
|
||||
"""Setup client grafana container which we test OIDC against"""
|
||||
"""Setup client saml-sp container which we test SAML against"""
|
||||
sleep(1)
|
||||
client: DockerClient = from_env()
|
||||
client.images.pull("beryju/oidc-test-client")
|
||||
container = client.containers.run(
|
||||
image="grafana/grafana:7.1.0",
|
||||
image="beryju/oidc-test-client",
|
||||
detach=True,
|
||||
network_mode="host",
|
||||
auto_remove=True,
|
||||
healthcheck=Healthcheck(
|
||||
test=["CMD", "wget", "--spider", "http://localhost:3000"],
|
||||
test=["CMD", "wget", "--spider", "http://localhost:9009/health"],
|
||||
interval=5 * 100 * 1000000,
|
||||
start_period=1 * 100 * 1000000,
|
||||
),
|
||||
environment={
|
||||
"GF_AUTH_GENERIC_OAUTH_ENABLED": "true",
|
||||
"GF_AUTH_GENERIC_OAUTH_CLIENT_ID": self.client_id,
|
||||
"GF_AUTH_GENERIC_OAUTH_CLIENT_SECRET": self.client_secret,
|
||||
"GF_AUTH_GENERIC_OAUTH_SCOPES": "openid email profile",
|
||||
"GF_AUTH_GENERIC_OAUTH_AUTH_URL": (
|
||||
self.url("passbook_providers_oauth2:authorize")
|
||||
),
|
||||
"GF_AUTH_GENERIC_OAUTH_TOKEN_URL": (
|
||||
self.url("passbook_providers_oauth2:token")
|
||||
),
|
||||
"GF_AUTH_GENERIC_OAUTH_API_URL": (
|
||||
self.url("passbook_providers_oauth2:userinfo")
|
||||
),
|
||||
"GF_LOG_LEVEL": "debug",
|
||||
"OIDC_CLIENT_ID": self.client_id,
|
||||
"OIDC_CLIENT_SECRET": self.client_secret,
|
||||
"OIDC_PROVIDER": f"{self.live_server_url}/application/o/{self.application_slug}/",
|
||||
},
|
||||
)
|
||||
while True:
|
||||
@ -81,10 +76,6 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
||||
LOGGER.info("Container failed healthcheck")
|
||||
sleep(1)
|
||||
|
||||
def tearDown(self):
|
||||
self.container.kill()
|
||||
super().tearDown()
|
||||
|
||||
def test_redirect_uri_error(self):
|
||||
"""test OpenID Provider flow (invalid redirect URI, check error message)"""
|
||||
sleep(1)
|
||||
@ -93,12 +84,12 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
||||
slug="default-provider-authorization-implicit-consent"
|
||||
)
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name="grafana",
|
||||
name=self.application_slug,
|
||||
client_type=ClientTypes.CONFIDENTIAL,
|
||||
client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
rsa_key=CertificateKeyPair.objects.first(),
|
||||
redirect_uris="http://localhost:3000/",
|
||||
redirect_uris="http://localhost:9009/",
|
||||
authorization_flow=authorization_flow,
|
||||
response_type=ResponseTypes.CODE,
|
||||
)
|
||||
@ -109,11 +100,14 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
||||
)
|
||||
provider.save()
|
||||
Application.objects.create(
|
||||
name="Grafana", slug="grafana", provider=provider,
|
||||
name=self.application_slug,
|
||||
slug=self.application_slug,
|
||||
provider=provider,
|
||||
)
|
||||
self.container = self.setup_client()
|
||||
|
||||
self.driver.get("http://localhost:9009")
|
||||
|
||||
self.driver.get("http://localhost:3000")
|
||||
self.driver.find_element(By.CLASS_NAME, "btn-service--oauth").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
@ -133,12 +127,12 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
||||
slug="default-provider-authorization-implicit-consent"
|
||||
)
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name="grafana",
|
||||
name=self.application_slug,
|
||||
client_type=ClientTypes.CONFIDENTIAL,
|
||||
client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
rsa_key=CertificateKeyPair.objects.first(),
|
||||
redirect_uris="http://localhost:3000/login/generic_oauth",
|
||||
redirect_uris="http://localhost:9009/auth/callback",
|
||||
authorization_flow=authorization_flow,
|
||||
response_type=ResponseTypes.CODE,
|
||||
)
|
||||
@ -149,39 +143,31 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
||||
)
|
||||
provider.save()
|
||||
Application.objects.create(
|
||||
name="Grafana", slug="grafana", provider=provider,
|
||||
name=self.application_slug,
|
||||
slug=self.application_slug,
|
||||
provider=provider,
|
||||
)
|
||||
self.container = self.setup_client()
|
||||
|
||||
self.driver.get("http://localhost:9009")
|
||||
|
||||
self.driver.get("http://localhost:3000")
|
||||
self.driver.find_element(By.CLASS_NAME, "btn-service--oauth").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/profile')]").click()
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CLASS_NAME, "page-header__title").text,
|
||||
USER().name,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CSS_SELECTOR, "input[name=name]").get_attribute(
|
||||
"value"
|
||||
),
|
||||
USER().name,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR, "input[name=email]"
|
||||
).get_attribute("value"),
|
||||
USER().email,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR, "input[name=login]"
|
||||
).get_attribute("value"),
|
||||
USER().email,
|
||||
)
|
||||
|
||||
self.wait.until(ec.presence_of_element_located((By.CSS_SELECTOR, "pre")))
|
||||
body = loads(self.driver.find_element(By.CSS_SELECTOR, "pre").text)
|
||||
|
||||
self.assertEqual(body["IDTokenClaims"]["nickname"], USER().username)
|
||||
self.assertEqual(body["UserInfo"]["nickname"], USER().username)
|
||||
|
||||
self.assertEqual(body["IDTokenClaims"]["name"], USER().name)
|
||||
self.assertEqual(body["UserInfo"]["name"], USER().name)
|
||||
|
||||
self.assertEqual(body["IDTokenClaims"]["email"], USER().email)
|
||||
self.assertEqual(body["UserInfo"]["email"], USER().email)
|
||||
|
||||
def test_authorization_consent_explicit(self):
|
||||
"""test OpenID Provider flow (default authorization flow with explicit consent)"""
|
||||
@ -191,14 +177,14 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
||||
slug="default-provider-authorization-explicit-consent"
|
||||
)
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name="grafana",
|
||||
name=self.application_slug,
|
||||
authorization_flow=authorization_flow,
|
||||
response_type=ResponseTypes.CODE,
|
||||
client_type=ClientTypes.CONFIDENTIAL,
|
||||
client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
rsa_key=CertificateKeyPair.objects.first(),
|
||||
redirect_uris="http://localhost:3000/login/generic_oauth",
|
||||
redirect_uris="http://localhost:9009/auth/callback",
|
||||
)
|
||||
provider.property_mappings.set(
|
||||
ScopeMapping.objects.filter(
|
||||
@ -207,22 +193,23 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
||||
)
|
||||
provider.save()
|
||||
app = Application.objects.create(
|
||||
name="Grafana", slug="grafana", provider=provider,
|
||||
name=self.application_slug,
|
||||
slug=self.application_slug,
|
||||
provider=provider,
|
||||
)
|
||||
self.container = self.setup_client()
|
||||
|
||||
self.driver.get("http://localhost:9009")
|
||||
|
||||
self.driver.get("http://localhost:3000")
|
||||
self.driver.find_element(By.CLASS_NAME, "btn-service--oauth").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
|
||||
self.assertIn(
|
||||
self.assertEqual(
|
||||
app.name,
|
||||
self.driver.find_element(
|
||||
By.XPATH, "/html/body/div[2]/div/main/div/form/div[2]/p[1]"
|
||||
).text,
|
||||
self.driver.find_element(By.ID, "application-name").text,
|
||||
)
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, "[type=submit]"))
|
||||
@ -230,34 +217,17 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
||||
sleep(1)
|
||||
self.driver.find_element(By.CSS_SELECTOR, "[type=submit]").click()
|
||||
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located(
|
||||
(By.XPATH, "//a[contains(@href, '/profile')]")
|
||||
)
|
||||
)
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/profile')]").click()
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CLASS_NAME, "page-header__title").text,
|
||||
USER().name,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CSS_SELECTOR, "input[name=name]").get_attribute(
|
||||
"value"
|
||||
),
|
||||
USER().name,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR, "input[name=email]"
|
||||
).get_attribute("value"),
|
||||
USER().email,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(
|
||||
By.CSS_SELECTOR, "input[name=login]"
|
||||
).get_attribute("value"),
|
||||
USER().email,
|
||||
)
|
||||
self.wait.until(ec.presence_of_element_located((By.CSS_SELECTOR, "pre")))
|
||||
body = loads(self.driver.find_element(By.CSS_SELECTOR, "pre").text)
|
||||
|
||||
self.assertEqual(body["IDTokenClaims"]["nickname"], USER().username)
|
||||
self.assertEqual(body["UserInfo"]["nickname"], USER().username)
|
||||
|
||||
self.assertEqual(body["IDTokenClaims"]["name"], USER().name)
|
||||
self.assertEqual(body["UserInfo"]["name"], USER().name)
|
||||
|
||||
self.assertEqual(body["IDTokenClaims"]["email"], USER().email)
|
||||
self.assertEqual(body["UserInfo"]["email"], USER().email)
|
||||
|
||||
def test_authorization_denied(self):
|
||||
"""test OpenID Provider flow (default authorization with access deny)"""
|
||||
@ -267,14 +237,14 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
||||
slug="default-provider-authorization-explicit-consent"
|
||||
)
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name="grafana",
|
||||
name=self.application_slug,
|
||||
authorization_flow=authorization_flow,
|
||||
response_type=ResponseTypes.CODE,
|
||||
client_type=ClientTypes.CONFIDENTIAL,
|
||||
client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
rsa_key=CertificateKeyPair.objects.first(),
|
||||
redirect_uris="http://localhost:3000/login/generic_oauth",
|
||||
redirect_uris="http://localhost:9009/auth/callback",
|
||||
)
|
||||
provider.property_mappings.set(
|
||||
ScopeMapping.objects.filter(
|
||||
@ -283,21 +253,28 @@ class TestProviderOAuth2OIDC(SeleniumTestCase):
|
||||
)
|
||||
provider.save()
|
||||
app = Application.objects.create(
|
||||
name="Grafana", slug="grafana", provider=provider,
|
||||
name=self.application_slug,
|
||||
slug=self.application_slug,
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
negative_policy = ExpressionPolicy.objects.create(
|
||||
name="negative-static", expression="return False"
|
||||
)
|
||||
PolicyBinding.objects.create(target=app, policy=negative_policy, order=0)
|
||||
self.driver.get("http://localhost:3000")
|
||||
self.driver.find_element(By.CLASS_NAME, "btn-service--oauth").click()
|
||||
|
||||
self.container = self.setup_client()
|
||||
self.driver.get("http://localhost:9009")
|
||||
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
self.wait_for_url(self.url("passbook_flows:denied"))
|
||||
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, "header > h1"))
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CSS_SELECTOR, "header > h1").text,
|
||||
"Permission denied",
|
||||
|
155
e2e/test_provider_proxy.py
Normal file
@ -0,0 +1,155 @@
|
||||
"""Proxy and Outpost e2e tests"""
|
||||
from dataclasses import asdict
|
||||
from sys import platform
|
||||
from time import sleep
|
||||
from typing import Any, Dict, Optional
|
||||
from unittest.case import skipUnless
|
||||
|
||||
from channels.testing import ChannelsLiveServerTestCase
|
||||
from docker.client import DockerClient, from_env
|
||||
from docker.models.containers import Container
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
|
||||
from e2e.utils import USER, SeleniumTestCase
|
||||
from passbook import __version__
|
||||
from passbook.core.models import Application
|
||||
from passbook.flows.models import Flow
|
||||
from passbook.outposts.models import (
|
||||
Outpost,
|
||||
OutpostConfig,
|
||||
OutpostDeploymentType,
|
||||
OutpostType,
|
||||
)
|
||||
from passbook.providers.proxy.models import ProxyProvider
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestProviderProxy(SeleniumTestCase):
|
||||
"""Proxy and Outpost e2e tests"""
|
||||
|
||||
proxy_container: Container
|
||||
|
||||
def tearDown(self) -> None:
|
||||
super().tearDown()
|
||||
self.proxy_container.kill()
|
||||
|
||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||
return {
|
||||
"image": "traefik/whoami:latest",
|
||||
"detach": True,
|
||||
"network_mode": "host",
|
||||
"auto_remove": True,
|
||||
}
|
||||
|
||||
def start_proxy(self, outpost: Outpost) -> Container:
|
||||
"""Start proxy container based on outpost created"""
|
||||
client: DockerClient = from_env()
|
||||
container = client.containers.run(
|
||||
image=f"beryju/passbook-proxy:{__version__}",
|
||||
detach=True,
|
||||
network_mode="host",
|
||||
auto_remove=True,
|
||||
environment={
|
||||
"PASSBOOK_HOST": self.live_server_url,
|
||||
"PASSBOOK_TOKEN": outpost.token.key,
|
||||
},
|
||||
)
|
||||
return container
|
||||
|
||||
def test_proxy_simple(self):
|
||||
"""Test simple outpost setup with single provider"""
|
||||
proxy: ProxyProvider = ProxyProvider.objects.create(
|
||||
name="proxy_provider",
|
||||
authorization_flow=Flow.objects.get(
|
||||
slug="default-provider-authorization-implicit-consent"
|
||||
),
|
||||
internal_host="http://localhost:80",
|
||||
external_host="http://localhost:4180",
|
||||
)
|
||||
# Ensure OAuth2 Params are set
|
||||
proxy.set_oauth_defaults()
|
||||
proxy.save()
|
||||
# we need to create an application to actually access the proxy
|
||||
Application.objects.create(name="proxy", slug="proxy", provider=proxy)
|
||||
outpost: Outpost = Outpost.objects.create(
|
||||
name="proxy_outpost",
|
||||
type=OutpostType.PROXY,
|
||||
deployment_type=OutpostDeploymentType.CUSTOM,
|
||||
)
|
||||
outpost.providers.add(proxy)
|
||||
outpost.save()
|
||||
|
||||
self.proxy_container = self.start_proxy(outpost)
|
||||
|
||||
# Wait until outpost healthcheck succeeds
|
||||
healthcheck_retries = 0
|
||||
while healthcheck_retries < 50:
|
||||
if len(outpost.state) > 0:
|
||||
state = outpost.state[0]
|
||||
if state.last_seen:
|
||||
break
|
||||
healthcheck_retries += 1
|
||||
sleep(0.5)
|
||||
|
||||
self.driver.get("http://localhost:4180")
|
||||
|
||||
self.driver.find_element(By.ID, "id_uid_field").click()
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
|
||||
sleep(1)
|
||||
|
||||
full_body_text = self.driver.find_element(By.CSS_SELECTOR, "pre").text
|
||||
self.assertIn("X-Forwarded-Preferred-Username: pbadmin", full_body_text)
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestProviderProxyConnect(ChannelsLiveServerTestCase):
|
||||
"""Test Proxy connectivity over websockets"""
|
||||
|
||||
def test_proxy_connectivity(self):
|
||||
"""Test proxy connectivity over websocket"""
|
||||
SeleniumTestCase().apply_default_data()
|
||||
proxy: ProxyProvider = ProxyProvider.objects.create(
|
||||
name="proxy_provider",
|
||||
authorization_flow=Flow.objects.get(
|
||||
slug="default-provider-authorization-implicit-consent"
|
||||
),
|
||||
internal_host="http://localhost:80",
|
||||
external_host="http://localhost:4180",
|
||||
)
|
||||
# Ensure OAuth2 Params are set
|
||||
proxy.set_oauth_defaults()
|
||||
proxy.save()
|
||||
# we need to create an application to actually access the proxy
|
||||
Application.objects.create(name="proxy", slug="proxy", provider=proxy)
|
||||
outpost: Outpost = Outpost.objects.create(
|
||||
name="proxy_outpost",
|
||||
type=OutpostType.PROXY,
|
||||
deployment_type=OutpostDeploymentType.DOCKER,
|
||||
_config=asdict(
|
||||
OutpostConfig(passbook_host=self.live_server_url, log_level="debug")
|
||||
),
|
||||
)
|
||||
outpost.providers.add(proxy)
|
||||
outpost.save()
|
||||
|
||||
# Wait until outpost healthcheck succeeds
|
||||
healthcheck_retries = 0
|
||||
while healthcheck_retries < 50:
|
||||
if len(outpost.state) > 0:
|
||||
state = outpost.state[0]
|
||||
if state.last_seen and state.version:
|
||||
break
|
||||
healthcheck_retries += 1
|
||||
sleep(0.5)
|
||||
|
||||
state = outpost.state
|
||||
self.assertTrue(len(state), 1)
|
||||
self.assertEqual(state[0].version, __version__)
|
||||
|
||||
# Make sure to delete the outpost to remove the container
|
||||
outpost.delete()
|
@ -1,11 +1,15 @@
|
||||
"""test SAML Provider flow"""
|
||||
from json import loads
|
||||
from sys import platform
|
||||
from time import sleep
|
||||
from unittest.case import skipUnless
|
||||
|
||||
from docker import DockerClient, from_env
|
||||
from docker.models.containers import Container
|
||||
from docker.types import Healthcheck
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
from selenium.webdriver.support import expected_conditions as ec
|
||||
from structlog import get_logger
|
||||
|
||||
from e2e.utils import USER, SeleniumTestCase
|
||||
@ -23,6 +27,7 @@ from passbook.providers.saml.models import (
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestProviderSAML(SeleniumTestCase):
|
||||
"""test SAML Provider flow"""
|
||||
|
||||
@ -31,6 +36,7 @@ class TestProviderSAML(SeleniumTestCase):
|
||||
def setup_client(self, provider: SAMLProvider) -> Container:
|
||||
"""Setup client saml-sp container which we test SAML against"""
|
||||
client: DockerClient = from_env()
|
||||
client.images.pull("beryju/oidc-test-client")
|
||||
container = client.containers.run(
|
||||
image="beryju/saml-test-sp",
|
||||
detach=True,
|
||||
@ -60,10 +66,6 @@ class TestProviderSAML(SeleniumTestCase):
|
||||
LOGGER.info("Container failed healthcheck")
|
||||
sleep(1)
|
||||
|
||||
def tearDown(self):
|
||||
self.container.kill()
|
||||
super().tearDown()
|
||||
|
||||
def test_sp_initiated_implicit(self):
|
||||
"""test SAML Provider flow SP-initiated flow (implicit consent)"""
|
||||
# Bootstrap all needed objects
|
||||
@ -82,7 +84,9 @@ class TestProviderSAML(SeleniumTestCase):
|
||||
provider.property_mappings.set(SAMLPropertyMapping.objects.all())
|
||||
provider.save()
|
||||
Application.objects.create(
|
||||
name="SAML", slug="passbook-saml", provider=provider,
|
||||
name="SAML",
|
||||
slug="passbook-saml",
|
||||
provider=provider,
|
||||
)
|
||||
self.container = self.setup_client(provider)
|
||||
self.driver.get("http://localhost:9009")
|
||||
@ -92,10 +96,14 @@ class TestProviderSAML(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
self.wait_for_url("http://localhost:9009/")
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.XPATH, "/html/body/pre").text,
|
||||
f"Hello, {USER().name}!",
|
||||
)
|
||||
|
||||
body = loads(self.driver.find_element(By.CSS_SELECTOR, "pre").text)
|
||||
|
||||
self.assertEqual(body["attr"]["cn"], [USER().name])
|
||||
self.assertEqual(body["attr"]["displayName"], [USER().username])
|
||||
self.assertEqual(body["attr"]["eduPersonPrincipalName"], [USER().email])
|
||||
self.assertEqual(body["attr"]["mail"], [USER().email])
|
||||
self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
|
||||
|
||||
def test_sp_initiated_explicit(self):
|
||||
"""test SAML Provider flow SP-initiated flow (explicit consent)"""
|
||||
@ -115,7 +123,9 @@ class TestProviderSAML(SeleniumTestCase):
|
||||
provider.property_mappings.set(SAMLPropertyMapping.objects.all())
|
||||
provider.save()
|
||||
app = Application.objects.create(
|
||||
name="SAML", slug="passbook-saml", provider=provider,
|
||||
name="SAML",
|
||||
slug="passbook-saml",
|
||||
provider=provider,
|
||||
)
|
||||
self.container = self.setup_client(provider)
|
||||
self.driver.get("http://localhost:9009")
|
||||
@ -124,19 +134,21 @@ class TestProviderSAML(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
self.assertIn(
|
||||
self.assertEqual(
|
||||
app.name,
|
||||
self.driver.find_element(
|
||||
By.XPATH, "/html/body/div[2]/div/main/div/form/div[2]/p[1]"
|
||||
).text,
|
||||
self.driver.find_element(By.ID, "application-name").text,
|
||||
)
|
||||
sleep(1)
|
||||
self.driver.find_element(By.CSS_SELECTOR, "[type=submit]").click()
|
||||
self.wait_for_url("http://localhost:9009/")
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.XPATH, "/html/body/pre").text,
|
||||
f"Hello, {USER().name}!",
|
||||
)
|
||||
|
||||
body = loads(self.driver.find_element(By.CSS_SELECTOR, "pre").text)
|
||||
|
||||
self.assertEqual(body["attr"]["cn"], [USER().name])
|
||||
self.assertEqual(body["attr"]["displayName"], [USER().username])
|
||||
self.assertEqual(body["attr"]["eduPersonPrincipalName"], [USER().email])
|
||||
self.assertEqual(body["attr"]["mail"], [USER().email])
|
||||
self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
|
||||
|
||||
def test_idp_initiated_implicit(self):
|
||||
"""test SAML Provider flow IdP-initiated flow (implicit consent)"""
|
||||
@ -156,7 +168,9 @@ class TestProviderSAML(SeleniumTestCase):
|
||||
provider.property_mappings.set(SAMLPropertyMapping.objects.all())
|
||||
provider.save()
|
||||
Application.objects.create(
|
||||
name="SAML", slug="passbook-saml", provider=provider,
|
||||
name="SAML",
|
||||
slug="passbook-saml",
|
||||
provider=provider,
|
||||
)
|
||||
self.container = self.setup_client(provider)
|
||||
self.driver.get(
|
||||
@ -170,11 +184,16 @@ class TestProviderSAML(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
sleep(1)
|
||||
self.wait_for_url("http://localhost:9009/")
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.XPATH, "/html/body/pre").text,
|
||||
f"Hello, {USER().name}!",
|
||||
)
|
||||
|
||||
body = loads(self.driver.find_element(By.CSS_SELECTOR, "pre").text)
|
||||
|
||||
self.assertEqual(body["attr"]["cn"], [USER().name])
|
||||
self.assertEqual(body["attr"]["displayName"], [USER().username])
|
||||
self.assertEqual(body["attr"]["eduPersonPrincipalName"], [USER().email])
|
||||
self.assertEqual(body["attr"]["mail"], [USER().email])
|
||||
self.assertEqual(body["attr"]["uid"], [str(USER().pk)])
|
||||
|
||||
def test_sp_initiated_denied(self):
|
||||
"""test SAML Provider flow SP-initiated flow (Policy denies access)"""
|
||||
@ -197,7 +216,9 @@ class TestProviderSAML(SeleniumTestCase):
|
||||
provider.property_mappings.set(SAMLPropertyMapping.objects.all())
|
||||
provider.save()
|
||||
app = Application.objects.create(
|
||||
name="SAML", slug="passbook-saml", provider=provider,
|
||||
name="SAML",
|
||||
slug="passbook-saml",
|
||||
provider=provider,
|
||||
)
|
||||
PolicyBinding.objects.create(target=app, policy=negative_policy, order=0)
|
||||
self.container = self.setup_client(provider)
|
||||
@ -207,7 +228,10 @@ class TestProviderSAML(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "id_uid_field").send_keys(Keys.ENTER)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(USER().username)
|
||||
self.driver.find_element(By.ID, "id_password").send_keys(Keys.ENTER)
|
||||
self.wait_for_url(self.url("passbook_flows:denied"))
|
||||
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, "header > h1"))
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CSS_SELECTOR, "header > h1").text,
|
||||
"Permission denied",
|
||||
|
339
e2e/test_source_oauth.py
Normal file
@ -0,0 +1,339 @@
|
||||
"""test OAuth Source"""
|
||||
from os.path import abspath
|
||||
from sys import platform
|
||||
from time import sleep
|
||||
from typing import Any, Dict, Optional
|
||||
from unittest.case import skipUnless
|
||||
|
||||
from django.test import override_settings
|
||||
from docker.models.containers import Container
|
||||
from docker.types import Healthcheck
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
from selenium.webdriver.support import expected_conditions as ec
|
||||
from structlog import get_logger
|
||||
from yaml import safe_dump
|
||||
|
||||
from e2e.utils import SeleniumTestCase
|
||||
from passbook.flows.models import Flow
|
||||
from passbook.providers.oauth2.generators import (
|
||||
generate_client_id,
|
||||
generate_client_secret,
|
||||
)
|
||||
from passbook.sources.oauth.models import OAuthSource
|
||||
|
||||
CONFIG_PATH = "/tmp/dex.yml"
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestSourceOAuth2(SeleniumTestCase):
|
||||
"""test OAuth Source flow"""
|
||||
|
||||
container: Container
|
||||
|
||||
def setUp(self):
|
||||
self.client_secret = generate_client_secret()
|
||||
self.prepare_dex_config()
|
||||
super().setUp()
|
||||
|
||||
def prepare_dex_config(self):
|
||||
"""Since Dex does not document which environment
|
||||
variables can be used to configure clients"""
|
||||
config = {
|
||||
"enablePasswordDB": True,
|
||||
"issuer": "http://127.0.0.1:5556/dex",
|
||||
"logger": {"level": "debug"},
|
||||
"staticClients": [
|
||||
{
|
||||
"id": "example-app",
|
||||
"name": "Example App",
|
||||
"redirectURIs": [
|
||||
self.url(
|
||||
"passbook_sources_oauth:oauth-client-callback",
|
||||
source_slug="dex",
|
||||
)
|
||||
],
|
||||
"secret": self.client_secret,
|
||||
}
|
||||
],
|
||||
"staticPasswords": [
|
||||
{
|
||||
"email": "admin@example.com",
|
||||
# hash for password
|
||||
"hash": "$2a$10$2b2cU8CPhOTaGrs1HRQuAueS7JTT5ZHsHSzYiFPm1leZck7Mc8T4W",
|
||||
"userID": "08a8684b-db88-4b73-90a9-3cd1661f5466",
|
||||
"username": "admin",
|
||||
}
|
||||
],
|
||||
"storage": {"config": {"file": "/tmp/dex.db"}, "type": "sqlite3"},
|
||||
"web": {"http": "0.0.0.0:5556"},
|
||||
}
|
||||
with open(CONFIG_PATH, "w+") as _file:
|
||||
safe_dump(config, _file)
|
||||
|
||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||
return {
|
||||
"image": "quay.io/dexidp/dex:v2.24.0",
|
||||
"detach": True,
|
||||
"network_mode": "host",
|
||||
"auto_remove": True,
|
||||
"command": "serve /config.yml",
|
||||
"healthcheck": Healthcheck(
|
||||
test=["CMD", "wget", "--spider", "http://localhost:5556/dex/healthz"],
|
||||
interval=5 * 100 * 1000000,
|
||||
start_period=1 * 100 * 1000000,
|
||||
),
|
||||
"volumes": {abspath(CONFIG_PATH): {"bind": "/config.yml", "mode": "ro"}},
|
||||
}
|
||||
|
||||
def create_objects(self):
|
||||
"""Create required objects"""
|
||||
# Bootstrap all needed objects
|
||||
authentication_flow = Flow.objects.get(slug="default-source-authentication")
|
||||
enrollment_flow = Flow.objects.get(slug="default-source-enrollment")
|
||||
|
||||
OAuthSource.objects.create( # nosec
|
||||
name="dex",
|
||||
slug="dex",
|
||||
authentication_flow=authentication_flow,
|
||||
enrollment_flow=enrollment_flow,
|
||||
provider_type="openid-connect",
|
||||
authorization_url="http://127.0.0.1:5556/dex/auth",
|
||||
access_token_url="http://127.0.0.1:5556/dex/token",
|
||||
profile_url="http://127.0.0.1:5556/dex/userinfo",
|
||||
consumer_key="example-app",
|
||||
consumer_secret=self.client_secret,
|
||||
)
|
||||
|
||||
def test_oauth_enroll(self):
|
||||
"""test OAuth Source With With OIDC"""
|
||||
self.create_objects()
|
||||
self.driver.get(self.live_server_url)
|
||||
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located(
|
||||
(By.CLASS_NAME, "pf-c-login__main-footer-links-item-link")
|
||||
)
|
||||
)
|
||||
self.driver.find_element(
|
||||
By.CLASS_NAME, "pf-c-login__main-footer-links-item-link"
|
||||
).click()
|
||||
|
||||
# Now we should be at the IDP, wait for the login field
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "login")))
|
||||
self.driver.find_element(By.ID, "login").send_keys("admin@example.com")
|
||||
self.driver.find_element(By.ID, "password").send_keys("password")
|
||||
self.driver.find_element(By.ID, "password").send_keys(Keys.ENTER)
|
||||
|
||||
# Wait until we're logged in
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, "button[type=submit]"))
|
||||
)
|
||||
self.driver.find_element(By.CSS_SELECTOR, "button[type=submit]").click()
|
||||
|
||||
self.wait.until(ec.presence_of_element_located((By.NAME, "username")))
|
||||
# At this point we've been redirected back
|
||||
# and we're asked for the username
|
||||
self.driver.find_element(By.NAME, "username").click()
|
||||
self.driver.find_element(By.NAME, "username").send_keys("foo")
|
||||
self.driver.find_element(By.NAME, "username").send_keys(Keys.ENTER)
|
||||
|
||||
# Wait until we've loaded the user info page
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "user-settings")))
|
||||
self.driver.get(self.url("passbook_core:user-settings"))
|
||||
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "user-settings").text,
|
||||
"foo",
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_username").get_attribute("value"), "foo"
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_name").get_attribute("value"),
|
||||
"admin",
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_email").get_attribute("value"),
|
||||
"admin@example.com",
|
||||
)
|
||||
|
||||
@override_settings(SESSION_COOKIE_SAMESITE="strict")
|
||||
def test_oauth_samesite_strict(self):
|
||||
"""test OAuth Source With SameSite set to strict
|
||||
(=will fail because session is not carried over)"""
|
||||
self.create_objects()
|
||||
self.driver.get(self.live_server_url)
|
||||
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located(
|
||||
(By.CLASS_NAME, "pf-c-login__main-footer-links-item-link")
|
||||
)
|
||||
)
|
||||
self.driver.find_element(
|
||||
By.CLASS_NAME, "pf-c-login__main-footer-links-item-link"
|
||||
).click()
|
||||
|
||||
# Now we should be at the IDP, wait for the login field
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "login")))
|
||||
self.driver.find_element(By.ID, "login").send_keys("admin@example.com")
|
||||
self.driver.find_element(By.ID, "password").send_keys("password")
|
||||
self.driver.find_element(By.ID, "password").send_keys(Keys.ENTER)
|
||||
|
||||
# Wait until we're logged in
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, "button[type=submit]"))
|
||||
)
|
||||
self.driver.find_element(By.CSS_SELECTOR, "button[type=submit]").click()
|
||||
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, ".pf-c-alert__title"))
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.CSS_SELECTOR, ".pf-c-alert__title").text,
|
||||
"Authentication Failed.",
|
||||
)
|
||||
|
||||
def test_oauth_enroll_auth(self):
|
||||
"""test OAuth Source With With OIDC (enroll and authenticate again)"""
|
||||
self.test_oauth_enroll()
|
||||
# We're logged in at the end of this, log out and re-login
|
||||
self.driver.find_element(By.ID, "logout").click()
|
||||
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located(
|
||||
(By.CLASS_NAME, "pf-c-login__main-footer-links-item-link")
|
||||
)
|
||||
)
|
||||
sleep(1)
|
||||
self.driver.find_element(
|
||||
By.CLASS_NAME, "pf-c-login__main-footer-links-item-link"
|
||||
).click()
|
||||
sleep(1)
|
||||
# Now we should be at the IDP, wait for the login field
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "login")))
|
||||
self.driver.find_element(By.ID, "login").send_keys("admin@example.com")
|
||||
self.driver.find_element(By.ID, "password").send_keys("password")
|
||||
self.driver.find_element(By.ID, "password").send_keys(Keys.ENTER)
|
||||
|
||||
# Wait until we're logged in
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, "button[type=submit]"))
|
||||
)
|
||||
self.driver.find_element(By.CSS_SELECTOR, "button[type=submit]").click()
|
||||
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "user-settings")))
|
||||
self.driver.get(self.url("passbook_core:user-settings"))
|
||||
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "user-settings").text,
|
||||
"foo",
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_username").get_attribute("value"), "foo"
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_name").get_attribute("value"),
|
||||
"admin",
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_email").get_attribute("value"),
|
||||
"admin@example.com",
|
||||
)
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestSourceOAuth1(SeleniumTestCase):
|
||||
"""Test OAuth1 Source"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.client_id = generate_client_id()
|
||||
self.client_secret = generate_client_secret()
|
||||
self.source_slug = "oauth1-test"
|
||||
super().setUp()
|
||||
|
||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||
return {
|
||||
"image": "beryju/oauth1-test-server",
|
||||
"detach": True,
|
||||
"network_mode": "host",
|
||||
"auto_remove": True,
|
||||
"environment": {
|
||||
"OAUTH1_CLIENT_ID": self.client_id,
|
||||
"OAUTH1_CLIENT_SECRET": self.client_secret,
|
||||
"OAUTH1_REDIRECT_URI": (
|
||||
self.url(
|
||||
"passbook_sources_oauth:oauth-client-callback",
|
||||
source_slug=self.source_slug,
|
||||
)
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
def create_objects(self):
|
||||
"""Create required objects"""
|
||||
# Bootstrap all needed objects
|
||||
authentication_flow = Flow.objects.get(slug="default-source-authentication")
|
||||
enrollment_flow = Flow.objects.get(slug="default-source-enrollment")
|
||||
|
||||
OAuthSource.objects.create( # nosec
|
||||
name="oauth1",
|
||||
slug=self.source_slug,
|
||||
authentication_flow=authentication_flow,
|
||||
enrollment_flow=enrollment_flow,
|
||||
provider_type="twitter",
|
||||
request_token_url="http://localhost:5000/oauth/request_token",
|
||||
access_token_url="http://localhost:5000/oauth/access_token",
|
||||
authorization_url="http://localhost:5000/oauth/authorize",
|
||||
profile_url="http://localhost:5000/api/me",
|
||||
consumer_key=self.client_id,
|
||||
consumer_secret=self.client_secret,
|
||||
)
|
||||
|
||||
def test_oauth_enroll(self):
|
||||
"""test OAuth Source With With OIDC"""
|
||||
self.create_objects()
|
||||
self.driver.get(self.live_server_url)
|
||||
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located(
|
||||
(By.CLASS_NAME, "pf-c-login__main-footer-links-item-link")
|
||||
)
|
||||
)
|
||||
self.driver.find_element(
|
||||
By.CLASS_NAME, "pf-c-login__main-footer-links-item-link"
|
||||
).click()
|
||||
|
||||
# Now we should be at the IDP, wait for the login field
|
||||
self.wait.until(ec.presence_of_element_located((By.NAME, "username")))
|
||||
self.driver.find_element(By.NAME, "username").send_keys("example-user")
|
||||
self.driver.find_element(By.NAME, "username").send_keys(Keys.ENTER)
|
||||
|
||||
# Wait until we're logged in
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, "[name='confirm']"))
|
||||
)
|
||||
self.driver.find_element(By.CSS_SELECTOR, "[name='confirm']").click()
|
||||
|
||||
# Wait until we've loaded the user info page
|
||||
sleep(2)
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "user-settings")))
|
||||
self.driver.get(self.url("passbook_core:user-settings"))
|
||||
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "user-settings").text,
|
||||
"example-user",
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_username").get_attribute("value"),
|
||||
"example-user",
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_name").get_attribute("value"),
|
||||
"test name",
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_email").get_attribute("value"),
|
||||
"foo@example.com",
|
||||
)
|
@ -1,8 +1,9 @@
|
||||
"""test SAML Source"""
|
||||
from sys import platform
|
||||
from time import sleep
|
||||
from typing import Any, Dict, Optional
|
||||
from unittest.case import skipUnless
|
||||
|
||||
from docker import DockerClient, from_env
|
||||
from docker.models.containers import Container
|
||||
from docker.types import Healthcheck
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
@ -68,53 +69,38 @@ Sm75WXsflOxuTn08LbgGc4s=
|
||||
-----END PRIVATE KEY-----"""
|
||||
|
||||
|
||||
@skipUnless(platform.startswith("linux"), "requires local docker")
|
||||
class TestSourceSAML(SeleniumTestCase):
|
||||
"""test SAML Source flow"""
|
||||
|
||||
def setUp(self):
|
||||
self.container = self.setup_client()
|
||||
super().setUp()
|
||||
|
||||
def setup_client(self) -> Container:
|
||||
"""Setup test IdP container"""
|
||||
client: DockerClient = from_env()
|
||||
container = client.containers.run(
|
||||
image="kristophjunge/test-saml-idp:1.15",
|
||||
detach=True,
|
||||
network_mode="host",
|
||||
auto_remove=True,
|
||||
healthcheck=Healthcheck(
|
||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||
return {
|
||||
"image": "kristophjunge/test-saml-idp:1.15",
|
||||
"detach": True,
|
||||
"network_mode": "host",
|
||||
"auto_remove": True,
|
||||
"healthcheck": Healthcheck(
|
||||
test=["CMD", "curl", "http://localhost:8080"],
|
||||
interval=5 * 100 * 1000000,
|
||||
start_period=1 * 100 * 1000000,
|
||||
),
|
||||
environment={
|
||||
"environment": {
|
||||
"SIMPLESAMLPHP_SP_ENTITY_ID": "entity-id",
|
||||
"SIMPLESAMLPHP_SP_ASSERTION_CONSUMER_SERVICE": (
|
||||
f"{self.live_server_url}/source/saml/saml-idp-test/acs/"
|
||||
),
|
||||
},
|
||||
)
|
||||
while True:
|
||||
container.reload()
|
||||
status = container.attrs.get("State", {}).get("Health", {}).get("Status")
|
||||
if status == "healthy":
|
||||
return container
|
||||
LOGGER.info("Container failed healthcheck")
|
||||
sleep(1)
|
||||
|
||||
def tearDown(self):
|
||||
self.container.kill()
|
||||
super().tearDown()
|
||||
}
|
||||
|
||||
def test_idp_redirect(self):
|
||||
"""test SAML Source With redirect binding"""
|
||||
sleep(1)
|
||||
# Bootstrap all needed objects
|
||||
authentication_flow = Flow.objects.get(slug="default-source-authentication")
|
||||
enrollment_flow = Flow.objects.get(slug="default-source-enrollment")
|
||||
keypair = CertificateKeyPair.objects.create(
|
||||
name="test-idp-cert", certificate_data=IDP_CERT, key_data=IDP_KEY,
|
||||
name="test-idp-cert",
|
||||
certificate_data=IDP_CERT,
|
||||
key_data=IDP_KEY,
|
||||
)
|
||||
|
||||
SAMLSource.objects.create(
|
||||
@ -146,12 +132,8 @@ class TestSourceSAML(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "password").send_keys(Keys.ENTER)
|
||||
|
||||
# Wait until we're logged in
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located(
|
||||
(By.XPATH, "//a[contains(@href, '/-/user/')]")
|
||||
)
|
||||
)
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/-/user/')]").click()
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "user-settings")))
|
||||
self.driver.get(self.url("passbook_core:user-settings"))
|
||||
|
||||
# Wait until we've loaded the user info page
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "id_username")))
|
||||
@ -161,12 +143,13 @@ class TestSourceSAML(SeleniumTestCase):
|
||||
|
||||
def test_idp_post(self):
|
||||
"""test SAML Source With post binding"""
|
||||
sleep(1)
|
||||
# Bootstrap all needed objects
|
||||
authentication_flow = Flow.objects.get(slug="default-source-authentication")
|
||||
enrollment_flow = Flow.objects.get(slug="default-source-enrollment")
|
||||
keypair = CertificateKeyPair.objects.create(
|
||||
name="test-idp-cert", certificate_data=IDP_CERT, key_data=IDP_KEY,
|
||||
name="test-idp-cert",
|
||||
certificate_data=IDP_CERT,
|
||||
key_data=IDP_KEY,
|
||||
)
|
||||
|
||||
SAMLSource.objects.create(
|
||||
@ -200,12 +183,8 @@ class TestSourceSAML(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "password").send_keys(Keys.ENTER)
|
||||
|
||||
# Wait until we're logged in
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located(
|
||||
(By.XPATH, "//a[contains(@href, '/-/user/')]")
|
||||
)
|
||||
)
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/-/user/')]").click()
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "user-settings")))
|
||||
self.driver.get(self.url("passbook_core:user-settings"))
|
||||
|
||||
# Wait until we've loaded the user info page
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "id_username")))
|
||||
@ -215,12 +194,13 @@ class TestSourceSAML(SeleniumTestCase):
|
||||
|
||||
def test_idp_post_auto(self):
|
||||
"""test SAML Source With post binding (auto redirect)"""
|
||||
sleep(1)
|
||||
# Bootstrap all needed objects
|
||||
authentication_flow = Flow.objects.get(slug="default-source-authentication")
|
||||
enrollment_flow = Flow.objects.get(slug="default-source-enrollment")
|
||||
keypair = CertificateKeyPair.objects.create(
|
||||
name="test-idp-cert", certificate_data=IDP_CERT, key_data=IDP_KEY,
|
||||
name="test-idp-cert",
|
||||
certificate_data=IDP_CERT,
|
||||
key_data=IDP_KEY,
|
||||
)
|
||||
|
||||
SAMLSource.objects.create(
|
||||
@ -252,12 +232,8 @@ class TestSourceSAML(SeleniumTestCase):
|
||||
self.driver.find_element(By.ID, "password").send_keys(Keys.ENTER)
|
||||
|
||||
# Wait until we're logged in
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located(
|
||||
(By.XPATH, "//a[contains(@href, '/-/user/')]")
|
||||
)
|
||||
)
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/-/user/')]").click()
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "user-settings")))
|
||||
self.driver.get(self.url("passbook_core:user-settings"))
|
||||
|
||||
# Wait until we've loaded the user info page
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "id_username")))
|
||||
|
@ -1,215 +0,0 @@
|
||||
"""test OAuth Source"""
|
||||
from os.path import abspath
|
||||
from time import sleep
|
||||
|
||||
from docker import DockerClient, from_env
|
||||
from docker.models.containers import Container
|
||||
from docker.types import Healthcheck
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
from selenium.webdriver.support import expected_conditions as ec
|
||||
from structlog import get_logger
|
||||
from yaml import safe_dump
|
||||
|
||||
from e2e.utils import SeleniumTestCase
|
||||
from passbook.flows.models import Flow
|
||||
from passbook.providers.oauth2.generators import generate_client_secret
|
||||
from passbook.sources.oauth.models import OAuthSource
|
||||
|
||||
TOKEN_URL = "http://127.0.0.1:5556/dex/token"
|
||||
CONFIG_PATH = "/tmp/dex.yml"
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class TestSourceOAuth(SeleniumTestCase):
|
||||
"""test OAuth Source flow"""
|
||||
|
||||
container: Container
|
||||
|
||||
def setUp(self):
|
||||
self.client_secret = generate_client_secret()
|
||||
self.container = self.setup_client()
|
||||
super().setUp()
|
||||
|
||||
def prepare_dex_config(self):
|
||||
"""Since Dex does not document which environment
|
||||
variables can be used to configure clients"""
|
||||
config = {
|
||||
"enablePasswordDB": True,
|
||||
"issuer": "http://127.0.0.1:5556/dex",
|
||||
"logger": {"level": "debug"},
|
||||
"staticClients": [
|
||||
{
|
||||
"id": "example-app",
|
||||
"name": "Example App",
|
||||
"redirectURIs": [
|
||||
self.url(
|
||||
"passbook_sources_oauth:oauth-client-callback",
|
||||
source_slug="dex",
|
||||
)
|
||||
],
|
||||
"secret": self.client_secret,
|
||||
}
|
||||
],
|
||||
"staticPasswords": [
|
||||
{
|
||||
"email": "admin@example.com",
|
||||
# hash for password
|
||||
"hash": "$2a$10$2b2cU8CPhOTaGrs1HRQuAueS7JTT5ZHsHSzYiFPm1leZck7Mc8T4W",
|
||||
"userID": "08a8684b-db88-4b73-90a9-3cd1661f5466",
|
||||
"username": "admin",
|
||||
}
|
||||
],
|
||||
"storage": {"config": {"file": "/tmp/dex.db"}, "type": "sqlite3"},
|
||||
"web": {"http": "0.0.0.0:5556"},
|
||||
}
|
||||
with open(CONFIG_PATH, "w+") as _file:
|
||||
safe_dump(config, _file)
|
||||
|
||||
def setup_client(self) -> Container:
|
||||
"""Setup test Dex container"""
|
||||
self.prepare_dex_config()
|
||||
client: DockerClient = from_env()
|
||||
container = client.containers.run(
|
||||
image="quay.io/dexidp/dex:v2.24.0",
|
||||
detach=True,
|
||||
network_mode="host",
|
||||
auto_remove=True,
|
||||
command="serve /config.yml",
|
||||
healthcheck=Healthcheck(
|
||||
test=["CMD", "wget", "--spider", "http://localhost:5556/dex/healthz"],
|
||||
interval=5 * 100 * 1000000,
|
||||
start_period=1 * 100 * 1000000,
|
||||
),
|
||||
volumes={abspath(CONFIG_PATH): {"bind": "/config.yml", "mode": "ro"}},
|
||||
)
|
||||
while True:
|
||||
container.reload()
|
||||
status = container.attrs.get("State", {}).get("Health", {}).get("Status")
|
||||
if status == "healthy":
|
||||
return container
|
||||
LOGGER.info("Container failed healthcheck")
|
||||
sleep(1)
|
||||
|
||||
def create_objects(self):
|
||||
"""Create required objects"""
|
||||
sleep(1)
|
||||
# Bootstrap all needed objects
|
||||
authentication_flow = Flow.objects.get(slug="default-source-authentication")
|
||||
enrollment_flow = Flow.objects.get(slug="default-source-enrollment")
|
||||
|
||||
OAuthSource.objects.create(
|
||||
name="dex",
|
||||
slug="dex",
|
||||
authentication_flow=authentication_flow,
|
||||
enrollment_flow=enrollment_flow,
|
||||
provider_type="openid-connect",
|
||||
authorization_url="http://127.0.0.1:5556/dex/auth",
|
||||
access_token_url=TOKEN_URL,
|
||||
profile_url="http://127.0.0.1:5556/dex/userinfo",
|
||||
consumer_key="example-app",
|
||||
consumer_secret=self.client_secret,
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
self.container.kill()
|
||||
super().tearDown()
|
||||
|
||||
def test_oauth_enroll(self):
|
||||
"""test OAuth Source With With OIDC"""
|
||||
self.create_objects()
|
||||
self.driver.get(self.live_server_url)
|
||||
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located(
|
||||
(By.CLASS_NAME, "pf-c-login__main-footer-links-item-link")
|
||||
)
|
||||
)
|
||||
self.driver.find_element(
|
||||
By.CLASS_NAME, "pf-c-login__main-footer-links-item-link"
|
||||
).click()
|
||||
|
||||
# Now we should be at the IDP, wait for the login field
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "login")))
|
||||
self.driver.find_element(By.ID, "login").send_keys("admin@example.com")
|
||||
self.driver.find_element(By.ID, "password").send_keys("password")
|
||||
self.driver.find_element(By.ID, "password").send_keys(Keys.ENTER)
|
||||
|
||||
# Wait until we're logged in
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, "button[type=submit]"))
|
||||
)
|
||||
self.driver.find_element(By.CSS_SELECTOR, "button[type=submit]").click()
|
||||
|
||||
# At this point we've been redirected back
|
||||
# and we're asked for the username
|
||||
self.driver.find_element(By.NAME, "username").click()
|
||||
self.driver.find_element(By.NAME, "username").send_keys("foo")
|
||||
self.driver.find_element(By.NAME, "username").send_keys(Keys.ENTER)
|
||||
|
||||
# Wait until we've loaded the user info page
|
||||
self.wait.until(ec.presence_of_element_located((By.LINK_TEXT, "foo")))
|
||||
self.driver.find_element(By.LINK_TEXT, "foo").click()
|
||||
|
||||
self.wait_for_url(self.url("passbook_core:user-settings"))
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/-/user/')]").text,
|
||||
"foo",
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_username").get_attribute("value"), "foo"
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_name").get_attribute("value"), "admin",
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_email").get_attribute("value"),
|
||||
"admin@example.com",
|
||||
)
|
||||
|
||||
def test_oauth_enroll_auth(self):
|
||||
"""test OAuth Source With With OIDC (enroll and authenticate again)"""
|
||||
self.test_oauth_enroll()
|
||||
# We're logged in at the end of this, log out and re-login
|
||||
self.driver.find_element(By.CSS_SELECTOR, "[aria-label=logout]").click()
|
||||
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located(
|
||||
(By.CLASS_NAME, "pf-c-login__main-footer-links-item-link")
|
||||
)
|
||||
)
|
||||
self.driver.find_element(
|
||||
By.CLASS_NAME, "pf-c-login__main-footer-links-item-link"
|
||||
).click()
|
||||
|
||||
# Now we should be at the IDP, wait for the login field
|
||||
self.wait.until(ec.presence_of_element_located((By.ID, "login")))
|
||||
self.driver.find_element(By.ID, "login").send_keys("admin@example.com")
|
||||
self.driver.find_element(By.ID, "password").send_keys("password")
|
||||
self.driver.find_element(By.ID, "password").send_keys(Keys.ENTER)
|
||||
|
||||
# Wait until we're logged in
|
||||
self.wait.until(
|
||||
ec.presence_of_element_located((By.CSS_SELECTOR, "button[type=submit]"))
|
||||
)
|
||||
self.driver.find_element(By.CSS_SELECTOR, "button[type=submit]").click()
|
||||
|
||||
# Wait until we've loaded the user info page
|
||||
self.wait.until(ec.presence_of_element_located((By.LINK_TEXT, "foo")))
|
||||
self.driver.find_element(By.LINK_TEXT, "foo").click()
|
||||
|
||||
self.wait_for_url(self.url("passbook_core:user-settings"))
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.XPATH, "//a[contains(@href, '/-/user/')]").text,
|
||||
"foo",
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_username").get_attribute("value"), "foo"
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_name").get_attribute("value"), "admin",
|
||||
)
|
||||
self.assertEqual(
|
||||
self.driver.find_element(By.ID, "id_email").get_attribute("value"),
|
||||
"admin@example.com",
|
||||
)
|
34
e2e/utils.py
@ -1,16 +1,18 @@
|
||||
"""passbook e2e testing utilities"""
|
||||
from functools import lru_cache
|
||||
from glob import glob
|
||||
from importlib.util import module_from_spec, spec_from_file_location
|
||||
from inspect import getmembers, isfunction
|
||||
from os import environ, makedirs
|
||||
from time import time
|
||||
from time import sleep, time
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from django.apps import apps
|
||||
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
|
||||
from django.db import connection, transaction
|
||||
from django.db.utils import IntegrityError
|
||||
from django.shortcuts import reverse
|
||||
from docker import DockerClient, from_env
|
||||
from docker.models.containers import Container
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.webdriver.remote.webdriver import WebDriver
|
||||
@ -20,7 +22,6 @@ from structlog import get_logger
|
||||
from passbook.core.models import User
|
||||
|
||||
|
||||
@lru_cache
|
||||
# pylint: disable=invalid-name
|
||||
def USER() -> User: # noqa
|
||||
"""Cached function that always returns pbadmin"""
|
||||
@ -30,15 +31,38 @@ def USER() -> User: # noqa
|
||||
class SeleniumTestCase(StaticLiveServerTestCase):
|
||||
"""StaticLiveServerTestCase which automatically creates a Webdriver instance"""
|
||||
|
||||
container: Optional[Container] = None
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
makedirs("selenium_screenshots/", exist_ok=True)
|
||||
self.driver = self._get_driver()
|
||||
self.driver.maximize_window()
|
||||
self.driver.implicitly_wait(30)
|
||||
self.wait = WebDriverWait(self.driver, 50)
|
||||
self.wait = WebDriverWait(self.driver, 60)
|
||||
self.apply_default_data()
|
||||
self.logger = get_logger()
|
||||
if specs := self.get_container_specs():
|
||||
self.container = self._start_container(specs)
|
||||
|
||||
def _start_container(self, specs: Dict[str, Any]) -> Container:
|
||||
client: DockerClient = from_env()
|
||||
client.images.pull(specs["image"])
|
||||
container = client.containers.run(**specs)
|
||||
if "healthcheck" not in specs:
|
||||
return container
|
||||
while True:
|
||||
container.reload()
|
||||
status = container.attrs.get("State", {}).get("Health", {}).get("Status")
|
||||
if status == "healthy":
|
||||
return container
|
||||
self.logger.info("Container failed healthcheck")
|
||||
sleep(1)
|
||||
|
||||
def get_container_specs(self) -> Optional[Dict[str, Any]]:
|
||||
"""Optionally get container specs which will launched on setup, wait for the container to
|
||||
be healthy, and deleted again on tearDown"""
|
||||
return None
|
||||
|
||||
def _get_driver(self) -> WebDriver:
|
||||
return webdriver.Remote(
|
||||
@ -57,6 +81,8 @@ class SeleniumTestCase(StaticLiveServerTestCase):
|
||||
self.logger.warning(
|
||||
line["message"], source=line["source"], level=line["level"]
|
||||
)
|
||||
if self.container:
|
||||
self.container.kill()
|
||||
self.driver.quit()
|
||||
super().tearDown()
|
||||
|
||||
|
@ -1,15 +1,15 @@
|
||||
apiVersion: v2
|
||||
appVersion: "0.10.0-rc4"
|
||||
appVersion: "0.12.2-stable"
|
||||
description: A Helm chart for passbook.
|
||||
name: passbook
|
||||
version: "0.10.0-rc4"
|
||||
icon: https://github.com/BeryJu/passbook/blob/master/passbook/static/static/passbook/logo.svg
|
||||
version: "0.12.2-stable"
|
||||
icon: https://github.com/BeryJu/passbook/blob/master/docs/images/logo.svg
|
||||
dependencies:
|
||||
- name: postgresql
|
||||
version: 9.3.2
|
||||
version: 9.4.1
|
||||
repository: https://charts.bitnami.com/bitnami
|
||||
condition: install.postgresql
|
||||
- name: redis
|
||||
version: 10.7.16
|
||||
version: 10.9.0
|
||||
repository: https://charts.bitnami.com/bitnami
|
||||
condition: install.redis
|
||||
|
@ -7,13 +7,14 @@ data:
|
||||
POSTGRESQL__NAME: "{{ .Values.postgresql.postgresqlDatabase }}"
|
||||
POSTGRESQL__USER: "{{ .Values.postgresql.postgresqlUsername }}"
|
||||
{{- if .Values.backup }}
|
||||
POSTGRESQL__BACKUP__ACCESS_KEY: "{{ .Values.backup.access_key }}"
|
||||
POSTGRESQL__BACKUP__SECRET_KEY: "{{ .Values.backup.secret_key }}"
|
||||
POSTGRESQL__BACKUP__BUCKET: "{{ .Values.backup.bucket }}"
|
||||
POSTGRESQL__BACKUP__HOST: "{{ .Values.backup.host }}"
|
||||
POSTGRESQL__S3_BACKUP__ACCESS_KEY: "{{ .Values.backup.accessKey }}"
|
||||
POSTGRESQL__S3_BACKUP__SECRET_KEY: "{{ .Values.backup.secretKey }}"
|
||||
POSTGRESQL__S3_BACKUP__BUCKET: "{{ .Values.backup.bucket }}"
|
||||
POSTGRESQL__S3_BACKUP__REGION: "{{ .Values.backup.region }}"
|
||||
POSTGRESQL__S3_BACKUP__HOST: "{{ .Values.backup.host }}"
|
||||
{{- end}}
|
||||
REDIS__HOST: "{{ .Release.Name }}-redis-master"
|
||||
ERROR_REPORTING__ENABLED: "{{ .Values.config.error_reporting.enabled }}"
|
||||
ERROR_REPORTING__ENVIRONMENT: "{{ .Values.config.error_reporting.environment }}"
|
||||
ERROR_REPORTING__SEND_PII: "{{ .Values.config.error_reporting.send_pii }}"
|
||||
LOG_LEVEL: "{{ .Values.config.log_level }}"
|
||||
ERROR_REPORTING__ENABLED: "{{ .Values.config.errorReporting.enabled }}"
|
||||
ERROR_REPORTING__ENVIRONMENT: "{{ .Values.config.errorReporting.environment }}"
|
||||
ERROR_REPORTING__SEND_PII: "{{ .Values.config.errorReporting.sendPii }}"
|
||||
LOG_LEVEL: "{{ .Values.config.logLevel }}"
|
||||
|
42
helm/templates/cronjob-backup.yaml
Normal file
@ -0,0 +1,42 @@
|
||||
{{- if .Values.backup }}
|
||||
apiVersion: batch/v1beta1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: {{ include "passbook.fullname" . }}-backup
|
||||
labels:
|
||||
app.kubernetes.io/name: {{ include "passbook.name" . }}
|
||||
helm.sh/chart: {{ include "passbook.chart" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
spec:
|
||||
schedule: "0 0 * * *"
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
restartPolicy: Never
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "{{ .Values.image.name }}:{{ .Values.image.tag }}"
|
||||
args: [server]
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: {{ include "passbook.fullname" . }}-config
|
||||
prefix: PASSBOOK_
|
||||
env:
|
||||
- name: PASSBOOK_SECRET_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ include "passbook.fullname" . }}-secret-key"
|
||||
key: "secret_key"
|
||||
- name: PASSBOOK_REDIS__PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Release.Name }}-redis"
|
||||
key: "redis-password"
|
||||
- name: PASSBOOK_POSTGRESQL__PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Release.Name }}-postgresql"
|
||||
key: "postgresql-password"
|
||||
{{- end}}
|
@ -5,8 +5,8 @@ metadata:
|
||||
name: {{ include "passbook.fullname" . }}-secret-key
|
||||
data:
|
||||
monitoring_username: bW9uaXRvcg== # monitor in base64
|
||||
{{- if .Values.config.secret_key }}
|
||||
secret_key: {{ .Values.config.secret_key | b64enc | quote }}
|
||||
{{- if .Values.config.secretKey }}
|
||||
secret_key: {{ .Values.config.secretKey | b64enc | quote }}
|
||||
{{- else }}
|
||||
secret_key: {{ randAlphaNum 50 | b64enc | quote}}
|
||||
{{- end }}
|
||||
|
64
helm/templates/service-account.yaml
Normal file
@ -0,0 +1,64 @@
|
||||
{{- if .Values.kubernetesIntegration }}
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: {{ include "passbook.fullname" . }}-sa-role
|
||||
rules:
|
||||
- apiGroups:
|
||||
- ""
|
||||
resources:
|
||||
- secrets
|
||||
- services
|
||||
verbs:
|
||||
- "get"
|
||||
- "create"
|
||||
- "delete"
|
||||
- "read"
|
||||
- "patch"
|
||||
- apiGroups:
|
||||
- "extensions"
|
||||
- "apps"
|
||||
resources:
|
||||
- "deployments"
|
||||
verbs:
|
||||
- "get"
|
||||
- "create"
|
||||
- "delete"
|
||||
- "read"
|
||||
- "patch"
|
||||
- apiGroups:
|
||||
- "extensions"
|
||||
- "networking.k8s.io"
|
||||
resources:
|
||||
- "ingresses"
|
||||
verbs:
|
||||
- "get"
|
||||
- "create"
|
||||
- "delete"
|
||||
- "read"
|
||||
- "patch"
|
||||
- apiGroups:
|
||||
- ""
|
||||
resources:
|
||||
- namespaces
|
||||
verbs:
|
||||
- list
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ include "passbook.fullname" . }}-sa
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: {{ include "passbook.fullname" . }}-sa-role-binding
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: {{ include "passbook.fullname" . }}-sa-role
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: {{ include "passbook.fullname" . }}-sa
|
||||
namespace: {{ .Release.Namespace }}
|
||||
{{- end }}
|
@ -9,7 +9,7 @@ metadata:
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
k8s.passbook.beryju.org/component: web
|
||||
spec:
|
||||
replicas: {{ serverReplicas }}
|
||||
replicas: {{ .Values.serverReplicas }}
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: {{ include "passbook.name" . }}
|
||||
@ -22,10 +22,29 @@ spec:
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
k8s.passbook.beryju.org/component: web
|
||||
spec:
|
||||
affinity:
|
||||
podAntiAffinity:
|
||||
preferredDuringSchedulingIgnoredDuringExecution:
|
||||
- weight: 1
|
||||
podAffinityTerm:
|
||||
labelSelector:
|
||||
matchExpressions:
|
||||
- key: app.kubernetes.io/name
|
||||
operator: In
|
||||
values:
|
||||
- {{ include "passbook.name" . }}
|
||||
- key: app.kubernetes.io/instance
|
||||
operator: In
|
||||
values:
|
||||
- {{ .Release.Name }}
|
||||
- key: k8s.passbook.beryju.org/component
|
||||
operator: In
|
||||
values:
|
||||
- web
|
||||
topologyKey: "kubernetes.io/hostname"
|
||||
initContainers:
|
||||
- name: passbook-database-migrations
|
||||
image: "{{ .Values.image.name }}:{{ .Values.image.tag }}"
|
||||
imagePullPolicy: Always
|
||||
args: [migrate]
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
@ -50,7 +69,6 @@ spec:
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "{{ .Values.image.name }}:{{ .Values.image.tag }}"
|
||||
imagePullPolicy: Always
|
||||
args: [server]
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
@ -82,18 +100,18 @@ spec:
|
||||
port: http
|
||||
httpHeaders:
|
||||
- name: Host
|
||||
value: kubernetes-healthcheck-host
|
||||
value: passbook-healthcheck-host
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
httpHeaders:
|
||||
- name: Host
|
||||
value: kubernetes-healthcheck-host
|
||||
value: passbook-healthcheck-host
|
||||
resources:
|
||||
requests:
|
||||
cpu: 100m
|
||||
memory: 200M
|
||||
memory: 300M
|
||||
limits:
|
||||
cpu: 300m
|
||||
memory: 350M
|
||||
memory: 500M
|
||||
|
@ -9,7 +9,7 @@ metadata:
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
k8s.passbook.beryju.org/component: worker
|
||||
spec:
|
||||
replicas: {{ workerReplicas }}
|
||||
replicas: {{ .Values.workerReplicas }}
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: {{ include "passbook.name" . }}
|
||||
@ -22,6 +22,29 @@ spec:
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
k8s.passbook.beryju.org/component: worker
|
||||
spec:
|
||||
{{- if .Values.kubernetesIntegration }}
|
||||
serviceAccountName: {{ include "passbook.fullname" . }}-sa
|
||||
{{- end }}
|
||||
affinity:
|
||||
podAntiAffinity:
|
||||
preferredDuringSchedulingIgnoredDuringExecution:
|
||||
- weight: 1
|
||||
podAffinityTerm:
|
||||
labelSelector:
|
||||
matchExpressions:
|
||||
- key: app.kubernetes.io/name
|
||||
operator: In
|
||||
values:
|
||||
- {{ include "passbook.name" . }}
|
||||
- key: app.kubernetes.io/instance
|
||||
operator: In
|
||||
values:
|
||||
- {{ .Release.Name }}
|
||||
- key: k8s.passbook.beryju.org/component
|
||||
operator: In
|
||||
values:
|
||||
- worker
|
||||
topologyKey: "kubernetes.io/hostname"
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "{{ .Values.image.name }}:{{ .Values.image.tag }}"
|
||||
@ -50,7 +73,7 @@ spec:
|
||||
resources:
|
||||
requests:
|
||||
cpu: 150m
|
||||
memory: 300M
|
||||
memory: 400M
|
||||
limits:
|
||||
cpu: 300m
|
||||
memory: 500M
|
||||
memory: 600M
|
||||
|
21
helm/values.test.yaml
Normal file
@ -0,0 +1,21 @@
|
||||
image:
|
||||
tag: gh-master
|
||||
|
||||
serverReplicas: 1
|
||||
workerReplicas: 1
|
||||
|
||||
config:
|
||||
# Log level used by web and worker
|
||||
# Can be either debug, info, warning, error
|
||||
logLevel: debug
|
||||
|
||||
ingress:
|
||||
hosts:
|
||||
- passbook.127.0.0.1.nip.io
|
||||
|
||||
# These values influence the bundled postgresql and redis charts, but are also used by passbook to connect
|
||||
postgresql:
|
||||
postgresqlPassword: EK-5jnKfjrGRm<77
|
||||
|
||||
redis:
|
||||
password: password
|
@ -4,32 +4,48 @@
|
||||
image:
|
||||
name: beryju/passbook
|
||||
name_static: beryju/passbook-static
|
||||
tag: 0.10.0-rc4
|
||||
tag: 0.12.2-stable
|
||||
|
||||
nameOverride: ""
|
||||
|
||||
serverReplicas: 1
|
||||
workerReplicas: 1
|
||||
|
||||
# Enable the Kubernetes integration which lets passbook deploy outposts into kubernetes
|
||||
kubernetesIntegration: true
|
||||
|
||||
config:
|
||||
# Optionally specify fixed secret_key, otherwise generated automatically
|
||||
# secret_key: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
||||
# secretKey: _k*@6h2u2@q-dku57hhgzb7tnx*ba9wodcb^s9g0j59@=y(@_o
|
||||
# Enable error reporting
|
||||
error_reporting:
|
||||
errorReporting:
|
||||
enabled: false
|
||||
environment: customer
|
||||
send_pii: false
|
||||
sendPii: false
|
||||
# Log level used by web and worker
|
||||
# Can be either debug, info, warning, error
|
||||
log_level: warning
|
||||
logLevel: warning
|
||||
|
||||
# Enable Database Backups to S3
|
||||
# backup:
|
||||
# access_key: access-key
|
||||
# secret_key: secret-key
|
||||
# accessKey: access-key
|
||||
# secretKey: secret-key
|
||||
# bucket: s3-bucket
|
||||
# region: eu-central-1
|
||||
# host: s3-host
|
||||
|
||||
ingress:
|
||||
annotations: {}
|
||||
# kubernetes.io/ingress.class: nginx
|
||||
# kubernetes.io/tls-acme: "true"
|
||||
path: /
|
||||
hosts:
|
||||
- passbook.k8s.local
|
||||
tls: []
|
||||
# - secretName: chart-example-tls
|
||||
# hosts:
|
||||
# - passbook.k8s.local
|
||||
|
||||
###################################
|
||||
# Values controlling dependencies
|
||||
###################################
|
||||
@ -50,15 +66,3 @@ redis:
|
||||
enabled: false
|
||||
# https://stackoverflow.com/a/59189742
|
||||
disableCommands: []
|
||||
|
||||
ingress:
|
||||
annotations: {}
|
||||
# kubernetes.io/ingress.class: nginx
|
||||
# kubernetes.io/tls-acme: "true"
|
||||
path: /
|
||||
hosts:
|
||||
- passbook.k8s.local
|
||||
tls: []
|
||||
# - secretName: chart-example-tls
|
||||
# hosts:
|
||||
# - passbook.k8s.local
|
||||
|
@ -1,14 +1,20 @@
|
||||
#!/bin/bash -e
|
||||
python -m lifecycle.wait_for_db
|
||||
printf '{"event": "Bootstrap completed", "level": "info", "logger": "bootstrap", "command": "%s"}\n' "$@"
|
||||
printf '{"event": "Bootstrap completed", "level": "info", "logger": "bootstrap", "command": "%s"}\n' "$@" > /dev/stderr
|
||||
if [[ "$1" == "server" ]]; then
|
||||
gunicorn -c /lifecycle/gunicorn.conf.py passbook.root.asgi:application
|
||||
elif [[ "$1" == "worker" ]]; then
|
||||
celery worker --autoscale=10,3 -E -B -A=passbook.root.celery -s=/tmp/celerybeat-schedule
|
||||
celery -A passbook.root.celery worker --autoscale 3,1 -E -B -s /tmp/celerybeat-schedule -Q passbook,passbook_scheduled
|
||||
elif [[ "$1" == "migrate" ]]; then
|
||||
# Run system migrations first, run normal migrations after
|
||||
python -m lifecycle.migrate
|
||||
python -m manage migrate
|
||||
elif [[ "$1" == "backup" ]]; then
|
||||
python -m manage dbbackup --clean
|
||||
elif [[ "$1" == "restore" ]]; then
|
||||
python -m manage dbrestore ${@:2}
|
||||
elif [[ "$1" == "bash" ]]; then
|
||||
/bin/bash
|
||||
else
|
||||
python -m manage "$@"
|
||||
fi
|
||||
|
@ -1,9 +1,10 @@
|
||||
"""Gunicorn config"""
|
||||
from multiprocessing import cpu_count
|
||||
from pathlib import Path
|
||||
|
||||
import structlog
|
||||
|
||||
bind = "0.0.0.0:8000"
|
||||
workers = 2
|
||||
threads = 4
|
||||
|
||||
user = "passbook"
|
||||
group = "passbook"
|
||||
@ -40,3 +41,11 @@ logconfig_dict = {
|
||||
"gunicorn": {"handlers": ["console"], "level": "INFO", "propagate": False},
|
||||
},
|
||||
}
|
||||
|
||||
# if we're running in kubernetes, use fixed workers because we can scale with more pods
|
||||
# otherwise (assume docker-compose), use as much as we can
|
||||
if Path("/var/run/secrets/kubernetes.io").exists():
|
||||
workers = 2
|
||||
else:
|
||||
worker = cpu_count() * 2 + 1
|
||||
threads = 4
|
||||
|
@ -47,7 +47,9 @@ if __name__ == "__main__":
|
||||
# pyright: reportGeneralTypeIssues=false
|
||||
spec.loader.exec_module(mod)
|
||||
|
||||
for _, sub in getmembers(mod, isclass):
|
||||
for name, sub in getmembers(mod, isclass):
|
||||
if name != "Migration":
|
||||
continue
|
||||
migration = sub(curr, conn)
|
||||
if migration.needs_migration():
|
||||
LOGGER.info("Migration needs to be applied", migration=sub)
|
||||
|
@ -25,7 +25,7 @@ delete from django_migrations where app = 'passbook_stages_password' and
|
||||
name = '0002_passwordstage_change_flow';"""
|
||||
|
||||
|
||||
class To010Migration(BaseMigration):
|
||||
class Migration(BaseMigration):
|
||||
def needs_migration(self) -> bool:
|
||||
self.cur.execute(
|
||||
"select * from information_schema.tables where table_name='oidc_provider_client'"
|
||||
|
@ -1,16 +1,28 @@
|
||||
#!/usr/bin/env python
|
||||
"""This file needs to be run from the root of the project to correctly
|
||||
import passbook. This is done by the dockerfile."""
|
||||
from json import dumps
|
||||
from sys import stderr
|
||||
from time import sleep
|
||||
|
||||
from psycopg2 import OperationalError, connect
|
||||
from redis import Redis
|
||||
from redis.exceptions import RedisError
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.lib.config import CONFIG
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
def j_print(event: str, log_level: str = "info", **kwargs):
|
||||
"""Print event in the same format as structlog with JSON.
|
||||
Used before structlog is configured."""
|
||||
data = {
|
||||
"event": event,
|
||||
"level": log_level,
|
||||
"logger": __name__,
|
||||
}
|
||||
data.update(**kwargs)
|
||||
print(dumps(data), file=stderr)
|
||||
|
||||
|
||||
while True:
|
||||
try:
|
||||
@ -24,7 +36,7 @@ while True:
|
||||
break
|
||||
except OperationalError:
|
||||
sleep(1)
|
||||
LOGGER.warning("PostgreSQL Connection failed, retrying...")
|
||||
j_print("PostgreSQL Connection failed, retrying...")
|
||||
|
||||
while True:
|
||||
try:
|
||||
@ -38,4 +50,4 @@ while True:
|
||||
break
|
||||
except RedisError:
|
||||
sleep(1)
|
||||
LOGGER.warning("Redis Connection failed, retrying...")
|
||||
j_print("Redis Connection failed, retrying...")
|
||||
|
57
mkdocs.yml
@ -8,29 +8,34 @@ nav:
|
||||
- Installation:
|
||||
- docker-compose: installation/docker-compose.md
|
||||
- Kubernetes: installation/kubernetes.md
|
||||
- Reverse Proxy: installation/reverse-proxy.md
|
||||
- Flows:
|
||||
Overview: flow/flows.md
|
||||
Examples: flow/examples/examples.md
|
||||
- Stages:
|
||||
- Captcha Stage: flow/stages/captcha/index.md
|
||||
- Dummy Stage: flow/stages/dummy/index.md
|
||||
- Email Stage: flow/stages/email/index.md
|
||||
- Identification Stage: flow/stages/identification/index.md
|
||||
- Invitation Stage: flow/stages/invitation/index.md
|
||||
- OTP Stage: flow/stages/otp/index.md
|
||||
- Password Stage: flow/stages/password/index.md
|
||||
- Prompt Stage: flow/stages/prompt/index.md
|
||||
- Prompt Stage Validation: flow/stages/prompt/validation.md
|
||||
- User Delete Stage: flow/stages/user_delete.md
|
||||
- User Login Stage: flow/stages/user_login.md
|
||||
- User Logout Stage: flow/stages/user_logout.md
|
||||
- User Write Stage: flow/stages/user_write.md
|
||||
- Captcha Stage: flow/stages/captcha/index.md
|
||||
- Dummy Stage: flow/stages/dummy/index.md
|
||||
- Email Stage: flow/stages/email/index.md
|
||||
- Identification Stage: flow/stages/identification/index.md
|
||||
- Invitation Stage: flow/stages/invitation/index.md
|
||||
- OTP Stage: flow/stages/otp/index.md
|
||||
- Password Stage: flow/stages/password/index.md
|
||||
- Prompt Stage: flow/stages/prompt/index.md
|
||||
- Prompt Stage Validation: flow/stages/prompt/validation.md
|
||||
- User Delete Stage: flow/stages/user_delete.md
|
||||
- User Login Stage: flow/stages/user_login.md
|
||||
- User Logout Stage: flow/stages/user_logout.md
|
||||
- User Write Stage: flow/stages/user_write.md
|
||||
- Sources: sources.md
|
||||
- Providers:
|
||||
- OAuth2: providers/oauth2.md
|
||||
- SAML: providers/saml.md
|
||||
- Proxy: providers/proxy.md
|
||||
- Outposts: outposts/outposts.md
|
||||
- Outposts:
|
||||
- Overview: outposts/outposts.md
|
||||
- Upgrading: outposts/upgrading.md
|
||||
- Deploy on docker-compose: outposts/deploy-docker-compose.md
|
||||
- Deploy on Kubernetes: outposts/deploy-kubernetes.md
|
||||
- Expressions:
|
||||
- Overview: expressions/index.md
|
||||
- Reference:
|
||||
@ -42,16 +47,28 @@ nav:
|
||||
- Overview: policies/index.md
|
||||
- Expression: policies/expression.md
|
||||
- Integrations:
|
||||
- as Source:
|
||||
- Active Directory: integrations/sources/active-directory/index.md
|
||||
- as Provider:
|
||||
- Amazon Web Services: integrations/services/aws/index.md
|
||||
- GitLab: integrations/services/gitlab/index.md
|
||||
- Rancher: integrations/services/rancher/index.md
|
||||
- Harbor: integrations/services/harbor/index.md
|
||||
- Sentry: integrations/services/sentry/index.md
|
||||
- Ansible Tower/AWX: integrations/services/tower-awx/index.md
|
||||
- Amazon Web Services: integrations/services/aws/index.md
|
||||
- GitLab: integrations/services/gitlab/index.md
|
||||
- Rancher: integrations/services/rancher/index.md
|
||||
- Harbor: integrations/services/harbor/index.md
|
||||
- Sentry: integrations/services/sentry/index.md
|
||||
- Ansible Tower/AWX: integrations/services/tower-awx/index.md
|
||||
- VMware vCenter: integrations/services/vmware-vcenter/index.md
|
||||
- Ubuntu Landscape: integrations/services/ubuntu-landscape/index.md
|
||||
- Sonarr: integrations/services/sonarr/index.md
|
||||
- Tautulli: integrations/services/tautulli/index.md
|
||||
- Maintenance:
|
||||
- Backups: maintenance/backups/index.md
|
||||
- Upgrading:
|
||||
- to 0.9: upgrading/to-0.9.md
|
||||
- to 0.10: upgrading/to-0.10.md
|
||||
- to 0.11: upgrading/to-0.11.md
|
||||
- to 0.12: upgrading/to-0.12.md
|
||||
- Troubleshooting:
|
||||
- Access problems: troubleshooting/access.md
|
||||
|
||||
repo_name: "BeryJu/passbook"
|
||||
repo_url: https://github.com/BeryJu/passbook
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook"""
|
||||
__version__ = "0.10.0-rc4"
|
||||
__version__ = "0.12.2-stable"
|
||||
|
0
passbook/admin/api/__init__.py
Normal file
80
passbook/admin/api/overview.py
Normal file
@ -0,0 +1,80 @@
|
||||
"""passbook administration overview"""
|
||||
from django.core.cache import cache
|
||||
from django.http import response
|
||||
from drf_yasg2.utils import swagger_auto_schema
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import Serializer
|
||||
from rest_framework.viewsets import ViewSet
|
||||
|
||||
from passbook import __version__
|
||||
from passbook.admin.tasks import VERSION_CACHE_KEY, update_latest_version
|
||||
from passbook.core.models import Provider
|
||||
from passbook.policies.models import Policy
|
||||
from passbook.root.celery import CELERY_APP
|
||||
|
||||
|
||||
class AdministrationOverviewSerializer(Serializer):
|
||||
"""Overview View"""
|
||||
|
||||
version = SerializerMethodField()
|
||||
version_latest = SerializerMethodField()
|
||||
worker_count = SerializerMethodField()
|
||||
providers_without_application = SerializerMethodField()
|
||||
policies_without_binding = SerializerMethodField()
|
||||
cached_policies = SerializerMethodField()
|
||||
cached_flows = SerializerMethodField()
|
||||
|
||||
def get_version(self, _) -> str:
|
||||
"""Get current version"""
|
||||
return __version__
|
||||
|
||||
def get_version_latest(self, _) -> str:
|
||||
"""Get latest version from cache"""
|
||||
version_in_cache = cache.get(VERSION_CACHE_KEY)
|
||||
if not version_in_cache:
|
||||
update_latest_version.delay()
|
||||
return __version__
|
||||
return version_in_cache
|
||||
|
||||
def get_worker_count(self, _) -> int:
|
||||
"""Ping workers"""
|
||||
return len(CELERY_APP.control.ping(timeout=0.5))
|
||||
|
||||
def get_providers_without_application(self, _) -> int:
|
||||
"""Count of providers without application"""
|
||||
return len(Provider.objects.filter(application=None))
|
||||
|
||||
def get_policies_without_binding(self, _) -> int:
|
||||
"""Count of policies not bound or use in prompt stages"""
|
||||
return len(
|
||||
Policy.objects.filter(bindings__isnull=True, promptstage__isnull=True)
|
||||
)
|
||||
|
||||
def get_cached_policies(self, _) -> int:
|
||||
"""Get cached policy count"""
|
||||
return len(cache.keys("policy_*"))
|
||||
|
||||
def get_cached_flows(self, _) -> int:
|
||||
"""Get cached flow count"""
|
||||
return len(cache.keys("flow_*"))
|
||||
|
||||
def create(self, request: Request) -> response:
|
||||
raise NotImplementedError
|
||||
|
||||
def update(self, request: Request) -> Response:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class AdministrationOverviewViewSet(ViewSet):
|
||||
"""Return single instance of AdministrationOverviewSerializer"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@swagger_auto_schema(responses={200: AdministrationOverviewSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
"""Return single instance of AdministrationOverviewSerializer"""
|
||||
serializer = AdministrationOverviewSerializer(True)
|
||||
return Response(serializer.data)
|
80
passbook/admin/api/overview_metrics.py
Normal file
@ -0,0 +1,80 @@
|
||||
"""passbook administration overview"""
|
||||
import time
|
||||
from collections import Counter
|
||||
from datetime import timedelta
|
||||
from typing import Dict, List
|
||||
|
||||
from django.db.models import Count, ExpressionWrapper, F
|
||||
from django.db.models.fields import DurationField
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.http import response
|
||||
from django.utils.timezone import now
|
||||
from drf_yasg2.utils import swagger_auto_schema
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import Serializer
|
||||
from rest_framework.viewsets import ViewSet
|
||||
|
||||
from passbook.audit.models import Event, EventAction
|
||||
|
||||
|
||||
class AdministrationMetricsSerializer(Serializer):
|
||||
"""Overview View"""
|
||||
|
||||
logins_per_1h = SerializerMethodField()
|
||||
logins_failed_per_1h = SerializerMethodField()
|
||||
|
||||
def get_events_per_1h(self, action: str) -> List[Dict[str, int]]:
|
||||
"""Get event count by hour in the last day, fill with zeros"""
|
||||
date_from = now() - timedelta(days=1)
|
||||
result = (
|
||||
Event.objects.filter(action=action, created__gte=date_from)
|
||||
.annotate(
|
||||
age=ExpressionWrapper(
|
||||
now() - F("created"), output_field=DurationField()
|
||||
)
|
||||
)
|
||||
.annotate(age_hours=ExtractHour("age"))
|
||||
.values("age_hours")
|
||||
.annotate(count=Count("pk"))
|
||||
.order_by("age_hours")
|
||||
)
|
||||
data = Counter({d["age_hours"]: d["count"] for d in result})
|
||||
results = []
|
||||
_now = now()
|
||||
for hour in range(0, -24, -1):
|
||||
results.append(
|
||||
{
|
||||
"x": time.mktime((_now + timedelta(hours=hour)).timetuple()) * 1000,
|
||||
"y": data[hour * -1],
|
||||
}
|
||||
)
|
||||
return results
|
||||
|
||||
def get_logins_per_1h(self, _):
|
||||
"""Get successful logins per hour for the last 24 hours"""
|
||||
return self.get_events_per_1h(EventAction.LOGIN)
|
||||
|
||||
def get_logins_failed_per_1h(self, _):
|
||||
"""Get failed logins per hour for the last 24 hours"""
|
||||
return self.get_events_per_1h(EventAction.LOGIN_FAILED)
|
||||
|
||||
def create(self, request: Request) -> response:
|
||||
raise NotImplementedError
|
||||
|
||||
def update(self, request: Request) -> Response:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class AdministrationMetricsViewSet(ViewSet):
|
||||
"""Return single instance of AdministrationMetricsSerializer"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@swagger_auto_schema(responses={200: AdministrationMetricsSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
"""Return single instance of AdministrationMetricsSerializer"""
|
||||
serializer = AdministrationMetricsSerializer(True)
|
||||
return Response(serializer.data)
|
64
passbook/admin/api/tasks.py
Normal file
@ -0,0 +1,64 @@
|
||||
"""Tasks API"""
|
||||
from importlib import import_module
|
||||
|
||||
from django.contrib import messages
|
||||
from django.http.response import Http404
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_yasg2.utils import swagger_auto_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, DateTimeField, IntegerField, ListField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import Serializer
|
||||
from rest_framework.viewsets import ViewSet
|
||||
|
||||
from passbook.lib.tasks import TaskInfo
|
||||
|
||||
|
||||
class TaskSerializer(Serializer):
|
||||
"""Serialize TaskInfo and TaskResult"""
|
||||
|
||||
task_name = CharField()
|
||||
task_description = CharField()
|
||||
task_finish_timestamp = DateTimeField(source="finish_timestamp")
|
||||
|
||||
status = IntegerField(source="result.status.value")
|
||||
messages = ListField(source="result.messages")
|
||||
|
||||
def create(self, request: Request) -> Response:
|
||||
raise NotImplementedError
|
||||
|
||||
def update(self, request: Request) -> Response:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class TaskViewSet(ViewSet):
|
||||
"""Read-only view set that returns all background tasks"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@swagger_auto_schema(responses={200: TaskSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
"""List current messages and pass into Serializer"""
|
||||
return Response(TaskSerializer(TaskInfo.all().values(), many=True).data)
|
||||
|
||||
@action(detail=True, methods=["post"])
|
||||
# pylint: disable=invalid-name
|
||||
def retry(self, request: Request, pk=None) -> Response:
|
||||
"""Retry task"""
|
||||
task = TaskInfo.by_name(pk)
|
||||
if not task:
|
||||
raise Http404
|
||||
task_module = import_module(task.task_call_module)
|
||||
task_func = getattr(task_module, task.task_call_func)
|
||||
task_func.delay(*task.task_call_args, **task.task_call_kwargs)
|
||||
messages.success(
|
||||
self.request,
|
||||
_("Successfully re-scheduled Task %(name)s!" % {"name": task.task_name}),
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"successful": True,
|
||||
}
|
||||
)
|
@ -1,9 +1,35 @@
|
||||
"""YAML fields"""
|
||||
"""Additional fields"""
|
||||
import yaml
|
||||
from django import forms
|
||||
from django.utils.datastructures import MultiValueDict
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class ArrayFieldSelectMultiple(forms.SelectMultiple):
|
||||
"""This is a Form Widget for use with a Postgres ArrayField. It implements
|
||||
a multi-select interface that can be given a set of `choices`.
|
||||
You can provide a `delimiter` keyword argument to specify the delimeter used.
|
||||
|
||||
https://gist.github.com/stephane/00e73c0002de52b1c601"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Accept a `delimiter` argument, and grab it (defaulting to a comma)
|
||||
self.delimiter = kwargs.pop("delimiter", ",")
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def value_from_datadict(self, data, files, name):
|
||||
if isinstance(data, MultiValueDict):
|
||||
# Normally, we'd want a list here, which is what we get from the
|
||||
# SelectMultiple superclass, but the SimpleArrayField expects to
|
||||
# get a delimited string, so we're doing a little extra work.
|
||||
return self.delimiter.join(data.getlist(name))
|
||||
|
||||
return data.get(name)
|
||||
|
||||
def get_context(self, name, value, attrs):
|
||||
return super().get_context(name, value.split(self.delimiter), attrs)
|
||||
|
||||
|
||||
class CodeMirrorWidget(forms.Textarea):
|
||||
"""Custom Textarea-based Widget that triggers a CodeMirror editor"""
|
||||
|
||||
@ -15,11 +41,8 @@ class CodeMirrorWidget(forms.Textarea):
|
||||
self.mode = mode
|
||||
|
||||
def render(self, *args, **kwargs):
|
||||
if "attrs" not in kwargs:
|
||||
kwargs["attrs"] = {}
|
||||
attrs = kwargs["attrs"]
|
||||
if "class" not in attrs:
|
||||
attrs["class"] = ""
|
||||
attrs = kwargs.setdefault("attrs", {})
|
||||
attrs.setdefault("class", "")
|
||||
attrs["class"] += " codemirror"
|
||||
attrs["data-cm-mode"] = self.mode
|
||||
return super().render(*args, **kwargs)
|
||||
@ -52,10 +75,14 @@ class YAMLField(forms.JSONField):
|
||||
converted = yaml.safe_load(value)
|
||||
except yaml.YAMLError:
|
||||
raise forms.ValidationError(
|
||||
self.error_messages["invalid"], code="invalid", params={"value": value},
|
||||
self.error_messages["invalid"],
|
||||
code="invalid",
|
||||
params={"value": value},
|
||||
)
|
||||
if isinstance(converted, str):
|
||||
return YAMLString(converted)
|
||||
if converted is None:
|
||||
return {}
|
||||
return converted
|
||||
|
||||
def bound_data(self, data, initial):
|
||||
|
@ -12,7 +12,7 @@ class UserForm(forms.ModelForm):
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
fields = ["username", "name", "email", "is_staff", "is_active", "attributes"]
|
||||
fields = ["username", "name", "email", "is_active", "attributes"]
|
||||
widgets = {
|
||||
"name": forms.TextInput,
|
||||
"attributes": CodeMirrorWidget,
|
||||
|
@ -1,26 +0,0 @@
|
||||
"""passbook admin Middleware to impersonate users"""
|
||||
|
||||
from passbook.core.models import User
|
||||
|
||||
|
||||
def impersonate(get_response):
|
||||
"""Middleware to impersonate users"""
|
||||
|
||||
def middleware(request):
|
||||
"""Middleware to impersonate users"""
|
||||
|
||||
# User is superuser and has __impersonate ID set
|
||||
if request.user.is_superuser and "__impersonate" in request.GET:
|
||||
request.session["impersonate_id"] = request.GET["__impersonate"]
|
||||
# user wants to stop impersonation
|
||||
elif "__unimpersonate" in request.GET and "impersonate_id" in request.session:
|
||||
del request.session["impersonate_id"]
|
||||
|
||||
# Actually impersonate user
|
||||
if request.user.is_superuser and "impersonate_id" in request.session:
|
||||
request.user = User.objects.get(pk=request.session["impersonate_id"])
|
||||
|
||||
response = get_response(request)
|
||||
return response
|
||||
|
||||
return middleware
|
@ -1,5 +1,10 @@
|
||||
"""passbook admin settings"""
|
||||
from celery.schedules import crontab
|
||||
|
||||
MIDDLEWARE = [
|
||||
"passbook.admin.middleware.impersonate",
|
||||
]
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"admin_latest_version": {
|
||||
"task": "passbook.admin.tasks.update_latest_version",
|
||||
"schedule": crontab(minute=0), # Run every hour
|
||||
"options": {"queue": "passbook_scheduled"},
|
||||
}
|
||||
}
|
||||
|
30
passbook/admin/tasks.py
Normal file
@ -0,0 +1,30 @@
|
||||
"""passbook admin tasks"""
|
||||
from django.core.cache import cache
|
||||
from requests import RequestException, get
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.lib.tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
from passbook.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
VERSION_CACHE_KEY = "passbook_latest_version"
|
||||
VERSION_CACHE_TIMEOUT = 2 * 60 * 60 # 2 hours
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||
def update_latest_version(self: MonitoredTask):
|
||||
"""Update latest version info"""
|
||||
try:
|
||||
data = get(
|
||||
"https://api.github.com/repos/beryju/passbook/releases/latest"
|
||||
).json()
|
||||
tag_name = data.get("tag_name")
|
||||
cache.set(VERSION_CACHE_KEY, tag_name.split("/")[1], VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(
|
||||
TaskResult(
|
||||
TaskResultStatus.SUCCESSFUL, ["Successfully updated latest Version"]
|
||||
)
|
||||
)
|
||||
except (RequestException, IndexError) as exc:
|
||||
cache.set(VERSION_CACHE_KEY, "0.0.0", VERSION_CACHE_TIMEOUT)
|
||||
self.set_status(TaskResult(TaskResultStatus.ERROR).with_error(exc))
|
@ -18,6 +18,7 @@
|
||||
{% if object_list %}
|
||||
<div class="pf-c-toolbar">
|
||||
<div class="pf-c-toolbar__content">
|
||||
{% include 'partials/toolbar_search.html' %}
|
||||
<div class="pf-c-toolbar__bulk-select">
|
||||
<a href="{% url 'passbook_admin:application-create' %}?back={{ request.get_full_path }}" class="pf-c-button pf-m-primary" type="button">{% trans 'Create' %}</a>
|
||||
</div>
|
||||
@ -62,18 +63,27 @@
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="pf-c-toolbar" id="page-layout-table-simple-toolbar-bottom">
|
||||
<div class="pf-c-pagination pf-m-bottom">
|
||||
{% include 'partials/pagination.html' %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="pf-c-toolbar">
|
||||
<div class="pf-c-toolbar__content">
|
||||
{% include 'partials/toolbar_search.html' %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="pf-c-empty-state">
|
||||
<div class="pf-c-empty-state__content">
|
||||
<i class="fas fa-cubes pf-c-empty-state__icon" aria-hidden="true"></i>
|
||||
<i class="pf-icon pf-icon-applications pf-c-empty-state__icon" aria-hidden="true"></i>
|
||||
<h1 class="pf-c-title pf-m-lg">
|
||||
{% trans 'No Applications.' %}
|
||||
</h1>
|
||||
<div class="pf-c-empty-state__body">
|
||||
{% if request.GET.search != "" %}
|
||||
{% trans "Your search query doesn't match any application." %}
|
||||
{% else %}
|
||||
{% trans 'Currently no applications exist. Click the button below to create one.' %}
|
||||
{% endif %}
|
||||
</div>
|
||||
<a href="{% url 'passbook_admin:application-create' %}?back={{ request.get_full_path }}" class="pf-c-button pf-m-primary" type="button">{% trans 'Create' %}</a>
|
||||
</div>
|
||||
|
@ -25,7 +25,7 @@
|
||||
<li class="pf-c-nav__item">
|
||||
<a href="{% url 'passbook_admin:overview' %}"
|
||||
class="pf-c-nav__link {% is_active 'passbook_admin:overview' %}">
|
||||
{% trans 'System Status' %}
|
||||
{% trans 'Overview' %}
|
||||
</a>
|
||||
</li>
|
||||
<li class="pf-c-nav__item">
|
||||
@ -58,7 +58,7 @@
|
||||
{% trans 'Property Mappings' %}
|
||||
</a>
|
||||
</li>
|
||||
<li class="pf-c-nav__item pf-m-expandable pf-m-expanded">
|
||||
<li class="pf-c-nav__item pf-m-expanded">
|
||||
<a href="#" class="pf-c-nav__link" aria-expanded="true">{% trans 'Flows' %}
|
||||
<span class="pf-c-nav__toggle">
|
||||
<i class="fas fa-angle-right" aria-hidden="true"></i>
|
||||
@ -99,7 +99,7 @@
|
||||
</ul>
|
||||
</section>
|
||||
</li>
|
||||
<li class="pf-c-nav__item pf-m-expandable pf-m-expanded">
|
||||
<li class="pf-c-nav__item pf-m-expanded">
|
||||
<a href="#" class="pf-c-nav__link" aria-expanded="true">{% trans 'Policies' %}
|
||||
<span class="pf-c-nav__toggle">
|
||||
<i class="fas fa-angle-right" aria-hidden="true"></i>
|
||||
@ -146,6 +146,12 @@
|
||||
{% trans 'Groups' %}
|
||||
</a>
|
||||
</li>
|
||||
<li class="pf-c-nav__item">
|
||||
<a href="{% url 'passbook_admin:tasks' %}"
|
||||
class="pf-c-nav__link {% is_active 'passbook_admin:tasks' %}">
|
||||
{% trans 'System Tasks' %}
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
|
@ -18,6 +18,7 @@
|
||||
{% if object_list %}
|
||||
<div class="pf-c-toolbar">
|
||||
<div class="pf-c-toolbar__content">
|
||||
{% include 'partials/toolbar_search.html' %}
|
||||
<div class="pf-c-toolbar__bulk-select">
|
||||
<a href="{% url 'passbook_admin:certificatekeypair-create' %}?back={{ request.get_full_path }}" class="pf-c-button pf-m-primary" type="button">{% trans 'Create' %}</a>
|
||||
</div>
|
||||
@ -64,18 +65,27 @@
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="pf-c-toolbar" id="page-layout-table-simple-toolbar-bottom">
|
||||
<div class="pf-c-pagination pf-m-bottom">
|
||||
{% include 'partials/pagination.html' %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="pf-c-toolbar">
|
||||
<div class="pf-c-toolbar__content">
|
||||
{% include 'partials/toolbar_search.html' %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="pf-c-empty-state">
|
||||
<div class="pf-c-empty-state__content">
|
||||
<i class="fas fa-cubes pf-c-empty-state__icon" aria-hidden="true"></i>
|
||||
<i class="pf-icon pf-icon-key pf-c-empty-state__icon" aria-hidden="true"></i>
|
||||
<h1 class="pf-c-title pf-m-lg">
|
||||
{% trans 'No Certificates.' %}
|
||||
</h1>
|
||||
<div class="pf-c-empty-state__body">
|
||||
{% if request.GET.search != "" %}
|
||||
{% trans "Your search query doesn't match any certificates." %}
|
||||
{% else %}
|
||||
{% trans 'Currently no certificates exist. Click the button below to create one.' %}
|
||||
{% endif %}
|
||||
</div>
|
||||
<a href="{% url 'passbook_admin:certificatekeypair-create' %}?back={{ request.get_full_path }}" class="pf-c-button pf-m-primary" type="button">{% trans 'Create' %}</a>
|
||||
</div>
|
||||
|
@ -18,6 +18,7 @@
|
||||
{% if object_list %}
|
||||
<div class="pf-c-toolbar">
|
||||
<div class="pf-c-toolbar__content">
|
||||
{% include 'partials/toolbar_search.html' %}
|
||||
<div class="pf-c-toolbar__bulk-select">
|
||||
<a href="{% url 'passbook_admin:flow-create' %}?back={{ request.get_full_path }}" class="pf-c-button pf-m-primary" type="button">{% trans 'Create' %}</a>
|
||||
<a href="{% url 'passbook_admin:flow-import' %}?back={{ request.get_full_path }}" class="pf-c-button pf-m-secondary" type="button">{% trans 'Import' %}</a>
|
||||
@ -69,18 +70,27 @@
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="pf-c-toolbar" id="page-layout-table-simple-toolbar-bottom">
|
||||
<div class="pf-c-pagination pf-m-bottom">
|
||||
{% include 'partials/pagination.html' %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="pf-c-toolbar">
|
||||
<div class="pf-c-toolbar__content">
|
||||
{% include 'partials/toolbar_search.html' %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="pf-c-empty-state">
|
||||
<div class="pf-c-empty-state__content">
|
||||
<i class="fas fa-cubes pf-c-empty-state__icon" aria-hidden="true"></i>
|
||||
<i class="pf-icon pf-icon-process-automation pf-c-empty-state__icon" aria-hidden="true"></i>
|
||||
<h1 class="pf-c-title pf-m-lg">
|
||||
{% trans 'No Flows.' %}
|
||||
</h1>
|
||||
<div class="pf-c-empty-state__body">
|
||||
{% if request.GET.search != "" %}
|
||||
{% trans "Your search query doesn't match any flows." %}
|
||||
{% else %}
|
||||
{% trans 'Currently no flows exist. Click the button below to create one.' %}
|
||||
{% endif %}
|
||||
</div>
|
||||
<a href="{% url 'passbook_admin:flow-create' %}?back={{ request.get_full_path }}" class="pf-c-button pf-m-primary" type="button">{% trans 'Create' %}</a>
|
||||
<a href="{% url 'passbook_admin:flow-import' %}?back={{ request.get_full_path }}" class="pf-c-button pf-m-primary" type="button">{% trans 'Import' %}</a>
|
||||
|