Compare commits
45 Commits
version/0.
...
version/0.
Author | SHA1 | Date | |
---|---|---|---|
ec0a6e7854 | |||
6904608e6f | |||
cb3732cb2b | |||
57de6cbafc | |||
b1dda764a9 | |||
5ec2102487 | |||
9f8fb7378a | |||
98cd646044 | |||
0cba1b4c45 | |||
53918462b6 | |||
8a7e74b523 | |||
4dc7065e97 | |||
3c93bb9f9f | |||
8143fae2d6 | |||
3cfe45d3cb | |||
8e5c3f2f31 | |||
5a3b2fdd49 | |||
e47b9f0d57 | |||
146dd747f1 | |||
f2ce56063b | |||
b26f378e4c | |||
9072b836c6 | |||
2fa57d064e | |||
146705c60a | |||
5029a99df6 | |||
e7129d18f6 | |||
d2bf9f81d6 | |||
30acf0660b | |||
dda41af5c8 | |||
9b5b03647b | |||
940b3eb943 | |||
16eb629b71 | |||
755045b226 | |||
61478db94e | |||
f69f959bdb | |||
146edb45d4 | |||
045a802365 | |||
c90d8ddcff | |||
3ff2ec929f | |||
a3ef26b7ad | |||
19cd1624c1 | |||
366ef352c6 | |||
a9031a6abc | |||
15aaeda475 | |||
8536ef9e23 |
@ -1,10 +1,10 @@
|
||||
[bumpversion]
|
||||
current_version = 0.1.28-beta
|
||||
current_version = 0.1.34-beta
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)
|
||||
serialize = {major}.{minor}.{patch}-{release}
|
||||
message = bump version: {current_version} -> {new_version}
|
||||
message = new release: {new_version}
|
||||
tag_name = version/{new_version}
|
||||
|
||||
[bumpversion:part:release]
|
||||
|
@ -9,3 +9,6 @@ insert_final_newline = true
|
||||
|
||||
[html]
|
||||
indent_size = 2
|
||||
|
||||
[yaml]
|
||||
indent_size = 2
|
||||
|
225
.gitlab-ci.yml
225
.gitlab-ci.yml
@ -1,156 +1,109 @@
|
||||
# Global Variables
|
||||
before_script:
|
||||
- "python3 -m pip install -U virtualenv"
|
||||
- "virtualenv env"
|
||||
- "source env/bin/activate"
|
||||
- "pip3 install -U -r requirements-dev.txt"
|
||||
stages:
|
||||
- test
|
||||
- build
|
||||
- docs
|
||||
- deploy
|
||||
image: python:3.6
|
||||
- build-buildimage
|
||||
- test
|
||||
- build
|
||||
- docs
|
||||
- deploy
|
||||
image: docker.beryju.org/passbook/build-base:latest
|
||||
services:
|
||||
- postgres:latest
|
||||
- redis:latest
|
||||
- postgres:latest
|
||||
- redis:latest
|
||||
|
||||
variables:
|
||||
POSTGRES_DB: passbook
|
||||
POSTGRES_USER: passbook
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
POSTGRES_DB: passbook
|
||||
POSTGRES_USER: passbook
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
|
||||
include:
|
||||
- /client-packages/allauth/.gitlab-ci.yml
|
||||
create-build-image:
|
||||
image:
|
||||
name: gcr.io/kaniko-project/executor:debug
|
||||
entrypoint: [""]
|
||||
before_script:
|
||||
- echo "{\"auths\":{\"docker.$NEXUS_URL\":{\"auth\":\"$NEXUS_AUTH\"}}}" > /kaniko/.docker/config.json
|
||||
script:
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile.build-base --destination docker.beryju.org/passbook/build-base:latest --destination docker.beryju.org/passbook/build-base:0.1.34-beta
|
||||
stage: build-buildimage
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
|
||||
isort:
|
||||
script:
|
||||
- isort -c -sg env
|
||||
stage: test
|
||||
script:
|
||||
- isort -c -sg env
|
||||
stage: test
|
||||
migrations:
|
||||
script:
|
||||
- python manage.py migrate
|
||||
stage: test
|
||||
script:
|
||||
- python manage.py migrate
|
||||
stage: test
|
||||
prospector:
|
||||
script:
|
||||
- prospector
|
||||
stage: test
|
||||
script:
|
||||
- prospector
|
||||
stage: test
|
||||
pylint:
|
||||
script:
|
||||
- pylint passbook
|
||||
stage: test
|
||||
script:
|
||||
- pylint passbook
|
||||
stage: test
|
||||
coverage:
|
||||
script:
|
||||
- coverage run manage.py test
|
||||
- coverage report
|
||||
stage: test
|
||||
script:
|
||||
- python manage.py collectstatic --no-input
|
||||
- coverage run manage.py test
|
||||
- coverage report
|
||||
stage: test
|
||||
bandit:
|
||||
script:
|
||||
- bandit -r passbook
|
||||
stage: test
|
||||
script:
|
||||
- bandit -r passbook
|
||||
stage: test
|
||||
|
||||
package-docker:
|
||||
image:
|
||||
name: gcr.io/kaniko-project/executor:debug
|
||||
entrypoint: [""]
|
||||
before_script:
|
||||
- echo "{\"auths\":{\"docker.$NEXUS_URL\":{\"auth\":\"$NEXUS_AUTH\"}}}" > /kaniko/.docker/config.json
|
||||
script:
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination docker.pkg.beryju.org/passbook:latest --destination docker.pkg.beryju.org/passbook:0.1.28-beta
|
||||
stage: build
|
||||
only:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
image:
|
||||
name: gcr.io/kaniko-project/executor:debug
|
||||
entrypoint: [""]
|
||||
before_script:
|
||||
- echo "{\"auths\":{\"docker.$NEXUS_URL\":{\"auth\":\"$NEXUS_AUTH\"}}}" > /kaniko/.docker/config.json
|
||||
script:
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination docker.beryju.org/passbook/server:latest --destination docker.beryju.org/passbook/server:0.1.34-beta
|
||||
stage: build
|
||||
only:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
package-helm:
|
||||
stage: build
|
||||
script:
|
||||
- curl https://raw.githubusercontent.com/helm/helm/master/scripts/get | bash
|
||||
- helm init --client-only
|
||||
- helm package helm/passbook
|
||||
- ./manage.py nexus_upload --method put --url $NEXUS_URL --auth $NEXUS_AUTH --repo helm *.tgz
|
||||
only:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
package-debian:
|
||||
before_script:
|
||||
- apt update
|
||||
- apt install -y --no-install-recommends build-essential debhelper devscripts equivs python3 python3-dev python3-pip libsasl2-dev libldap2-dev
|
||||
- mk-build-deps debian/control
|
||||
- apt install ./*build-deps*deb -f -y
|
||||
- python3 -m pip install -U virtualenv pip
|
||||
- virtualenv env
|
||||
- source env/bin/activate
|
||||
- pip3 install -U -r requirements.txt -r requirements-dev.txt
|
||||
- ./manage.py collectstatic --no-input
|
||||
image: ubuntu:18.04
|
||||
script:
|
||||
- debuild -us -uc
|
||||
- cp ../passbook*.deb .
|
||||
- ./manage.py nexus_upload --method post --url $NEXUS_URL --auth $NEXUS_AUTH --repo apt passbook*deb
|
||||
artifacts:
|
||||
paths:
|
||||
- passbook*deb
|
||||
expire_in: 2 days
|
||||
stage: build
|
||||
only:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
stage: build
|
||||
script:
|
||||
- curl https://raw.githubusercontent.com/helm/helm/master/scripts/get | bash
|
||||
- helm init --client-only
|
||||
- helm package helm/passbook
|
||||
artifacts:
|
||||
paths:
|
||||
- passbook-*.tgz
|
||||
expire_in: 2 days
|
||||
only:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
|
||||
package-client-package-allauth:
|
||||
script:
|
||||
- cd client-packages/allauth
|
||||
- python setup.py sdist
|
||||
- twine upload --username $TWINE_USERNAME --password $TWINE_PASSWORD dist/*
|
||||
stage: build
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
changes:
|
||||
- client-packages/allauth/**
|
||||
script:
|
||||
- cd client-packages/allauth
|
||||
- python setup.py sdist
|
||||
- twine upload --username $TWINE_USERNAME --password $TWINE_PASSWORD dist/*
|
||||
stage: build
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
changes:
|
||||
- client-packages/allauth/**
|
||||
|
||||
package-client-package-sentry:
|
||||
script:
|
||||
- cd client-packages/sentry-auth-passbook
|
||||
- python setup.py sdist
|
||||
- twine upload --username $TWINE_USERNAME --password $TWINE_PASSWORD dist/*
|
||||
stage: build
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
changes:
|
||||
- client-packages/sentry-auth-passbook/**
|
||||
|
||||
# docs:
|
||||
# stage: docs
|
||||
# only:
|
||||
# - master
|
||||
# - tags
|
||||
# - /^debian/.*$/
|
||||
# environment:
|
||||
# name: docs
|
||||
# url: "https://passbook.beryju.org/docs/"
|
||||
# script:
|
||||
# - apt update
|
||||
# - apt install -y rsync
|
||||
# - "mkdir ~/.ssh"
|
||||
# - "cp .gitlab/known_hosts ~/.ssh/"
|
||||
# - "pip3 install -U -r requirements-docs.txt"
|
||||
# - "eval $(ssh-agent -s)"
|
||||
# - "echo \"${CI_SSH_PRIVATE}\" | ssh-add -"
|
||||
# - mkdocs build
|
||||
# - 'rsync -avh --delete web/* "beryjuorg@ory1-web-prod-1.ory1.beryju.org:passbook.beryju.org/"'
|
||||
# - 'rsync -avh --delete site/* "beryjuorg@ory1-web-prod-1.ory1.beryju.org:passbook.beryju.org/docs/"'
|
||||
|
||||
# deploy:
|
||||
# environment:
|
||||
# name: production
|
||||
# url: https://passbook-prod.default.k8s.beryju.org/
|
||||
# stage: deploy
|
||||
# only:
|
||||
# - tags
|
||||
# - /^version/.*$/
|
||||
# script:
|
||||
# - curl https://raw.githubusercontent.com/helm/helm/master/scripts/get | bash
|
||||
# - helm init
|
||||
# - helm upgrade passbook-prod helm/passbook --devel
|
||||
script:
|
||||
- cd client-packages/sentry-auth-passbook
|
||||
- python setup.py sdist
|
||||
- twine upload --username $TWINE_USERNAME --password $TWINE_PASSWORD dist/*
|
||||
stage: build
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
changes:
|
||||
- client-packages/sentry-auth-passbook/**
|
||||
|
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@ -4,6 +4,9 @@
|
||||
"[html]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[yml]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"cSpell.words": [
|
||||
"SAML",
|
||||
"passbook"
|
||||
|
12
Dockerfile.build-base
Normal file
12
Dockerfile.build-base
Normal file
@ -0,0 +1,12 @@
|
||||
FROM python:3.6
|
||||
|
||||
COPY ./passbook/ /app/passbook
|
||||
COPY ./client-packages/ /app/client-packages
|
||||
COPY ./requirements.txt /app/
|
||||
COPY ./requirements-dev.txt /app/
|
||||
|
||||
WORKDIR /app/
|
||||
|
||||
RUN apt-get update && apt-get install libssl-dev libffi-dev libpq-dev -y && \
|
||||
pip install -U -r requirements-dev.txt && \
|
||||
rm -rf /app/*
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2018 BeryJu.org
|
||||
Copyright (c) 2019 BeryJu.org
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -1,27 +0,0 @@
|
||||
# Global Variables
|
||||
before_script:
|
||||
- cd allauth/
|
||||
- "python3 -m pip install -U virtualenv"
|
||||
- "virtualenv env"
|
||||
- "source env/bin/activate"
|
||||
- "pip3 install -U -r requirements-dev.txt"
|
||||
stages:
|
||||
- test-allauth
|
||||
image: python:3.6
|
||||
|
||||
isort:
|
||||
script:
|
||||
- isort -c -sg env
|
||||
stage: test-allauth
|
||||
prospector:
|
||||
script:
|
||||
- prospector
|
||||
stage: test-allauth
|
||||
pylint:
|
||||
script:
|
||||
- pylint passbook
|
||||
stage: test-allauth
|
||||
bandit:
|
||||
script:
|
||||
- bandit -r allauth_passbook
|
||||
stage: test-allauth
|
@ -3,7 +3,7 @@ from setuptools import setup
|
||||
|
||||
setup(
|
||||
name='django-allauth-passbook',
|
||||
version='0.1.28-beta',
|
||||
version='0.1.34-beta',
|
||||
description='passbook support for django-allauth',
|
||||
# long_description='\n'.join(read_simple('docs/index.md')[2:]),
|
||||
long_description_content_type='text/markdown',
|
||||
|
@ -18,7 +18,7 @@ tests_require = [
|
||||
|
||||
setup(
|
||||
name='sentry-auth-passbook',
|
||||
version='0.1.28-beta',
|
||||
version='0.1.34-beta',
|
||||
author='BeryJu.org',
|
||||
author_email='support@beryju.org',
|
||||
url='https://passbook.beryju.org',
|
||||
|
203
debian/changelog
vendored
203
debian/changelog
vendored
@ -1,203 +0,0 @@
|
||||
passbook (0.1.28) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.26-beta -> 0.1.27-beta
|
||||
* fix allauth client's formatting
|
||||
* switch from raven to sentry_sdk
|
||||
* add ability to have non-expiring nonces, clean up expired nonces
|
||||
* fully remove raven and switch WSGI and logging to sentry_sdk
|
||||
* fix failing CI
|
||||
* trigger autoreload from config files
|
||||
* Choose upstream more cleverly
|
||||
* Move code from django-revproxy to app_gw to fix cookie bug
|
||||
* Implement websocket proxy
|
||||
* switch kubernetes deployment to daphne server
|
||||
* set default log level to warn, fix clean_nonces not working
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Thu, 11 Apr 2019 08:46:44 +0000
|
||||
|
||||
passbook (0.1.27) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.25-beta -> 0.1.26-beta
|
||||
* fix broken app_gw
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Fri, 22 Mar 2019 13:50:31 +0000
|
||||
|
||||
passbook (0.1.26) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.24-beta -> 0.1.25-beta
|
||||
* always parse url instead of once
|
||||
* validate upstream in form
|
||||
* add custom template views
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Fri, 22 Mar 2019 11:47:08 +0000
|
||||
|
||||
passbook (0.1.25) stable; urgency=medium
|
||||
|
||||
* initial implementation of reverse proxy, using django-revproxy from within a middleware
|
||||
* fix TypeError: can only concatenate list (not "str") to list
|
||||
* bump version: 0.1.23-beta -> 0.1.24-beta
|
||||
* add redis dependency back in for caching
|
||||
* utilise cache in PolicyEngine
|
||||
* explicitly use redis db
|
||||
* invalidate cache when policy is saved
|
||||
* add redis as service in CI for unittests
|
||||
* add timeout field to policy to prevent stuck policies
|
||||
* Don't use LoginRequired for PermissionDenied View
|
||||
* Check for policies in app_gw
|
||||
* Better handle policy timeouts
|
||||
* cleanup post-migration mess
|
||||
* prevent ZeroDivisionError
|
||||
* Redirect to login on reverse proxy
|
||||
* cleanup property_mapping list
|
||||
* add compiled regex to RewriteRule
|
||||
* implement actual Rewriting logic
|
||||
* Invalidate cache when ApplicationGateway instance is saved
|
||||
* validate server_name in form
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Thu, 21 Mar 2019 15:47:58 +0000
|
||||
|
||||
passbook (0.1.24) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.22-beta -> 0.1.23-beta
|
||||
* add modal for OAuth Providers showing the URLs
|
||||
* remove user field from form. Closes #32
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Wed, 20 Mar 2019 21:59:21 +0000
|
||||
|
||||
passbook (0.1.23) stable; urgency=medium
|
||||
|
||||
* add support for OpenID-Connect Discovery
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Thu, 18 Mar 2019 20:19:27 +0000
|
||||
|
||||
passbook (0.1.22) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.20-beta -> 0.1.21-beta
|
||||
* fix missing debug template
|
||||
* move icons to single folder, cleanup
|
||||
* fix layout when on mobile viewport and scrolling
|
||||
* fix delete form not working
|
||||
* point to correct icons
|
||||
* add Azure AD Source
|
||||
* Fix OAuth Client's disconnect view having invalid URL names
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Thu, 14 Mar 2019 20:19:27 +0000
|
||||
|
||||
passbook (0.1.21) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.19-beta -> 0.1.20-beta
|
||||
* add request debug view
|
||||
* detect HTTPS from reverse proxy
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Thu, 14 Mar 2019 17:01:49 +0000
|
||||
|
||||
passbook (0.1.20) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.18-beta -> 0.1.19-beta
|
||||
* fix GitHub Pretend again
|
||||
* add user settings for Sources
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Wed, 13 Mar 2019 15:49:44 +0000
|
||||
|
||||
passbook (0.1.18) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.16-beta -> 0.1.17-beta
|
||||
* fix Server Error when downloading metadata
|
||||
* add sentry client
|
||||
* fix included yaml file
|
||||
* adjust versions for client packages, auto build client-packages
|
||||
* bump version: 0.1.17-beta -> 0.1.18-beta
|
||||
* fix API Call for sentry-client, add missing template
|
||||
* fix GitHub Pretend throwing a 500 error
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Wed, 13 Mar 2019 14:14:10 +0000
|
||||
|
||||
passbook (0.1.17) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.15-beta -> 0.1.16-beta
|
||||
* remove Application.user_is_authorized
|
||||
* don't use celery heartbeat, use TCP keepalive instead
|
||||
* switch to vertical navigation
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Tue, 12 Mar 2019 14:54:27 +0000
|
||||
|
||||
passbook (0.1.16) stable; urgency=medium
|
||||
|
||||
* Replace redis with RabbitMQ
|
||||
* updated debian package to suggest RabbitMQ
|
||||
* update helm chart to require RabbitMQ
|
||||
* fix invalid default config in debian package
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Mon, 11 Mar 2019 10:28:36 +0000
|
||||
|
||||
passbook (0.1.14) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.11-beta -> 0.1.12-beta
|
||||
* Fix DoesNotExist error when running PolicyEngine against None user
|
||||
* allow custom email server for helm installs
|
||||
* fix UserChangePasswordView not requiring Login
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Mon, 11 Mar 2019 10:28:36 +0000
|
||||
|
||||
passbook (0.1.12) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.10-beta -> 0.1.11-beta
|
||||
* rewrite PasswordFactor to use backends setting instead of trying all backends
|
||||
* install updated helm release from local folder
|
||||
* disable automatic k8s deployment for now
|
||||
* fix OAuth Authorization View not requiring authentication
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Mon, 11 Mar 2019 08:50:29 +0000
|
||||
|
||||
passbook (0.1.11) stable; urgency=medium
|
||||
|
||||
* add group administration
|
||||
* bump version: 0.1.9-beta -> 0.1.10-beta
|
||||
* fix helm labels being on deployments and not pods
|
||||
* automatically deploy after release
|
||||
* use Django's Admin FilteredSelectMultiple for Group Membership
|
||||
* always use FilteredSelectMultiple for many-to-many fields
|
||||
* Add Group Member policy
|
||||
* add LDAP Group Membership Policy
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Sun, 10 Mar 2019 18:55:31 +0000
|
||||
|
||||
passbook (0.1.10) stable; urgency=high
|
||||
|
||||
* bump version: 0.1.7-beta -> 0.1.8-beta
|
||||
* consistently using PolicyEngine
|
||||
* add more Verbosity to PolicyEngine, rewrite SAML Authorisation check
|
||||
* slightly refactor Factor View, add more unittests
|
||||
* add impersonation middleware, add to templates
|
||||
* bump version: 0.1.8-beta -> 0.1.9-beta
|
||||
* fix k8s service routing http traffic to workers
|
||||
* Fix button on policy test page
|
||||
* better show loading state when testing a policy
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Sun, 10 Mar 2019 14:52:40 +0000
|
||||
|
||||
passbook (0.1.7) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.3-beta -> 0.1.4-beta
|
||||
* implicitly add kubernetes-healthcheck-host in helm configmap
|
||||
* fix debian build (again)
|
||||
* add PropertyMapping Model, add Subclass for SAML, test with AWS
|
||||
* add custom DynamicArrayField to better handle arrays
|
||||
* format data before inserting it
|
||||
* bump version: 0.1.4-beta -> 0.1.5-beta
|
||||
* fix static files missing for debian package
|
||||
* fix password not getting set on user import
|
||||
* remove audit's login attempt
|
||||
* add passing property to PolicyEngine
|
||||
* fix captcha factor not loading keys from Factor class
|
||||
* bump version: 0.1.5-beta -> 0.1.6-beta
|
||||
* fix MATCH_EXACT not working as intended
|
||||
* Improve access control for saml
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Fri, 08 Mar 2019 20:37:05 +0000
|
||||
|
||||
passbook (0.1.4) stable; urgency=medium
|
||||
|
||||
* initial debian package release
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Wed, 06 Mar 2019 18:22:41 +0000
|
1
debian/compat
vendored
1
debian/compat
vendored
@ -1 +0,0 @@
|
||||
10
|
20
debian/config
vendored
20
debian/config
vendored
@ -1,20 +0,0 @@
|
||||
#!/bin/sh
|
||||
# config maintainer script for passbook
|
||||
set -e
|
||||
|
||||
# source debconf stuff
|
||||
. /usr/share/debconf/confmodule
|
||||
|
||||
dbc_first_version=1.0.0
|
||||
dbc_dbuser=passbook
|
||||
dbc_dbname=passbook
|
||||
|
||||
# source dbconfig-common shell library, and call the hook function
|
||||
if [ -f /usr/share/dbconfig-common/dpkg/config.pgsql ]; then
|
||||
. /usr/share/dbconfig-common/dpkg/config.pgsql
|
||||
dbc_go passbook "$@"
|
||||
fi
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
14
debian/control
vendored
14
debian/control
vendored
@ -1,14 +0,0 @@
|
||||
Source: passbook
|
||||
Section: admin
|
||||
Priority: optional
|
||||
Maintainer: BeryJu.org <support@beryju.org>
|
||||
Uploaders: Jens Langhammer <jens@beryju.org>, BeryJu.org <support@beryju.org>
|
||||
Build-Depends: debhelper (>= 10), dh-systemd (>= 1.5), dh-exec, wget, dh-exec, python3 (>= 3.5) | python3.6 | python3.7
|
||||
Standards-Version: 3.9.6
|
||||
|
||||
Package: passbook
|
||||
Architecture: all
|
||||
Recommends: mysql-server, rabbitmq-server, redis-server
|
||||
Pre-Depends: adduser, libldap2-dev, libsasl2-dev
|
||||
Depends: python3 (>= 3.5) | python3.6 | python3.7, python3-pip, dbconfig-pgsql | dbconfig-no-thanks, ${misc:Depends}
|
||||
Description: Authentication Provider/Proxy supporting protocols like SAML, OAuth, LDAP and more.
|
22
debian/copyright
vendored
22
debian/copyright
vendored
@ -1,22 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 BeryJu.org
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
4
debian/dirs
vendored
4
debian/dirs
vendored
@ -1,4 +0,0 @@
|
||||
etc/passbook/
|
||||
etc/passbook/config.d/
|
||||
var/log/passbook/
|
||||
usr/share/passbook/
|
81
debian/etc/passbook/config.yml
vendored
81
debian/etc/passbook/config.yml
vendored
@ -1,81 +0,0 @@
|
||||
http:
|
||||
host: 0.0.0.0
|
||||
port: 8000
|
||||
secret_key_file: /etc/passbook/secret_key
|
||||
log:
|
||||
level:
|
||||
console: INFO
|
||||
file: DEBUG
|
||||
file: /var/log/passbook/passbook.log
|
||||
debug: false
|
||||
secure_proxy_header:
|
||||
HTTP_X_FORWARDED_PROTO: https
|
||||
rabbitmq: guest:guest@localhost/passbook
|
||||
redis: localhost/0
|
||||
|
||||
# Error reporting, sends stacktrace to sentry.services.beryju.org
|
||||
error_report_enabled: true
|
||||
|
||||
primary_domain: passbook.local
|
||||
|
||||
passbook:
|
||||
sign_up:
|
||||
# Enables signup, created users are stored in internal Database and created in LDAP if ldap.create_users is true
|
||||
enabled: true
|
||||
password_reset:
|
||||
# Enable password reset, passwords are reset in internal Database and in LDAP if ldap.reset_password is true
|
||||
enabled: true
|
||||
# Verification the user has to provide in order to be able to reset passwords. Can be any combination of `email`, `2fa`, `security_questions`
|
||||
verification:
|
||||
- email
|
||||
# Text used in title, on login page and multiple other places
|
||||
branding: passbook
|
||||
login:
|
||||
# Override URL used for logo
|
||||
logo_url: null
|
||||
# Override URL used for Background on Login page
|
||||
bg_url: null
|
||||
# Optionally add a subtext, placed below logo on the login page
|
||||
subtext: null
|
||||
footer:
|
||||
links:
|
||||
# Optionally add links to the footer on the login page
|
||||
# - name: test
|
||||
# href: https://test
|
||||
# Specify which fields can be used to authenticate. Can be any combination of `username` and `email`
|
||||
uid_fields:
|
||||
- username
|
||||
- email
|
||||
session:
|
||||
remember_age: 2592000 # 60 * 60 * 24 * 30, one month
|
||||
# Provider-specific settings
|
||||
ldap:
|
||||
# Which field from `uid_fields` maps to which LDAP Attribute
|
||||
login_field_map:
|
||||
username: sAMAccountName
|
||||
email: mail # or userPrincipalName
|
||||
user_attribute_map:
|
||||
active_directory:
|
||||
username: "%(sAMAccountName)s"
|
||||
email: "%(mail)s"
|
||||
name: "%(displayName)"
|
||||
oauth_client:
|
||||
# List of python packages with sources types to load.
|
||||
types:
|
||||
- passbook.oauth_client.source_types.discord
|
||||
- passbook.oauth_client.source_types.facebook
|
||||
- passbook.oauth_client.source_types.github
|
||||
- passbook.oauth_client.source_types.google
|
||||
- passbook.oauth_client.source_types.reddit
|
||||
- passbook.oauth_client.source_types.supervisr
|
||||
- passbook.oauth_client.source_types.twitter
|
||||
saml_idp:
|
||||
# List of python packages with provider types to load.
|
||||
types:
|
||||
- passbook.saml_idp.processors.generic
|
||||
- passbook.saml_idp.processors.aws
|
||||
- passbook.saml_idp.processors.gitlab
|
||||
- passbook.saml_idp.processors.nextcloud
|
||||
- passbook.saml_idp.processors.salesforce
|
||||
- passbook.saml_idp.processors.shibboleth
|
||||
- passbook.saml_idp.processors.wordpress_orange
|
2
debian/gbp.conf
vendored
2
debian/gbp.conf
vendored
@ -1,2 +0,0 @@
|
||||
[buildpackage]
|
||||
export-dir=../build-area
|
8
debian/install
vendored
8
debian/install
vendored
@ -1,8 +0,0 @@
|
||||
passbook /usr/share/passbook/
|
||||
static /usr/share/passbook/
|
||||
manage.py /usr/share/passbook/
|
||||
passbook.sh /usr/share/passbook/
|
||||
vendor /usr/share/passbook/
|
||||
|
||||
debian/etc/passbook /etc/
|
||||
debian/templates/database.yml /usr/share/passbook/
|
0
debian/links
vendored
0
debian/links
vendored
14
debian/passbook-worker.service
vendored
14
debian/passbook-worker.service
vendored
@ -1,14 +0,0 @@
|
||||
[Unit]
|
||||
Description=passbook - Authentication Provider/Proxy (Background worker)
|
||||
After=network.target
|
||||
Requires=network.target
|
||||
|
||||
[Service]
|
||||
User=passbook
|
||||
Group=passbook
|
||||
WorkingDirectory=/usr/share/passbook
|
||||
Type=simple
|
||||
ExecStart=/usr/share/passbook/passbook.sh worker
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
14
debian/passbook.service
vendored
14
debian/passbook.service
vendored
@ -1,14 +0,0 @@
|
||||
[Unit]
|
||||
Description=passbook - Authentication Provider/Proxy
|
||||
After=network.target
|
||||
Requires=network.target
|
||||
|
||||
[Service]
|
||||
User=passbook
|
||||
Group=passbook
|
||||
WorkingDirectory=/usr/share/passbook
|
||||
Type=simple
|
||||
ExecStart=/usr/share/passbook/passbook.sh web
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
36
debian/postinst
vendored
36
debian/postinst
vendored
@ -1,36 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
. /usr/share/debconf/confmodule
|
||||
. /usr/share/dbconfig-common/dpkg/postinst.pgsql
|
||||
|
||||
# you can set the default database encoding to something else
|
||||
dbc_pgsql_createdb_encoding="UTF8"
|
||||
dbc_generate_include=template:/etc/passbook/config.d/database.yml
|
||||
dbc_generate_include_args="-o template_infile=/usr/share/passbook/database.yml"
|
||||
dbc_go passbook "$@"
|
||||
|
||||
if [ -z "`getent group passbook`" ]; then
|
||||
addgroup --quiet --system passbook
|
||||
fi
|
||||
if [ -z "`getent passwd passbook`" ]; then
|
||||
echo " * Creating user and group passbook..."
|
||||
adduser --quiet --system --home /usr/share/passbook --shell /bin/false --ingroup passbook --disabled-password --disabled-login --gecos "passbook User" passbook >> /var/log/passbook/passbook.log 2>&1
|
||||
fi
|
||||
echo " * Updating binary packages (psycopg2)"
|
||||
python3 -m pip install --target=/usr/share/passbook/vendor/ --no-cache-dir --upgrade --force-reinstall psycopg2 >> /var/log/passbook/passbook.log 2>&1
|
||||
if [ ! -f '/etc/passbook/secret_key' ]; then
|
||||
echo " * Generating Secret Key"
|
||||
python3 -c 'import random; result = "".join([random.choice("abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)") for i in range(50)]); print(result)' > /etc/passbook/secret_key 2> /dev/null
|
||||
fi
|
||||
chown -R passbook: /usr/share/passbook/
|
||||
chown -R passbook: /etc/passbook/
|
||||
chown -R passbook: /var/log/passbook/
|
||||
chmod 440 /etc/passbook/secret_key
|
||||
echo " * Running Database Migration"
|
||||
/usr/share/passbook/passbook.sh migrate
|
||||
echo " * A superuser can be created with this command '/usr/share/passbook/passbook.sh createsuperuser'"
|
||||
echo " * You should probably also adjust your settings in '/etc/passbook/config.yml'"
|
||||
|
||||
#DEBHELPER#
|
24
debian/postrm
vendored
24
debian/postrm
vendored
@ -1,24 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
if [ -f /usr/share/debconf/confmodule ]; then
|
||||
. /usr/share/debconf/confmodule
|
||||
fi
|
||||
if [ -f /usr/share/dbconfig-common/dpkg/postrm.pgsql ]; then
|
||||
. /usr/share/dbconfig-common/dpkg/postrm.pgsql
|
||||
dbc_go passbook "$@"
|
||||
fi
|
||||
|
||||
|
||||
if [ "$1" = "purge" ]; then
|
||||
if which ucf >/dev/null 2>&1; then
|
||||
ucf --purge /etc/passbook/config.d/database.yml
|
||||
ucfr --purge passbook /etc/passbook/config.d/database.yml
|
||||
fi
|
||||
rm -rf /etc/passbook/
|
||||
rm -rf /usr/share/passbook/
|
||||
fi
|
||||
|
||||
#DEBHELPER#
|
||||
|
10
debian/prerm
vendored
10
debian/prerm
vendored
@ -1,10 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
. /usr/share/debconf/confmodule
|
||||
. /usr/share/dbconfig-common/dpkg/prerm.pgsql
|
||||
dbc_go passbook "$@"
|
||||
|
||||
#DEBHELPER#
|
||||
|
27
debian/rules
vendored
27
debian/rules
vendored
@ -1,27 +0,0 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
# Uncomment this to turn on verbose mode.
|
||||
# export DH_VERBOSE=1
|
||||
|
||||
%:
|
||||
dh $@ --with=systemd
|
||||
|
||||
build-arch:
|
||||
python3 -m pip install setuptools
|
||||
python3 -m pip install --target=vendor/ -r requirements.txt
|
||||
|
||||
override_dh_strip:
|
||||
dh_strip --exclude=psycopg2
|
||||
|
||||
override_dh_shlibdeps:
|
||||
dh_shlibdeps --exclude=psycopg2
|
||||
|
||||
override_dh_installinit:
|
||||
dh_installinit --name=passbook
|
||||
dh_installinit --name=passbook-worker
|
||||
dh_systemd_enable --name=passbook
|
||||
dh_systemd_enable --name=passbook-worker
|
||||
dh_systemd_start
|
||||
|
||||
# override_dh_usrlocal to do nothing
|
||||
override_dh_usrlocal:
|
1
debian/source/format
vendored
1
debian/source/format
vendored
@ -1 +0,0 @@
|
||||
3.0 (native)
|
8
debian/templates/database.yml
vendored
8
debian/templates/database.yml
vendored
@ -1,8 +0,0 @@
|
||||
databases:
|
||||
default:
|
||||
engine: django.db.backends.postgresql
|
||||
name: _DBC_DBNAME_
|
||||
user: _DBC_DBUSER_
|
||||
password: _DBC_DBPASS_
|
||||
host: _DBC_DBSERVER_
|
||||
port: _DBC_DBPORT_
|
@ -1,6 +1,6 @@
|
||||
apiVersion: v1
|
||||
appVersion: "0.1.28-beta"
|
||||
appVersion: "0.1.34-beta"
|
||||
description: A Helm chart for passbook.
|
||||
name: passbook
|
||||
version: "0.1.28-beta"
|
||||
icon: https://passbook.beryju.org/images/logo.png
|
||||
version: "0.1.34-beta"
|
||||
icon: https://git.beryju.org/uploads/-/system/project/avatar/108/logo.png
|
||||
|
1
helm/passbook/app-readme.md
Normal file
1
helm/passbook/app-readme.md
Normal file
@ -0,0 +1 @@
|
||||
# passbook
|
98
helm/passbook/questions.yml
Normal file
98
helm/passbook/questions.yml
Normal file
@ -0,0 +1,98 @@
|
||||
---
|
||||
categories:
|
||||
- Authentication
|
||||
- SSO
|
||||
questions:
|
||||
- default: "true"
|
||||
variable: config.error_reporting
|
||||
type: boolean
|
||||
description: "Enable error-reporting to sentry.services.beryju.org"
|
||||
group: "passbook Configuration"
|
||||
label: "Error Reporting"
|
||||
####################################################################
|
||||
### PostgreSQL
|
||||
####################################################################
|
||||
- variable: postgresql.enabled
|
||||
default: true
|
||||
description: "Deploy a database server as part of this deployment, or set to false and configure an external database connection."
|
||||
type: boolean
|
||||
required: true
|
||||
label: Install PostgreSQL
|
||||
show_subquestion_if: true
|
||||
group: "Database Settings"
|
||||
subquestions:
|
||||
- variable: postgresql.postgresqlDatabase
|
||||
default: "passbook"
|
||||
description: "Database name to create"
|
||||
type: string
|
||||
label: PostgreSQL Database
|
||||
- variable: postgresql.postgresqlUsername
|
||||
default: "passbook"
|
||||
description: "Database user to create"
|
||||
type: string
|
||||
label: PostgreSQL User
|
||||
- variable: postgresql.postgresqlPassword
|
||||
default: ""
|
||||
description: "password will be auto-generated if not specified"
|
||||
type: password
|
||||
label: PostgreSQL Password
|
||||
- variable: externalDatabase.host
|
||||
default: ""
|
||||
description: "Host of the external database"
|
||||
type: string
|
||||
label: External Database Host
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: externalDatabase.user
|
||||
default: ""
|
||||
description: "Existing username in the external DB"
|
||||
type: string
|
||||
label: External Database username
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: externalDatabase.password
|
||||
default: ""
|
||||
description: "External database password"
|
||||
type: password
|
||||
label: External Database password
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: externalDatabase.database
|
||||
default: ""
|
||||
description: "Name of the existing database"
|
||||
type: string
|
||||
label: External Database
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: externalDatabase.port
|
||||
default: "3306"
|
||||
description: "External database port number"
|
||||
type: string
|
||||
label: External Database Port
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: postgresql.persistence.enabled
|
||||
default: false
|
||||
description: "Enable persistent volume for PostgreSQL"
|
||||
type: boolean
|
||||
required: true
|
||||
label: PostgreSQL Persistent Volume Enabled
|
||||
show_if: "postgresql.enabled=true"
|
||||
show_subquestion_if: true
|
||||
group: "Database Settings"
|
||||
subquestions:
|
||||
- variable: postgresql.master.persistence.size
|
||||
default: "8Gi"
|
||||
description: "PostgreSQL Persistent Volume Size"
|
||||
type: string
|
||||
label: PostgreSQL Volume Size
|
||||
- variable: postgresql.master.persistence.storageClass
|
||||
default: ""
|
||||
description: "If undefined or null, uses the default StorageClass. Default to null"
|
||||
type: storageclass
|
||||
label: Default StorageClass for PostgreSQL
|
||||
- variable: postgresql.master.persistence.existingClaim
|
||||
default: ""
|
||||
description: "If not empty, uses the specified existing PVC instead of creating new one"
|
||||
type: string
|
||||
label: Existing Persistent Volume Claim for PostgreSQL
|
@ -134,9 +134,4 @@ data:
|
||||
# List of python packages with provider types to load.
|
||||
types:
|
||||
- passbook.saml_idp.processors.generic
|
||||
- passbook.saml_idp.processors.aws
|
||||
- passbook.saml_idp.processors.gitlab
|
||||
- passbook.saml_idp.processors.nextcloud
|
||||
- passbook.saml_idp.processors.salesforce
|
||||
- passbook.saml_idp.processors.shibboleth
|
||||
- passbook.saml_idp.processors.wordpress_orange
|
||||
|
@ -26,10 +26,10 @@ spec:
|
||||
name: {{ include "passbook.fullname" . }}-config
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "docker.pkg.beryju.org/passbook:{{ .Values.image.tag }}"
|
||||
image: "docker.beryju.org/passbook/server:{{ .Values.image.tag }}"
|
||||
imagePullPolicy: IfNotPresent
|
||||
command: ["/bin/sh","-c"]
|
||||
args: ["./manage.py migrate && daphne -p 8000 passbook.core.asgi:application"]
|
||||
args: ["./manage.py migrate && ./manage.py web"]
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8000
|
||||
|
@ -26,7 +26,7 @@ spec:
|
||||
name: {{ include "passbook.fullname" . }}-config
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "docker.pkg.beryju.org/passbook:{{ .Values.image.tag }}"
|
||||
image: "docker.beryju.org/passbook/server:{{ .Values.image.tag }}"
|
||||
imagePullPolicy: IfNotPresent
|
||||
command: ["./manage.py", "worker"]
|
||||
ports:
|
||||
|
@ -5,7 +5,7 @@
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
tag: 0.1.28-beta
|
||||
tag: 0.1.34-beta
|
||||
|
||||
nameOverride: ""
|
||||
|
||||
|
@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Check if this file is a symlink, if so, read real base dir
|
||||
BASE_DIR=$(dirname $(readlink -f ${BASH_SOURCE[0]}))
|
||||
|
||||
cd $BASE_DIR
|
||||
PYTHONPATH="${BASE_DIR}/vendor/" python3 manage.py $@
|
@ -1,2 +1,2 @@
|
||||
"""passbook"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook admin"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook api"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook Application Security Gateway Header"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -1,238 +1,33 @@
|
||||
"""passbook app_gw middleware"""
|
||||
import mimetypes
|
||||
from logging import getLogger
|
||||
from random import SystemRandom
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import certifi
|
||||
import urllib3
|
||||
from django.core.cache import cache
|
||||
from django.utils.http import urlencode
|
||||
from django.views.generic import RedirectView
|
||||
from revproxy.exceptions import InvalidUpstream
|
||||
|
||||
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||
from passbook.app_gw.proxy.response import get_django_response
|
||||
from passbook.app_gw.proxy.utils import encode_items, normalize_request_headers
|
||||
from passbook.app_gw.rewrite import Rewriter
|
||||
from passbook.core.models import Application
|
||||
from passbook.core.policies import PolicyEngine
|
||||
from passbook.app_gw.proxy.handler import RequestHandler
|
||||
from passbook.lib.config import CONFIG
|
||||
|
||||
SESSION_UPSTREAM_KEY = 'passbook_app_gw_upstream'
|
||||
IGNORED_HOSTNAMES_KEY = 'passbook_app_gw_ignored'
|
||||
LOGGER = getLogger(__name__)
|
||||
QUOTE_SAFE = r'<.;>\(}*+|~=-$/_:^@)[{]&\'!,"`'
|
||||
ERRORS_MESSAGES = {
|
||||
'upstream-no-scheme': ("Upstream URL scheme must be either "
|
||||
"'http' or 'https' (%s).")
|
||||
}
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
class ApplicationGatewayMiddleware:
|
||||
"""Check if request should be proxied or handeled normally"""
|
||||
|
||||
ignored_hosts = []
|
||||
request = None
|
||||
app_gw = None
|
||||
http = None
|
||||
http_no_verify = None
|
||||
host_header = ''
|
||||
|
||||
_parsed_url = None
|
||||
_request_headers = None
|
||||
_app_gw_cache = {}
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
self.ignored_hosts = cache.get(IGNORED_HOSTNAMES_KEY, [])
|
||||
self.http_no_verify = urllib3.PoolManager()
|
||||
self.http = urllib3.PoolManager(
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
ca_certs=certifi.where())
|
||||
|
||||
def precheck(self, request):
|
||||
"""Check if a request should be proxied or forwarded to passbook"""
|
||||
# Check if hostname is in cached list of ignored hostnames
|
||||
# This saves us having to query the database on each request
|
||||
self.host_header = request.META.get('HTTP_HOST')
|
||||
if self.host_header in self.ignored_hosts:
|
||||
LOGGER.debug("%s is ignored", self.host_header)
|
||||
return True, None
|
||||
# Look through all ApplicationGatewayProviders and check hostnames
|
||||
matches = ApplicationGatewayProvider.objects.filter(
|
||||
server_name__contains=[self.host_header],
|
||||
enabled=True)
|
||||
if not matches.exists():
|
||||
# Mo matching Providers found, add host header to ignored list
|
||||
self.ignored_hosts.append(self.host_header)
|
||||
cache.set(IGNORED_HOSTNAMES_KEY, self.ignored_hosts)
|
||||
LOGGER.debug("Ignoring %s", self.host_header)
|
||||
return True, None
|
||||
# At this point we're certain there's a matching ApplicationGateway
|
||||
if len(matches) > 1:
|
||||
# This should never happen
|
||||
raise ValueError
|
||||
app_gw = matches.first()
|
||||
try:
|
||||
# Check if ApplicationGateway is associated with application
|
||||
getattr(app_gw, 'application')
|
||||
return False, app_gw
|
||||
except Application.DoesNotExist:
|
||||
LOGGER.debug("ApplicationGateway not associated with Application")
|
||||
return True, None
|
||||
return True, None
|
||||
|
||||
def __call__(self, request):
|
||||
forward, self.app_gw = self.precheck(request)
|
||||
if forward:
|
||||
return self.get_response(request)
|
||||
self.request = request
|
||||
return self.dispatch(request)
|
||||
# Rudimentary cache
|
||||
host_header = request.META.get('HTTP_HOST')
|
||||
if host_header not in self._app_gw_cache:
|
||||
self._app_gw_cache[host_header] = RequestHandler.find_app_gw_for_request(request)
|
||||
if self._app_gw_cache[host_header]:
|
||||
return self.dispatch(request, self._app_gw_cache[host_header])
|
||||
return self.get_response(request)
|
||||
|
||||
def _get_upstream(self):
|
||||
"""Choose random upstream and save in session"""
|
||||
if SESSION_UPSTREAM_KEY not in self.request.session:
|
||||
self.request.session[SESSION_UPSTREAM_KEY] = {}
|
||||
if self.app_gw.pk not in self.request.session[SESSION_UPSTREAM_KEY]:
|
||||
upstream_index = SystemRandom().randrange(len(self.app_gw.upstream))
|
||||
self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk] = upstream_index
|
||||
return self.app_gw.upstream[self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk]]
|
||||
|
||||
def get_upstream(self):
|
||||
"""Get upstream as parsed url"""
|
||||
upstream = self._get_upstream()
|
||||
|
||||
self._parsed_url = urlparse(upstream)
|
||||
|
||||
if self._parsed_url.scheme not in ('http', 'https'):
|
||||
raise InvalidUpstream(ERRORS_MESSAGES['upstream-no-scheme'] %
|
||||
upstream)
|
||||
|
||||
return upstream
|
||||
|
||||
def _format_path_to_redirect(self, request):
|
||||
LOGGER.debug("Path before: %s", request.get_full_path())
|
||||
rewriter = Rewriter(self.app_gw, request)
|
||||
after = rewriter.build()
|
||||
LOGGER.debug("Path after: %s", after)
|
||||
return after
|
||||
|
||||
def get_proxy_request_headers(self, request):
|
||||
"""Get normalized headers for the upstream
|
||||
Gets all headers from the original request and normalizes them.
|
||||
Normalization occurs by removing the prefix ``HTTP_`` and
|
||||
replacing and ``_`` by ``-``. Example: ``HTTP_ACCEPT_ENCODING``
|
||||
becames ``Accept-Encoding``.
|
||||
.. versionadded:: 0.9.1
|
||||
:param request: The original HTTPRequest instance
|
||||
:returns: Normalized headers for the upstream
|
||||
"""
|
||||
return normalize_request_headers(request)
|
||||
|
||||
def get_request_headers(self):
|
||||
"""Return request headers that will be sent to upstream.
|
||||
The header REMOTE_USER is set to the current user
|
||||
if AuthenticationMiddleware is enabled and
|
||||
the view's add_remote_user property is True.
|
||||
.. versionadded:: 0.9.8
|
||||
"""
|
||||
request_headers = self.get_proxy_request_headers(self.request)
|
||||
request_headers[self.app_gw.authentication_header] = self.request.user.get_username()
|
||||
LOGGER.info("%s set", self.app_gw.authentication_header)
|
||||
|
||||
return request_headers
|
||||
|
||||
def check_permission(self):
|
||||
"""Check if user is authenticated and has permission to access app"""
|
||||
if not hasattr(self.request, 'user'):
|
||||
return False
|
||||
if not self.request.user.is_authenticated:
|
||||
return False
|
||||
policy_engine = PolicyEngine(self.app_gw.application.policies.all())
|
||||
policy_engine.for_user(self.request.user).with_request(self.request).build()
|
||||
passing, _messages = policy_engine.result
|
||||
|
||||
return passing
|
||||
|
||||
def get_encoded_query_params(self):
|
||||
"""Return encoded query params to be used in proxied request"""
|
||||
get_data = encode_items(self.request.GET.lists())
|
||||
return urlencode(get_data)
|
||||
|
||||
def _created_proxy_response(self, request, path):
|
||||
request_payload = request.body
|
||||
|
||||
LOGGER.debug("Request headers: %s", self._request_headers)
|
||||
|
||||
request_url = self.get_upstream() + path
|
||||
LOGGER.debug("Request URL: %s", request_url)
|
||||
|
||||
if request.GET:
|
||||
request_url += '?' + self.get_encoded_query_params()
|
||||
LOGGER.debug("Request URL: %s", request_url)
|
||||
|
||||
http = self.http
|
||||
if not self.app_gw.upstream_ssl_verification:
|
||||
http = self.http_no_verify
|
||||
|
||||
try:
|
||||
proxy_response = http.urlopen(request.method,
|
||||
request_url,
|
||||
redirect=False,
|
||||
retries=None,
|
||||
headers=self._request_headers,
|
||||
body=request_payload,
|
||||
decode_content=False,
|
||||
preload_content=False)
|
||||
LOGGER.debug("Proxy response header: %s",
|
||||
proxy_response.getheaders())
|
||||
except urllib3.exceptions.HTTPError as error:
|
||||
LOGGER.exception(error)
|
||||
raise
|
||||
|
||||
return proxy_response
|
||||
|
||||
def _replace_host_on_redirect_location(self, request, proxy_response):
|
||||
location = proxy_response.headers.get('Location')
|
||||
if location:
|
||||
if request.is_secure():
|
||||
scheme = 'https://'
|
||||
else:
|
||||
scheme = 'http://'
|
||||
request_host = scheme + self.host_header
|
||||
|
||||
upstream_host_http = 'http://' + self._parsed_url.netloc
|
||||
upstream_host_https = 'https://' + self._parsed_url.netloc
|
||||
|
||||
location = location.replace(upstream_host_http, request_host)
|
||||
location = location.replace(upstream_host_https, request_host)
|
||||
proxy_response.headers['Location'] = location
|
||||
LOGGER.debug("Proxy response LOCATION: %s",
|
||||
proxy_response.headers['Location'])
|
||||
|
||||
def _set_content_type(self, request, proxy_response):
|
||||
content_type = proxy_response.headers.get('Content-Type')
|
||||
if not content_type:
|
||||
content_type = (mimetypes.guess_type(request.path)[0] or
|
||||
self.app_gw.default_content_type)
|
||||
proxy_response.headers['Content-Type'] = content_type
|
||||
LOGGER.debug("Proxy response CONTENT-TYPE: %s",
|
||||
proxy_response.headers['Content-Type'])
|
||||
|
||||
def dispatch(self, request):
|
||||
def dispatch(self, request, app_gw):
|
||||
"""Build proxied request and pass to upstream"""
|
||||
if not self.check_permission():
|
||||
handler = RequestHandler(app_gw, request)
|
||||
|
||||
if not handler.check_permission():
|
||||
to_url = 'https://%s/?next=%s' % (CONFIG.get('domains')[0], request.get_full_path())
|
||||
return RedirectView.as_view(url=to_url)(request)
|
||||
|
||||
self._request_headers = self.get_request_headers()
|
||||
|
||||
path = self._format_path_to_redirect(request)
|
||||
proxy_response = self._created_proxy_response(request, path)
|
||||
|
||||
self._replace_host_on_redirect_location(request, proxy_response)
|
||||
self._set_content_type(request, proxy_response)
|
||||
response = get_django_response(proxy_response, strict_cookies=False)
|
||||
|
||||
LOGGER.debug("RESPONSE RETURNED: %s", response)
|
||||
return response
|
||||
return handler.get_response()
|
||||
|
18
passbook/app_gw/migrations/0003_auto_20190411_1314.py
Normal file
18
passbook/app_gw/migrations/0003_auto_20190411_1314.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 2.2 on 2019-04-11 13:14
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_app_gw', '0002_auto_20190321_1521'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='applicationgatewayprovider',
|
||||
name='authentication_header',
|
||||
field=models.TextField(blank=True, default='X-Remote-User'),
|
||||
),
|
||||
]
|
@ -15,7 +15,7 @@ class ApplicationGatewayProvider(Provider):
|
||||
upstream = ArrayField(models.TextField())
|
||||
enabled = models.BooleanField(default=True)
|
||||
|
||||
authentication_header = models.TextField(default='X-Remote-User')
|
||||
authentication_header = models.TextField(default='X-Remote-User', blank=True)
|
||||
default_content_type = models.TextField(default='application/octet-stream')
|
||||
upstream_ssl_verification = models.BooleanField(default=True)
|
||||
|
||||
|
225
passbook/app_gw/proxy/handler.py
Normal file
225
passbook/app_gw/proxy/handler.py
Normal file
@ -0,0 +1,225 @@
|
||||
"""passbook app_gw request handler"""
|
||||
import mimetypes
|
||||
from logging import getLogger
|
||||
from random import SystemRandom
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import certifi
|
||||
import urllib3
|
||||
from django.core.cache import cache
|
||||
from django.utils.http import urlencode
|
||||
|
||||
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||
from passbook.app_gw.proxy.exceptions import InvalidUpstream
|
||||
from passbook.app_gw.proxy.response import get_django_response
|
||||
from passbook.app_gw.proxy.rewrite import Rewriter
|
||||
from passbook.app_gw.proxy.utils import encode_items, normalize_request_headers
|
||||
from passbook.core.models import Application
|
||||
from passbook.core.policies import PolicyEngine
|
||||
|
||||
SESSION_UPSTREAM_KEY = 'passbook_app_gw_upstream'
|
||||
IGNORED_HOSTNAMES_KEY = 'passbook_app_gw_ignored'
|
||||
LOGGER = getLogger(__name__)
|
||||
QUOTE_SAFE = r'<.;>\(}*+|~=-$/_:^@)[{]&\'!,"`'
|
||||
ERRORS_MESSAGES = {
|
||||
'upstream-no-scheme': ("Upstream URL scheme must be either "
|
||||
"'http' or 'https' (%s).")
|
||||
}
|
||||
HTTP_NO_VERIFY = urllib3.PoolManager()
|
||||
HTTP = urllib3.PoolManager(
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
ca_certs=certifi.where())
|
||||
IGNORED_HOSTS = cache.get(IGNORED_HOSTNAMES_KEY, [])
|
||||
POLICY_CACHE = {}
|
||||
|
||||
class RequestHandler:
|
||||
"""Forward requests"""
|
||||
|
||||
_parsed_url = None
|
||||
_request_headers = None
|
||||
|
||||
def __init__(self, app_gw, request):
|
||||
self.app_gw = app_gw
|
||||
self.request = request
|
||||
if self.app_gw.pk not in POLICY_CACHE:
|
||||
POLICY_CACHE[self.app_gw.pk] = self.app_gw.application.policies.all()
|
||||
|
||||
@staticmethod
|
||||
def find_app_gw_for_request(request):
|
||||
"""Check if a request should be proxied or forwarded to passbook"""
|
||||
# Check if hostname is in cached list of ignored hostnames
|
||||
# This saves us having to query the database on each request
|
||||
host_header = request.META.get('HTTP_HOST')
|
||||
if host_header in IGNORED_HOSTS:
|
||||
# LOGGER.debug("%s is ignored", host_header)
|
||||
return False
|
||||
# Look through all ApplicationGatewayProviders and check hostnames
|
||||
matches = ApplicationGatewayProvider.objects.filter(
|
||||
server_name__contains=[host_header],
|
||||
enabled=True)
|
||||
if not matches.exists():
|
||||
# Mo matching Providers found, add host header to ignored list
|
||||
IGNORED_HOSTS.append(host_header)
|
||||
cache.set(IGNORED_HOSTNAMES_KEY, IGNORED_HOSTS)
|
||||
# LOGGER.debug("Ignoring %s", host_header)
|
||||
return False
|
||||
# At this point we're certain there's a matching ApplicationGateway
|
||||
if len(matches) > 1:
|
||||
# This should never happen
|
||||
raise ValueError
|
||||
app_gw = matches.first()
|
||||
try:
|
||||
# Check if ApplicationGateway is associated with application
|
||||
getattr(app_gw, 'application')
|
||||
if app_gw:
|
||||
return app_gw
|
||||
except Application.DoesNotExist:
|
||||
pass
|
||||
# LOGGER.debug("ApplicationGateway not associated with Application")
|
||||
return True
|
||||
|
||||
def _get_upstream(self):
|
||||
"""Choose random upstream and save in session"""
|
||||
if SESSION_UPSTREAM_KEY not in self.request.session:
|
||||
self.request.session[SESSION_UPSTREAM_KEY] = {}
|
||||
if self.app_gw.pk not in self.request.session[SESSION_UPSTREAM_KEY]:
|
||||
upstream_index = int(SystemRandom().random() * len(self.app_gw.upstream))
|
||||
self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk] = upstream_index
|
||||
return self.app_gw.upstream[self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk]]
|
||||
|
||||
def get_upstream(self):
|
||||
"""Get upstream as parsed url"""
|
||||
upstream = self._get_upstream()
|
||||
|
||||
self._parsed_url = urlparse(upstream)
|
||||
|
||||
if self._parsed_url.scheme not in ('http', 'https'):
|
||||
raise InvalidUpstream(ERRORS_MESSAGES['upstream-no-scheme'] %
|
||||
upstream)
|
||||
|
||||
return upstream
|
||||
|
||||
def _format_path_to_redirect(self):
|
||||
# LOGGER.debug("Path before: %s", self.request.get_full_path())
|
||||
rewriter = Rewriter(self.app_gw, self.request)
|
||||
after = rewriter.build()
|
||||
# LOGGER.debug("Path after: %s", after)
|
||||
return after
|
||||
|
||||
def get_proxy_request_headers(self):
|
||||
"""Get normalized headers for the upstream
|
||||
Gets all headers from the original request and normalizes them.
|
||||
Normalization occurs by removing the prefix ``HTTP_`` and
|
||||
replacing and ``_`` by ``-``. Example: ``HTTP_ACCEPT_ENCODING``
|
||||
becames ``Accept-Encoding``.
|
||||
.. versionadded:: 0.9.1
|
||||
:param request: The original HTTPRequest instance
|
||||
:returns: Normalized headers for the upstream
|
||||
"""
|
||||
return normalize_request_headers(self.request)
|
||||
|
||||
def get_request_headers(self):
|
||||
"""Return request headers that will be sent to upstream.
|
||||
The header REMOTE_USER is set to the current user
|
||||
if AuthenticationMiddleware is enabled and
|
||||
the view's add_remote_user property is True.
|
||||
.. versionadded:: 0.9.8
|
||||
"""
|
||||
request_headers = self.get_proxy_request_headers()
|
||||
if not self.app_gw.authentication_header:
|
||||
return request_headers
|
||||
request_headers[self.app_gw.authentication_header] = self.request.user.get_username()
|
||||
# LOGGER.debug("%s set", self.app_gw.authentication_header)
|
||||
|
||||
return request_headers
|
||||
|
||||
def check_permission(self):
|
||||
"""Check if user is authenticated and has permission to access app"""
|
||||
if not hasattr(self.request, 'user'):
|
||||
return False
|
||||
if not self.request.user.is_authenticated:
|
||||
return False
|
||||
policy_engine = PolicyEngine(POLICY_CACHE[self.app_gw.pk])
|
||||
policy_engine.for_user(self.request.user).with_request(self.request).build()
|
||||
passing, _messages = policy_engine.result
|
||||
|
||||
return passing
|
||||
|
||||
def get_encoded_query_params(self):
|
||||
"""Return encoded query params to be used in proxied request"""
|
||||
get_data = encode_items(self.request.GET.lists())
|
||||
return urlencode(get_data)
|
||||
|
||||
def _created_proxy_response(self, path):
|
||||
request_payload = self.request.body
|
||||
|
||||
# LOGGER.debug("Request headers: %s", self._request_headers)
|
||||
|
||||
request_url = self.get_upstream() + path
|
||||
# LOGGER.debug("Request URL: %s", request_url)
|
||||
|
||||
if self.request.GET:
|
||||
request_url += '?' + self.get_encoded_query_params()
|
||||
# LOGGER.debug("Request URL: %s", request_url)
|
||||
|
||||
http = HTTP
|
||||
if not self.app_gw.upstream_ssl_verification:
|
||||
http = HTTP_NO_VERIFY
|
||||
|
||||
try:
|
||||
proxy_response = http.urlopen(self.request.method,
|
||||
request_url,
|
||||
redirect=False,
|
||||
retries=None,
|
||||
headers=self._request_headers,
|
||||
body=request_payload,
|
||||
decode_content=False,
|
||||
preload_content=False)
|
||||
# LOGGER.debug("Proxy response header: %s",
|
||||
# proxy_response.getheaders())
|
||||
except urllib3.exceptions.HTTPError as error:
|
||||
LOGGER.exception(error)
|
||||
raise
|
||||
|
||||
return proxy_response
|
||||
|
||||
def _replace_host_on_redirect_location(self, proxy_response):
|
||||
location = proxy_response.headers.get('Location')
|
||||
if location:
|
||||
if self.request.is_secure():
|
||||
scheme = 'https://'
|
||||
else:
|
||||
scheme = 'http://'
|
||||
request_host = scheme + self.request.META.get('HTTP_HOST')
|
||||
|
||||
upstream_host_http = 'http://' + self._parsed_url.netloc
|
||||
upstream_host_https = 'https://' + self._parsed_url.netloc
|
||||
|
||||
location = location.replace(upstream_host_http, request_host)
|
||||
location = location.replace(upstream_host_https, request_host)
|
||||
proxy_response.headers['Location'] = location
|
||||
# LOGGER.debug("Proxy response LOCATION: %s",
|
||||
# proxy_response.headers['Location'])
|
||||
|
||||
def _set_content_type(self, proxy_response):
|
||||
content_type = proxy_response.headers.get('Content-Type')
|
||||
if not content_type:
|
||||
content_type = (mimetypes.guess_type(self.request.path)[0] or
|
||||
self.app_gw.default_content_type)
|
||||
proxy_response.headers['Content-Type'] = content_type
|
||||
# LOGGER.debug("Proxy response CONTENT-TYPE: %s",
|
||||
# proxy_response.headers['Content-Type'])
|
||||
|
||||
def get_response(self):
|
||||
"""Pass request to upstream and return response"""
|
||||
self._request_headers = self.get_request_headers()
|
||||
|
||||
path = self._format_path_to_redirect()
|
||||
proxy_response = self._created_proxy_response(path)
|
||||
|
||||
self._replace_host_on_redirect_location(proxy_response)
|
||||
self._set_content_type(proxy_response)
|
||||
response = get_django_response(proxy_response, strict_cookies=False)
|
||||
|
||||
# LOGGER.debug("RESPONSE RETURNED: %s", response)
|
||||
return response
|
@ -2,6 +2,7 @@
|
||||
|
||||
from passbook.app_gw.models import RewriteRule
|
||||
|
||||
RULE_CACHE = {}
|
||||
|
||||
class Context:
|
||||
"""Empty class which we dynamically add attributes to"""
|
||||
@ -15,6 +16,9 @@ class Rewriter:
|
||||
def __init__(self, application, request):
|
||||
self.__application = application
|
||||
self.__request = request
|
||||
if self.__application.pk not in RULE_CACHE:
|
||||
RULE_CACHE[self.__application.pk] = RewriteRule.objects.filter(
|
||||
provider__in=[self.__application])
|
||||
|
||||
def __build_context(self, matches):
|
||||
"""Build object with .0, .1, etc as groups and give access to request"""
|
||||
@ -27,7 +31,7 @@ class Rewriter:
|
||||
def build(self):
|
||||
"""Run all rules over path and return final path"""
|
||||
path = self.__request.get_full_path()
|
||||
for rule in RewriteRule.objects.filter(provider__in=[self.__application]):
|
||||
for rule in RULE_CACHE[self.__application.pk]:
|
||||
matches = rule.compiled_matcher.search(path)
|
||||
if not matches:
|
||||
continue
|
@ -6,8 +6,8 @@ from django.core.cache import cache
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from passbook.app_gw.middleware import IGNORED_HOSTNAMES_KEY
|
||||
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||
from passbook.app_gw.proxy.handler import IGNORED_HOSTNAMES_KEY
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook audit Header"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook captcha_factor Header"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook core"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -2,11 +2,11 @@
|
||||
|
||||
from logging import getLogger
|
||||
|
||||
import cherrypy
|
||||
from django.conf import settings
|
||||
from daphne.cli import CommandLineInterface
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import autoreload
|
||||
|
||||
from passbook.core.wsgi import application
|
||||
from passbook.lib.config import CONFIG
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
@ -15,20 +15,15 @@ class Command(BaseCommand):
|
||||
"""Run CherryPy webserver"""
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""passbook cherrypy server"""
|
||||
config = settings.CHERRYPY_SERVER
|
||||
config.update(**options)
|
||||
cherrypy.config.update(config)
|
||||
cherrypy.tree.graft(application, '/')
|
||||
# Mount NullObject to serve static files
|
||||
cherrypy.tree.mount(None, '/static', config={
|
||||
'/': {
|
||||
'tools.staticdir.on': True,
|
||||
'tools.staticdir.dir': settings.STATIC_ROOT,
|
||||
'tools.expires.on': True,
|
||||
'tools.expires.secs': 86400,
|
||||
'tools.gzip.on': True,
|
||||
}
|
||||
})
|
||||
cherrypy.engine.start()
|
||||
cherrypy.engine.block()
|
||||
"""passbook daphne server"""
|
||||
autoreload.run_with_reloader(self.daphne_server)
|
||||
|
||||
def daphne_server(self):
|
||||
"""Run daphne server within autoreload"""
|
||||
autoreload.raise_last_exception()
|
||||
CommandLineInterface().run([
|
||||
'-p', str(CONFIG.y('web.port', 8000)),
|
||||
'-b', CONFIG.y('web.listen', '0.0.0.0'), # nosec
|
||||
'--access-log', '/dev/null',
|
||||
'passbook.core.asgi:application'
|
||||
])
|
||||
|
@ -3,6 +3,7 @@
|
||||
from logging import getLogger
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import autoreload
|
||||
|
||||
from passbook.core.celery import CELERY_APP
|
||||
|
||||
@ -14,4 +15,9 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""celery worker"""
|
||||
autoreload.run_with_reloader(self.celery_worker)
|
||||
|
||||
def celery_worker(self):
|
||||
"""Run celery worker within autoreload"""
|
||||
autoreload.raise_last_exception()
|
||||
CELERY_APP.worker_main(['worker', '--autoscale=10,3', '-E', '-B'])
|
||||
|
17
passbook/core/migrations/0023_remove_user_applications.py
Normal file
17
passbook/core/migrations/0023_remove_user_applications.py
Normal file
@ -0,0 +1,17 @@
|
||||
# Generated by Django 2.2 on 2019-04-13 15:51
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0022_nonce_expiring'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='applications',
|
||||
),
|
||||
]
|
@ -47,7 +47,6 @@ class User(AbstractUser):
|
||||
name = models.TextField()
|
||||
|
||||
sources = models.ManyToManyField('Source', through='UserSourceConnection')
|
||||
applications = models.ManyToManyField('Application')
|
||||
groups = models.ManyToManyField('Group')
|
||||
password_change_date = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
"""passbook core policy engine"""
|
||||
from logging import getLogger
|
||||
|
||||
# from logging import getLogger
|
||||
from amqp.exceptions import UnexpectedFrame
|
||||
from celery import group
|
||||
from celery.exceptions import TimeoutError as CeleryTimeoutError
|
||||
@ -10,7 +9,7 @@ from ipware import get_client_ip
|
||||
from passbook.core.celery import CELERY_APP
|
||||
from passbook.core.models import Policy, User
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
# LOGGER = getLogger(__name__)
|
||||
|
||||
def _cache_key(policy, user):
|
||||
return "%s#%s" % (policy.uuid, user.pk)
|
||||
@ -24,8 +23,8 @@ def _policy_engine_task(user_pk, policy_pk, **kwargs):
|
||||
user_obj = User.objects.get(pk=user_pk)
|
||||
for key, value in kwargs.items():
|
||||
setattr(user_obj, key, value)
|
||||
LOGGER.debug("Running policy `%s`#%s for user %s...", policy_obj.name,
|
||||
policy_obj.pk.hex, user_obj)
|
||||
# LOGGER.debug("Running policy `%s`#%s for user %s...", policy_obj.name,
|
||||
# policy_obj.pk.hex, user_obj)
|
||||
policy_result = policy_obj.passes(user_obj)
|
||||
# Handle policy result correctly if result, message or just result
|
||||
message = None
|
||||
@ -34,10 +33,10 @@ def _policy_engine_task(user_pk, policy_pk, **kwargs):
|
||||
# Invert result if policy.negate is set
|
||||
if policy_obj.negate:
|
||||
policy_result = not policy_result
|
||||
LOGGER.debug("Policy %r#%s got %s", policy_obj.name, policy_obj.pk.hex, policy_result)
|
||||
# LOGGER.debug("Policy %r#%s got %s", policy_obj.name, policy_obj.pk.hex, policy_result)
|
||||
cache_key = _cache_key(policy_obj, user_obj)
|
||||
cache.set(cache_key, (policy_obj.action, policy_result, message))
|
||||
LOGGER.debug("Cached entry as %s", cache_key)
|
||||
# LOGGER.debug("Cached entry as %s", cache_key)
|
||||
return policy_obj.action, policy_result, message
|
||||
|
||||
class PolicyEngine:
|
||||
@ -82,16 +81,16 @@ class PolicyEngine:
|
||||
for policy in self.policies:
|
||||
cached_policy = cache.get(_cache_key(policy, self.__user), None)
|
||||
if cached_policy:
|
||||
LOGGER.debug("Taking result from cache for %s", policy.pk.hex)
|
||||
# LOGGER.debug("Taking result from cache for %s", policy.pk.hex)
|
||||
cached_policies.append(cached_policy)
|
||||
else:
|
||||
LOGGER.debug("Evaluating policy %s", policy.pk.hex)
|
||||
# LOGGER.debug("Evaluating policy %s", policy.pk.hex)
|
||||
signatures.append(_policy_engine_task.signature(
|
||||
args=(self.__user.pk, policy.pk.hex),
|
||||
kwargs=kwargs,
|
||||
time_limit=policy.timeout))
|
||||
self.__get_timeout += policy.timeout
|
||||
LOGGER.debug("Set total policy timeout to %r", self.__get_timeout)
|
||||
# LOGGER.debug("Set total policy timeout to %r", self.__get_timeout)
|
||||
# If all policies are cached, we have an empty list here.
|
||||
if signatures:
|
||||
self.__group = group(signatures)()
|
||||
@ -120,7 +119,7 @@ class PolicyEngine:
|
||||
for policy_action, policy_result, policy_message in result:
|
||||
passing = (policy_action == Policy.ACTION_ALLOW and policy_result) or \
|
||||
(policy_action == Policy.ACTION_DENY and not policy_result)
|
||||
LOGGER.debug('Action=%s, Result=%r => %r', policy_action, policy_result, passing)
|
||||
# LOGGER.debug('Action=%s, Result=%r => %r', policy_action, policy_result, passing)
|
||||
if policy_message:
|
||||
messages.append(policy_message)
|
||||
if not passing:
|
||||
|
@ -1,6 +1,6 @@
|
||||
celery
|
||||
cherrypy
|
||||
colorlog
|
||||
django-guardian
|
||||
django-ipware
|
||||
django-model-utils
|
||||
django-redis
|
||||
@ -12,3 +12,4 @@ psycopg2
|
||||
PyYAML
|
||||
sentry-sdk
|
||||
pip
|
||||
whitenoise
|
||||
|
@ -24,6 +24,7 @@ from sentry_sdk.integrations.logging import LoggingIntegration
|
||||
|
||||
from passbook import __version__
|
||||
from passbook.lib.config import CONFIG
|
||||
from passbook.lib.sentry import before_send
|
||||
|
||||
VERSION = __version__
|
||||
|
||||
@ -58,7 +59,8 @@ SESSION_CACHE_ALIAS = "default"
|
||||
LANGUAGE_COOKIE_NAME = 'passbook_language'
|
||||
|
||||
AUTHENTICATION_BACKENDS = [
|
||||
'django.contrib.auth.backends.ModelBackend'
|
||||
'django.contrib.auth.backends.ModelBackend',
|
||||
'guardian.backends.ObjectPermissionBackend',
|
||||
]
|
||||
|
||||
# Application definition
|
||||
@ -73,6 +75,7 @@ INSTALLED_APPS = [
|
||||
'django.contrib.postgres',
|
||||
'rest_framework',
|
||||
'drf_yasg',
|
||||
'guardian',
|
||||
'passbook.core.apps.PassbookCoreConfig',
|
||||
'passbook.admin.apps.PassbookAdminConfig',
|
||||
'passbook.api.apps.PassbookAPIConfig',
|
||||
@ -121,6 +124,7 @@ CACHES = {
|
||||
|
||||
MIDDLEWARE = [
|
||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
'whitenoise.middleware.WhiteNoiseMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'passbook.app_gw.middleware.ApplicationGatewayMiddleware',
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
@ -216,38 +220,28 @@ CELERY_BEAT_SCHEDULE = {
|
||||
}
|
||||
}
|
||||
|
||||
sentry_init(
|
||||
dsn=("https://55b5dd780bc14f4c96bba69b7a9abbcc:449af483bd0745"
|
||||
"0d83be640d834e5458@sentry.services.beryju.org/8"),
|
||||
integrations=[
|
||||
DjangoIntegration(),
|
||||
CeleryIntegration(),
|
||||
LoggingIntegration(
|
||||
level=logging.INFO,
|
||||
event_level=logging.ERROR
|
||||
)
|
||||
],
|
||||
send_default_pii=True
|
||||
)
|
||||
|
||||
|
||||
# CherryPY settings
|
||||
with CONFIG.cd('web'):
|
||||
CHERRYPY_SERVER = {
|
||||
'server.socket_host': CONFIG.get('listen', '0.0.0.0'), # nosec
|
||||
'server.socket_port': CONFIG.get('port', 8000),
|
||||
'server.thread_pool': CONFIG.get('threads', 30),
|
||||
'log.screen': False,
|
||||
'log.access_file': '',
|
||||
'log.error_file': '',
|
||||
}
|
||||
if not DEBUG:
|
||||
sentry_init(
|
||||
dsn="https://55b5dd780bc14f4c96bba69b7a9abbcc@sentry.services.beryju.org/8",
|
||||
integrations=[
|
||||
DjangoIntegration(),
|
||||
CeleryIntegration(),
|
||||
LoggingIntegration(
|
||||
level=logging.INFO,
|
||||
event_level=logging.ERROR
|
||||
)
|
||||
],
|
||||
send_default_pii=True,
|
||||
before_send=before_send,
|
||||
release='p2@%s' % __version__
|
||||
)
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/2.1/howto/static-files/
|
||||
|
||||
STATIC_URL = '/static/'
|
||||
|
||||
LOG_HANDLERS = ['console', 'syslog', 'file']
|
||||
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
|
||||
|
||||
with CONFIG.cd('log'):
|
||||
LOGGING = {
|
||||
@ -291,38 +285,52 @@ with CONFIG.cd('log'):
|
||||
'formatter': 'verbose',
|
||||
'filename': CONFIG.get('file'),
|
||||
},
|
||||
'queue': {
|
||||
'level': CONFIG.get('level').get('console'),
|
||||
'class': 'passbook.lib.log.QueueListenerHandler',
|
||||
'handlers': [
|
||||
'cfg://handlers.console',
|
||||
# 'cfg://handlers.syslog',
|
||||
'cfg://handlers.file',
|
||||
],
|
||||
}
|
||||
},
|
||||
'loggers': {
|
||||
'passbook': {
|
||||
'handlers': LOG_HANDLERS,
|
||||
'handlers': ['queue'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': True,
|
||||
},
|
||||
'django': {
|
||||
'handlers': LOG_HANDLERS,
|
||||
'handlers': ['queue'],
|
||||
'level': 'INFO',
|
||||
'propagate': True,
|
||||
},
|
||||
'tasks': {
|
||||
'handlers': LOG_HANDLERS,
|
||||
'handlers': ['queue'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': True,
|
||||
},
|
||||
'cherrypy': {
|
||||
'handlers': LOG_HANDLERS,
|
||||
'handlers': ['queue'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': True,
|
||||
},
|
||||
'oauthlib': {
|
||||
'handlers': LOG_HANDLERS,
|
||||
'handlers': ['queue'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': True,
|
||||
},
|
||||
'oauth2_provider': {
|
||||
'handlers': LOG_HANDLERS,
|
||||
'handlers': ['queue'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': True,
|
||||
},
|
||||
'daphne': {
|
||||
'handlers': ['queue'],
|
||||
'level': 'INFO',
|
||||
'propagate': True,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -68,7 +68,7 @@
|
||||
{% is_active_app 'passbook_admin' as is_admin %}
|
||||
{% if user.is_superuser %}
|
||||
<li class="list-group-item {% is_active_app 'passbook_admin' %} secondary-nav-item-pf">
|
||||
<a href="{% url 'passbook_admin:overview' %}">
|
||||
<a>
|
||||
<span class="pficon pficon-user" data-toggle="tooltip" title=""
|
||||
data-original-title="{% trans 'Administration' %}"></span>
|
||||
<span class="list-group-item-value dropdown-title">{% trans 'Administration' %}</span>
|
||||
|
@ -2,8 +2,7 @@
|
||||
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.views.generic import TemplateView
|
||||
|
||||
from passbook.core.models import Application
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
|
||||
|
||||
class OverviewView(LoginRequiredMixin, TemplateView):
|
||||
@ -13,7 +12,6 @@ class OverviewView(LoginRequiredMixin, TemplateView):
|
||||
template_name = 'overview/index.html'
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
kwargs['applications'] = self.request.user.applications.all()
|
||||
if self.request.user.is_superuser:
|
||||
kwargs['applications'] = Application.objects.all()
|
||||
kwargs['applications'] = get_objects_for_user(self.request.user,
|
||||
'passbook_core.view_application')
|
||||
return super().get_context_data(**kwargs)
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook hibp_policy"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""Passbook ldap app Header"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook lib"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -95,9 +95,4 @@ saml_idp:
|
||||
# List of python packages with provider types to load.
|
||||
types:
|
||||
- passbook.saml_idp.processors.generic
|
||||
- passbook.saml_idp.processors.aws
|
||||
- passbook.saml_idp.processors.gitlab
|
||||
- passbook.saml_idp.processors.nextcloud
|
||||
- passbook.saml_idp.processors.salesforce
|
||||
- passbook.saml_idp.processors.shibboleth
|
||||
- passbook.saml_idp.processors.wordpress_orange
|
||||
|
37
passbook/lib/log.py
Normal file
37
passbook/lib/log.py
Normal file
@ -0,0 +1,37 @@
|
||||
"""QueueListener that can be configured from logging.dictConfig"""
|
||||
from atexit import register
|
||||
from logging.config import ConvertingList
|
||||
from logging.handlers import QueueHandler, QueueListener
|
||||
from queue import Queue
|
||||
|
||||
|
||||
def _resolve_handlers(_list):
|
||||
"""Evaluates ConvertingList by iterating over it"""
|
||||
if not isinstance(_list, ConvertingList):
|
||||
return _list
|
||||
|
||||
# Indexing the list performs the evaluation.
|
||||
return [_list[i] for i in range(len(_list))]
|
||||
|
||||
|
||||
class QueueListenerHandler(QueueHandler):
|
||||
"""QueueListener that can be configured from logging.dictConfig"""
|
||||
|
||||
def __init__(self, handlers, auto_run=True, queue=Queue(-1)):
|
||||
super().__init__(queue)
|
||||
handlers = _resolve_handlers(handlers)
|
||||
self._listener = QueueListener(
|
||||
self.queue,
|
||||
*handlers,
|
||||
respect_handler_level=True)
|
||||
if auto_run:
|
||||
self.start()
|
||||
register(self.stop)
|
||||
|
||||
def start(self):
|
||||
"""start background thread"""
|
||||
self._listener.start()
|
||||
|
||||
def stop(self):
|
||||
"""stop background thread"""
|
||||
self._listener.stop()
|
17
passbook/lib/sentry.py
Normal file
17
passbook/lib/sentry.py
Normal file
@ -0,0 +1,17 @@
|
||||
"""passbook sentry integration"""
|
||||
|
||||
|
||||
def before_send(event, hint):
|
||||
"""Check if error is database error, and ignore if so"""
|
||||
from django.core.exceptions import OperationalError
|
||||
from django_redis.exceptions import ConnectionInterrupted
|
||||
|
||||
ignored_classes = [
|
||||
OperationalError,
|
||||
ConnectionInterrupted,
|
||||
]
|
||||
if 'exc_info' in hint:
|
||||
_exc_type, exc_value, _ = hint['exc_info']
|
||||
if isinstance(exc_value, ignored_classes):
|
||||
return None
|
||||
return event
|
@ -1,2 +1,2 @@
|
||||
"""passbook oauth_client Header"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook oauth_provider Header"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -36,6 +36,13 @@ class PassbookAuthorizationView(AccessMixin, AuthorizationView):
|
||||
|
||||
_application = None
|
||||
|
||||
def _inject_response_type(self):
|
||||
"""Inject response_type into querystring if not set"""
|
||||
LOGGER.debug("response_type not set, defaulting to 'code'")
|
||||
querystring = urlencode(self.request.GET)
|
||||
querystring += '&response_type=code'
|
||||
return redirect(reverse('passbook_oauth_provider:oauth2-ok-authorize') + '?' + querystring)
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
"""Update OAuth2Provider's skip_authorization state"""
|
||||
# Get client_id to get provider, so we can update skip_authorization field
|
||||
@ -55,6 +62,9 @@ class PassbookAuthorizationView(AccessMixin, AuthorizationView):
|
||||
for policy_meaage in policy_meaages:
|
||||
messages.error(request, policy_meaage)
|
||||
return redirect('passbook_oauth_provider:oauth2-permission-denied')
|
||||
# Some clients don't pass response_type, so we default to code
|
||||
if 'response_type' not in request.GET:
|
||||
return self._inject_response_type()
|
||||
actual_response = super().dispatch(request, *args, **kwargs)
|
||||
if actual_response.status_code == 400:
|
||||
LOGGER.debug(request.GET.get('redirect_uri'))
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook otp Header"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook password_expiry"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook saml_idp Header"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
@ -65,7 +65,7 @@ class Processor:
|
||||
self._remote = remote
|
||||
self._logger = getLogger(__name__)
|
||||
self._system_params['ISSUER'] = self._remote.issuer
|
||||
self._logger.info('processor configured')
|
||||
self._logger.debug('processor configured')
|
||||
|
||||
def _build_assertion(self):
|
||||
"""Builds _assertion_params."""
|
||||
@ -116,11 +116,7 @@ class Processor:
|
||||
|
||||
def _determine_audience(self):
|
||||
"""Determines the _audience."""
|
||||
self._audience = self._request_params.get('DESTINATION', None)
|
||||
|
||||
if not self._audience:
|
||||
self._audience = self._request_params.get('PROVIDER_NAME', None)
|
||||
|
||||
self._audience = self._remote.audience
|
||||
self._logger.info('determined audience')
|
||||
|
||||
def _determine_response_id(self):
|
||||
@ -295,6 +291,7 @@ class Processor:
|
||||
def generate_response(self):
|
||||
"""Processes request and returns template variables suitable for a response."""
|
||||
# Build the assertion and response.
|
||||
self.can_handle(self._django_request)
|
||||
self._validate_user()
|
||||
self._build_assertion()
|
||||
self._format_assertion()
|
||||
|
@ -25,7 +25,7 @@ class SAMLProviderForm(forms.ModelForm):
|
||||
class Meta:
|
||||
|
||||
model = SAMLProvider
|
||||
fields = ['name', 'property_mappings', 'acs_url', 'processor_path', 'issuer',
|
||||
fields = ['name', 'property_mappings', 'acs_url', 'audience', 'processor_path', 'issuer',
|
||||
'assertion_valid_for', 'signing', 'signing_cert', 'signing_key', ]
|
||||
labels = {
|
||||
'acs_url': 'ACS URL',
|
||||
@ -33,6 +33,7 @@ class SAMLProviderForm(forms.ModelForm):
|
||||
}
|
||||
widgets = {
|
||||
'name': forms.TextInput(),
|
||||
'audience': forms.TextInput(),
|
||||
'issuer': forms.TextInput(),
|
||||
'property_mappings': FilteredSelectMultiple(_('Property Mappings'), False)
|
||||
}
|
||||
|
18
passbook/saml_idp/migrations/0003_samlprovider_audience.py
Normal file
18
passbook/saml_idp/migrations/0003_samlprovider_audience.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 2.2 on 2019-04-18 09:09
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_saml_idp', '0002_samlpropertymapping'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='samlprovider',
|
||||
name='audience',
|
||||
field=models.TextField(blank=True, default=''),
|
||||
),
|
||||
]
|
18
passbook/saml_idp/migrations/0004_auto_20190418_0918.py
Normal file
18
passbook/saml_idp/migrations/0004_auto_20190418_0918.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 2.2 on 2019-04-18 09:18
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_saml_idp', '0003_samlprovider_audience'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='samlprovider',
|
||||
name='audience',
|
||||
field=models.TextField(default=''),
|
||||
),
|
||||
]
|
@ -15,6 +15,7 @@ class SAMLProvider(Provider):
|
||||
|
||||
name = models.TextField()
|
||||
acs_url = models.URLField()
|
||||
audience = models.TextField(default='')
|
||||
processor_path = models.CharField(max_length=255, choices=[])
|
||||
issuer = models.TextField()
|
||||
assertion_valid_for = models.IntegerField(default=86400)
|
||||
@ -33,7 +34,10 @@ class SAMLProvider(Provider):
|
||||
def processor(self):
|
||||
"""Return selected processor as instance"""
|
||||
if not self._processor:
|
||||
self._processor = path_to_class(self.processor_path)(self)
|
||||
try:
|
||||
self._processor = path_to_class(self.processor_path)(self)
|
||||
except ModuleNotFoundError:
|
||||
self._processor = None
|
||||
return self._processor
|
||||
|
||||
def __str__(self):
|
||||
|
@ -1,22 +0,0 @@
|
||||
"""AWS Processor"""
|
||||
|
||||
from passbook.saml_idp.base import Processor, xml_render
|
||||
|
||||
|
||||
class AWSProcessor(Processor):
|
||||
"""AWS Response Handler Processor for testing against django-saml2-sp."""
|
||||
|
||||
def _determine_audience(self):
|
||||
self._audience = 'urn:amazon:webservices'
|
||||
|
||||
def _format_assertion(self):
|
||||
"""Formats _assertion_params as _assertion_xml."""
|
||||
super()._format_assertion()
|
||||
self._assertion_params['ATTRIBUTES'].append(
|
||||
{
|
||||
'Name': 'https://aws.amazon.com/SAML/Attributes/RoleSessionName',
|
||||
'Value': self._django_request.user.username,
|
||||
}
|
||||
)
|
||||
self._assertion_xml = xml_render.get_assertion_xml(
|
||||
'saml/xml/assertions/generic.xml', self._assertion_params, signed=True)
|
@ -1,10 +0,0 @@
|
||||
"""GitLab Processor"""
|
||||
|
||||
from passbook.saml_idp.base import Processor
|
||||
|
||||
|
||||
class GitLabProcessor(Processor):
|
||||
"""GitLab Response Handler Processor for testing against django-saml2-sp."""
|
||||
|
||||
def _determine_audience(self):
|
||||
self._audience = self._remote.acs_url.replace('/users/auth/saml/callback', '')
|
@ -1,11 +0,0 @@
|
||||
"""NextCloud Processor"""
|
||||
from passbook.saml_idp.base import Processor
|
||||
|
||||
|
||||
class NextCloudProcessor(Processor):
|
||||
"""Nextcloud SAML 2.0 AuthnRequest to Response Handler Processor."""
|
||||
|
||||
def _determine_audience(self):
|
||||
# Nextcloud expects an audience in this format
|
||||
# https://<host>/index.php/apps/user_saml/saml/metadata
|
||||
self._audience = self._remote.acs_url.replace('acs', 'metadata')
|
@ -1,11 +0,0 @@
|
||||
"""Shibboleth Processor"""
|
||||
|
||||
from passbook.saml_idp.base import Processor
|
||||
|
||||
|
||||
class ShibbolethProcessor(Processor):
|
||||
"""Shibboleth-specific Processor"""
|
||||
|
||||
def _determine_audience(self):
|
||||
"""Determines the _audience."""
|
||||
self._audience = "https://sp.testshib.org/shibboleth-sp"
|
@ -1,13 +0,0 @@
|
||||
"""WordpressOrange Processor"""
|
||||
|
||||
from passbook.saml_idp.base import Processor
|
||||
|
||||
|
||||
class WordpressOrangeProcessor(Processor):
|
||||
"""WordpressOrange Response Handler Processor for testing against django-saml2-sp."""
|
||||
|
||||
def _determine_audience(self):
|
||||
# Orange expects an audience in this format
|
||||
# https://<host>/wp-content/plugins/miniorange-saml-20-single-sign-on/
|
||||
self._audience = self._remote.acs_url + \
|
||||
'wp-content/plugins/miniorange-saml-20-single-sign-on/'
|
@ -17,7 +17,6 @@ from signxml.util import strip_pem_header
|
||||
from passbook.audit.models import AuditEntry
|
||||
from passbook.core.models import Application
|
||||
from passbook.core.policies import PolicyEngine
|
||||
from passbook.lib.config import CONFIG
|
||||
from passbook.lib.mixins import CSRFExemptMixin
|
||||
from passbook.lib.utils.template import render_to_string
|
||||
from passbook.saml_idp import exceptions
|
||||
@ -205,7 +204,7 @@ class DescriptorDownloadView(AccessRequiredView):
|
||||
|
||||
def get(self, request, application):
|
||||
"""Replies with the XML Metadata IDSSODescriptor."""
|
||||
entity_id = CONFIG.y('saml_idp.issuer')
|
||||
entity_id = self.provider.issuer
|
||||
slo_url = request.build_absolute_uri(reverse('passbook_saml_idp:saml-logout', kwargs={
|
||||
'application': application
|
||||
}))
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook suspicious_policy"""
|
||||
__version__ = '0.1.28-beta'
|
||||
__version__ = '0.1.34-beta'
|
||||
|
Reference in New Issue
Block a user