Compare commits
118 Commits
version/0.
...
version/0.
Author | SHA1 | Date | |
---|---|---|---|
11b5860d4a | |||
9bdbff4cda | |||
e0d597eeac | |||
f576985cc9 | |||
22a6aef60b | |||
ec0a6e7854 | |||
6904608e6f | |||
cb3732cb2b | |||
57de6cbafc | |||
b1dda764a9 | |||
5ec2102487 | |||
9f8fb7378a | |||
98cd646044 | |||
0cba1b4c45 | |||
53918462b6 | |||
8a7e74b523 | |||
4dc7065e97 | |||
3c93bb9f9f | |||
8143fae2d6 | |||
3cfe45d3cb | |||
8e5c3f2f31 | |||
5a3b2fdd49 | |||
e47b9f0d57 | |||
146dd747f1 | |||
f2ce56063b | |||
b26f378e4c | |||
9072b836c6 | |||
2fa57d064e | |||
146705c60a | |||
5029a99df6 | |||
e7129d18f6 | |||
d2bf9f81d6 | |||
30acf0660b | |||
dda41af5c8 | |||
9b5b03647b | |||
940b3eb943 | |||
16eb629b71 | |||
755045b226 | |||
61478db94e | |||
f69f959bdb | |||
146edb45d4 | |||
045a802365 | |||
c90d8ddcff | |||
3ff2ec929f | |||
a3ef26b7ad | |||
19cd1624c1 | |||
366ef352c6 | |||
a9031a6abc | |||
a1a5223b58 | |||
c723b0233f | |||
b369eb28f1 | |||
9b8f390e31 | |||
11630c9a74 | |||
c9ac10f6f6 | |||
04d613cb28 | |||
40866f9ecd | |||
d8585eb872 | |||
15aaeda475 | |||
8536ef9e23 | |||
35b6bb6b3f | |||
eaa573c715 | |||
660972e303 | |||
a21012bf0c | |||
8dbafa4bda | |||
80049413f0 | |||
2739442d4a | |||
c679f0a67c | |||
d9a952dd03 | |||
9a1a0f0aa8 | |||
4d6bb60134 | |||
80e6d59382 | |||
81ac951872 | |||
f33e553cfd | |||
9b0240dc26 | |||
c327310392 | |||
457375287c | |||
7e87bfef5b | |||
a7af5268de | |||
6d916029bb | |||
81fdcbadad | |||
ec1e25fe71 | |||
b5306e4a94 | |||
801b8a1e59 | |||
3a52059793 | |||
10b7d99b37 | |||
6be8d0cbb2 | |||
5b8e3689ec | |||
25a5d8f5da | |||
883d439544 | |||
1c3b5889e5 | |||
87012b65e1 | |||
29913773a7 | |||
0bc6a4fed4 | |||
4645d8353f | |||
260c5555fa | |||
6f7b917c38 | |||
1456ee6d3e | |||
ae3d3d0295 | |||
c23ceacd0b | |||
5155204283 | |||
5509ec9b0f | |||
d6f9b2e47d | |||
67aa4aef11 | |||
9e46c8bfec | |||
1eaa9b9733 | |||
ee05834b69 | |||
fccc8f4959 | |||
c721620f96 | |||
c9f73d718e | |||
bfa58be721 | |||
4bb602149e | |||
81ab9092fc | |||
29d5962c4c | |||
5c75339946 | |||
4774d9a46c | |||
dbe16ba4fd | |||
6972cf00a0 | |||
0445be9712 |
@ -1,10 +1,10 @@
|
||||
[bumpversion]
|
||||
current_version = 0.1.21-beta
|
||||
current_version = 0.1.35-beta
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)
|
||||
serialize = {major}.{minor}.{patch}-{release}
|
||||
message = bump version: {current_version} -> {new_version}
|
||||
message = new release: {new_version}
|
||||
tag_name = version/{new_version}
|
||||
|
||||
[bumpversion:part:release]
|
||||
@ -53,3 +53,7 @@ values =
|
||||
|
||||
[bumpversion:file:passbook/otp/__init__.py]
|
||||
|
||||
[bumpversion:file:passbook/app_gw/__init__.py]
|
||||
|
||||
[bumpversion:file:passbook/suspicious_policy/__init__.py]
|
||||
|
||||
|
@ -9,3 +9,6 @@ insert_final_newline = true
|
||||
|
||||
[html]
|
||||
indent_size = 2
|
||||
|
||||
[yaml]
|
||||
indent_size = 2
|
||||
|
224
.gitlab-ci.yml
@ -1,155 +1,109 @@
|
||||
# Global Variables
|
||||
before_script:
|
||||
- "python3 -m pip install -U virtualenv"
|
||||
- "virtualenv env"
|
||||
- "source env/bin/activate"
|
||||
- "pip3 install -U -r requirements-dev.txt"
|
||||
stages:
|
||||
- test
|
||||
- build
|
||||
- docs
|
||||
- deploy
|
||||
image: python:3.6
|
||||
- build-buildimage
|
||||
- test
|
||||
- build
|
||||
- docs
|
||||
- deploy
|
||||
image: docker.beryju.org/passbook/build-base:latest
|
||||
services:
|
||||
- postgres:latest
|
||||
- postgres:latest
|
||||
- redis:latest
|
||||
|
||||
variables:
|
||||
POSTGRES_DB: passbook
|
||||
POSTGRES_USER: passbook
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
POSTGRES_DB: passbook
|
||||
POSTGRES_USER: passbook
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
|
||||
include:
|
||||
- /client-packages/allauth/.gitlab-ci.yml
|
||||
create-build-image:
|
||||
image:
|
||||
name: gcr.io/kaniko-project/executor:debug
|
||||
entrypoint: [""]
|
||||
before_script:
|
||||
- echo "{\"auths\":{\"docker.beryju.org\":{\"auth\":\"$DOCKER_AUTH\"}}}" > /kaniko/.docker/config.json
|
||||
script:
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile.build-base --destination docker.beryju.org/passbook/build-base:latest --destination docker.beryju.org/passbook/build-base:0.1.35-beta
|
||||
stage: build-buildimage
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
|
||||
isort:
|
||||
script:
|
||||
- isort -c -sg env
|
||||
stage: test
|
||||
script:
|
||||
- isort -c -sg env
|
||||
stage: test
|
||||
migrations:
|
||||
script:
|
||||
- python manage.py migrate
|
||||
stage: test
|
||||
script:
|
||||
- python manage.py migrate
|
||||
stage: test
|
||||
prospector:
|
||||
script:
|
||||
- prospector
|
||||
stage: test
|
||||
script:
|
||||
- prospector
|
||||
stage: test
|
||||
pylint:
|
||||
script:
|
||||
- pylint passbook
|
||||
stage: test
|
||||
script:
|
||||
- pylint passbook
|
||||
stage: test
|
||||
coverage:
|
||||
script:
|
||||
- coverage run manage.py test
|
||||
- coverage report
|
||||
stage: test
|
||||
script:
|
||||
- python manage.py collectstatic --no-input
|
||||
- coverage run manage.py test
|
||||
- coverage report
|
||||
stage: test
|
||||
bandit:
|
||||
script:
|
||||
- bandit -r passbook
|
||||
stage: test
|
||||
script:
|
||||
- bandit -r passbook
|
||||
stage: test
|
||||
|
||||
package-docker:
|
||||
image:
|
||||
name: gcr.io/kaniko-project/executor:debug
|
||||
entrypoint: [""]
|
||||
before_script:
|
||||
- echo "{\"auths\":{\"docker.$NEXUS_URL\":{\"auth\":\"$NEXUS_AUTH\"}}}" > /kaniko/.docker/config.json
|
||||
script:
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination docker.pkg.beryju.org/passbook:latest --destination docker.pkg.beryju.org/passbook:0.1.21-beta
|
||||
stage: build
|
||||
only:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
image:
|
||||
name: gcr.io/kaniko-project/executor:debug
|
||||
entrypoint: [""]
|
||||
before_script:
|
||||
- echo "{\"auths\":{\"docker.beryju.org\":{\"auth\":\"$DOCKER_AUTH\"}}}" > /kaniko/.docker/config.json
|
||||
script:
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination docker.beryju.org/passbook/server:latest --destination docker.beryju.org/passbook/server:0.1.35-beta
|
||||
stage: build
|
||||
only:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
package-helm:
|
||||
stage: build
|
||||
script:
|
||||
- curl https://raw.githubusercontent.com/helm/helm/master/scripts/get | bash
|
||||
- helm init --client-only
|
||||
- helm package helm/passbook
|
||||
- ./manage.py nexus_upload --method put --url $NEXUS_URL --auth $NEXUS_AUTH --repo helm *.tgz
|
||||
only:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
package-debian:
|
||||
before_script:
|
||||
- apt update
|
||||
- apt install -y --no-install-recommends build-essential debhelper devscripts equivs python3 python3-dev python3-pip libsasl2-dev libldap2-dev
|
||||
- mk-build-deps debian/control
|
||||
- apt install ./*build-deps*deb -f -y
|
||||
- python3 -m pip install -U virtualenv pip
|
||||
- virtualenv env
|
||||
- source env/bin/activate
|
||||
- pip3 install -U -r requirements.txt -r requirements-dev.txt
|
||||
- ./manage.py collectstatic --no-input
|
||||
image: ubuntu:18.04
|
||||
script:
|
||||
- debuild -us -uc
|
||||
- cp ../passbook*.deb .
|
||||
- ./manage.py nexus_upload --method post --url $NEXUS_URL --auth $NEXUS_AUTH --repo apt passbook*deb
|
||||
artifacts:
|
||||
paths:
|
||||
- passbook*deb
|
||||
expire_in: 2 days
|
||||
stage: build
|
||||
only:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
stage: build
|
||||
script:
|
||||
- curl https://raw.githubusercontent.com/helm/helm/master/scripts/get | bash
|
||||
- helm init --client-only
|
||||
- helm package helm/passbook
|
||||
artifacts:
|
||||
paths:
|
||||
- passbook-*.tgz
|
||||
expire_in: 2 days
|
||||
only:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
|
||||
package-client-package-allauth:
|
||||
script:
|
||||
- cd client-packages/allauth
|
||||
- python setup.py sdist
|
||||
- twine upload --username $TWINE_USERNAME --password $TWINE_PASSWORD dist/*
|
||||
stage: build
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
changes:
|
||||
- client-packages/allauth/**
|
||||
script:
|
||||
- cd client-packages/allauth
|
||||
- python setup.py sdist
|
||||
- twine upload --username $TWINE_USERNAME --password $TWINE_PASSWORD dist/*
|
||||
stage: build
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
changes:
|
||||
- client-packages/allauth/**
|
||||
|
||||
package-client-package-sentry:
|
||||
script:
|
||||
- cd client-packages/sentry-auth-passbook
|
||||
- python setup.py sdist
|
||||
- twine upload --username $TWINE_USERNAME --password $TWINE_PASSWORD dist/*
|
||||
stage: build
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
changes:
|
||||
- client-packages/sentry-auth-passbook/**
|
||||
|
||||
# docs:
|
||||
# stage: docs
|
||||
# only:
|
||||
# - master
|
||||
# - tags
|
||||
# - /^debian/.*$/
|
||||
# environment:
|
||||
# name: docs
|
||||
# url: "https://passbook.beryju.org/docs/"
|
||||
# script:
|
||||
# - apt update
|
||||
# - apt install -y rsync
|
||||
# - "mkdir ~/.ssh"
|
||||
# - "cp .gitlab/known_hosts ~/.ssh/"
|
||||
# - "pip3 install -U -r requirements-docs.txt"
|
||||
# - "eval $(ssh-agent -s)"
|
||||
# - "echo \"${CI_SSH_PRIVATE}\" | ssh-add -"
|
||||
# - mkdocs build
|
||||
# - 'rsync -avh --delete web/* "beryjuorg@ory1-web-prod-1.ory1.beryju.org:passbook.beryju.org/"'
|
||||
# - 'rsync -avh --delete site/* "beryjuorg@ory1-web-prod-1.ory1.beryju.org:passbook.beryju.org/docs/"'
|
||||
|
||||
# deploy:
|
||||
# environment:
|
||||
# name: production
|
||||
# url: https://passbook-prod.default.k8s.beryju.org/
|
||||
# stage: deploy
|
||||
# only:
|
||||
# - tags
|
||||
# - /^version/.*$/
|
||||
# script:
|
||||
# - curl https://raw.githubusercontent.com/helm/helm/master/scripts/get | bash
|
||||
# - helm init
|
||||
# - helm upgrade passbook-prod helm/passbook --devel
|
||||
script:
|
||||
- cd client-packages/sentry-auth-passbook
|
||||
- python setup.py sdist
|
||||
- twine upload --username $TWINE_USERNAME --password $TWINE_PASSWORD dist/*
|
||||
stage: build
|
||||
only:
|
||||
refs:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
changes:
|
||||
- client-packages/sentry-auth-passbook/**
|
||||
|
3
.vscode/settings.json
vendored
@ -4,6 +4,9 @@
|
||||
"[html]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[yml]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"cSpell.words": [
|
||||
"SAML",
|
||||
"passbook"
|
||||
|
@ -6,7 +6,7 @@ COPY ./requirements.txt /app/
|
||||
|
||||
WORKDIR /app/
|
||||
|
||||
RUN apt-get update && apt-get install build-essential libssl-dev libffi-dev -y && \
|
||||
RUN apt-get update && apt-get install build-essential libssl-dev libffi-dev libpq-dev -y && \
|
||||
mkdir /app/static/ && \
|
||||
pip install -r requirements.txt && \
|
||||
pip install psycopg2 && \
|
||||
@ -23,7 +23,7 @@ COPY --from=build /app/static /app/static/
|
||||
|
||||
WORKDIR /app/
|
||||
|
||||
RUN apt-get update && apt-get install build-essential libssl-dev libffi-dev -y && \
|
||||
RUN apt-get update && apt-get install build-essential libssl-dev libffi-dev libpq-dev -y && \
|
||||
pip install -r requirements.txt && \
|
||||
pip install psycopg2 && \
|
||||
adduser --system --home /app/ passbook && \
|
||||
|
12
Dockerfile.build-base
Normal file
@ -0,0 +1,12 @@
|
||||
FROM python:3.6
|
||||
|
||||
COPY ./passbook/ /app/passbook
|
||||
COPY ./client-packages/ /app/client-packages
|
||||
COPY ./requirements.txt /app/
|
||||
COPY ./requirements-dev.txt /app/
|
||||
|
||||
WORKDIR /app/
|
||||
|
||||
RUN apt-get update && apt-get install libssl-dev libffi-dev libpq-dev -y && \
|
||||
pip install -U -r requirements-dev.txt && \
|
||||
rm -rf /app/*
|
2
LICENSE
@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2018 BeryJu.org
|
||||
Copyright (c) 2019 BeryJu.org
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -1,27 +0,0 @@
|
||||
# Global Variables
|
||||
before_script:
|
||||
- cd allauth/
|
||||
- "python3 -m pip install -U virtualenv"
|
||||
- "virtualenv env"
|
||||
- "source env/bin/activate"
|
||||
- "pip3 install -U -r requirements-dev.txt"
|
||||
stages:
|
||||
- test-allauth
|
||||
image: python:3.6
|
||||
|
||||
isort:
|
||||
script:
|
||||
- isort -c -sg env
|
||||
stage: test-allauth
|
||||
prospector:
|
||||
script:
|
||||
- prospector
|
||||
stage: test-allauth
|
||||
pylint:
|
||||
script:
|
||||
- pylint passbook
|
||||
stage: test-allauth
|
||||
bandit:
|
||||
script:
|
||||
- bandit -r allauth_passbook
|
||||
stage: test-allauth
|
@ -3,7 +3,7 @@ from setuptools import setup
|
||||
|
||||
setup(
|
||||
name='django-allauth-passbook',
|
||||
version='0.1.21-beta',
|
||||
version='0.1.35-beta',
|
||||
description='passbook support for django-allauth',
|
||||
# long_description='\n'.join(read_simple('docs/index.md')[2:]),
|
||||
long_description_content_type='text/markdown',
|
||||
|
@ -18,7 +18,7 @@ tests_require = [
|
||||
|
||||
setup(
|
||||
name='sentry-auth-passbook',
|
||||
version='0.1.21-beta',
|
||||
version='0.1.35-beta',
|
||||
author='BeryJu.org',
|
||||
author_email='support@beryju.org',
|
||||
url='https://passbook.beryju.org',
|
||||
|
118
debian/changelog
vendored
@ -1,118 +0,0 @@
|
||||
passbook (0.1.21) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.19-beta -> 0.1.20-beta
|
||||
* add request debug view
|
||||
* detect HTTPS from reverse proxy
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Thu, 14 Mar 2019 17:01:49 +0000
|
||||
|
||||
passbook (0.1.20) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.18-beta -> 0.1.19-beta
|
||||
* fix GitHub Pretend again
|
||||
* add user settings for Sources
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Wed, 13 Mar 2019 15:49:44 +0000
|
||||
|
||||
passbook (0.1.18) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.16-beta -> 0.1.17-beta
|
||||
* fix Server Error when downloading metadata
|
||||
* add sentry client
|
||||
* fix included yaml file
|
||||
* adjust versions for client packages, auto build client-packages
|
||||
* bump version: 0.1.17-beta -> 0.1.18-beta
|
||||
* fix API Call for sentry-client, add missing template
|
||||
* fix GitHub Pretend throwing a 500 error
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Wed, 13 Mar 2019 14:14:10 +0000
|
||||
|
||||
passbook (0.1.17) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.15-beta -> 0.1.16-beta
|
||||
* remove Application.user_is_authorized
|
||||
* don't use celery heartbeat, use TCP keepalive instead
|
||||
* switch to vertical navigation
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Tue, 12 Mar 2019 14:54:27 +0000
|
||||
|
||||
passbook (0.1.16) stable; urgency=medium
|
||||
|
||||
* Replace redis with RabbitMQ
|
||||
* updated debian package to suggest RabbitMQ
|
||||
* update helm chart to require RabbitMQ
|
||||
* fix invalid default config in debian package
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Mon, 11 Mar 2019 10:28:36 +0000
|
||||
|
||||
passbook (0.1.14) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.11-beta -> 0.1.12-beta
|
||||
* Fix DoesNotExist error when running PolicyEngine against None user
|
||||
* allow custom email server for helm installs
|
||||
* fix UserChangePasswordView not requiring Login
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Mon, 11 Mar 2019 10:28:36 +0000
|
||||
|
||||
passbook (0.1.12) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.10-beta -> 0.1.11-beta
|
||||
* rewrite PasswordFactor to use backends setting instead of trying all backends
|
||||
* install updated helm release from local folder
|
||||
* disable automatic k8s deployment for now
|
||||
* fix OAuth Authorization View not requiring authentication
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Mon, 11 Mar 2019 08:50:29 +0000
|
||||
|
||||
passbook (0.1.11) stable; urgency=medium
|
||||
|
||||
* add group administration
|
||||
* bump version: 0.1.9-beta -> 0.1.10-beta
|
||||
* fix helm labels being on deployments and not pods
|
||||
* automatically deploy after release
|
||||
* use Django's Admin FilteredSelectMultiple for Group Membership
|
||||
* always use FilteredSelectMultiple for many-to-many fields
|
||||
* Add Group Member policy
|
||||
* add LDAP Group Membership Policy
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Sun, 10 Mar 2019 18:55:31 +0000
|
||||
|
||||
passbook (0.1.10) stable; urgency=high
|
||||
|
||||
* bump version: 0.1.7-beta -> 0.1.8-beta
|
||||
* consistently using PolicyEngine
|
||||
* add more Verbosity to PolicyEngine, rewrite SAML Authorisation check
|
||||
* slightly refactor Factor View, add more unittests
|
||||
* add impersonation middleware, add to templates
|
||||
* bump version: 0.1.8-beta -> 0.1.9-beta
|
||||
* fix k8s service routing http traffic to workers
|
||||
* Fix button on policy test page
|
||||
* better show loading state when testing a policy
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Sun, 10 Mar 2019 14:52:40 +0000
|
||||
|
||||
passbook (0.1.7) stable; urgency=medium
|
||||
|
||||
* bump version: 0.1.3-beta -> 0.1.4-beta
|
||||
* implicitly add kubernetes-healthcheck-host in helm configmap
|
||||
* fix debian build (again)
|
||||
* add PropertyMapping Model, add Subclass for SAML, test with AWS
|
||||
* add custom DynamicArrayField to better handle arrays
|
||||
* format data before inserting it
|
||||
* bump version: 0.1.4-beta -> 0.1.5-beta
|
||||
* fix static files missing for debian package
|
||||
* fix password not getting set on user import
|
||||
* remove audit's login attempt
|
||||
* add passing property to PolicyEngine
|
||||
* fix captcha factor not loading keys from Factor class
|
||||
* bump version: 0.1.5-beta -> 0.1.6-beta
|
||||
* fix MATCH_EXACT not working as intended
|
||||
* Improve access control for saml
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Fri, 08 Mar 2019 20:37:05 +0000
|
||||
|
||||
passbook (0.1.4) stable; urgency=medium
|
||||
|
||||
* initial debian package release
|
||||
|
||||
-- Jens Langhammer <jens.langhammer@beryju.org> Wed, 06 Mar 2019 18:22:41 +0000
|
1
debian/compat
vendored
@ -1 +0,0 @@
|
||||
10
|
20
debian/config
vendored
@ -1,20 +0,0 @@
|
||||
#!/bin/sh
|
||||
# config maintainer script for passbook
|
||||
set -e
|
||||
|
||||
# source debconf stuff
|
||||
. /usr/share/debconf/confmodule
|
||||
|
||||
dbc_first_version=1.0.0
|
||||
dbc_dbuser=passbook
|
||||
dbc_dbname=passbook
|
||||
|
||||
# source dbconfig-common shell library, and call the hook function
|
||||
if [ -f /usr/share/dbconfig-common/dpkg/config.pgsql ]; then
|
||||
. /usr/share/dbconfig-common/dpkg/config.pgsql
|
||||
dbc_go passbook "$@"
|
||||
fi
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
14
debian/control
vendored
@ -1,14 +0,0 @@
|
||||
Source: passbook
|
||||
Section: admin
|
||||
Priority: optional
|
||||
Maintainer: BeryJu.org <support@beryju.org>
|
||||
Uploaders: Jens Langhammer <jens@beryju.org>, BeryJu.org <support@beryju.org>
|
||||
Build-Depends: debhelper (>= 10), dh-systemd (>= 1.5), dh-exec, wget, dh-exec, python3 (>= 3.5) | python3.6 | python3.7
|
||||
Standards-Version: 3.9.6
|
||||
|
||||
Package: passbook
|
||||
Architecture: all
|
||||
Recommends: mysql-server, rabbitmq-server
|
||||
Pre-Depends: adduser, libldap2-dev, libsasl2-dev
|
||||
Depends: python3 (>= 3.5) | python3.6 | python3.7, python3-pip, dbconfig-pgsql | dbconfig-no-thanks, ${misc:Depends}
|
||||
Description: Authentication Provider/Proxy supporting protocols like SAML, OAuth, LDAP and more.
|
22
debian/copyright
vendored
@ -1,22 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 BeryJu.org
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
4
debian/dirs
vendored
@ -1,4 +0,0 @@
|
||||
etc/passbook/
|
||||
etc/passbook/config.d/
|
||||
var/log/passbook/
|
||||
usr/share/passbook/
|
77
debian/etc/passbook/config.yml
vendored
@ -1,77 +0,0 @@
|
||||
http:
|
||||
host: 0.0.0.0
|
||||
port: 8000
|
||||
secret_key_file: /etc/passbook/secret_key
|
||||
log:
|
||||
level:
|
||||
console: INFO
|
||||
file: DEBUG
|
||||
file: /var/log/passbook/passbook.log
|
||||
debug: false
|
||||
secure_proxy_header:
|
||||
HTTP_X_FORWARDED_PROTO: https
|
||||
rabbitmq: guest:guest@localhost/passbook
|
||||
# Error reporting, sends stacktrace to sentry.services.beryju.org
|
||||
error_report_enabled: true
|
||||
|
||||
passbook:
|
||||
sign_up:
|
||||
# Enables signup, created users are stored in internal Database and created in LDAP if ldap.create_users is true
|
||||
enabled: true
|
||||
password_reset:
|
||||
# Enable password reset, passwords are reset in internal Database and in LDAP if ldap.reset_password is true
|
||||
enabled: true
|
||||
# Verification the user has to provide in order to be able to reset passwords. Can be any combination of `email`, `2fa`, `security_questions`
|
||||
verification:
|
||||
- email
|
||||
# Text used in title, on login page and multiple other places
|
||||
branding: passbook
|
||||
login:
|
||||
# Override URL used for logo
|
||||
logo_url: null
|
||||
# Override URL used for Background on Login page
|
||||
bg_url: null
|
||||
# Optionally add a subtext, placed below logo on the login page
|
||||
subtext: null
|
||||
footer:
|
||||
links:
|
||||
# Optionally add links to the footer on the login page
|
||||
# - name: test
|
||||
# href: https://test
|
||||
# Specify which fields can be used to authenticate. Can be any combination of `username` and `email`
|
||||
uid_fields:
|
||||
- username
|
||||
- email
|
||||
session:
|
||||
remember_age: 2592000 # 60 * 60 * 24 * 30, one month
|
||||
# Provider-specific settings
|
||||
ldap:
|
||||
# Which field from `uid_fields` maps to which LDAP Attribute
|
||||
login_field_map:
|
||||
username: sAMAccountName
|
||||
email: mail # or userPrincipalName
|
||||
user_attribute_map:
|
||||
active_directory:
|
||||
username: "%(sAMAccountName)s"
|
||||
email: "%(mail)s"
|
||||
name: "%(displayName)"
|
||||
oauth_client:
|
||||
# List of python packages with sources types to load.
|
||||
types:
|
||||
- passbook.oauth_client.source_types.discord
|
||||
- passbook.oauth_client.source_types.facebook
|
||||
- passbook.oauth_client.source_types.github
|
||||
- passbook.oauth_client.source_types.google
|
||||
- passbook.oauth_client.source_types.reddit
|
||||
- passbook.oauth_client.source_types.supervisr
|
||||
- passbook.oauth_client.source_types.twitter
|
||||
saml_idp:
|
||||
# List of python packages with provider types to load.
|
||||
types:
|
||||
- passbook.saml_idp.processors.generic
|
||||
- passbook.saml_idp.processors.aws
|
||||
- passbook.saml_idp.processors.gitlab
|
||||
- passbook.saml_idp.processors.nextcloud
|
||||
- passbook.saml_idp.processors.salesforce
|
||||
- passbook.saml_idp.processors.shibboleth
|
||||
- passbook.saml_idp.processors.wordpress_orange
|
2
debian/gbp.conf
vendored
@ -1,2 +0,0 @@
|
||||
[buildpackage]
|
||||
export-dir=../build-area
|
8
debian/install
vendored
@ -1,8 +0,0 @@
|
||||
passbook /usr/share/passbook/
|
||||
static /usr/share/passbook/
|
||||
manage.py /usr/share/passbook/
|
||||
passbook.sh /usr/share/passbook/
|
||||
vendor /usr/share/passbook/
|
||||
|
||||
debian/etc/passbook /etc/
|
||||
debian/templates/database.yml /usr/share/passbook/
|
14
debian/passbook-worker.service
vendored
@ -1,14 +0,0 @@
|
||||
[Unit]
|
||||
Description=passbook - Authentication Provider/Proxy (Background worker)
|
||||
After=network.target
|
||||
Requires=network.target
|
||||
|
||||
[Service]
|
||||
User=passbook
|
||||
Group=passbook
|
||||
WorkingDirectory=/usr/share/passbook
|
||||
Type=simple
|
||||
ExecStart=/usr/share/passbook/passbook.sh worker
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
14
debian/passbook.service
vendored
@ -1,14 +0,0 @@
|
||||
[Unit]
|
||||
Description=passbook - Authentication Provider/Proxy
|
||||
After=network.target
|
||||
Requires=network.target
|
||||
|
||||
[Service]
|
||||
User=passbook
|
||||
Group=passbook
|
||||
WorkingDirectory=/usr/share/passbook
|
||||
Type=simple
|
||||
ExecStart=/usr/share/passbook/passbook.sh web
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
36
debian/postinst
vendored
@ -1,36 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
. /usr/share/debconf/confmodule
|
||||
. /usr/share/dbconfig-common/dpkg/postinst.pgsql
|
||||
|
||||
# you can set the default database encoding to something else
|
||||
dbc_pgsql_createdb_encoding="UTF8"
|
||||
dbc_generate_include=template:/etc/passbook/config.d/database.yml
|
||||
dbc_generate_include_args="-o template_infile=/usr/share/passbook/database.yml"
|
||||
dbc_go passbook "$@"
|
||||
|
||||
if [ -z "`getent group passbook`" ]; then
|
||||
addgroup --quiet --system passbook
|
||||
fi
|
||||
if [ -z "`getent passwd passbook`" ]; then
|
||||
echo " * Creating user and group passbook..."
|
||||
adduser --quiet --system --home /usr/share/passbook --shell /bin/false --ingroup passbook --disabled-password --disabled-login --gecos "passbook User" passbook >> /var/log/passbook/passbook.log 2>&1
|
||||
fi
|
||||
echo " * Updating binary packages (psycopg2)"
|
||||
python3 -m pip install --target=/usr/share/passbook/vendor/ --no-cache-dir --upgrade --force-reinstall psycopg2 >> /var/log/passbook/passbook.log 2>&1
|
||||
if [ ! -f '/etc/passbook/secret_key' ]; then
|
||||
echo " * Generating Secret Key"
|
||||
python3 -c 'import random; result = "".join([random.choice("abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)") for i in range(50)]); print(result)' > /etc/passbook/secret_key 2> /dev/null
|
||||
fi
|
||||
chown -R passbook: /usr/share/passbook/
|
||||
chown -R passbook: /etc/passbook/
|
||||
chown -R passbook: /var/log/passbook/
|
||||
chmod 440 /etc/passbook/secret_key
|
||||
echo " * Running Database Migration"
|
||||
/usr/share/passbook/passbook.sh migrate
|
||||
echo " * A superuser can be created with this command '/usr/share/passbook/passbook.sh createsuperuser'"
|
||||
echo " * You should probably also adjust your settings in '/etc/passbook/config.yml'"
|
||||
|
||||
#DEBHELPER#
|
24
debian/postrm
vendored
@ -1,24 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
if [ -f /usr/share/debconf/confmodule ]; then
|
||||
. /usr/share/debconf/confmodule
|
||||
fi
|
||||
if [ -f /usr/share/dbconfig-common/dpkg/postrm.pgsql ]; then
|
||||
. /usr/share/dbconfig-common/dpkg/postrm.pgsql
|
||||
dbc_go passbook "$@"
|
||||
fi
|
||||
|
||||
|
||||
if [ "$1" = "purge" ]; then
|
||||
if which ucf >/dev/null 2>&1; then
|
||||
ucf --purge /etc/passbook/config.d/database.yml
|
||||
ucfr --purge passbook /etc/passbook/config.d/database.yml
|
||||
fi
|
||||
rm -rf /etc/passbook/
|
||||
rm -rf /usr/share/passbook/
|
||||
fi
|
||||
|
||||
#DEBHELPER#
|
||||
|
10
debian/prerm
vendored
@ -1,10 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
. /usr/share/debconf/confmodule
|
||||
. /usr/share/dbconfig-common/dpkg/prerm.pgsql
|
||||
dbc_go passbook "$@"
|
||||
|
||||
#DEBHELPER#
|
||||
|
27
debian/rules
vendored
@ -1,27 +0,0 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
# Uncomment this to turn on verbose mode.
|
||||
# export DH_VERBOSE=1
|
||||
|
||||
%:
|
||||
dh $@ --with=systemd
|
||||
|
||||
build-arch:
|
||||
python3 -m pip install setuptools
|
||||
python3 -m pip install --target=vendor/ -r requirements.txt
|
||||
|
||||
override_dh_strip:
|
||||
dh_strip --exclude=psycopg2
|
||||
|
||||
override_dh_shlibdeps:
|
||||
dh_shlibdeps --exclude=psycopg2
|
||||
|
||||
override_dh_installinit:
|
||||
dh_installinit --name=passbook
|
||||
dh_installinit --name=passbook-worker
|
||||
dh_systemd_enable --name=passbook
|
||||
dh_systemd_enable --name=passbook-worker
|
||||
dh_systemd_start
|
||||
|
||||
# override_dh_usrlocal to do nothing
|
||||
override_dh_usrlocal:
|
1
debian/source/format
vendored
@ -1 +0,0 @@
|
||||
3.0 (native)
|
8
debian/templates/database.yml
vendored
@ -1,8 +0,0 @@
|
||||
databases:
|
||||
default:
|
||||
engine: django.db.backends.postgresql
|
||||
name: _DBC_DBNAME_
|
||||
user: _DBC_DBUSER_
|
||||
password: _DBC_DBPASS_
|
||||
host: _DBC_DBSERVER_
|
||||
port: _DBC_DBPORT_
|
@ -1,6 +1,6 @@
|
||||
apiVersion: v1
|
||||
appVersion: "0.1.21-beta"
|
||||
appVersion: "0.1.35-beta"
|
||||
description: A Helm chart for passbook.
|
||||
name: passbook
|
||||
version: "0.1.21-beta"
|
||||
icon: https://passbook.beryju.org/images/logo.png
|
||||
version: "0.1.35-beta"
|
||||
icon: https://git.beryju.org/uploads/-/system/project/avatar/108/logo.png
|
||||
|
1
helm/passbook/app-readme.md
Normal file
@ -0,0 +1 @@
|
||||
# passbook
|
BIN
helm/passbook/charts/redis-5.1.0.tgz
Normal file
98
helm/passbook/questions.yml
Normal file
@ -0,0 +1,98 @@
|
||||
---
|
||||
categories:
|
||||
- Authentication
|
||||
- SSO
|
||||
questions:
|
||||
- default: "true"
|
||||
variable: config.error_reporting
|
||||
type: boolean
|
||||
description: "Enable error-reporting to sentry.services.beryju.org"
|
||||
group: "passbook Configuration"
|
||||
label: "Error Reporting"
|
||||
####################################################################
|
||||
### PostgreSQL
|
||||
####################################################################
|
||||
- variable: postgresql.enabled
|
||||
default: true
|
||||
description: "Deploy a database server as part of this deployment, or set to false and configure an external database connection."
|
||||
type: boolean
|
||||
required: true
|
||||
label: Install PostgreSQL
|
||||
show_subquestion_if: true
|
||||
group: "Database Settings"
|
||||
subquestions:
|
||||
- variable: postgresql.postgresqlDatabase
|
||||
default: "passbook"
|
||||
description: "Database name to create"
|
||||
type: string
|
||||
label: PostgreSQL Database
|
||||
- variable: postgresql.postgresqlUsername
|
||||
default: "passbook"
|
||||
description: "Database user to create"
|
||||
type: string
|
||||
label: PostgreSQL User
|
||||
- variable: postgresql.postgresqlPassword
|
||||
default: ""
|
||||
description: "password will be auto-generated if not specified"
|
||||
type: password
|
||||
label: PostgreSQL Password
|
||||
- variable: externalDatabase.host
|
||||
default: ""
|
||||
description: "Host of the external database"
|
||||
type: string
|
||||
label: External Database Host
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: externalDatabase.user
|
||||
default: ""
|
||||
description: "Existing username in the external DB"
|
||||
type: string
|
||||
label: External Database username
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: externalDatabase.password
|
||||
default: ""
|
||||
description: "External database password"
|
||||
type: password
|
||||
label: External Database password
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: externalDatabase.database
|
||||
default: ""
|
||||
description: "Name of the existing database"
|
||||
type: string
|
||||
label: External Database
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: externalDatabase.port
|
||||
default: "3306"
|
||||
description: "External database port number"
|
||||
type: string
|
||||
label: External Database Port
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: postgresql.persistence.enabled
|
||||
default: false
|
||||
description: "Enable persistent volume for PostgreSQL"
|
||||
type: boolean
|
||||
required: true
|
||||
label: PostgreSQL Persistent Volume Enabled
|
||||
show_if: "postgresql.enabled=true"
|
||||
show_subquestion_if: true
|
||||
group: "Database Settings"
|
||||
subquestions:
|
||||
- variable: postgresql.master.persistence.size
|
||||
default: "8Gi"
|
||||
description: "PostgreSQL Persistent Volume Size"
|
||||
type: string
|
||||
label: PostgreSQL Volume Size
|
||||
- variable: postgresql.master.persistence.storageClass
|
||||
default: ""
|
||||
description: "If undefined or null, uses the default StorageClass. Default to null"
|
||||
type: storageclass
|
||||
label: Default StorageClass for PostgreSQL
|
||||
- variable: postgresql.master.persistence.existingClaim
|
||||
default: ""
|
||||
description: "If not empty, uses the specified existing PVC instead of creating new one"
|
||||
type: string
|
||||
label: Existing Persistent Volume Claim for PostgreSQL
|
@ -5,5 +5,8 @@ dependencies:
|
||||
- name: postgresql
|
||||
repository: https://kubernetes-charts.storage.googleapis.com/
|
||||
version: 3.10.1
|
||||
digest: sha256:c36e054785f7d706d7d3f525eb1b167dbc89b42f84da7fc167a18bbb6542c999
|
||||
generated: 2019-03-11T20:36:35.125079+01:00
|
||||
- name: redis
|
||||
repository: https://kubernetes-charts.storage.googleapis.com/
|
||||
version: 5.1.0
|
||||
digest: sha256:8bf68bc928a2e3c0f05139635be05fa0840554c7bde4cecd624fac78fb5fa5a3
|
||||
generated: 2019-03-21T11:06:51.553379+01:00
|
||||
|
@ -5,3 +5,6 @@ dependencies:
|
||||
- name: postgresql
|
||||
version: 3.10.1
|
||||
repository: https://kubernetes-charts.storage.googleapis.com/
|
||||
- name: redis
|
||||
version: 5.1.0
|
||||
repository: https://kubernetes-charts.storage.googleapis.com/
|
||||
|
@ -15,8 +15,8 @@ data:
|
||||
port: ''
|
||||
log:
|
||||
level:
|
||||
console: DEBUG
|
||||
file: DEBUG
|
||||
console: WARNING
|
||||
file: WARNING
|
||||
file: /dev/null
|
||||
syslog:
|
||||
host: 127.0.0.1
|
||||
@ -37,6 +37,7 @@ data:
|
||||
secure_proxy_header:
|
||||
HTTP_X_FORWARDED_PROTO: https
|
||||
rabbitmq: "user:{{ .Values.rabbitmq.rabbitmq.password }}@{{ .Release.Name }}-rabbitmq"
|
||||
redis: ":{{ .Values.redis.password }}@{{ .Release.Name }}-redis-master/0"
|
||||
# Error reporting, sends stacktrace to sentry.services.beryju.org
|
||||
error_report_enabled: {{ .Values.config.error_reporting }}
|
||||
|
||||
@ -46,6 +47,7 @@ data:
|
||||
secret_key: {{ randAlphaNum 50 }}
|
||||
{{- end }}
|
||||
|
||||
primary_domain: {{ .Values.primary_domain }}
|
||||
domains:
|
||||
{{- range .Values.ingress.hosts }}
|
||||
- {{ . | quote }}
|
||||
@ -123,6 +125,7 @@ data:
|
||||
- passbook.oauth_client.source_types.reddit
|
||||
- passbook.oauth_client.source_types.supervisr
|
||||
- passbook.oauth_client.source_types.twitter
|
||||
- passbook.oauth_client.source_types.azure_ad
|
||||
saml_idp:
|
||||
signing: true
|
||||
autosubmit: false
|
||||
@ -131,9 +134,4 @@ data:
|
||||
# List of python packages with provider types to load.
|
||||
types:
|
||||
- passbook.saml_idp.processors.generic
|
||||
- passbook.saml_idp.processors.aws
|
||||
- passbook.saml_idp.processors.gitlab
|
||||
- passbook.saml_idp.processors.nextcloud
|
||||
- passbook.saml_idp.processors.salesforce
|
||||
- passbook.saml_idp.processors.shibboleth
|
||||
- passbook.saml_idp.processors.wordpress_orange
|
||||
|
@ -26,7 +26,7 @@ spec:
|
||||
name: {{ include "passbook.fullname" . }}-config
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "docker.pkg.beryju.org/passbook:{{ .Values.image.tag }}"
|
||||
image: "docker.beryju.org/passbook/server:{{ .Values.image.tag }}"
|
||||
imagePullPolicy: IfNotPresent
|
||||
command: ["/bin/sh","-c"]
|
||||
args: ["./manage.py migrate && ./manage.py web"]
|
||||
|
@ -26,7 +26,7 @@ spec:
|
||||
name: {{ include "passbook.fullname" . }}-config
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "docker.pkg.beryju.org/passbook:{{ .Values.image.tag }}"
|
||||
image: "docker.beryju.org/passbook/server:{{ .Values.image.tag }}"
|
||||
imagePullPolicy: IfNotPresent
|
||||
command: ["./manage.py", "worker"]
|
||||
ports:
|
||||
|
@ -5,7 +5,7 @@
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
tag: 0.1.21-beta
|
||||
tag: 0.1.35-beta
|
||||
|
||||
nameOverride: ""
|
||||
|
||||
|
@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Check if this file is a symlink, if so, read real base dir
|
||||
BASE_DIR=$(dirname $(readlink -f ${BASH_SOURCE[0]}))
|
||||
|
||||
cd $BASE_DIR
|
||||
PYTHONPATH="${BASE_DIR}/vendor/" python3 manage.py $@
|
@ -1,2 +1,2 @@
|
||||
"""passbook"""
|
||||
__version__ = '0.1.21-beta'
|
||||
__version__ = '0.1.35-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook admin"""
|
||||
__version__ = '0.1.21-beta'
|
||||
__version__ = '0.1.35-beta'
|
||||
|
31
passbook/admin/templates/administration/debug/request.html
Normal file
@ -0,0 +1,31 @@
|
||||
{% extends "administration/base.html" %}
|
||||
|
||||
{% load i18n %}
|
||||
{% load utils %}
|
||||
|
||||
{% block title %}
|
||||
{% title %}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
<h1><span class="pficon-applications"></span> {% trans "Request" %}</h1>
|
||||
<hr>
|
||||
<table class="table table-striped table-bordered">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>{% trans 'Key' %}</th>
|
||||
<th>{% trans 'Value' %}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for key, value in request_dict.items %}
|
||||
<tr>
|
||||
<td>{{ key }}</td>
|
||||
<td>{{ value }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% endblock %}
|
@ -152,10 +152,8 @@
|
||||
<div class="col-xs-6 col-sm-2 col-md-2">
|
||||
<div class="card-pf card-pf-accented card-pf-aggregate-status">
|
||||
<h2 class="card-pf-title">
|
||||
<a href="#">
|
||||
<span class="pficon-bundle"></span>
|
||||
<span class="card-pf-aggregate-status-count"></span> {% trans 'Version' %}
|
||||
</a>
|
||||
<span class="pficon-bundle"></span>
|
||||
<span class="card-pf-aggregate-status-count"></span> {% trans 'Version' %}
|
||||
</h2>
|
||||
<div class="card-pf-body">
|
||||
<p class="card-pf-aggregate-status-notifications">
|
||||
@ -192,5 +190,59 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-xs-6 col-sm-2 col-md-2">
|
||||
<div class="card-pf card-pf-accented card-pf-aggregate-status">
|
||||
<h2 class="card-pf-title">
|
||||
<span class="pficon-server"></span>
|
||||
<span class="card-pf-aggregate-status-count"></span> {% trans 'Cached Policies' %}
|
||||
</h2>
|
||||
<div class="card-pf-body">
|
||||
<p class="card-pf-aggregate-status-notifications">
|
||||
<span class="card-pf-aggregate-status-notification">
|
||||
<a href="#" data-toggle="modal" data-target="#clearCacheMOdal">
|
||||
{% if cached_policies < 1 %}
|
||||
<span class="pficon-warning-triangle-o" data-toggle="tooltip" data-placement="right"
|
||||
title="{% trans 'No policies cached. Users may experience slow response times.' %}"></span> {{ cached_policies }}
|
||||
{% else %}
|
||||
<span class="pficon pficon-ok"></span>{{ cached_policies }}
|
||||
{% endif %}
|
||||
</a>
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal fade" id="clearCacheMOdal" tabindex="-1" role="dialog" aria-labelledby="clearCacheMOdalLabel" aria-hidden="true">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">
|
||||
<span class="pficon pficon-close"></span>
|
||||
</button>
|
||||
<h4 class="modal-title" id="clearCacheMOdalLabel">{% trans 'Clear Cache' %}</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<form method="post" id="clearForm">
|
||||
{% csrf_token %}
|
||||
<input type="hidden" name="clear">
|
||||
<p>
|
||||
{% blocktrans %}
|
||||
Are you sure you want to clear the cache? This includes all user sessions and all cached Policy results.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
<h3>
|
||||
{% blocktrans %}
|
||||
This will also log you out.
|
||||
{% endblocktrans %}
|
||||
</h3>
|
||||
</form>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
|
||||
<button form="clearForm" type="submit" type="button" class="btn btn-danger">{% trans 'Clear' %}</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
@ -36,7 +36,7 @@
|
||||
<tbody>
|
||||
{% for property_mapping in object_list %}
|
||||
<tr>
|
||||
<td>{{ property_mapping.name }} ({{ property_mapping.slug }})</td>
|
||||
<td>{{ property_mapping.name }}</td>
|
||||
<td>{{ property_mapping|verbose_name }}</td>
|
||||
<td>
|
||||
<a class="btn btn-default btn-sm"
|
||||
|
@ -57,6 +57,10 @@
|
||||
<a class="btn btn-default btn-sm"
|
||||
href="{{ href }}?back={{ request.get_full_path }}">{% trans name %}</a>
|
||||
{% endfor %}
|
||||
{% get_htmls provider as htmls %}
|
||||
{% for html in htmls %}
|
||||
{{ html|safe }}
|
||||
{% endfor %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
|
@ -5,6 +5,8 @@ from logging import getLogger
|
||||
from django import template
|
||||
from django.db.models import Model
|
||||
|
||||
from passbook.lib.utils.template import render_to_string
|
||||
|
||||
register = template.Library()
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
@ -29,3 +31,24 @@ def get_links(model_instance):
|
||||
pass
|
||||
|
||||
return links
|
||||
|
||||
|
||||
@register.simple_tag(takes_context=True)
|
||||
def get_htmls(context, model_instance):
|
||||
"""Find all html_ methods on an object instance, run them and return as dict"""
|
||||
prefix = 'html_'
|
||||
htmls = []
|
||||
|
||||
if not isinstance(model_instance, Model):
|
||||
LOGGER.warning("Model %s is not instance of Model", model_instance)
|
||||
return htmls
|
||||
|
||||
try:
|
||||
for name, method in inspect.getmembers(model_instance, predicate=inspect.ismethod):
|
||||
if name.startswith(prefix):
|
||||
template, _context = method(context.get('request'))
|
||||
htmls.append(render_to_string(template, _context))
|
||||
except NotImplementedError:
|
||||
pass
|
||||
|
||||
return htmls
|
||||
|
@ -1,4 +1,6 @@
|
||||
"""passbook administration overview"""
|
||||
from django.core.cache import cache
|
||||
from django.shortcuts import redirect, reverse
|
||||
from django.views.generic import TemplateView
|
||||
|
||||
from passbook.admin.mixins import AdminRequiredMixin
|
||||
@ -13,6 +15,13 @@ class AdministrationOverviewView(AdminRequiredMixin, TemplateView):
|
||||
|
||||
template_name = 'administration/overview.html'
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
"""Handle post (clear cache from modal)"""
|
||||
if 'clear' in self.request.POST:
|
||||
cache.clear()
|
||||
return redirect(reverse('passbook_core:auth-login'))
|
||||
return self.get(*args, **kwargs)
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
kwargs['application_count'] = len(Application.objects.all())
|
||||
kwargs['policy_count'] = len(Policy.objects.all())
|
||||
@ -25,4 +34,6 @@ class AdministrationOverviewView(AdminRequiredMixin, TemplateView):
|
||||
kwargs['worker_count'] = len(CELERY_APP.control.ping(timeout=0.5))
|
||||
kwargs['providers_without_application'] = Provider.objects.filter(application=None)
|
||||
kwargs['policies_without_attachment'] = len(Policy.objects.filter(policymodel__isnull=True))
|
||||
kwargs['cached_policies'] = len(cache.keys('policy_*'))
|
||||
print(cache.keys('*'))
|
||||
return super().get_context_data(**kwargs)
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook api"""
|
||||
__version__ = '0.1.21-beta'
|
||||
__version__ = '0.1.35-beta'
|
||||
|
BIN
passbook/app_gw/.DS_Store
vendored
Normal file
2
passbook/app_gw/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
"""passbook Application Security Gateway Header"""
|
||||
__version__ = '0.1.35-beta'
|
5
passbook/app_gw/admin.py
Normal file
@ -0,0 +1,5 @@
|
||||
"""passbook Application Security Gateway model admin"""
|
||||
|
||||
from passbook.lib.admin import admin_autoregister
|
||||
|
||||
admin_autoregister('passbook_app_gw')
|
16
passbook/app_gw/apps.py
Normal file
@ -0,0 +1,16 @@
|
||||
"""passbook Application Security Gateway app"""
|
||||
from importlib import import_module
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class PassbookApplicationApplicationGatewayConfig(AppConfig):
|
||||
"""passbook app_gw app"""
|
||||
|
||||
name = 'passbook.app_gw'
|
||||
label = 'passbook_app_gw'
|
||||
verbose_name = 'passbook Application Security Gateway'
|
||||
mountpoint = 'app_gw/'
|
||||
|
||||
def ready(self):
|
||||
import_module('passbook.app_gw.signals')
|
66
passbook/app_gw/forms.py
Normal file
@ -0,0 +1,66 @@
|
||||
"""passbook Application Security Gateway Forms"""
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django import forms
|
||||
from django.contrib.admin.widgets import FilteredSelectMultiple
|
||||
from django.forms import ValidationError
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from passbook.app_gw.models import ApplicationGatewayProvider, RewriteRule
|
||||
from passbook.lib.fields import DynamicArrayField
|
||||
|
||||
|
||||
class ApplicationGatewayProviderForm(forms.ModelForm):
|
||||
"""Security Gateway Provider form"""
|
||||
|
||||
def clean_server_name(self):
|
||||
"""Check if server_name is in DB already, since
|
||||
Postgres ArrayField doesn't suppport keys."""
|
||||
current = self.cleaned_data.get('server_name')
|
||||
if ApplicationGatewayProvider.objects \
|
||||
.filter(server_name__overlap=current) \
|
||||
.exclude(pk=self.instance.pk).exists():
|
||||
raise ValidationError(_("Server Name already in use."))
|
||||
return current
|
||||
|
||||
def clean_upstream(self):
|
||||
"""Check that upstream begins with http(s)"""
|
||||
for upstream in self.cleaned_data.get('upstream'):
|
||||
_parsed_url = urlparse(upstream)
|
||||
|
||||
if _parsed_url.scheme not in ('http', 'https'):
|
||||
raise ValidationError(_("URL Scheme must be either http or https"))
|
||||
return self.cleaned_data.get('upstream')
|
||||
|
||||
class Meta:
|
||||
|
||||
model = ApplicationGatewayProvider
|
||||
fields = ['server_name', 'upstream', 'enabled', 'authentication_header',
|
||||
'default_content_type', 'upstream_ssl_verification', 'property_mappings']
|
||||
widgets = {
|
||||
'authentication_header': forms.TextInput(),
|
||||
'default_content_type': forms.TextInput(),
|
||||
'property_mappings': FilteredSelectMultiple(_('Property Mappings'), False)
|
||||
}
|
||||
field_classes = {
|
||||
'server_name': DynamicArrayField,
|
||||
'upstream': DynamicArrayField
|
||||
}
|
||||
labels = {
|
||||
'upstream_ssl_verification': _('Verify upstream SSL Certificates?'),
|
||||
'property_mappings': _('Rewrite Rules')
|
||||
}
|
||||
|
||||
class RewriteRuleForm(forms.ModelForm):
|
||||
"""Rewrite Rule Form"""
|
||||
|
||||
class Meta:
|
||||
|
||||
model = RewriteRule
|
||||
fields = ['name', 'match', 'halt', 'replacement', 'redirect', 'conditions']
|
||||
widgets = {
|
||||
'name': forms.TextInput(),
|
||||
'match': forms.TextInput(attrs={'data-is-monospace': True}),
|
||||
'replacement': forms.TextInput(attrs={'data-is-monospace': True}),
|
||||
'conditions': FilteredSelectMultiple(_('Conditions'), False)
|
||||
}
|
33
passbook/app_gw/middleware.py
Normal file
@ -0,0 +1,33 @@
|
||||
"""passbook app_gw middleware"""
|
||||
from django.views.generic import RedirectView
|
||||
|
||||
from passbook.app_gw.proxy.handler import RequestHandler
|
||||
from passbook.lib.config import CONFIG
|
||||
|
||||
|
||||
class ApplicationGatewayMiddleware:
|
||||
"""Check if request should be proxied or handeled normally"""
|
||||
|
||||
_app_gw_cache = {}
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
# Rudimentary cache
|
||||
host_header = request.META.get('HTTP_HOST')
|
||||
if host_header not in self._app_gw_cache:
|
||||
self._app_gw_cache[host_header] = RequestHandler.find_app_gw_for_request(request)
|
||||
if self._app_gw_cache[host_header]:
|
||||
return self.dispatch(request, self._app_gw_cache[host_header])
|
||||
return self.get_response(request)
|
||||
|
||||
def dispatch(self, request, app_gw):
|
||||
"""Build proxied request and pass to upstream"""
|
||||
handler = RequestHandler(app_gw, request)
|
||||
|
||||
if not handler.check_permission():
|
||||
to_url = 'https://%s/?next=%s' % (CONFIG.get('domains')[0], request.get_full_path())
|
||||
return RedirectView.as_view(url=to_url)(request)
|
||||
|
||||
return handler.get_response()
|
BIN
passbook/app_gw/migrations/.DS_Store
vendored
Normal file
50
passbook/app_gw/migrations/0001_initial.py
Normal file
@ -0,0 +1,50 @@
|
||||
# Generated by Django 2.1.7 on 2019-03-20 21:38
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0020_groupmembershippolicy'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ApplicationGatewayProvider',
|
||||
fields=[
|
||||
('provider_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.Provider')),
|
||||
('server_name', django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), size=None)),
|
||||
('upstream', django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), size=None)),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
('authentication_header', models.TextField(default='X-Remote-User')),
|
||||
('default_content_type', models.TextField(default='application/octet-stream')),
|
||||
('upstream_ssl_verification', models.BooleanField(default=True)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Application Gateway Provider',
|
||||
'verbose_name_plural': 'Application Gateway Providers',
|
||||
},
|
||||
bases=('passbook_core.provider',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='RewriteRule',
|
||||
fields=[
|
||||
('propertymapping_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.PropertyMapping')),
|
||||
('match', models.TextField()),
|
||||
('halt', models.BooleanField(default=False)),
|
||||
('replacement', models.TextField()),
|
||||
('redirect', models.CharField(choices=[('internal', 'Internal'), (301, 'Moved Permanently'), (302, 'Found')], max_length=50)),
|
||||
('conditions', models.ManyToManyField(to='passbook_core.Policy')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Rewrite Rule',
|
||||
'verbose_name_plural': 'Rewrite Rules',
|
||||
},
|
||||
bases=('passbook_core.propertymapping',),
|
||||
),
|
||||
]
|
18
passbook/app_gw/migrations/0002_auto_20190321_1521.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 2.1.7 on 2019-03-21 15:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_app_gw', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='rewriterule',
|
||||
name='conditions',
|
||||
field=models.ManyToManyField(blank=True, to='passbook_core.Policy'),
|
||||
),
|
||||
]
|
18
passbook/app_gw/migrations/0003_auto_20190411_1314.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 2.2 on 2019-04-11 13:14
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_app_gw', '0002_auto_20190321_1521'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='applicationgatewayprovider',
|
||||
name='authentication_header',
|
||||
field=models.TextField(blank=True, default='X-Remote-User'),
|
||||
),
|
||||
]
|
74
passbook/app_gw/models.py
Normal file
@ -0,0 +1,74 @@
|
||||
"""passbook app_gw models"""
|
||||
import re
|
||||
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from passbook.core.models import Policy, PropertyMapping, Provider
|
||||
|
||||
|
||||
class ApplicationGatewayProvider(Provider):
|
||||
"""Virtual server which proxies requests to any hostname in server_name to upstream"""
|
||||
|
||||
server_name = ArrayField(models.TextField())
|
||||
upstream = ArrayField(models.TextField())
|
||||
enabled = models.BooleanField(default=True)
|
||||
|
||||
authentication_header = models.TextField(default='X-Remote-User', blank=True)
|
||||
default_content_type = models.TextField(default='application/octet-stream')
|
||||
upstream_ssl_verification = models.BooleanField(default=True)
|
||||
|
||||
form = 'passbook.app_gw.forms.ApplicationGatewayProviderForm'
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""since this model has no name property, return a joined list of server_names as name"""
|
||||
return ', '.join(self.server_name)
|
||||
|
||||
def __str__(self):
|
||||
return "Application Gateway %s" % ', '.join(self.server_name)
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _('Application Gateway Provider')
|
||||
verbose_name_plural = _('Application Gateway Providers')
|
||||
|
||||
|
||||
class RewriteRule(PropertyMapping):
|
||||
"""Rewrite requests matching `match` with `replacement`, if all polcies in `conditions` apply"""
|
||||
|
||||
REDIRECT_INTERNAL = 'internal'
|
||||
REDIRECT_PERMANENT = 301
|
||||
REDIRECT_FOUND = 302
|
||||
|
||||
REDIRECTS = (
|
||||
(REDIRECT_INTERNAL, _('Internal')),
|
||||
(REDIRECT_PERMANENT, _('Moved Permanently')),
|
||||
(REDIRECT_FOUND, _('Found')),
|
||||
)
|
||||
|
||||
match = models.TextField()
|
||||
halt = models.BooleanField(default=False)
|
||||
conditions = models.ManyToManyField(Policy, blank=True)
|
||||
replacement = models.TextField() # python formatted strings, use {match.1}
|
||||
redirect = models.CharField(max_length=50, choices=REDIRECTS)
|
||||
|
||||
form = 'passbook.app_gw.forms.RewriteRuleForm'
|
||||
|
||||
_matcher = None
|
||||
|
||||
@property
|
||||
def compiled_matcher(self):
|
||||
"""Cache the compiled regex in memory"""
|
||||
if not self._matcher:
|
||||
self._matcher = re.compile(self.match)
|
||||
return self._matcher
|
||||
|
||||
def __str__(self):
|
||||
return "Rewrite Rule %s" % self.name
|
||||
|
||||
class Meta:
|
||||
|
||||
verbose_name = _('Rewrite Rule')
|
||||
verbose_name_plural = _('Rewrite Rules')
|
0
passbook/app_gw/proxy/__init__.py
Normal file
8
passbook/app_gw/proxy/exceptions.py
Normal file
@ -0,0 +1,8 @@
|
||||
"""Exception classes"""
|
||||
|
||||
class ReverseProxyException(Exception):
|
||||
"""Base for revproxy exception"""
|
||||
|
||||
|
||||
class InvalidUpstream(ReverseProxyException):
|
||||
"""Invalid upstream set"""
|
225
passbook/app_gw/proxy/handler.py
Normal file
@ -0,0 +1,225 @@
|
||||
"""passbook app_gw request handler"""
|
||||
import mimetypes
|
||||
from logging import getLogger
|
||||
from random import SystemRandom
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import certifi
|
||||
import urllib3
|
||||
from django.core.cache import cache
|
||||
from django.utils.http import urlencode
|
||||
|
||||
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||
from passbook.app_gw.proxy.exceptions import InvalidUpstream
|
||||
from passbook.app_gw.proxy.response import get_django_response
|
||||
from passbook.app_gw.proxy.rewrite import Rewriter
|
||||
from passbook.app_gw.proxy.utils import encode_items, normalize_request_headers
|
||||
from passbook.core.models import Application
|
||||
from passbook.core.policies import PolicyEngine
|
||||
|
||||
SESSION_UPSTREAM_KEY = 'passbook_app_gw_upstream'
|
||||
IGNORED_HOSTNAMES_KEY = 'passbook_app_gw_ignored'
|
||||
LOGGER = getLogger(__name__)
|
||||
QUOTE_SAFE = r'<.;>\(}*+|~=-$/_:^@)[{]&\'!,"`'
|
||||
ERRORS_MESSAGES = {
|
||||
'upstream-no-scheme': ("Upstream URL scheme must be either "
|
||||
"'http' or 'https' (%s).")
|
||||
}
|
||||
HTTP_NO_VERIFY = urllib3.PoolManager()
|
||||
HTTP = urllib3.PoolManager(
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
ca_certs=certifi.where())
|
||||
IGNORED_HOSTS = cache.get(IGNORED_HOSTNAMES_KEY, [])
|
||||
POLICY_CACHE = {}
|
||||
|
||||
class RequestHandler:
|
||||
"""Forward requests"""
|
||||
|
||||
_parsed_url = None
|
||||
_request_headers = None
|
||||
|
||||
def __init__(self, app_gw, request):
|
||||
self.app_gw = app_gw
|
||||
self.request = request
|
||||
if self.app_gw.pk not in POLICY_CACHE:
|
||||
POLICY_CACHE[self.app_gw.pk] = self.app_gw.application.policies.all()
|
||||
|
||||
@staticmethod
|
||||
def find_app_gw_for_request(request):
|
||||
"""Check if a request should be proxied or forwarded to passbook"""
|
||||
# Check if hostname is in cached list of ignored hostnames
|
||||
# This saves us having to query the database on each request
|
||||
host_header = request.META.get('HTTP_HOST')
|
||||
if host_header in IGNORED_HOSTS:
|
||||
# LOGGER.debug("%s is ignored", host_header)
|
||||
return False
|
||||
# Look through all ApplicationGatewayProviders and check hostnames
|
||||
matches = ApplicationGatewayProvider.objects.filter(
|
||||
server_name__contains=[host_header],
|
||||
enabled=True)
|
||||
if not matches.exists():
|
||||
# Mo matching Providers found, add host header to ignored list
|
||||
IGNORED_HOSTS.append(host_header)
|
||||
cache.set(IGNORED_HOSTNAMES_KEY, IGNORED_HOSTS)
|
||||
# LOGGER.debug("Ignoring %s", host_header)
|
||||
return False
|
||||
# At this point we're certain there's a matching ApplicationGateway
|
||||
if len(matches) > 1:
|
||||
# This should never happen
|
||||
raise ValueError
|
||||
app_gw = matches.first()
|
||||
try:
|
||||
# Check if ApplicationGateway is associated with application
|
||||
getattr(app_gw, 'application')
|
||||
if app_gw:
|
||||
return app_gw
|
||||
except Application.DoesNotExist:
|
||||
pass
|
||||
# LOGGER.debug("ApplicationGateway not associated with Application")
|
||||
return True
|
||||
|
||||
def _get_upstream(self):
|
||||
"""Choose random upstream and save in session"""
|
||||
if SESSION_UPSTREAM_KEY not in self.request.session:
|
||||
self.request.session[SESSION_UPSTREAM_KEY] = {}
|
||||
if self.app_gw.pk not in self.request.session[SESSION_UPSTREAM_KEY]:
|
||||
upstream_index = int(SystemRandom().random() * len(self.app_gw.upstream))
|
||||
self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk] = upstream_index
|
||||
return self.app_gw.upstream[self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk]]
|
||||
|
||||
def get_upstream(self):
|
||||
"""Get upstream as parsed url"""
|
||||
upstream = self._get_upstream()
|
||||
|
||||
self._parsed_url = urlparse(upstream)
|
||||
|
||||
if self._parsed_url.scheme not in ('http', 'https'):
|
||||
raise InvalidUpstream(ERRORS_MESSAGES['upstream-no-scheme'] %
|
||||
upstream)
|
||||
|
||||
return upstream
|
||||
|
||||
def _format_path_to_redirect(self):
|
||||
# LOGGER.debug("Path before: %s", self.request.get_full_path())
|
||||
rewriter = Rewriter(self.app_gw, self.request)
|
||||
after = rewriter.build()
|
||||
# LOGGER.debug("Path after: %s", after)
|
||||
return after
|
||||
|
||||
def get_proxy_request_headers(self):
|
||||
"""Get normalized headers for the upstream
|
||||
Gets all headers from the original request and normalizes them.
|
||||
Normalization occurs by removing the prefix ``HTTP_`` and
|
||||
replacing and ``_`` by ``-``. Example: ``HTTP_ACCEPT_ENCODING``
|
||||
becames ``Accept-Encoding``.
|
||||
.. versionadded:: 0.9.1
|
||||
:param request: The original HTTPRequest instance
|
||||
:returns: Normalized headers for the upstream
|
||||
"""
|
||||
return normalize_request_headers(self.request)
|
||||
|
||||
def get_request_headers(self):
|
||||
"""Return request headers that will be sent to upstream.
|
||||
The header REMOTE_USER is set to the current user
|
||||
if AuthenticationMiddleware is enabled and
|
||||
the view's add_remote_user property is True.
|
||||
.. versionadded:: 0.9.8
|
||||
"""
|
||||
request_headers = self.get_proxy_request_headers()
|
||||
if not self.app_gw.authentication_header:
|
||||
return request_headers
|
||||
request_headers[self.app_gw.authentication_header] = self.request.user.get_username()
|
||||
# LOGGER.debug("%s set", self.app_gw.authentication_header)
|
||||
|
||||
return request_headers
|
||||
|
||||
def check_permission(self):
|
||||
"""Check if user is authenticated and has permission to access app"""
|
||||
if not hasattr(self.request, 'user'):
|
||||
return False
|
||||
if not self.request.user.is_authenticated:
|
||||
return False
|
||||
policy_engine = PolicyEngine(POLICY_CACHE[self.app_gw.pk])
|
||||
policy_engine.for_user(self.request.user).with_request(self.request).build()
|
||||
passing, _messages = policy_engine.result
|
||||
|
||||
return passing
|
||||
|
||||
def get_encoded_query_params(self):
|
||||
"""Return encoded query params to be used in proxied request"""
|
||||
get_data = encode_items(self.request.GET.lists())
|
||||
return urlencode(get_data)
|
||||
|
||||
def _created_proxy_response(self, path):
|
||||
request_payload = self.request.body
|
||||
|
||||
# LOGGER.debug("Request headers: %s", self._request_headers)
|
||||
|
||||
request_url = self.get_upstream() + path
|
||||
# LOGGER.debug("Request URL: %s", request_url)
|
||||
|
||||
if self.request.GET:
|
||||
request_url += '?' + self.get_encoded_query_params()
|
||||
# LOGGER.debug("Request URL: %s", request_url)
|
||||
|
||||
http = HTTP
|
||||
if not self.app_gw.upstream_ssl_verification:
|
||||
http = HTTP_NO_VERIFY
|
||||
|
||||
try:
|
||||
proxy_response = http.urlopen(self.request.method,
|
||||
request_url,
|
||||
redirect=False,
|
||||
retries=None,
|
||||
headers=self._request_headers,
|
||||
body=request_payload,
|
||||
decode_content=False,
|
||||
preload_content=False)
|
||||
# LOGGER.debug("Proxy response header: %s",
|
||||
# proxy_response.getheaders())
|
||||
except urllib3.exceptions.HTTPError as error:
|
||||
LOGGER.exception(error)
|
||||
raise
|
||||
|
||||
return proxy_response
|
||||
|
||||
def _replace_host_on_redirect_location(self, proxy_response):
|
||||
location = proxy_response.headers.get('Location')
|
||||
if location:
|
||||
if self.request.is_secure():
|
||||
scheme = 'https://'
|
||||
else:
|
||||
scheme = 'http://'
|
||||
request_host = scheme + self.request.META.get('HTTP_HOST')
|
||||
|
||||
upstream_host_http = 'http://' + self._parsed_url.netloc
|
||||
upstream_host_https = 'https://' + self._parsed_url.netloc
|
||||
|
||||
location = location.replace(upstream_host_http, request_host)
|
||||
location = location.replace(upstream_host_https, request_host)
|
||||
proxy_response.headers['Location'] = location
|
||||
# LOGGER.debug("Proxy response LOCATION: %s",
|
||||
# proxy_response.headers['Location'])
|
||||
|
||||
def _set_content_type(self, proxy_response):
|
||||
content_type = proxy_response.headers.get('Content-Type')
|
||||
if not content_type:
|
||||
content_type = (mimetypes.guess_type(self.request.path)[0] or
|
||||
self.app_gw.default_content_type)
|
||||
proxy_response.headers['Content-Type'] = content_type
|
||||
# LOGGER.debug("Proxy response CONTENT-TYPE: %s",
|
||||
# proxy_response.headers['Content-Type'])
|
||||
|
||||
def get_response(self):
|
||||
"""Pass request to upstream and return response"""
|
||||
self._request_headers = self.get_request_headers()
|
||||
|
||||
path = self._format_path_to_redirect()
|
||||
proxy_response = self._created_proxy_response(path)
|
||||
|
||||
self._replace_host_on_redirect_location(proxy_response)
|
||||
self._set_content_type(proxy_response)
|
||||
response = get_django_response(proxy_response, strict_cookies=False)
|
||||
|
||||
# LOGGER.debug("RESPONSE RETURNED: %s", response)
|
||||
return response
|
63
passbook/app_gw/proxy/response.py
Normal file
@ -0,0 +1,63 @@
|
||||
"""response functions from django-revproxy"""
|
||||
import logging
|
||||
|
||||
from django.http import HttpResponse, StreamingHttpResponse
|
||||
|
||||
from passbook.app_gw.proxy.utils import (cookie_from_string,
|
||||
set_response_headers, should_stream)
|
||||
|
||||
#: Default number of bytes that are going to be read in a file lecture
|
||||
DEFAULT_AMT = 2 ** 16
|
||||
|
||||
logger = logging.getLogger('revproxy.response')
|
||||
|
||||
|
||||
def get_django_response(proxy_response, strict_cookies=False):
|
||||
"""This method is used to create an appropriate response based on the
|
||||
Content-Length of the proxy_response. If the content is bigger than
|
||||
MIN_STREAMING_LENGTH, which is found on utils.py,
|
||||
than django.http.StreamingHttpResponse will be created,
|
||||
else a django.http.HTTPResponse will be created instead
|
||||
|
||||
:param proxy_response: An Instance of urllib3.response.HTTPResponse that
|
||||
will create an appropriate response
|
||||
:param strict_cookies: Whether to only accept RFC-compliant cookies
|
||||
:returns: Returns an appropriate response based on the proxy_response
|
||||
content-length
|
||||
"""
|
||||
status = proxy_response.status
|
||||
headers = proxy_response.headers
|
||||
|
||||
logger.debug('Proxy response headers: %s', headers)
|
||||
|
||||
content_type = headers.get('Content-Type')
|
||||
|
||||
logger.debug('Content-Type: %s', content_type)
|
||||
|
||||
if should_stream(proxy_response):
|
||||
logger.info('Content-Length is bigger than %s', DEFAULT_AMT)
|
||||
response = StreamingHttpResponse(proxy_response.stream(DEFAULT_AMT),
|
||||
status=status,
|
||||
content_type=content_type)
|
||||
else:
|
||||
content = proxy_response.data or b''
|
||||
response = HttpResponse(content, status=status,
|
||||
content_type=content_type)
|
||||
|
||||
logger.info('Normalizing response headers')
|
||||
set_response_headers(response, headers)
|
||||
|
||||
logger.debug('Response headers: %s', getattr(response, '_headers'))
|
||||
|
||||
cookies = proxy_response.headers.getlist('set-cookie')
|
||||
logger.info('Checking for invalid cookies')
|
||||
for cookie_string in cookies:
|
||||
cookie_dict = cookie_from_string(cookie_string,
|
||||
strict_cookies=strict_cookies)
|
||||
# if cookie is invalid cookie_dict will be None
|
||||
if cookie_dict:
|
||||
response.set_cookie(**cookie_dict)
|
||||
|
||||
logger.debug('Response cookies: %s', response.cookies)
|
||||
|
||||
return response
|
42
passbook/app_gw/proxy/rewrite.py
Normal file
@ -0,0 +1,42 @@
|
||||
"""passbook app_gw rewriter"""
|
||||
|
||||
from passbook.app_gw.models import RewriteRule
|
||||
|
||||
RULE_CACHE = {}
|
||||
|
||||
class Context:
|
||||
"""Empty class which we dynamically add attributes to"""
|
||||
|
||||
class Rewriter:
|
||||
"""Apply rewrites"""
|
||||
|
||||
__application = None
|
||||
__request = None
|
||||
|
||||
def __init__(self, application, request):
|
||||
self.__application = application
|
||||
self.__request = request
|
||||
if self.__application.pk not in RULE_CACHE:
|
||||
RULE_CACHE[self.__application.pk] = RewriteRule.objects.filter(
|
||||
provider__in=[self.__application])
|
||||
|
||||
def __build_context(self, matches):
|
||||
"""Build object with .0, .1, etc as groups and give access to request"""
|
||||
context = Context()
|
||||
for index, group_match in enumerate(matches.groups()):
|
||||
setattr(context, "g%d" % (index + 1), group_match)
|
||||
setattr(context, 'request', self.__request)
|
||||
return context
|
||||
|
||||
def build(self):
|
||||
"""Run all rules over path and return final path"""
|
||||
path = self.__request.get_full_path()
|
||||
for rule in RULE_CACHE[self.__application.pk]:
|
||||
matches = rule.compiled_matcher.search(path)
|
||||
if not matches:
|
||||
continue
|
||||
replace_context = self.__build_context(matches)
|
||||
path = rule.replacement.format(context=replace_context)
|
||||
if rule.halt:
|
||||
return path
|
||||
return path
|
227
passbook/app_gw/proxy/utils.py
Normal file
@ -0,0 +1,227 @@
|
||||
"""Utils from django-revproxy, slightly adjusted"""
|
||||
import logging
|
||||
import re
|
||||
from wsgiref.util import is_hop_by_hop
|
||||
|
||||
try:
|
||||
from http.cookies import SimpleCookie
|
||||
COOKIE_PREFIX = ''
|
||||
except ImportError:
|
||||
from Cookie import SimpleCookie
|
||||
COOKIE_PREFIX = 'Set-Cookie: '
|
||||
|
||||
|
||||
#: List containing string constant that are used to represent headers that can
|
||||
#: be ignored in the required_header function
|
||||
IGNORE_HEADERS = (
|
||||
'HTTP_ACCEPT_ENCODING', # We want content to be uncompressed so
|
||||
# we remove the Accept-Encoding from
|
||||
# original request
|
||||
'HTTP_HOST',
|
||||
'HTTP_REMOTE_USER',
|
||||
)
|
||||
|
||||
|
||||
# Default from HTTP RFC 2616
|
||||
# See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
|
||||
#: Variable that represent the default charset used
|
||||
DEFAULT_CHARSET = 'latin-1'
|
||||
|
||||
#: List containing string constants that represents possible html content type
|
||||
HTML_CONTENT_TYPES = (
|
||||
'text/html',
|
||||
'application/xhtml+xml'
|
||||
)
|
||||
|
||||
#: Variable used to represent a minimal content size required for response
|
||||
#: to be turned into stream
|
||||
MIN_STREAMING_LENGTH = 4 * 1024 # 4KB
|
||||
|
||||
#: Regex used to find charset in a html content type
|
||||
_get_charset_re = re.compile(r';\s*charset=(?P<charset>[^\s;]+)', re.I)
|
||||
|
||||
|
||||
def is_html_content_type(content_type):
|
||||
"""Function used to verify if the parameter is a proper html content type
|
||||
|
||||
:param content_type: String variable that represent a content-type
|
||||
:returns: A boolean value stating if the content_type is a valid html
|
||||
content type
|
||||
"""
|
||||
for html_content_type in HTML_CONTENT_TYPES:
|
||||
if content_type.startswith(html_content_type):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def should_stream(proxy_response):
|
||||
"""Function to verify if the proxy_response must be converted into
|
||||
a stream.This will be done by checking the proxy_response content-length
|
||||
and verify if its length is bigger than one stipulated
|
||||
by MIN_STREAMING_LENGTH.
|
||||
|
||||
:param proxy_response: An Instance of urllib3.response.HTTPResponse
|
||||
:returns: A boolean stating if the proxy_response should
|
||||
be treated as a stream
|
||||
"""
|
||||
content_type = proxy_response.headers.get('Content-Type')
|
||||
|
||||
if is_html_content_type(content_type):
|
||||
return False
|
||||
|
||||
try:
|
||||
content_length = int(proxy_response.headers.get('Content-Length', 0))
|
||||
except ValueError:
|
||||
content_length = 0
|
||||
|
||||
if not content_length or content_length > MIN_STREAMING_LENGTH:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_charset(content_type):
|
||||
"""Function used to retrieve the charset from a content-type.If there is no
|
||||
charset in the content type then the charset defined on DEFAULT_CHARSET
|
||||
will be returned
|
||||
|
||||
:param content_type: A string containing a Content-Type header
|
||||
:returns: A string containing the charset
|
||||
"""
|
||||
if not content_type:
|
||||
return DEFAULT_CHARSET
|
||||
|
||||
matched = _get_charset_re.search(content_type)
|
||||
if matched:
|
||||
# Extract the charset and strip its double quotes
|
||||
return matched.group('charset').replace('"', '')
|
||||
return DEFAULT_CHARSET
|
||||
|
||||
|
||||
def required_header(header):
|
||||
"""Function that verify if the header parameter is a essential header
|
||||
|
||||
:param header: A string represented a header
|
||||
:returns: A boolean value that represent if the header is required
|
||||
"""
|
||||
if header in IGNORE_HEADERS:
|
||||
return False
|
||||
|
||||
if header.startswith('HTTP_') or header == 'CONTENT_TYPE':
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def set_response_headers(response, response_headers):
|
||||
"""Set response's header"""
|
||||
for header, value in response_headers.items():
|
||||
if is_hop_by_hop(header) or header.lower() == 'set-cookie':
|
||||
continue
|
||||
|
||||
response[header.title()] = value
|
||||
|
||||
logger.debug('Response headers: %s', getattr(response, '_headers'))
|
||||
|
||||
|
||||
def normalize_request_headers(request):
|
||||
"""Function used to transform header, replacing 'HTTP\\_' to ''
|
||||
and replace '_' to '-'
|
||||
|
||||
:param request: A HttpRequest that will be transformed
|
||||
:returns: A dictionary with the normalized headers
|
||||
"""
|
||||
norm_headers = {}
|
||||
for header, value in request.META.items():
|
||||
if required_header(header):
|
||||
norm_header = header.replace('HTTP_', '').title().replace('_', '-')
|
||||
norm_headers[norm_header] = value
|
||||
|
||||
return norm_headers
|
||||
|
||||
|
||||
def encode_items(items):
|
||||
"""Function that encode all elements in the list of items passed as
|
||||
a parameter
|
||||
|
||||
:param items: A list of tuple
|
||||
:returns: A list of tuple with all items encoded in 'utf-8'
|
||||
"""
|
||||
encoded = []
|
||||
for key, values in items:
|
||||
for value in values:
|
||||
encoded.append((key.encode('utf-8'), value.encode('utf-8')))
|
||||
return encoded
|
||||
|
||||
|
||||
logger = logging.getLogger('revproxy.cookies')
|
||||
|
||||
|
||||
def cookie_from_string(cookie_string, strict_cookies=False):
|
||||
"""Parser for HTTP header set-cookie
|
||||
The return from this function will be used as parameters for
|
||||
django's response.set_cookie method. Because set_cookie doesn't
|
||||
have parameter comment, this cookie attribute will be ignored.
|
||||
|
||||
:param cookie_string: A string representing a valid cookie
|
||||
:param strict_cookies: Whether to only accept RFC-compliant cookies
|
||||
:returns: A dictionary containing the cookie_string attributes
|
||||
"""
|
||||
|
||||
if strict_cookies:
|
||||
|
||||
cookies = SimpleCookie(COOKIE_PREFIX + cookie_string)
|
||||
if not cookies.keys():
|
||||
return None
|
||||
cookie_name, = cookies.keys()
|
||||
cookie_dict = {k: v for k, v in cookies[cookie_name].items()
|
||||
if v and k != 'comment'}
|
||||
cookie_dict['key'] = cookie_name
|
||||
cookie_dict['value'] = cookies[cookie_name].value
|
||||
return cookie_dict
|
||||
valid_attrs = ('path', 'domain', 'comment', 'expires',
|
||||
'max_age', 'httponly', 'secure')
|
||||
|
||||
cookie_dict = {}
|
||||
|
||||
cookie_parts = cookie_string.split(';')
|
||||
try:
|
||||
cookie_dict['key'], cookie_dict['value'] = \
|
||||
cookie_parts[0].split('=', 1)
|
||||
cookie_dict['value'] = cookie_dict['value'].replace('"', '')
|
||||
# print('aaaaaaaaaaaaaaaaaaaaaaaaaaaa')
|
||||
# print(cookie_parts[0].split('=', 1))
|
||||
except ValueError:
|
||||
logger.warning('Invalid cookie: `%s`', cookie_string)
|
||||
return None
|
||||
|
||||
if cookie_dict['value'].startswith('='):
|
||||
logger.warning('Invalid cookie: `%s`', cookie_string)
|
||||
return None
|
||||
|
||||
for part in cookie_parts[1:]:
|
||||
if '=' in part:
|
||||
attr, value = part.split('=', 1)
|
||||
value = value.strip()
|
||||
else:
|
||||
attr = part
|
||||
value = ''
|
||||
|
||||
attr = attr.strip().lower()
|
||||
if not attr:
|
||||
continue
|
||||
|
||||
if attr in valid_attrs:
|
||||
if attr in ('httponly', 'secure'):
|
||||
cookie_dict[attr] = True
|
||||
elif attr in 'comment':
|
||||
# ignoring comment attr as explained in the
|
||||
# function docstring
|
||||
continue
|
||||
else:
|
||||
cookie_dict[attr] = value
|
||||
else:
|
||||
logger.warning('Unknown cookie attribute %s', attr)
|
||||
|
||||
return cookie_dict
|
7
passbook/app_gw/requirements.txt
Normal file
@ -0,0 +1,7 @@
|
||||
django-revproxy
|
||||
urllib3[secure]
|
||||
channels
|
||||
service_identity
|
||||
websocket-client
|
||||
daphne<2.3.0
|
||||
asgiref~=2.3
|
5
passbook/app_gw/settings.py
Normal file
@ -0,0 +1,5 @@
|
||||
"""Application Security Gateway settings"""
|
||||
INSTALLED_APPS = [
|
||||
'channels'
|
||||
]
|
||||
ASGI_APPLICATION = "passbook.app_gw.websocket.routing.application"
|
20
passbook/app_gw/signals.py
Normal file
@ -0,0 +1,20 @@
|
||||
"""passbook app_gw cache clean signals"""
|
||||
|
||||
from logging import getLogger
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||
from passbook.app_gw.proxy.handler import IGNORED_HOSTNAMES_KEY
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
@receiver(post_save)
|
||||
# pylint: disable=unused-argument
|
||||
def invalidate_app_gw_cache(sender, instance, **kwargs):
|
||||
"""Invalidate Policy cache when app_gw is updated"""
|
||||
if isinstance(instance, ApplicationGatewayProvider):
|
||||
LOGGER.debug("Invalidating cache for ignored hostnames")
|
||||
cache.delete(IGNORED_HOSTNAMES_KEY)
|
2
passbook/app_gw/urls.py
Normal file
@ -0,0 +1,2 @@
|
||||
"""passbook app_gw urls"""
|
||||
urlpatterns = []
|
0
passbook/app_gw/websocket/__init__.py
Normal file
83
passbook/app_gw/websocket/consumer.py
Normal file
@ -0,0 +1,83 @@
|
||||
"""websocket proxy consumer"""
|
||||
import threading
|
||||
from logging import getLogger
|
||||
from ssl import CERT_NONE
|
||||
|
||||
import websocket
|
||||
from channels.generic.websocket import WebsocketConsumer
|
||||
|
||||
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
class ProxyConsumer(WebsocketConsumer):
|
||||
"""Proxy websocket connection to upstream"""
|
||||
|
||||
_headers_dict = {}
|
||||
_app_gw = None
|
||||
_client = None
|
||||
_thread = None
|
||||
|
||||
def _fix_headers(self, input_dict):
|
||||
"""Fix headers from bytestrings to normal strings"""
|
||||
return {
|
||||
key.decode('utf-8'): value.decode('utf-8')
|
||||
for key, value in dict(input_dict).items()
|
||||
}
|
||||
|
||||
def connect(self):
|
||||
"""Extract host header, lookup in database and proxy connection"""
|
||||
self._headers_dict = self._fix_headers(dict(self.scope.get('headers')))
|
||||
host = self._headers_dict.pop('host')
|
||||
query_string = self.scope.get('query_string').decode('utf-8')
|
||||
matches = ApplicationGatewayProvider.objects.filter(
|
||||
server_name__contains=[host],
|
||||
enabled=True)
|
||||
if matches.exists():
|
||||
self._app_gw = matches.first()
|
||||
# TODO: Get upstream that starts with wss or
|
||||
upstream = self._app_gw.upstream[0].replace('http', 'ws') + self.scope.get('path')
|
||||
if query_string:
|
||||
upstream += '?' + query_string
|
||||
sslopt = {}
|
||||
if not self._app_gw.upstream_ssl_verification:
|
||||
sslopt = {"cert_reqs": CERT_NONE}
|
||||
self._client = websocket.WebSocketApp(
|
||||
url=upstream,
|
||||
subprotocols=self.scope.get('subprotocols'),
|
||||
header=self._headers_dict,
|
||||
on_message=self._client_on_message_handler(),
|
||||
on_error=self._client_on_error_handler(),
|
||||
on_close=self._client_on_close_handler(),
|
||||
on_open=self._client_on_open_handler())
|
||||
LOGGER.debug("Accepting connection for %s", host)
|
||||
self._thread = threading.Thread(target=lambda: self._client.run_forever(sslopt=sslopt))
|
||||
self._thread.start()
|
||||
|
||||
def _client_on_open_handler(self):
|
||||
return lambda ws: self.accept(self._client.sock.handshake_response.subprotocol)
|
||||
|
||||
def _client_on_message_handler(self):
|
||||
# pylint: disable=unused-argument,invalid-name
|
||||
def message_handler(ws, message):
|
||||
if isinstance(message, str):
|
||||
self.send(text_data=message)
|
||||
else:
|
||||
self.send(bytes_data=message)
|
||||
return message_handler
|
||||
|
||||
def _client_on_error_handler(self):
|
||||
return lambda ws, error: print(error)
|
||||
|
||||
def _client_on_close_handler(self):
|
||||
return lambda ws: self.disconnect(0)
|
||||
|
||||
def disconnect(self, code):
|
||||
self._client.close()
|
||||
|
||||
def receive(self, text_data=None, bytes_data=None):
|
||||
if text_data:
|
||||
opcode = websocket.ABNF.OPCODE_TEXT
|
||||
if bytes_data:
|
||||
opcode = websocket.ABNF.OPCODE_BINARY
|
||||
self._client.send(text_data or bytes_data, opcode)
|
17
passbook/app_gw/websocket/routing.py
Normal file
@ -0,0 +1,17 @@
|
||||
"""app_gw websocket proxy"""
|
||||
from channels.auth import AuthMiddlewareStack
|
||||
from channels.routing import ProtocolTypeRouter, URLRouter
|
||||
from django.conf.urls import url
|
||||
|
||||
from passbook.app_gw.websocket.consumer import ProxyConsumer
|
||||
|
||||
websocket_urlpatterns = [
|
||||
url(r'^(.*)$', ProxyConsumer),
|
||||
]
|
||||
|
||||
application = ProtocolTypeRouter({
|
||||
# (http->django views is added by default)
|
||||
'websocket': AuthMiddlewareStack(
|
||||
URLRouter(websocket_urlpatterns)
|
||||
),
|
||||
})
|
@ -1,2 +1,2 @@
|
||||
"""passbook audit Header"""
|
||||
__version__ = '0.1.21-beta'
|
||||
__version__ = '0.1.35-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook captcha_factor Header"""
|
||||
__version__ = '0.1.21-beta'
|
||||
__version__ = '0.1.35-beta'
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook core"""
|
||||
__version__ = '0.1.21-beta'
|
||||
__version__ = '0.1.35-beta'
|
||||
|
13
passbook/core/asgi.py
Normal file
@ -0,0 +1,13 @@
|
||||
"""
|
||||
ASGI entrypoint. Configures Django and then runs the application
|
||||
defined in the ASGI_APPLICATION setting.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import django
|
||||
from channels.routing import get_default_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.core.settings")
|
||||
django.setup()
|
||||
application = get_default_application()
|
@ -3,10 +3,8 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
import celery
|
||||
from celery import Celery, signals
|
||||
from django.conf import settings
|
||||
from raven import Client
|
||||
from raven.contrib.celery import register_logger_signal, register_signal
|
||||
|
||||
# set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.core.settings")
|
||||
@ -14,31 +12,18 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.core.settings")
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Celery(celery.Celery):
|
||||
"""Custom Celery class with Raven configured"""
|
||||
|
||||
# pylint: disable=method-hidden
|
||||
def on_configure(self):
|
||||
"""Update raven client"""
|
||||
try:
|
||||
client = Client(settings.RAVEN_CONFIG.get('dsn'))
|
||||
# register a custom filter to filter out duplicate logs
|
||||
register_logger_signal(client)
|
||||
# hook into the Celery error handler
|
||||
register_signal(client)
|
||||
except RecursionError: # This error happens when pdoc is running
|
||||
pass
|
||||
CELERY_APP = Celery('passbook')
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@celery.signals.setup_logging.connect
|
||||
@signals.setup_logging.connect
|
||||
def config_loggers(*args, **kwags):
|
||||
"""Apply logging settings from settings.py to celery"""
|
||||
logging.config.dictConfig(settings.LOGGING)
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@celery.signals.after_task_publish.connect
|
||||
@signals.after_task_publish.connect
|
||||
def after_task_publish(sender=None, headers=None, body=None, **kwargs):
|
||||
"""Log task_id after it was published"""
|
||||
info = headers if 'task' in headers else body
|
||||
@ -46,22 +31,20 @@ def after_task_publish(sender=None, headers=None, body=None, **kwargs):
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@celery.signals.task_prerun.connect
|
||||
@signals.task_prerun.connect
|
||||
def task_prerun(task_id, task, *args, **kwargs):
|
||||
"""Log task_id on worker"""
|
||||
LOGGER.debug('%-40s started (name=%s)', task_id, task.__name__)
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@celery.signals.task_postrun.connect
|
||||
@signals.task_postrun.connect
|
||||
def task_postrun(task_id, task, *args, retval=None, state=None, **kwargs):
|
||||
"""Log task_id on worker"""
|
||||
LOGGER.debug('%-40s finished (name=%s, state=%s)',
|
||||
task_id, task.__name__, state)
|
||||
|
||||
|
||||
CELERY_APP = Celery('passbook')
|
||||
|
||||
# Using a string here means the worker doesn't have to serialize
|
||||
# the configuration object to child processes.
|
||||
# - namespace='CELERY' means all celery-related configuration keys
|
||||
|
@ -7,7 +7,7 @@ from passbook.core.models import (DebugPolicy, FieldMatcherPolicy,
|
||||
GroupMembershipPolicy, PasswordPolicy,
|
||||
WebhookPolicy)
|
||||
|
||||
GENERAL_FIELDS = ['name', 'action', 'negate', 'order', ]
|
||||
GENERAL_FIELDS = ['name', 'action', 'negate', 'order', 'timeout']
|
||||
|
||||
class FieldMatcherPolicyForm(forms.ModelForm):
|
||||
"""FieldMatcherPolicy Form"""
|
||||
|
@ -2,11 +2,11 @@
|
||||
|
||||
from logging import getLogger
|
||||
|
||||
import cherrypy
|
||||
from django.conf import settings
|
||||
from daphne.cli import CommandLineInterface
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import autoreload
|
||||
|
||||
from passbook.core.wsgi import application
|
||||
from passbook.lib.config import CONFIG
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
@ -15,20 +15,15 @@ class Command(BaseCommand):
|
||||
"""Run CherryPy webserver"""
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""passbook cherrypy server"""
|
||||
config = settings.CHERRYPY_SERVER
|
||||
config.update(**options)
|
||||
cherrypy.config.update(config)
|
||||
cherrypy.tree.graft(application, '/')
|
||||
# Mount NullObject to serve static files
|
||||
cherrypy.tree.mount(None, '/static', config={
|
||||
'/': {
|
||||
'tools.staticdir.on': True,
|
||||
'tools.staticdir.dir': settings.STATIC_ROOT,
|
||||
'tools.expires.on': True,
|
||||
'tools.expires.secs': 86400,
|
||||
'tools.gzip.on': True,
|
||||
}
|
||||
})
|
||||
cherrypy.engine.start()
|
||||
cherrypy.engine.block()
|
||||
"""passbook daphne server"""
|
||||
autoreload.run_with_reloader(self.daphne_server)
|
||||
|
||||
def daphne_server(self):
|
||||
"""Run daphne server within autoreload"""
|
||||
autoreload.raise_last_exception()
|
||||
CommandLineInterface().run([
|
||||
'-p', str(CONFIG.y('web.port', 8000)),
|
||||
'-b', CONFIG.y('web.listen', '0.0.0.0'), # nosec
|
||||
'--access-log', '/dev/null',
|
||||
'passbook.core.asgi:application'
|
||||
])
|
||||
|
@ -3,6 +3,7 @@
|
||||
from logging import getLogger
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import autoreload
|
||||
|
||||
from passbook.core.celery import CELERY_APP
|
||||
|
||||
@ -14,4 +15,9 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""celery worker"""
|
||||
CELERY_APP.worker_main(['worker', '--autoscale=10,3', '-E'])
|
||||
autoreload.run_with_reloader(self.celery_worker)
|
||||
|
||||
def celery_worker(self):
|
||||
"""Run celery worker within autoreload"""
|
||||
autoreload.raise_last_exception()
|
||||
CELERY_APP.worker_main(['worker', '--autoscale=10,3', '-E', '-B'])
|
||||
|
18
passbook/core/migrations/0021_policy_timeout.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 2.1.7 on 2019-03-21 12:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0020_groupmembershippolicy'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='policy',
|
||||
name='timeout',
|
||||
field=models.IntegerField(default=30),
|
||||
),
|
||||
]
|
18
passbook/core/migrations/0022_nonce_expiring.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 2.1.7 on 2019-04-04 19:42
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0021_policy_timeout'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='nonce',
|
||||
name='expiring',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
17
passbook/core/migrations/0023_remove_user_applications.py
Normal file
@ -0,0 +1,17 @@
|
||||
# Generated by Django 2.2 on 2019-04-13 15:51
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0022_nonce_expiring'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='applications',
|
||||
),
|
||||
]
|
@ -47,7 +47,6 @@ class User(AbstractUser):
|
||||
name = models.TextField()
|
||||
|
||||
sources = models.ManyToManyField('Source', through='UserSourceConnection')
|
||||
applications = models.ManyToManyField('Application')
|
||||
groups = models.ManyToManyField('Group')
|
||||
password_change_date = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
@ -220,6 +219,7 @@ class Policy(UUIDModel, CreatedUpdatedModel):
|
||||
action = models.CharField(max_length=20, choices=ACTIONS)
|
||||
negate = models.BooleanField(default=False)
|
||||
order = models.IntegerField(default=0)
|
||||
timeout = models.IntegerField(default=30)
|
||||
|
||||
objects = InheritanceManager()
|
||||
|
||||
@ -436,6 +436,7 @@ class Nonce(UUIDModel):
|
||||
|
||||
expires = models.DateTimeField(default=default_nonce_duration)
|
||||
user = models.ForeignKey('User', on_delete=models.CASCADE)
|
||||
expiring = models.BooleanField(default=True)
|
||||
|
||||
def __str__(self):
|
||||
return "Nonce %s (expires=%s)" % (self.uuid.hex, self.expires)
|
||||
|
@ -1,7 +1,10 @@
|
||||
"""passbook core policy engine"""
|
||||
from logging import getLogger
|
||||
|
||||
from amqp.exceptions import UnexpectedFrame
|
||||
from celery import group
|
||||
from celery.exceptions import TimeoutError as CeleryTimeoutError
|
||||
from django.core.cache import cache
|
||||
from ipware import get_client_ip
|
||||
|
||||
from passbook.core.celery import CELERY_APP
|
||||
@ -9,6 +12,9 @@ from passbook.core.models import Policy, User
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
def _cache_key(policy, user):
|
||||
return "policy_%s#%s" % (policy.uuid, user.pk)
|
||||
|
||||
@CELERY_APP.task()
|
||||
def _policy_engine_task(user_pk, policy_pk, **kwargs):
|
||||
"""Task wrapper to run policy checking"""
|
||||
@ -29,58 +35,89 @@ def _policy_engine_task(user_pk, policy_pk, **kwargs):
|
||||
if policy_obj.negate:
|
||||
policy_result = not policy_result
|
||||
LOGGER.debug("Policy %r#%s got %s", policy_obj.name, policy_obj.pk.hex, policy_result)
|
||||
cache_key = _cache_key(policy_obj, user_obj)
|
||||
cache.set(cache_key, (policy_obj.action, policy_result, message))
|
||||
LOGGER.debug("Cached entry as %s", cache_key)
|
||||
return policy_obj.action, policy_result, message
|
||||
|
||||
class PolicyEngine:
|
||||
"""Orchestrate policy checking, launch tasks and return result"""
|
||||
|
||||
__group = None
|
||||
__cached = None
|
||||
|
||||
policies = None
|
||||
_group = None
|
||||
_request = None
|
||||
_user = None
|
||||
__get_timeout = 0
|
||||
__request = None
|
||||
__user = None
|
||||
|
||||
def __init__(self, policies):
|
||||
self.policies = policies
|
||||
self._request = None
|
||||
self._user = None
|
||||
self.__request = None
|
||||
self.__user = None
|
||||
|
||||
def for_user(self, user):
|
||||
"""Check policies for user"""
|
||||
self._user = user
|
||||
self.__user = user
|
||||
return self
|
||||
|
||||
def with_request(self, request):
|
||||
"""Set request"""
|
||||
self._request = request
|
||||
self.__request = request
|
||||
return self
|
||||
|
||||
def build(self):
|
||||
"""Build task group"""
|
||||
if not self._user:
|
||||
if not self.__user:
|
||||
raise ValueError("User not set.")
|
||||
signatures = []
|
||||
cached_policies = []
|
||||
kwargs = {
|
||||
'__password__': getattr(self._user, '__password__', None),
|
||||
'__password__': getattr(self.__user, '__password__', None),
|
||||
}
|
||||
if self._request:
|
||||
kwargs['remote_ip'], _ = get_client_ip(self._request)
|
||||
if self.__request:
|
||||
kwargs['remote_ip'], _ = get_client_ip(self.__request)
|
||||
if not kwargs['remote_ip']:
|
||||
kwargs['remote_ip'] = '255.255.255.255'
|
||||
for policy in self.policies:
|
||||
signatures.append(_policy_engine_task.s(self._user.pk, policy.pk.hex, **kwargs))
|
||||
self._group = group(signatures)()
|
||||
cached_policy = cache.get(_cache_key(policy, self.__user), None)
|
||||
if cached_policy:
|
||||
LOGGER.debug("Taking result from cache for %s", policy.pk.hex)
|
||||
cached_policies.append(cached_policy)
|
||||
else:
|
||||
LOGGER.debug("Evaluating policy %s", policy.pk.hex)
|
||||
signatures.append(_policy_engine_task.signature(
|
||||
args=(self.__user.pk, policy.pk.hex),
|
||||
kwargs=kwargs,
|
||||
time_limit=policy.timeout))
|
||||
self.__get_timeout += policy.timeout
|
||||
LOGGER.debug("Set total policy timeout to %r", self.__get_timeout)
|
||||
# If all policies are cached, we have an empty list here.
|
||||
if signatures:
|
||||
self.__group = group(signatures)()
|
||||
self.__get_timeout += 3
|
||||
self.__get_timeout = (self.__get_timeout / len(self.policies)) * 1.5
|
||||
self.__cached = cached_policies
|
||||
return self
|
||||
|
||||
@property
|
||||
def result(self):
|
||||
"""Get policy-checking result"""
|
||||
messages = []
|
||||
result = []
|
||||
try:
|
||||
# ValueError can be thrown from _policy_engine_task when user is None
|
||||
group_result = self._group.get()
|
||||
if self.__group:
|
||||
# ValueError can be thrown from _policy_engine_task when user is None
|
||||
result += self.__group.get(timeout=self.__get_timeout)
|
||||
result += self.__cached
|
||||
except ValueError as exc:
|
||||
return False, str(exc)
|
||||
for policy_action, policy_result, policy_message in group_result:
|
||||
# ValueError can be thrown from _policy_engine_task when user is None
|
||||
return False, [str(exc)]
|
||||
except UnexpectedFrame as exc:
|
||||
return False, [str(exc)]
|
||||
except CeleryTimeoutError as exc:
|
||||
return False, [str(exc)]
|
||||
for policy_action, policy_result, policy_message in result:
|
||||
passing = (policy_action == Policy.ACTION_ALLOW and policy_result) or \
|
||||
(policy_action == Policy.ACTION_DENY and not policy_result)
|
||||
LOGGER.debug('Action=%s, Result=%r => %r', policy_action, policy_result, passing)
|
||||
|
@ -1,12 +1,15 @@
|
||||
django>=2.0
|
||||
django-model-utils
|
||||
django-ipware
|
||||
djangorestframework
|
||||
PyYAML
|
||||
raven
|
||||
markdown
|
||||
colorlog
|
||||
celery
|
||||
psycopg2
|
||||
colorlog
|
||||
django-ipware
|
||||
django-model-utils
|
||||
django-redis
|
||||
django>=2.0
|
||||
djangorestframework
|
||||
idna<2.8,>=2.5
|
||||
cherrypy
|
||||
markdown
|
||||
psycopg2
|
||||
PyYAML
|
||||
sentry-sdk
|
||||
pip
|
||||
whitenoise
|
||||
urllib3<1.25,>=1.21.1
|
||||
|
@ -11,13 +11,20 @@ https://docs.djangoproject.com/en/2.1/ref/settings/
|
||||
"""
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from celery.schedules import crontab
|
||||
from django.contrib import messages
|
||||
from sentry_sdk import init as sentry_init
|
||||
from sentry_sdk.integrations.celery import CeleryIntegration
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
from sentry_sdk.integrations.logging import LoggingIntegration
|
||||
|
||||
from passbook import __version__
|
||||
from passbook.lib.config import CONFIG
|
||||
from passbook.lib.sentry import before_send
|
||||
|
||||
VERSION = __version__
|
||||
|
||||
@ -34,7 +41,8 @@ SECRET_KEY = CONFIG.get('secret_key')
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG = CONFIG.get('debug')
|
||||
INTERNAL_IPS = ['127.0.0.1']
|
||||
ALLOWED_HOSTS = CONFIG.get('domains', [])
|
||||
# ALLOWED_HOSTS = CONFIG.get('domains', []) + [CONFIG.get('primary_domain')]
|
||||
ALLOWED_HOSTS = ['*']
|
||||
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
||||
|
||||
LOGIN_URL = 'passbook_core:auth-login'
|
||||
@ -45,10 +53,14 @@ AUTH_USER_MODEL = 'passbook_core.User'
|
||||
|
||||
CSRF_COOKIE_NAME = 'passbook_csrf'
|
||||
SESSION_COOKIE_NAME = 'passbook_session'
|
||||
SESSION_COOKIE_DOMAIN = CONFIG.get('primary_domain')
|
||||
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
|
||||
SESSION_CACHE_ALIAS = "default"
|
||||
LANGUAGE_COOKIE_NAME = 'passbook_language'
|
||||
|
||||
AUTHENTICATION_BACKENDS = [
|
||||
'django.contrib.auth.backends.ModelBackend'
|
||||
'django.contrib.auth.backends.ModelBackend',
|
||||
'guardian.backends.ObjectPermissionBackend',
|
||||
]
|
||||
|
||||
# Application definition
|
||||
@ -63,7 +75,7 @@ INSTALLED_APPS = [
|
||||
'django.contrib.postgres',
|
||||
'rest_framework',
|
||||
'drf_yasg',
|
||||
'raven.contrib.django.raven_compat',
|
||||
'guardian',
|
||||
'passbook.core.apps.PassbookCoreConfig',
|
||||
'passbook.admin.apps.PassbookAdminConfig',
|
||||
'passbook.api.apps.PassbookAPIConfig',
|
||||
@ -79,6 +91,7 @@ INSTALLED_APPS = [
|
||||
'passbook.pretend.apps.PassbookPretendConfig',
|
||||
'passbook.password_expiry_policy.apps.PassbookPasswordExpiryPolicyConfig',
|
||||
'passbook.suspicious_policy.apps.PassbookSuspiciousPolicyConfig',
|
||||
'passbook.app_gw.apps.PassbookApplicationApplicationGatewayConfig',
|
||||
]
|
||||
|
||||
# Message Tag fix for bootstrap CSS Classes
|
||||
@ -99,15 +112,26 @@ REST_FRAMEWORK = {
|
||||
]
|
||||
}
|
||||
|
||||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": "redis://%s" % CONFIG.get('redis'),
|
||||
"OPTIONS": {
|
||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
MIDDLEWARE = [
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
'whitenoise.middleware.WhiteNoiseMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'passbook.app_gw.middleware.ApplicationGatewayMiddleware',
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
'django.middleware.csrf.CsrfViewMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||
'raven.contrib.django.raven_compat.middleware.SentryResponseErrorIdMiddleware',
|
||||
]
|
||||
|
||||
ROOT_URLCONF = 'passbook.core.urls'
|
||||
@ -189,32 +213,35 @@ CELERY_BROKER_URL = 'amqp://%s' % CONFIG.get('rabbitmq')
|
||||
CELERY_RESULT_BACKEND = 'rpc://'
|
||||
CELERY_ACKS_LATE = True
|
||||
CELERY_BROKER_HEARTBEAT = 0
|
||||
|
||||
# Raven settings
|
||||
RAVEN_CONFIG = {
|
||||
'dsn': ('https://55b5dd780bc14f4c96bba69b7a9abbcc:449af483bd0745'
|
||||
'0d83be640d834e5458@sentry.services.beryju.org/8'),
|
||||
'release': VERSION,
|
||||
'environment': 'dev' if DEBUG else 'production',
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
'cleanup-expired-nonces': {
|
||||
'task': 'passbook.core.tasks.clean_nonces',
|
||||
'schedule': crontab(hour=1, minute=1)
|
||||
}
|
||||
}
|
||||
|
||||
# CherryPY settings
|
||||
with CONFIG.cd('web'):
|
||||
CHERRYPY_SERVER = {
|
||||
'server.socket_host': CONFIG.get('listen', '0.0.0.0'), # nosec
|
||||
'server.socket_port': CONFIG.get('port', 8000),
|
||||
'server.thread_pool': CONFIG.get('threads', 30),
|
||||
'log.screen': False,
|
||||
'log.access_file': '',
|
||||
'log.error_file': '',
|
||||
}
|
||||
|
||||
if not DEBUG:
|
||||
sentry_init(
|
||||
dsn="https://55b5dd780bc14f4c96bba69b7a9abbcc@sentry.services.beryju.org/8",
|
||||
integrations=[
|
||||
DjangoIntegration(),
|
||||
CeleryIntegration(),
|
||||
LoggingIntegration(
|
||||
level=logging.INFO,
|
||||
event_level=logging.ERROR
|
||||
)
|
||||
],
|
||||
send_default_pii=True,
|
||||
before_send=before_send,
|
||||
release='p2@%s' % __version__
|
||||
)
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/2.1/howto/static-files/
|
||||
|
||||
STATIC_URL = '/static/'
|
||||
|
||||
LOG_HANDLERS = ['console', 'syslog', 'file', 'sentry']
|
||||
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
|
||||
|
||||
with CONFIG.cd('log'):
|
||||
LOGGING = {
|
||||
@ -245,10 +272,6 @@ with CONFIG.cd('log'):
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'color',
|
||||
},
|
||||
'sentry': {
|
||||
'level': 'ERROR',
|
||||
'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler',
|
||||
},
|
||||
'syslog': {
|
||||
'level': CONFIG.get('level').get('file'),
|
||||
'class': 'logging.handlers.SysLogHandler',
|
||||
@ -262,38 +285,52 @@ with CONFIG.cd('log'):
|
||||
'formatter': 'verbose',
|
||||
'filename': CONFIG.get('file'),
|
||||
},
|
||||
'queue': {
|
||||
'level': CONFIG.get('level').get('console'),
|
||||
'class': 'passbook.lib.log.QueueListenerHandler',
|
||||
'handlers': [
|
||||
'cfg://handlers.console',
|
||||
# 'cfg://handlers.syslog',
|
||||
'cfg://handlers.file',
|
||||
],
|
||||
}
|
||||
},
|
||||
'loggers': {
|
||||
'passbook': {
|
||||
'handlers': LOG_HANDLERS,
|
||||
'handlers': ['queue'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': True,
|
||||
},
|
||||
'django': {
|
||||
'handlers': LOG_HANDLERS,
|
||||
'handlers': ['queue'],
|
||||
'level': 'INFO',
|
||||
'propagate': True,
|
||||
},
|
||||
'tasks': {
|
||||
'handlers': LOG_HANDLERS,
|
||||
'handlers': ['queue'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': True,
|
||||
},
|
||||
'cherrypy': {
|
||||
'handlers': LOG_HANDLERS,
|
||||
'handlers': ['queue'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': True,
|
||||
},
|
||||
'oauthlib': {
|
||||
'handlers': LOG_HANDLERS,
|
||||
'handlers': ['queue'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': True,
|
||||
},
|
||||
'oauth2_provider': {
|
||||
'handlers': LOG_HANDLERS,
|
||||
'handlers': ['queue'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': True,
|
||||
},
|
||||
'daphne': {
|
||||
'handlers': ['queue'],
|
||||
'level': 'INFO',
|
||||
'propagate': True,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,15 @@
|
||||
"""passbook core signals"""
|
||||
from logging import getLogger
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.core.signals import Signal
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from passbook.core.exceptions import PasswordPolicyInvalid
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
user_signed_up = Signal(providing_args=['request', 'user'])
|
||||
invitation_created = Signal(providing_args=['request', 'invitation'])
|
||||
invitation_used = Signal(providing_args=['request', 'invitation', 'user'])
|
||||
@ -24,3 +29,14 @@ def password_policy_checker(sender, password, **kwargs):
|
||||
passing, messages = policy_engine.result
|
||||
if not passing:
|
||||
raise PasswordPolicyInvalid(*messages)
|
||||
|
||||
@receiver(post_save)
|
||||
# pylint: disable=unused-argument
|
||||
def invalidate_policy_cache(sender, instance, **kwargs):
|
||||
"""Invalidate Policy cache when policy is updated"""
|
||||
from passbook.core.models import Policy
|
||||
if isinstance(instance, Policy):
|
||||
LOGGER.debug("Invalidating cache for %s", instance.pk)
|
||||
keys = cache.keys("%s#*" % instance.pk)
|
||||
cache.delete_many(keys)
|
||||
LOGGER.debug("Deleted %d keys", len(keys))
|
||||
|
@ -195,3 +195,7 @@ form .form-row p.datetime {
|
||||
.selector-remove {
|
||||
background: url(../admin/img/selector-icons.svg) 0 -64px no-repeat;
|
||||
}
|
||||
|
||||
input[data-is-monospace] {
|
||||
font-family: monospace;
|
||||
}
|
||||
|
@ -1,19 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 22.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
width="20px" height="20px" viewBox="0 0 20 20" style="enable-background:new 0 0 20 20;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FBBB00;}
|
||||
.st1{fill:#518EF8;}
|
||||
.st2{fill:#28B446;}
|
||||
.st3{fill:#F14336;}
|
||||
</style>
|
||||
<path class="st0" d="M4.4,12.1l-0.7,2.6l-2.5,0.1C0.4,13.3,0,11.7,0,10c0-1.7,0.4-3.2,1.1-4.6h0l2.3,0.4l1,2.3
|
||||
C4.2,8.7,4.1,9.3,4.1,10C4.1,10.7,4.2,11.4,4.4,12.1z"/>
|
||||
<path class="st1" d="M19.8,8.1C19.9,8.7,20,9.4,20,10c0,0.7-0.1,1.4-0.2,2.1c-0.5,2.3-1.8,4.3-3.5,5.7l0,0l-2.9-0.1L13,15.1
|
||||
c1.2-0.7,2.1-1.8,2.6-3h-5.3v-4h5.4H19.8L19.8,8.1z"/>
|
||||
<path class="st2" d="M16.3,17.8L16.3,17.8C14.5,19.2,12.4,20,10,20c-3.8,0-7.1-2.1-8.8-5.3l3.2-2.7c0.8,2.3,3,3.9,5.6,3.9
|
||||
c1.1,0,2.1-0.3,3-0.8L16.3,17.8z"/>
|
||||
<path class="st3" d="M16.4,2.3L13.1,5c-0.9-0.6-2-0.9-3.1-0.9c-2.6,0-4.8,1.7-5.6,4L1.1,5.4h0C2.8,2.2,6.1,0,10,0
|
||||
C12.4,0,14.7,0.9,16.4,2.3z"/>
|
||||
</svg>
|
Before Width: | Height: | Size: 1.1 KiB |
Before Width: | Height: | Size: 15 KiB |
Before Width: | Height: | Size: 2.7 KiB After Width: | Height: | Size: 2.7 KiB |
Before Width: | Height: | Size: 6.4 KiB After Width: | Height: | Size: 6.4 KiB |
Before Width: | Height: | Size: 9.4 KiB After Width: | Height: | Size: 9.4 KiB |
Before Width: | Height: | Size: 4.4 KiB After Width: | Height: | Size: 4.4 KiB |
Before Width: | Height: | Size: 7.8 KiB After Width: | Height: | Size: 7.8 KiB |
Before Width: | Height: | Size: 8.0 KiB After Width: | Height: | Size: 8.0 KiB |
1
passbook/core/static/img/logos/azure ad.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="21" height="21" viewBox="0 0 21 21"><title>MS-SymbolLockup</title><rect x="1" y="1" width="9" height="9" fill="#f25022"/><rect x="1" y="11" width="9" height="9" fill="#00a4ef"/><rect x="11" y="1" width="9" height="9" fill="#7fba00"/><rect x="11" y="11" width="9" height="9" fill="#ffb900"/></svg>
|
After Width: | Height: | Size: 343 B |
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
Before Width: | Height: | Size: 750 B After Width: | Height: | Size: 750 B |