Compare commits
105 Commits
version/0.
...
version/0.
Author | SHA1 | Date | |
---|---|---|---|
34ed0b3594 | |||
f008a3e20c | |||
9de950220f | |||
567c90b4c6 | |||
ae19236366 | |||
f9babe7089 | |||
78c74cd469 | |||
32abb27e61 | |||
8478b03892 | |||
e972f2b289 | |||
22c4fb1414 | |||
0154def916 | |||
fc69b6851d | |||
44a3c7fa5f | |||
4e6653e299 | |||
c782585287 | |||
7718b3b3b8 | |||
8ff9e72972 | |||
ef6ef68a39 | |||
48a04744e0 | |||
6446ca8bb2 | |||
b9991465ee | |||
3d8242be06 | |||
ca3bcc565d | |||
432176ea2f | |||
c1dae0b599 | |||
e70d3b6286 | |||
17e6bc921b | |||
46111e7cac | |||
3b7e47dbe2 | |||
fff99f0e3d | |||
2e15b24f0a | |||
088b9592cd | |||
b1e4e32b83 | |||
d91a852eda | |||
171c5b9759 | |||
64290b2a37 | |||
72769b8a0a | |||
1018309413 | |||
6d0ecd228e | |||
40a651e66c | |||
a390bb7b59 | |||
245ec65cbb | |||
17eea4a10c | |||
862fb0f5d2 | |||
ec73b53340 | |||
9110f7fee3 | |||
54cc1fdeef | |||
8f42a7f0b4 | |||
2c221ea819 | |||
93e0441b58 | |||
7f1455cb12 | |||
59fc223a85 | |||
0a6f555c23 | |||
6a4233d6fd | |||
15fa7e9652 | |||
f2acc154cd | |||
d21ec6c9a5 | |||
43dd858cd5 | |||
34cbf5f702 | |||
3c6e94b6a8 | |||
1cd149c815 | |||
4c6f562805 | |||
e59c4ec1c7 | |||
1169db7530 | |||
1453008796 | |||
2209b6d603 | |||
ccbc0384f9 | |||
a48924c896 | |||
dc8d8dd2b6 | |||
afca94ceb8 | |||
0b86231a36 | |||
c0df1f38b8 | |||
2b8fed8f4e | |||
c7322a32a0 | |||
64b75cab84 | |||
f58bc61999 | |||
fb8ccc0283 | |||
c38012f147 | |||
3676ff21c2 | |||
920e705d75 | |||
de0b137b1e | |||
d44ac6e2a3 | |||
71039a4012 | |||
8745ac7932 | |||
7f70048423 | |||
97dbfc8885 | |||
149ea22a93 | |||
404ed5406d | |||
b8656858ec | |||
6b0f0e8993 | |||
aec1ccd88d | |||
bee5c200b6 | |||
9d640efc88 | |||
f0907841dd | |||
2bffc12ef9 | |||
2ff9ec6522 | |||
43a54f5c54 | |||
7bff2734aa | |||
84768c0ec6 | |||
f4499a5459 | |||
b3aede5bba | |||
531ea1c039 | |||
c2c5ff6912 | |||
9cddab8fd5 |
@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 0.2.8-beta
|
||||
current_version = 0.6.6-beta
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)
|
||||
@ -23,5 +23,5 @@ values =
|
||||
|
||||
[bumpversion:file:passbook/__init__.py]
|
||||
|
||||
[bumpversion:file:passbook/core/nginx.conf]
|
||||
[bumpversion:file:docker/nginx.conf]
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
[run]
|
||||
source = passbook
|
||||
omit =
|
||||
env/
|
||||
*/wsgi.py
|
||||
manage.py
|
||||
*/migrations/*
|
||||
|
@ -2,3 +2,4 @@ env
|
||||
helm
|
||||
passbook-ui
|
||||
static
|
||||
*.env.yml
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -191,3 +191,4 @@ pip-selfcheck.json
|
||||
# End of https://www.gitignore.io/api/python,django
|
||||
/static/
|
||||
local.env.yml
|
||||
.vscode/
|
||||
|
@ -13,9 +13,12 @@ variables:
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
|
||||
before_script:
|
||||
- pip install pipenv
|
||||
# Ensure all dependencies are installed, even those not included in passbook/dev
|
||||
- pip install -r requirements.txt
|
||||
- pip install -r requirements-dev.txt
|
||||
# According to pipenv docs, -d outputs all packages, however it actually does not
|
||||
- pipenv lock -r > requirements-all.txt
|
||||
- pipenv lock -rd >> requirements-all.txt
|
||||
- pip install -r requirements-all.txt
|
||||
|
||||
create-base-image:
|
||||
image:
|
||||
@ -24,7 +27,7 @@ create-base-image:
|
||||
before_script:
|
||||
- echo "{\"auths\":{\"docker.beryju.org\":{\"auth\":\"$DOCKER_AUTH\"}}}" > /kaniko/.docker/config.json
|
||||
script:
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile.base --destination docker.beryju.org/passbook/base:latest --destination docker.beryju.org/passbook/base:0.2.8-beta
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/base.Dockerfile --destination docker.beryju.org/passbook/base:latest
|
||||
stage: build-base-image
|
||||
only:
|
||||
refs:
|
||||
@ -38,7 +41,7 @@ build-dev-image:
|
||||
before_script:
|
||||
- echo "{\"auths\":{\"docker.beryju.org\":{\"auth\":\"$DOCKER_AUTH\"}}}" > /kaniko/.docker/config.json
|
||||
script:
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile.dev --destination docker.beryju.org/passbook/dev:latest --destination docker.beryju.org/passbook/dev:0.2.8-beta
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/dev.Dockerfile --destination docker.beryju.org/passbook/dev:latest
|
||||
stage: build-dev-image
|
||||
only:
|
||||
refs:
|
||||
@ -60,13 +63,13 @@ migrations:
|
||||
services:
|
||||
- postgres:latest
|
||||
- redis:latest
|
||||
prospector:
|
||||
script:
|
||||
- prospector
|
||||
stage: test
|
||||
services:
|
||||
- postgres:latest
|
||||
- redis:latest
|
||||
# prospector:
|
||||
# script:
|
||||
# - prospector
|
||||
# stage: test
|
||||
# services:
|
||||
# - postgres:latest
|
||||
# - redis:latest
|
||||
pylint:
|
||||
script:
|
||||
- pylint passbook
|
||||
@ -84,15 +87,15 @@ coverage:
|
||||
- postgres:latest
|
||||
- redis:latest
|
||||
|
||||
package-passbook-server:
|
||||
build-passbook-server:
|
||||
stage: build
|
||||
image:
|
||||
name: gcr.io/kaniko-project/executor:debug
|
||||
entrypoint: [""]
|
||||
before_script:
|
||||
- echo "{\"auths\":{\"docker.beryju.org\":{\"auth\":\"$DOCKER_AUTH\"}}}" > /kaniko/.docker/config.json
|
||||
script:
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination docker.beryju.org/passbook/server:latest --destination docker.beryju.org/passbook/server:0.2.8-beta
|
||||
stage: build
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination docker.beryju.org/passbook/server:latest --destination docker.beryju.org/passbook/server:0.6.6-beta
|
||||
only:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
@ -104,7 +107,7 @@ build-passbook-static:
|
||||
before_script:
|
||||
- echo "{\"auths\":{\"docker.beryju.org\":{\"auth\":\"$DOCKER_AUTH\"}}}" > /kaniko/.docker/config.json
|
||||
script:
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile.static --destination docker.beryju.org/passbook/static:latest --destination docker.beryju.org/passbook/static:0.2.8-beta
|
||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/static.Dockerfile --destination docker.beryju.org/passbook/static:latest --destination docker.beryju.org/passbook/static:0.6.6-beta
|
||||
only:
|
||||
- tags
|
||||
- /^version/.*$/
|
||||
@ -121,7 +124,7 @@ package-helm:
|
||||
- curl https://raw.githubusercontent.com/helm/helm/master/scripts/get | bash
|
||||
script:
|
||||
- helm init --client-only
|
||||
- helm dependency build helm/passbook
|
||||
- helm dependency update helm/passbook
|
||||
- helm package helm/passbook
|
||||
artifacts:
|
||||
paths:
|
||||
|
@ -3,11 +3,9 @@ test-warnings: true
|
||||
doc-warnings: false
|
||||
|
||||
ignore-paths:
|
||||
- env
|
||||
- migrations
|
||||
- docs
|
||||
- node_modules
|
||||
- client-packages
|
||||
|
||||
uses:
|
||||
- django
|
||||
|
@ -2,9 +2,10 @@
|
||||
|
||||
disable=redefined-outer-name,arguments-differ,no-self-use,cyclic-import,fixme,locally-disabled,unpacking-non-sequence,too-many-ancestors,too-many-branches,too-few-public-methods
|
||||
load-plugins=pylint_django,pylint.extensions.bad_builtin
|
||||
#,pylint.extensions.docparams
|
||||
extension-pkg-whitelist=lxml
|
||||
const-rgx=[a-zA-Z0-9_]{1,40}$
|
||||
ignored-modules=django-otp
|
||||
jobs=4
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
|
114
.vscode/.ropeproject/config.py
vendored
114
.vscode/.ropeproject/config.py
vendored
@ -1,114 +0,0 @@
|
||||
# The default ``config.py``
|
||||
# flake8: noqa
|
||||
|
||||
|
||||
def set_prefs(prefs):
|
||||
"""This function is called before opening the project"""
|
||||
|
||||
# Specify which files and folders to ignore in the project.
|
||||
# Changes to ignored resources are not added to the history and
|
||||
# VCSs. Also they are not returned in `Project.get_files()`.
|
||||
# Note that ``?`` and ``*`` match all characters but slashes.
|
||||
# '*.pyc': matches 'test.pyc' and 'pkg/test.pyc'
|
||||
# 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc'
|
||||
# '.svn': matches 'pkg/.svn' and all of its children
|
||||
# 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o'
|
||||
# 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o'
|
||||
prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject',
|
||||
'.hg', '.svn', '_svn', '.git', '.tox']
|
||||
|
||||
# Specifies which files should be considered python files. It is
|
||||
# useful when you have scripts inside your project. Only files
|
||||
# ending with ``.py`` are considered to be python files by
|
||||
# default.
|
||||
# prefs['python_files'] = ['*.py']
|
||||
|
||||
# Custom source folders: By default rope searches the project
|
||||
# for finding source folders (folders that should be searched
|
||||
# for finding modules). You can add paths to that list. Note
|
||||
# that rope guesses project source folders correctly most of the
|
||||
# time; use this if you have any problems.
|
||||
# The folders should be relative to project root and use '/' for
|
||||
# separating folders regardless of the platform rope is running on.
|
||||
# 'src/my_source_folder' for instance.
|
||||
# prefs.add('source_folders', 'src')
|
||||
|
||||
# You can extend python path for looking up modules
|
||||
# prefs.add('python_path', '~/python/')
|
||||
|
||||
# Should rope save object information or not.
|
||||
prefs['save_objectdb'] = True
|
||||
prefs['compress_objectdb'] = False
|
||||
|
||||
# If `True`, rope analyzes each module when it is being saved.
|
||||
prefs['automatic_soa'] = True
|
||||
# The depth of calls to follow in static object analysis
|
||||
prefs['soa_followed_calls'] = 0
|
||||
|
||||
# If `False` when running modules or unit tests "dynamic object
|
||||
# analysis" is turned off. This makes them much faster.
|
||||
prefs['perform_doa'] = True
|
||||
|
||||
# Rope can check the validity of its object DB when running.
|
||||
prefs['validate_objectdb'] = True
|
||||
|
||||
# How many undos to hold?
|
||||
prefs['max_history_items'] = 32
|
||||
|
||||
# Shows whether to save history across sessions.
|
||||
prefs['save_history'] = True
|
||||
prefs['compress_history'] = False
|
||||
|
||||
# Set the number spaces used for indenting. According to
|
||||
# :PEP:`8`, it is best to use 4 spaces. Since most of rope's
|
||||
# unit-tests use 4 spaces it is more reliable, too.
|
||||
prefs['indent_size'] = 4
|
||||
|
||||
# Builtin and c-extension modules that are allowed to be imported
|
||||
# and inspected by rope.
|
||||
prefs['extension_modules'] = []
|
||||
|
||||
# Add all standard c-extensions to extension_modules list.
|
||||
prefs['import_dynload_stdmods'] = True
|
||||
|
||||
# If `True` modules with syntax errors are considered to be empty.
|
||||
# The default value is `False`; When `False` syntax errors raise
|
||||
# `rope.base.exceptions.ModuleSyntaxError` exception.
|
||||
prefs['ignore_syntax_errors'] = False
|
||||
|
||||
# If `True`, rope ignores unresolvable imports. Otherwise, they
|
||||
# appear in the importing namespace.
|
||||
prefs['ignore_bad_imports'] = False
|
||||
|
||||
# If `True`, rope will insert new module imports as
|
||||
# `from <package> import <module>` by default.
|
||||
prefs['prefer_module_from_imports'] = False
|
||||
|
||||
# If `True`, rope will transform a comma list of imports into
|
||||
# multiple separate import statements when organizing
|
||||
# imports.
|
||||
prefs['split_imports'] = False
|
||||
|
||||
# If `True`, rope will remove all top-level import statements and
|
||||
# reinsert them at the top of the module when making changes.
|
||||
prefs['pull_imports_to_top'] = True
|
||||
|
||||
# If `True`, rope will sort imports alphabetically by module name instead
|
||||
# of alphabetically by import statement, with from imports after normal
|
||||
# imports.
|
||||
prefs['sort_imports_alphabetically'] = False
|
||||
|
||||
# Location of implementation of
|
||||
# rope.base.oi.type_hinting.interfaces.ITypeHintingFactory In general
|
||||
# case, you don't have to change this value, unless you're an rope expert.
|
||||
# Change this value to inject you own implementations of interfaces
|
||||
# listed in module rope.base.oi.type_hinting.providers.interfaces
|
||||
# For example, you can add you own providers for Django Models, or disable
|
||||
# the search type-hinting in a class hierarchy, etc.
|
||||
prefs['type_hinting_factory'] = (
|
||||
'rope.base.oi.type_hinting.factory.default_type_hinting_factory')
|
||||
|
||||
|
||||
def project_opened(project):
|
||||
"""This function is called after opening the project"""
|
||||
# Do whatever you like here!
|
BIN
.vscode/.ropeproject/objectdb
vendored
BIN
.vscode/.ropeproject/objectdb
vendored
Binary file not shown.
14
.vscode/settings.json
vendored
14
.vscode/settings.json
vendored
@ -1,14 +0,0 @@
|
||||
{
|
||||
"python.pythonPath": "env/bin/python",
|
||||
"editor.tabSize": 4,
|
||||
"[html]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[yml]": {
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"cSpell.words": [
|
||||
"SAML",
|
||||
"passbook"
|
||||
]
|
||||
}
|
@ -2,7 +2,8 @@ FROM docker.beryju.org/passbook/base:latest
|
||||
|
||||
COPY ./passbook/ /app/passbook
|
||||
COPY ./manage.py /app/
|
||||
|
||||
USER passbook
|
||||
COPY ./docker/uwsgi.ini /app/
|
||||
|
||||
WORKDIR /app/
|
||||
|
||||
USER passbook
|
||||
|
@ -1,11 +0,0 @@
|
||||
FROM python:3.7-alpine
|
||||
|
||||
COPY ./requirements.txt /app/
|
||||
|
||||
WORKDIR /app/
|
||||
|
||||
RUN apk update && \
|
||||
apk add --no-cache openssl-dev build-base libxml2-dev libxslt-dev libffi-dev gcc musl-dev libgcc zlib-dev postgresql-dev && \
|
||||
pip install -r /app/requirements.txt --no-cache-dir && \
|
||||
adduser -S passbook && \
|
||||
chown -R passbook /app
|
@ -1,14 +0,0 @@
|
||||
FROM docker.beryju.org/passbook/dev:latest as static-build
|
||||
|
||||
COPY ./passbook/ /app/passbook
|
||||
COPY ./manage.py /app/
|
||||
COPY ./requirements.txt /app/
|
||||
|
||||
WORKDIR /app/
|
||||
|
||||
RUN ./manage.py collectstatic --no-input
|
||||
|
||||
FROM nginx:latest
|
||||
|
||||
COPY --from=static-build /app/static /static/static/
|
||||
COPY ./passbook/core/nginx.conf /etc/nginx/nginx.conf
|
54
Pipfile
Normal file
54
Pipfile
Normal file
@ -0,0 +1,54 @@
|
||||
[[source]]
|
||||
name = "pypi"
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
|
||||
[packages]
|
||||
celery = "*"
|
||||
cherrypy = "*"
|
||||
defusedxml = "*"
|
||||
django = "*"
|
||||
kombu = "==4.5.0"
|
||||
django-cors-middleware = "*"
|
||||
django-filters = "*"
|
||||
django-ipware = "*"
|
||||
django-model-utils = "*"
|
||||
django-oauth-toolkit = "*"
|
||||
django-oidc-provider = "*"
|
||||
django-otp = "*"
|
||||
django-recaptcha = "*"
|
||||
django-redis = "*"
|
||||
django-rest-framework = "*"
|
||||
drf-yasg = "*"
|
||||
ldap3 = "*"
|
||||
lxml = "*"
|
||||
markdown = "*"
|
||||
oauthlib = "*"
|
||||
packaging = "*"
|
||||
psycopg2-binary = "*"
|
||||
pycryptodome = "*"
|
||||
pyyaml = "*"
|
||||
qrcode = "*"
|
||||
requests-oauthlib = "*"
|
||||
sentry-sdk = "*"
|
||||
service_identity = "*"
|
||||
signxml = "*"
|
||||
urllib3 = {extras = ["secure"],version = "*"}
|
||||
structlog = "*"
|
||||
pyuwsgi = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.7"
|
||||
|
||||
[dev-packages]
|
||||
coverage = "*"
|
||||
isort = "*"
|
||||
pylint = "==2.3.1"
|
||||
pylint-django = "*"
|
||||
prospector = "*"
|
||||
django-debug-toolbar = "*"
|
||||
bumpversion = "*"
|
||||
unittest-xml-reporting = "*"
|
||||
autopep8 = "*"
|
||||
bandit = "*"
|
||||
colorama = "*"
|
1101
Pipfile.lock
generated
Normal file
1101
Pipfile.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
11
README.md
Normal file
11
README.md
Normal file
@ -0,0 +1,11 @@
|
||||
# passbook
|
||||
|
||||
## Quick instance
|
||||
|
||||
```
|
||||
export PASSBOOK_DOMAIN=domain.tld
|
||||
docker-compose pull
|
||||
docker-compose up -d
|
||||
docker-compose exec server ./manage.py migrate
|
||||
docker-compose exec server ./manage.py createsuperuser
|
||||
```
|
20
base.Dockerfile
Normal file
20
base.Dockerfile
Normal file
@ -0,0 +1,20 @@
|
||||
FROM python:3.7-slim-buster as locker
|
||||
|
||||
COPY ./Pipfile /app/
|
||||
COPY ./Pipfile.lock /app/
|
||||
|
||||
WORKDIR /app/
|
||||
|
||||
RUN pip install pipenv && \
|
||||
pipenv lock -r > requirements.txt && \
|
||||
pipenv lock -rd > requirements-dev.txt
|
||||
|
||||
FROM python:3.7-slim-buster
|
||||
|
||||
COPY --from=locker /app/requirements.txt /app/
|
||||
COPY --from=locker /app/requirements-dev.txt /app/
|
||||
|
||||
WORKDIR /app/
|
||||
|
||||
RUN pip install -r requirements.txt --no-cache-dir && \
|
||||
adduser --system --no-create-home --uid 1000 --group --home /app passbook
|
@ -1,5 +1,3 @@
|
||||
FROM docker.beryju.org/passbook/base:latest
|
||||
|
||||
COPY ./requirements-dev.txt /app/
|
||||
|
||||
RUN pip install -r /app/requirements-dev.txt --no-cache-dir
|
90
docker-compose.yml
Normal file
90
docker-compose.yml
Normal file
@ -0,0 +1,90 @@
|
||||
---
|
||||
version: '3.2'
|
||||
|
||||
services:
|
||||
postgresql:
|
||||
image: postgres
|
||||
volumes:
|
||||
- database:/var/lib/postgresql/data
|
||||
networks:
|
||||
- internal
|
||||
environment:
|
||||
- POSTGRES_PASSWORD=${PG_PASS:-thisisnotagoodpassword}
|
||||
- POSTGRES_USER=passbook
|
||||
- POSTGRES_DB=passbook
|
||||
labels:
|
||||
- traefik.enable=false
|
||||
redis:
|
||||
image: redis
|
||||
networks:
|
||||
- internal
|
||||
labels:
|
||||
- traefik.enable=false
|
||||
server:
|
||||
build:
|
||||
context: .
|
||||
image: docker.beryju.org/passbook/server:${SERVER_TAG:-latest}
|
||||
command:
|
||||
- uwsgi
|
||||
- uwsgi.ini
|
||||
environment:
|
||||
- PASSBOOK_DOMAIN=${PASSBOOK_DOMAIN}
|
||||
- PASSBOOK_REDIS__HOST=redis
|
||||
- PASSBOOK_POSTGRESQL__HOST=postgresql
|
||||
- PASSBOOK_POSTGRESQL__PASSWORD=${PG_PASS:-thisisnotagoodpassword}
|
||||
ports:
|
||||
- 8000
|
||||
networks:
|
||||
- internal
|
||||
labels:
|
||||
- traefik.port=8000
|
||||
- traefik.docker.network=internal
|
||||
- traefik.frontend.rule=PathPrefix:/
|
||||
worker:
|
||||
image: docker.beryju.org/passbook/server:${SERVER_TAG:-latest}
|
||||
command:
|
||||
- celery
|
||||
- worker
|
||||
- --autoscale=10,3
|
||||
- -E
|
||||
- -B
|
||||
- -A=passbook.root.celery
|
||||
- -s=/tmp/celerybeat-schedule
|
||||
networks:
|
||||
- internal
|
||||
labels:
|
||||
- traefik.enable=false
|
||||
environment:
|
||||
- PASSBOOK_DOMAIN=${PASSBOOK_DOMAIN}
|
||||
- PASSBOOK_REDIS__HOST=redis
|
||||
- PASSBOOK_POSTGRESQL__HOST=postgresql
|
||||
- PASSBOOK_POSTGRESQL__PASSWORD=${PG_PASS:-thisisnotagoodpassword}
|
||||
static:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: static.Dockerfile
|
||||
image: docker.beryju.org/passbook/static:latest
|
||||
networks:
|
||||
- internal
|
||||
labels:
|
||||
- traefik.frontend.rule=PathPrefix:/static, /robots.txt
|
||||
- traefik.port=80
|
||||
- traefik.docker.network=internal
|
||||
traefik:
|
||||
image: traefik:1.7
|
||||
command: --api --docker
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
ports:
|
||||
- "0.0.0.0:80:80"
|
||||
- "0.0.0.0:443:443"
|
||||
- "0.0.0.0:8080:8080"
|
||||
networks:
|
||||
- internal
|
||||
|
||||
volumes:
|
||||
database:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
internal: {}
|
@ -39,9 +39,9 @@ http {
|
||||
gzip on;
|
||||
gzip_types application/javascript image/* text/css;
|
||||
gunzip on;
|
||||
add_header X-passbook-Version 0.2.8-beta;
|
||||
add_header X-passbook-Version 0.6.6-beta;
|
||||
add_header Vary X-passbook-Version;
|
||||
root /static/;
|
||||
root /data/;
|
||||
|
||||
location /_/healthz {
|
||||
return 204;
|
10
docker/uwsgi.ini
Normal file
10
docker/uwsgi.ini
Normal file
@ -0,0 +1,10 @@
|
||||
[uwsgi]
|
||||
http = 0.0.0.0:8000
|
||||
chdir = /app
|
||||
wsgi-file = passbook/root/wsgi.py
|
||||
processes = 2
|
||||
master = true
|
||||
threads = 2
|
||||
enable-threads = true
|
||||
uid = passbook
|
||||
gid = passbook
|
9
helm/passbook/Chart.lock
Normal file
9
helm/passbook/Chart.lock
Normal file
@ -0,0 +1,9 @@
|
||||
dependencies:
|
||||
- name: postgresql
|
||||
repository: https://kubernetes-charts.storage.googleapis.com/
|
||||
version: 6.3.10
|
||||
- name: redis
|
||||
repository: https://kubernetes-charts.storage.googleapis.com/
|
||||
version: 9.2.1
|
||||
digest: sha256:bdde250e1401dccdd5161e39c807f9e88b05e3e8e72e74df767a1bbb5fc39a4a
|
||||
generated: "2019-10-01T10:46:06.542706+02:00"
|
@ -1,6 +1,6 @@
|
||||
apiVersion: v1
|
||||
appVersion: "0.2.8-beta"
|
||||
appVersion: "0.6.6-beta"
|
||||
description: A Helm chart for passbook.
|
||||
name: passbook
|
||||
version: "0.2.8-beta"
|
||||
version: "0.6.6-beta"
|
||||
icon: https://git.beryju.org/uploads/-/system/project/avatar/108/logo.png
|
||||
|
@ -1 +0,0 @@
|
||||
# passbook
|
Binary file not shown.
BIN
helm/passbook/charts/postgresql-4.2.2.tgz
Normal file
BIN
helm/passbook/charts/postgresql-4.2.2.tgz
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
helm/passbook/charts/redis-9.2.1.tgz
Normal file
BIN
helm/passbook/charts/redis-9.2.1.tgz
Normal file
Binary file not shown.
@ -1,98 +0,0 @@
|
||||
---
|
||||
categories:
|
||||
- Authentication
|
||||
- SSO
|
||||
questions:
|
||||
- default: "true"
|
||||
variable: config.error_reporting
|
||||
type: boolean
|
||||
description: "Enable error-reporting to sentry.services.beryju.org"
|
||||
group: "passbook Configuration"
|
||||
label: "Error Reporting"
|
||||
####################################################################
|
||||
### PostgreSQL
|
||||
####################################################################
|
||||
- variable: postgresql.enabled
|
||||
default: true
|
||||
description: "Deploy a database server as part of this deployment, or set to false and configure an external database connection."
|
||||
type: boolean
|
||||
required: true
|
||||
label: Install PostgreSQL
|
||||
show_subquestion_if: true
|
||||
group: "Database Settings"
|
||||
subquestions:
|
||||
- variable: postgresql.postgresqlDatabase
|
||||
default: "passbook"
|
||||
description: "Database name to create"
|
||||
type: string
|
||||
label: PostgreSQL Database
|
||||
- variable: postgresql.postgresqlUsername
|
||||
default: "passbook"
|
||||
description: "Database user to create"
|
||||
type: string
|
||||
label: PostgreSQL User
|
||||
- variable: postgresql.postgresqlPassword
|
||||
default: ""
|
||||
description: "password will be auto-generated if not specified"
|
||||
type: password
|
||||
label: PostgreSQL Password
|
||||
- variable: externalDatabase.host
|
||||
default: ""
|
||||
description: "Host of the external database"
|
||||
type: string
|
||||
label: External Database Host
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: externalDatabase.user
|
||||
default: ""
|
||||
description: "Existing username in the external DB"
|
||||
type: string
|
||||
label: External Database username
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: externalDatabase.password
|
||||
default: ""
|
||||
description: "External database password"
|
||||
type: password
|
||||
label: External Database password
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: externalDatabase.database
|
||||
default: ""
|
||||
description: "Name of the existing database"
|
||||
type: string
|
||||
label: External Database
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: externalDatabase.port
|
||||
default: "3306"
|
||||
description: "External database port number"
|
||||
type: string
|
||||
label: External Database Port
|
||||
show_if: "postgresql.enabled=false"
|
||||
group: "Database Settings"
|
||||
- variable: postgresql.persistence.enabled
|
||||
default: false
|
||||
description: "Enable persistent volume for PostgreSQL"
|
||||
type: boolean
|
||||
required: true
|
||||
label: PostgreSQL Persistent Volume Enabled
|
||||
show_if: "postgresql.enabled=true"
|
||||
show_subquestion_if: true
|
||||
group: "Database Settings"
|
||||
subquestions:
|
||||
- variable: postgresql.master.persistence.size
|
||||
default: "8Gi"
|
||||
description: "PostgreSQL Persistent Volume Size"
|
||||
type: string
|
||||
label: PostgreSQL Volume Size
|
||||
- variable: postgresql.master.persistence.storageClass
|
||||
default: ""
|
||||
description: "If undefined or null, uses the default StorageClass. Default to null"
|
||||
type: storageclass
|
||||
label: Default StorageClass for PostgreSQL
|
||||
- variable: postgresql.master.persistence.existingClaim
|
||||
default: ""
|
||||
description: "If not empty, uses the specified existing PVC instead of creating new one"
|
||||
type: string
|
||||
label: Existing Persistent Volume Claim for PostgreSQL
|
@ -1,12 +1,9 @@
|
||||
dependencies:
|
||||
- name: rabbitmq
|
||||
repository: https://kubernetes-charts.storage.googleapis.com/
|
||||
version: 4.3.2
|
||||
- name: postgresql
|
||||
repository: https://kubernetes-charts.storage.googleapis.com/
|
||||
version: 3.10.1
|
||||
version: 4.2.2
|
||||
- name: redis
|
||||
repository: https://kubernetes-charts.storage.googleapis.com/
|
||||
version: 5.1.0
|
||||
digest: sha256:8bf68bc928a2e3c0f05139635be05fa0840554c7bde4cecd624fac78fb5fa5a3
|
||||
generated: 2019-03-21T11:06:51.553379+01:00
|
||||
version: 9.2.1
|
||||
digest: sha256:8782e974a1094eaeecf1d68f093ca4fb84977217b2bd38b09790a05ec289aec2
|
||||
generated: "2019-10-02T21:03:25.90491153Z"
|
||||
|
@ -1,10 +1,7 @@
|
||||
dependencies:
|
||||
- name: rabbitmq
|
||||
version: 4.3.2
|
||||
repository: https://kubernetes-charts.storage.googleapis.com/
|
||||
- name: postgresql
|
||||
version: 3.10.1
|
||||
version: 4.2.2
|
||||
repository: https://kubernetes-charts.storage.googleapis.com/
|
||||
- name: redis
|
||||
version: 5.1.0
|
||||
version: 9.2.1
|
||||
repository: https://kubernetes-charts.storage.googleapis.com/
|
||||
|
@ -1,62 +0,0 @@
|
||||
apiVersion: apps/v1beta2
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "passbook.fullname" . }}-appgw
|
||||
labels:
|
||||
app.kubernetes.io/name: {{ include "passbook.name" . }}
|
||||
helm.sh/chart: {{ include "passbook.chart" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
spec:
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: {{ include "passbook.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/name: {{ include "passbook.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
passbook.io/component: appgw
|
||||
spec:
|
||||
volumes:
|
||||
- name: config-volume
|
||||
configMap:
|
||||
name: {{ include "passbook.fullname" . }}-config
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "docker.beryju.org/passbook/server:{{ .Values.image.tag }}"
|
||||
imagePullPolicy: IfNotPresent
|
||||
command:
|
||||
- ./manage.py
|
||||
args:
|
||||
- app_gw_web
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8000
|
||||
protocol: TCP
|
||||
volumeMounts:
|
||||
- mountPath: /etc/passbook
|
||||
name: config-volume
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
httpHeaders:
|
||||
- name: Host
|
||||
value: kubernetes-healthcheck-host
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
httpHeaders:
|
||||
- name: Host
|
||||
value: kubernetes-healthcheck-host
|
||||
resources:
|
||||
requests:
|
||||
cpu: 150m
|
||||
memory: 300M
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 500M
|
@ -1,20 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ include "passbook.fullname" . }}-appgw
|
||||
labels:
|
||||
app.kubernetes.io/name: {{ include "passbook.name" . }}
|
||||
helm.sh/chart: {{ include "passbook.chart" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
spec:
|
||||
type: {{ .Values.service.type }}
|
||||
ports:
|
||||
- port: {{ .Values.service.port }}
|
||||
targetPort: http
|
||||
protocol: TCP
|
||||
name: http
|
||||
selector:
|
||||
app.kubernetes.io/name: {{ include "passbook.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
passbook.io/component: appgw
|
@ -4,134 +4,13 @@ metadata:
|
||||
name: {{ include "passbook.fullname" . }}-config
|
||||
data:
|
||||
config.yml: |
|
||||
# Env for Docker images
|
||||
databases:
|
||||
default:
|
||||
engine: django.db.backends.postgresql
|
||||
name: {{ .Values.postgresql.postgresqlDatabase }}
|
||||
user: postgres
|
||||
password: {{ .Values.postgresql.postgresqlPassword }}
|
||||
host: {{ .Release.Name }}-postgresql
|
||||
port: ''
|
||||
log:
|
||||
level:
|
||||
console: WARNING
|
||||
file: WARNING
|
||||
file: /dev/null
|
||||
syslog:
|
||||
host: 127.0.0.1
|
||||
port: 514
|
||||
email:
|
||||
host: {{ .Values.config.email.host }}
|
||||
port: 25
|
||||
user: ''
|
||||
password: ''
|
||||
use_tls: false
|
||||
use_ssl: false
|
||||
from: passbook <passbook@domain.tld>
|
||||
web:
|
||||
listen: 0.0.0.0
|
||||
port: 8000
|
||||
threads: 30
|
||||
debug: false
|
||||
secure_proxy_header:
|
||||
HTTP_X_FORWARDED_PROTO: https
|
||||
rabbitmq: "user:{{ .Values.rabbitmq.rabbitmq.password }}@{{ .Release.Name }}-rabbitmq"
|
||||
redis: ":{{ .Values.redis.password }}@{{ .Release.Name }}-redis-master/0"
|
||||
# Error reporting, sends stacktrace to sentry.services.beryju.org
|
||||
postgresql:
|
||||
host: "{{ .Release.Name }}-postgresql"
|
||||
name: "{{ .Values.postgresql.postgresqlDatabase }}"
|
||||
user: postgres
|
||||
redis:
|
||||
host: "{{ .Release.Name }}-redis-master"
|
||||
cache_db: 0
|
||||
message_queue_db: 1
|
||||
error_report_enabled: {{ .Values.config.error_reporting }}
|
||||
|
||||
{{- if .Values.config.secret_key }}
|
||||
secret_key: {{ .Values.config.secret_key }}
|
||||
{{- else }}
|
||||
secret_key: {{ randAlphaNum 50 }}
|
||||
{{- end }}
|
||||
|
||||
primary_domain: {{ .Values.primary_domain }}
|
||||
domains:
|
||||
{{- range .Values.ingress.hosts }}
|
||||
- {{ . | quote }}
|
||||
{{- end }}
|
||||
- kubernetes-healthcheck-host
|
||||
|
||||
passbook:
|
||||
sign_up:
|
||||
# Enables signup, created users are stored in internal Database and created in LDAP if ldap.create_users is true
|
||||
enabled: true
|
||||
password_reset:
|
||||
# Enable password reset, passwords are reset in internal Database and in LDAP if ldap.reset_password is true
|
||||
enabled: true
|
||||
# Verification the user has to provide in order to be able to reset passwords. Can be any combination of `email`, `2fa`, `security_questions`
|
||||
verification:
|
||||
- email
|
||||
# Text used in title, on login page and multiple other places
|
||||
branding: passbook
|
||||
login:
|
||||
# Override URL used for logo
|
||||
logo_url: null
|
||||
# Override URL used for Background on Login page
|
||||
bg_url: null
|
||||
# Optionally add a subtext, placed below logo on the login page
|
||||
subtext: null
|
||||
footer:
|
||||
links:
|
||||
# Optionally add links to the footer on the login page
|
||||
# - name: test
|
||||
# href: https://test
|
||||
# Specify which fields can be used to authenticate. Can be any combination of `username` and `email`
|
||||
uid_fields:
|
||||
- username
|
||||
- email
|
||||
session:
|
||||
remember_age: 2592000 # 60 * 60 * 24 * 30, one month
|
||||
# Provider-specific settings
|
||||
ldap:
|
||||
# # Completely enable or disable LDAP provider
|
||||
# enabled: false
|
||||
# # AD Domain, used to generate `userPrincipalName`
|
||||
# domain: corp.contoso.com
|
||||
# # Base DN in which passbook should look for users
|
||||
# base_dn: dn=corp,dn=contoso,dn=com
|
||||
# # LDAP field which is used to set the django username
|
||||
# username_field: sAMAccountName
|
||||
# # LDAP server to connect to, can be set to `<domain_name>`
|
||||
# server:
|
||||
# name: corp.contoso.com
|
||||
# use_tls: false
|
||||
# # Bind credentials, used for account creation
|
||||
# bind:
|
||||
# username: Administraotr@corp.contoso.com
|
||||
# password: VerySecurePassword!
|
||||
# Which field from `uid_fields` maps to which LDAP Attribute
|
||||
login_field_map:
|
||||
username: sAMAccountName
|
||||
email: mail # or userPrincipalName
|
||||
user_attribute_map:
|
||||
active_directory:
|
||||
username: "%(sAMAccountName)s"
|
||||
email: "%(mail)s"
|
||||
name: "%(displayName)"
|
||||
# # Create new users in LDAP upon sign-up
|
||||
# create_users: true
|
||||
# # Reset LDAP password when user reset their password
|
||||
# reset_password: true
|
||||
oauth_client:
|
||||
# List of python packages with sources types to load.
|
||||
types:
|
||||
- passbook.oauth_client.source_types.discord
|
||||
- passbook.oauth_client.source_types.facebook
|
||||
- passbook.oauth_client.source_types.github
|
||||
- passbook.oauth_client.source_types.google
|
||||
- passbook.oauth_client.source_types.reddit
|
||||
- passbook.oauth_client.source_types.supervisr
|
||||
- passbook.oauth_client.source_types.twitter
|
||||
- passbook.oauth_client.source_types.azure_ad
|
||||
saml_idp:
|
||||
signing: true
|
||||
autosubmit: false
|
||||
issuer: passbook
|
||||
assertion_valid_for: 86400
|
||||
# List of python packages with provider types to load.
|
||||
types:
|
||||
- passbook.saml_idp.processors.generic
|
||||
- passbook.saml_idp.processors.salesforce
|
||||
domain: ".{{ .Values.ingress.hosts[0] }}"
|
||||
|
@ -37,14 +37,9 @@ spec:
|
||||
backend:
|
||||
serviceName: {{ $fullName }}-static
|
||||
servicePort: http
|
||||
{{- end }}
|
||||
{{- range .Values.ingress.app_gw_hosts }}
|
||||
- host: {{ . | quote }}
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
- path: /robots.txt
|
||||
backend:
|
||||
serviceName: {{ $fullName }}-appgw
|
||||
serviceName: {{ $fullName }}-static
|
||||
servicePort: http
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
11
helm/passbook/templates/secret.yaml
Normal file
11
helm/passbook/templates/secret.yaml
Normal file
@ -0,0 +1,11 @@
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
type: Opaque
|
||||
metadata:
|
||||
name: {{ include "passbook.fullname" . }}-secret-key
|
||||
data:
|
||||
{{- if .Values.config.secret_key }}
|
||||
secret_key: {{ .Values.config.secret_key | b64enc | quote }}
|
||||
{{- else }}
|
||||
secret_key: {{ randAlphaNum 50 | b64enc | quote}}
|
||||
{{- end }}
|
@ -8,7 +8,7 @@ metadata:
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
spec:
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
replicas: 2
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: {{ include "passbook.name" . }}
|
||||
@ -34,21 +34,61 @@ spec:
|
||||
volumeMounts:
|
||||
- mountPath: /etc/passbook
|
||||
name: config-volume
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: {{ include "passbook.fullname" . }}-config
|
||||
prefix: PASSBOOK_
|
||||
env:
|
||||
- name: PASSBOOK_SECRET_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ include "passbook.fullname" . }}-secret-key
|
||||
key: secret_key
|
||||
- name: PASSBOOK_REDIS__PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Release.Name }}-redis"
|
||||
key: redis-password
|
||||
- name: PASSBOOK_POSTGRESQL__PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Release.Name }}-postgresql"
|
||||
key: postgresql-password
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "docker.beryju.org/passbook/server:{{ .Values.image.tag }}"
|
||||
imagePullPolicy: IfNotPresent
|
||||
command:
|
||||
- ./manage.py
|
||||
- uwsgi
|
||||
args:
|
||||
- web
|
||||
- uwsgi.ini
|
||||
volumeMounts:
|
||||
- mountPath: /etc/passbook
|
||||
name: config-volume
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: {{ include "passbook.fullname" . }}-config
|
||||
prefix: PASSBOOK_
|
||||
env:
|
||||
- name: PASSBOOK_SECRET_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ include "passbook.fullname" . }}-secret-key
|
||||
key: secret_key
|
||||
- name: PASSBOOK_REDIS__PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Release.Name }}-redis"
|
||||
key: redis-password
|
||||
- name: PASSBOOK_POSTGRESQL__PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Release.Name }}-postgresql"
|
||||
key: postgresql-password
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8000
|
||||
protocol: TCP
|
||||
volumeMounts:
|
||||
- mountPath: /etc/passbook
|
||||
name: config-volume
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
@ -65,8 +105,8 @@ spec:
|
||||
value: kubernetes-healthcheck-host
|
||||
resources:
|
||||
requests:
|
||||
cpu: 50m
|
||||
memory: 150M
|
||||
cpu: 100m
|
||||
memory: 200M
|
||||
limits:
|
||||
cpu: 200m
|
||||
memory: 300M
|
||||
cpu: 300m
|
||||
memory: 350M
|
||||
|
@ -8,7 +8,7 @@ metadata:
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
spec:
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: {{ include "passbook.name" . }}
|
||||
@ -29,16 +29,37 @@ spec:
|
||||
image: "docker.beryju.org/passbook/server:{{ .Values.image.tag }}"
|
||||
imagePullPolicy: IfNotPresent
|
||||
command:
|
||||
- ./manage.py
|
||||
- celery
|
||||
args:
|
||||
- worker
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8000
|
||||
protocol: TCP
|
||||
- --autoscale=10,3
|
||||
- -E
|
||||
- -B
|
||||
- -A=passbook.root.celery
|
||||
- -s=/tmp/celerybeat-schedule
|
||||
volumeMounts:
|
||||
- mountPath: /etc/passbook
|
||||
name: config-volume
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: {{ include "passbook.fullname" . }}-config
|
||||
prefix: PASSBOOK_
|
||||
env:
|
||||
- name: PASSBOOK_SECRET_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ include "passbook.fullname" . }}-secret-key
|
||||
key: secret_key
|
||||
- name: PASSBOOK_REDIS__PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Release.Name }}-redis"
|
||||
key: redis-password
|
||||
- name: PASSBOOK_POSTGRESQL__PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: "{{ .Release.Name }}-postgresql"
|
||||
key: postgresql-password
|
||||
resources:
|
||||
requests:
|
||||
cpu: 150m
|
||||
|
@ -1,11 +1,8 @@
|
||||
# Default values for passbook.
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
tag: 0.2.8-beta
|
||||
tag: 0.6.6-beta
|
||||
|
||||
nameOverride: ""
|
||||
|
||||
@ -19,11 +16,13 @@ config:
|
||||
|
||||
postgresql:
|
||||
postgresqlDatabase: passbook
|
||||
postgresqlPassword: foo
|
||||
|
||||
rabbitmq:
|
||||
rabbitmq:
|
||||
password: foo
|
||||
redis:
|
||||
cluster:
|
||||
enabled: false
|
||||
master:
|
||||
persistence:
|
||||
enabled: false
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
@ -37,28 +36,7 @@ ingress:
|
||||
path: /
|
||||
hosts:
|
||||
- passbook.k8s.local
|
||||
app_gw_hosts:
|
||||
- '*.passbook.k8s.local'
|
||||
defaultHost: passbook.k8s.local
|
||||
tls: []
|
||||
# - secretName: chart-example-tls
|
||||
# hosts:
|
||||
# - passbook.k8s.local
|
||||
|
||||
resources: {}
|
||||
# We usually recommend not to specify default resources and to leave this as a conscious
|
||||
# choice for the user. This also increases chances charts run on environments with little
|
||||
# resources, such as Minikube. If you do want to specify resources, uncomment the following
|
||||
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
|
||||
# limits:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
# requests:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
nodeSelector: {}
|
||||
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""passbook"""
|
||||
__version__ = '0.2.8-beta'
|
||||
__version__ = '0.6.6-beta'
|
||||
|
@ -11,7 +11,7 @@ class UserForm(forms.ModelForm):
|
||||
class Meta:
|
||||
|
||||
model = User
|
||||
fields = ['username', 'name', 'email', 'is_staff', 'is_active']
|
||||
fields = ['username', 'name', 'email', 'is_staff', 'is_active', 'attributes']
|
||||
widgets = {
|
||||
'name': forms.TextInput
|
||||
}
|
||||
|
@ -179,8 +179,8 @@
|
||||
<span class="card-pf-aggregate-status-notification">
|
||||
<a href="#">
|
||||
{% if worker_count < 1%}
|
||||
<span class="pficon-error-circle-o" data-toggle="tooltip" data-placement="right"
|
||||
title="{% trans 'No workers connected. Policies will not work and you may expect other issues.' %}"></span> {{ worker_count }}
|
||||
<span class="pficon-warning-triangle-o" data-toggle="tooltip" data-placement="right"
|
||||
title="{% trans 'No workers connected.' %}"></span> {{ worker_count }}
|
||||
{% else %}
|
||||
<span class="pficon pficon-ok"></span>{{ worker_count }}
|
||||
{% endif %}
|
||||
|
@ -7,6 +7,10 @@
|
||||
<div class="container">
|
||||
<h1><span class="pficon-users"></span> {% trans "Users" %}</h1>
|
||||
<hr>
|
||||
<a href="{% url 'passbook_admin:user-create' %}?back={{ request.get_full_path }}" class="btn btn-primary">
|
||||
{% trans 'Create...' %}
|
||||
</a>
|
||||
<hr>
|
||||
<table class="table table-striped table-bordered">
|
||||
<thead>
|
||||
<tr>
|
||||
|
@ -1,14 +1,14 @@
|
||||
"""passbook admin templatetags"""
|
||||
import inspect
|
||||
from logging import getLogger
|
||||
|
||||
from django import template
|
||||
from django.db.models import Model
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.lib.utils.template import render_to_string
|
||||
|
||||
register = template.Library()
|
||||
LOGGER = getLogger(__name__)
|
||||
LOGGER = get_logger()
|
||||
|
||||
@register.simple_tag()
|
||||
def get_links(model_instance):
|
||||
|
@ -61,6 +61,7 @@ urlpatterns = [
|
||||
# Users
|
||||
path('users/', users.UserListView.as_view(),
|
||||
name='users'),
|
||||
path('users/create/', users.UserCreateView.as_view(), name='user-create'),
|
||||
path('users/<int:pk>/update/',
|
||||
users.UserUpdateView.as_view(), name='user-update'),
|
||||
path('users/<int:pk>/delete/',
|
||||
|
@ -3,8 +3,8 @@ from django.core.cache import cache
|
||||
from django.shortcuts import redirect, reverse
|
||||
from django.views.generic import TemplateView
|
||||
|
||||
from passbook import __version__
|
||||
from passbook.admin.mixins import AdminRequiredMixin
|
||||
from passbook.core import __version__
|
||||
from passbook.core.models import (Application, Factor, Invitation, Policy,
|
||||
Provider, Source, User)
|
||||
from passbook.root.celery import CELERY_APP
|
||||
|
@ -11,8 +11,8 @@ from django.views.generic.detail import DetailView
|
||||
from passbook.admin.forms.policies import PolicyTestForm
|
||||
from passbook.admin.mixins import AdminRequiredMixin
|
||||
from passbook.core.models import Policy
|
||||
from passbook.core.policies import PolicyEngine
|
||||
from passbook.lib.utils.reflection import path_to_class
|
||||
from passbook.policies.engine import PolicyEngine
|
||||
|
||||
|
||||
class PolicyListView(AdminRequiredMixin, ListView):
|
||||
|
@ -5,7 +5,7 @@ from django.shortcuts import get_object_or_404, redirect
|
||||
from django.urls import reverse, reverse_lazy
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.views import View
|
||||
from django.views.generic import DeleteView, ListView, UpdateView
|
||||
from django.views.generic import CreateView, DeleteView, ListView, UpdateView
|
||||
|
||||
from passbook.admin.forms.users import UserForm
|
||||
from passbook.admin.mixins import AdminRequiredMixin
|
||||
@ -19,6 +19,17 @@ class UserListView(AdminRequiredMixin, ListView):
|
||||
template_name = 'administration/user/list.html'
|
||||
|
||||
|
||||
class UserCreateView(SuccessMessageMixin, AdminRequiredMixin, CreateView):
|
||||
"""Create user"""
|
||||
|
||||
model = User
|
||||
form_class = UserForm
|
||||
|
||||
template_name = 'generic/create.html'
|
||||
success_url = reverse_lazy('passbook_admin:users')
|
||||
success_message = _('Successfully created User')
|
||||
|
||||
|
||||
class UserUpdateView(SuccessMessageMixin, AdminRequiredMixin, UpdateView):
|
||||
"""Update user"""
|
||||
|
||||
|
BIN
passbook/app_gw/.DS_Store
vendored
BIN
passbook/app_gw/.DS_Store
vendored
Binary file not shown.
@ -1,16 +0,0 @@
|
||||
"""passbook Application Security Gateway app"""
|
||||
from importlib import import_module
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class PassbookApplicationApplicationGatewayConfig(AppConfig):
|
||||
"""passbook app_gw app"""
|
||||
|
||||
name = 'passbook.app_gw'
|
||||
label = 'passbook_app_gw'
|
||||
verbose_name = 'passbook Application Security Gateway'
|
||||
mountpoint = 'app_gw/'
|
||||
|
||||
def ready(self):
|
||||
import_module('passbook.app_gw.signals')
|
@ -1,13 +0,0 @@
|
||||
"""
|
||||
ASGI entrypoint. Configures Django and then runs the application
|
||||
defined in the ASGI_APPLICATION setting.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import django
|
||||
from channels.routing import get_default_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.root.settings")
|
||||
django.setup()
|
||||
application = get_default_application()
|
@ -1,30 +0,0 @@
|
||||
"""passbook app_gw webserver management command"""
|
||||
|
||||
from logging import getLogger
|
||||
|
||||
from daphne.cli import CommandLineInterface
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import autoreload
|
||||
|
||||
from passbook.lib.config import CONFIG
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Run Daphne Webserver for app_gw"""
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""passbook daphne server"""
|
||||
autoreload.run_with_reloader(self.daphne_server)
|
||||
|
||||
def daphne_server(self):
|
||||
"""Run daphne server within autoreload"""
|
||||
autoreload.raise_last_exception()
|
||||
CommandLineInterface().run([
|
||||
'-p', str(CONFIG.y('app_gw.port', 8000)),
|
||||
'-b', CONFIG.y('app_gw.listen', '0.0.0.0'), # nosec
|
||||
'--access-log', '/dev/null',
|
||||
'--application-close-timeout', '500',
|
||||
'passbook.app_gw.asgi:application'
|
||||
])
|
@ -1,33 +0,0 @@
|
||||
"""passbook app_gw middleware"""
|
||||
from django.views.generic import RedirectView
|
||||
|
||||
from passbook.app_gw.proxy.handler import RequestHandler
|
||||
from passbook.lib.config import CONFIG
|
||||
|
||||
|
||||
class ApplicationGatewayMiddleware:
|
||||
"""Check if request should be proxied or handeled normally"""
|
||||
|
||||
_app_gw_cache = {}
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
# Rudimentary cache
|
||||
host_header = request.META.get('HTTP_HOST')
|
||||
if host_header not in self._app_gw_cache:
|
||||
self._app_gw_cache[host_header] = RequestHandler.find_app_gw_for_request(request)
|
||||
if self._app_gw_cache[host_header]:
|
||||
return self.dispatch(request, self._app_gw_cache[host_header])
|
||||
return self.get_response(request)
|
||||
|
||||
def dispatch(self, request, app_gw):
|
||||
"""Build proxied request and pass to upstream"""
|
||||
handler = RequestHandler(app_gw, request)
|
||||
|
||||
if not handler.check_permission():
|
||||
to_url = 'https://%s/?next=%s' % (CONFIG.get('domains')[0], request.get_full_path())
|
||||
return RedirectView.as_view(url=to_url)(request)
|
||||
|
||||
return handler.get_response()
|
BIN
passbook/app_gw/migrations/.DS_Store
vendored
BIN
passbook/app_gw/migrations/.DS_Store
vendored
Binary file not shown.
@ -1,18 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-03-21 15:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_app_gw', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='rewriterule',
|
||||
name='conditions',
|
||||
field=models.ManyToManyField(blank=True, to='passbook_core.Policy'),
|
||||
),
|
||||
]
|
@ -1,18 +0,0 @@
|
||||
# Generated by Django 2.2 on 2019-04-11 13:14
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_app_gw', '0002_auto_20190321_1521'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='applicationgatewayprovider',
|
||||
name='authentication_header',
|
||||
field=models.TextField(blank=True, default='X-Remote-User'),
|
||||
),
|
||||
]
|
@ -1,8 +0,0 @@
|
||||
"""Exception classes"""
|
||||
|
||||
class ReverseProxyException(Exception):
|
||||
"""Base for revproxy exception"""
|
||||
|
||||
|
||||
class InvalidUpstream(ReverseProxyException):
|
||||
"""Invalid upstream set"""
|
@ -1,233 +0,0 @@
|
||||
"""passbook app_gw request handler"""
|
||||
import mimetypes
|
||||
from logging import getLogger
|
||||
from random import SystemRandom
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import certifi
|
||||
import urllib3
|
||||
from django.core.cache import cache
|
||||
from django.utils.http import urlencode
|
||||
|
||||
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||
from passbook.app_gw.proxy.exceptions import InvalidUpstream
|
||||
from passbook.app_gw.proxy.response import get_django_response
|
||||
from passbook.app_gw.proxy.rewrite import Rewriter
|
||||
from passbook.app_gw.proxy.utils import encode_items, normalize_request_headers
|
||||
from passbook.core.models import Application
|
||||
from passbook.core.policies import PolicyEngine
|
||||
|
||||
SESSION_UPSTREAM_KEY = 'passbook_app_gw_upstream'
|
||||
IGNORED_HOSTNAMES_KEY = 'passbook_app_gw_ignored'
|
||||
LOGGER = getLogger(__name__)
|
||||
QUOTE_SAFE = r'<.;>\(}*+|~=-$/_:^@)[{]&\'!,"`'
|
||||
ERRORS_MESSAGES = {
|
||||
'upstream-no-scheme': ("Upstream URL scheme must be either "
|
||||
"'http' or 'https' (%s).")
|
||||
}
|
||||
HTTP_NO_VERIFY = urllib3.PoolManager()
|
||||
HTTP = urllib3.PoolManager(
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
ca_certs=certifi.where())
|
||||
IGNORED_HOSTS = cache.get(IGNORED_HOSTNAMES_KEY, [])
|
||||
POLICY_CACHE = {}
|
||||
|
||||
class RequestHandler:
|
||||
"""Forward requests"""
|
||||
|
||||
_parsed_url = None
|
||||
_request_headers = None
|
||||
|
||||
def __init__(self, app_gw, request):
|
||||
self.app_gw = app_gw
|
||||
self.request = request
|
||||
if self.app_gw.pk not in POLICY_CACHE:
|
||||
POLICY_CACHE[self.app_gw.pk] = self.app_gw.application.policies.all()
|
||||
|
||||
@staticmethod
|
||||
def find_app_gw_for_request(request):
|
||||
"""Check if a request should be proxied or forwarded to passbook"""
|
||||
# Check if hostname is in cached list of ignored hostnames
|
||||
# This saves us having to query the database on each request
|
||||
host_header = request.META.get('HTTP_HOST')
|
||||
if host_header in IGNORED_HOSTS:
|
||||
# LOGGER.debug("%s is ignored", host_header)
|
||||
return False
|
||||
# Look through all ApplicationGatewayProviders and check hostnames
|
||||
matches = ApplicationGatewayProvider.objects.filter(
|
||||
server_name__contains=[host_header],
|
||||
enabled=True)
|
||||
if not matches.exists():
|
||||
# Mo matching Providers found, add host header to ignored list
|
||||
IGNORED_HOSTS.append(host_header)
|
||||
cache.set(IGNORED_HOSTNAMES_KEY, IGNORED_HOSTS)
|
||||
# LOGGER.debug("Ignoring %s", host_header)
|
||||
return False
|
||||
# At this point we're certain there's a matching ApplicationGateway
|
||||
if len(matches) > 1:
|
||||
# This should never happen
|
||||
raise ValueError
|
||||
app_gw = matches.first()
|
||||
try:
|
||||
# Check if ApplicationGateway is associated with application
|
||||
getattr(app_gw, 'application')
|
||||
if app_gw:
|
||||
return app_gw
|
||||
except Application.DoesNotExist:
|
||||
pass
|
||||
# LOGGER.debug("ApplicationGateway not associated with Application")
|
||||
return True
|
||||
|
||||
def _get_upstream(self):
|
||||
"""Choose random upstream and save in session"""
|
||||
if SESSION_UPSTREAM_KEY not in self.request.session:
|
||||
self.request.session[SESSION_UPSTREAM_KEY] = {}
|
||||
if self.app_gw.pk not in self.request.session[SESSION_UPSTREAM_KEY]:
|
||||
upstream_index = int(SystemRandom().random() * len(self.app_gw.upstream))
|
||||
self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk] = upstream_index
|
||||
return self.app_gw.upstream[self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk]]
|
||||
|
||||
def get_upstream(self):
|
||||
"""Get upstream as parsed url"""
|
||||
upstream = self._get_upstream()
|
||||
|
||||
self._parsed_url = urlparse(upstream)
|
||||
|
||||
if self._parsed_url.scheme not in ('http', 'https'):
|
||||
raise InvalidUpstream(ERRORS_MESSAGES['upstream-no-scheme'] %
|
||||
upstream)
|
||||
|
||||
return upstream
|
||||
|
||||
def _format_path_to_redirect(self):
|
||||
# LOGGER.debug("Path before: %s", self.request.get_full_path())
|
||||
rewriter = Rewriter(self.app_gw, self.request)
|
||||
after = rewriter.build()
|
||||
# LOGGER.debug("Path after: %s", after)
|
||||
return after
|
||||
|
||||
def get_proxy_request_headers(self):
|
||||
"""Get normalized headers for the upstream
|
||||
Gets all headers from the original request and normalizes them.
|
||||
Normalization occurs by removing the prefix ``HTTP_`` and
|
||||
replacing and ``_`` by ``-``. Example: ``HTTP_ACCEPT_ENCODING``
|
||||
becames ``Accept-Encoding``.
|
||||
.. versionadded:: 0.9.1
|
||||
:param request: The original HTTPRequest instance
|
||||
:returns: Normalized headers for the upstream
|
||||
"""
|
||||
return normalize_request_headers(self.request)
|
||||
|
||||
def get_request_headers(self):
|
||||
"""Return request headers that will be sent to upstream.
|
||||
The header REMOTE_USER is set to the current user
|
||||
if AuthenticationMiddleware is enabled and
|
||||
the view's add_remote_user property is True.
|
||||
.. versionadded:: 0.9.8
|
||||
"""
|
||||
request_headers = self.get_proxy_request_headers()
|
||||
if not self.app_gw.authentication_header:
|
||||
return request_headers
|
||||
request_headers[self.app_gw.authentication_header] = self.request.user.get_username()
|
||||
# LOGGER.debug("%s set", self.app_gw.authentication_header)
|
||||
|
||||
return request_headers
|
||||
|
||||
def check_permission(self):
|
||||
"""Check if user is authenticated and has permission to access app"""
|
||||
if not hasattr(self.request, 'user'):
|
||||
return False
|
||||
if not self.request.user.is_authenticated:
|
||||
return False
|
||||
policy_engine = PolicyEngine(POLICY_CACHE[self.app_gw.pk])
|
||||
policy_engine.for_user(self.request.user).with_request(self.request).build()
|
||||
passing, _messages = policy_engine.result
|
||||
|
||||
return passing
|
||||
|
||||
def get_encoded_query_params(self):
|
||||
"""Return encoded query params to be used in proxied request"""
|
||||
get_data = encode_items(self.request.GET.lists())
|
||||
return urlencode(get_data)
|
||||
|
||||
def _created_proxy_response(self, path):
|
||||
request_payload = self.request.body
|
||||
|
||||
# LOGGER.debug("Request headers: %s", self._request_headers)
|
||||
|
||||
request_url = self.get_upstream() + path
|
||||
# LOGGER.debug("Request URL: %s", request_url)
|
||||
|
||||
if self.request.GET:
|
||||
request_url += '?' + self.get_encoded_query_params()
|
||||
# LOGGER.debug("Request URL: %s", request_url)
|
||||
|
||||
http = HTTP
|
||||
if not self.app_gw.upstream_ssl_verification:
|
||||
http = HTTP_NO_VERIFY
|
||||
|
||||
try:
|
||||
proxy_response = http.urlopen(self.request.method,
|
||||
request_url,
|
||||
redirect=False,
|
||||
retries=None,
|
||||
headers=self._request_headers,
|
||||
body=request_payload,
|
||||
decode_content=False,
|
||||
preload_content=False)
|
||||
# LOGGER.debug("Proxy response header: %s",
|
||||
# proxy_response.getheaders())
|
||||
except urllib3.exceptions.HTTPError as error:
|
||||
LOGGER.exception(error)
|
||||
raise
|
||||
|
||||
return proxy_response
|
||||
|
||||
def _replace_host_on_redirect_location(self, proxy_response):
|
||||
location = proxy_response.headers.get('Location')
|
||||
if location:
|
||||
if self.request.is_secure():
|
||||
scheme = 'https://'
|
||||
else:
|
||||
scheme = 'http://'
|
||||
request_host = scheme + self.request.META.get('HTTP_HOST')
|
||||
|
||||
upstream_host_http = 'http://' + self._parsed_url.netloc
|
||||
upstream_host_https = 'https://' + self._parsed_url.netloc
|
||||
|
||||
location = location.replace(upstream_host_http, request_host)
|
||||
location = location.replace(upstream_host_https, request_host)
|
||||
proxy_response.headers['Location'] = location
|
||||
# LOGGER.debug("Proxy response LOCATION: %s",
|
||||
# proxy_response.headers['Location'])
|
||||
|
||||
def _set_content_type(self, proxy_response):
|
||||
content_type = proxy_response.headers.get('Content-Type')
|
||||
if not content_type:
|
||||
content_type = (mimetypes.guess_type(self.request.path)[0] or
|
||||
self.app_gw.default_content_type)
|
||||
proxy_response.headers['Content-Type'] = content_type
|
||||
# LOGGER.debug("Proxy response CONTENT-TYPE: %s",
|
||||
# proxy_response.headers['Content-Type'])
|
||||
|
||||
def get_response(self):
|
||||
"""Pass request to upstream and return response"""
|
||||
self._request_headers = self.get_request_headers()
|
||||
|
||||
path = self._format_path_to_redirect()
|
||||
proxy_response = self._created_proxy_response(path)
|
||||
|
||||
self._replace_host_on_redirect_location(proxy_response)
|
||||
self._set_content_type(proxy_response)
|
||||
response = get_django_response(proxy_response, strict_cookies=False)
|
||||
|
||||
# If response has a 'Location' header, we rewrite that location as well
|
||||
if 'Location' in response:
|
||||
LOGGER.debug("Rewriting Location header")
|
||||
for server_name in self.app_gw.server_name:
|
||||
response['Location'] = response['Location'].replace(
|
||||
self._parsed_url.hostname, server_name)
|
||||
LOGGER.debug(response['Location'])
|
||||
|
||||
# LOGGER.debug("RESPONSE RETURNED: %s", response)
|
||||
return response
|
@ -1,63 +0,0 @@
|
||||
"""response functions from django-revproxy"""
|
||||
import logging
|
||||
|
||||
from django.http import HttpResponse, StreamingHttpResponse
|
||||
|
||||
from passbook.app_gw.proxy.utils import (cookie_from_string,
|
||||
set_response_headers, should_stream)
|
||||
|
||||
#: Default number of bytes that are going to be read in a file lecture
|
||||
DEFAULT_AMT = 2 ** 16
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_django_response(proxy_response, strict_cookies=False):
|
||||
"""This method is used to create an appropriate response based on the
|
||||
Content-Length of the proxy_response. If the content is bigger than
|
||||
MIN_STREAMING_LENGTH, which is found on utils.py,
|
||||
than django.http.StreamingHttpResponse will be created,
|
||||
else a django.http.HTTPResponse will be created instead
|
||||
|
||||
:param proxy_response: An Instance of urllib3.response.HTTPResponse that
|
||||
will create an appropriate response
|
||||
:param strict_cookies: Whether to only accept RFC-compliant cookies
|
||||
:returns: Returns an appropriate response based on the proxy_response
|
||||
content-length
|
||||
"""
|
||||
status = proxy_response.status
|
||||
headers = proxy_response.headers
|
||||
|
||||
logger.debug('Proxy response headers: %s', headers)
|
||||
|
||||
content_type = headers.get('Content-Type')
|
||||
|
||||
logger.debug('Content-Type: %s', content_type)
|
||||
|
||||
if should_stream(proxy_response):
|
||||
logger.info('Content-Length is bigger than %s', DEFAULT_AMT)
|
||||
response = StreamingHttpResponse(proxy_response.stream(DEFAULT_AMT),
|
||||
status=status,
|
||||
content_type=content_type)
|
||||
else:
|
||||
content = proxy_response.data or b''
|
||||
response = HttpResponse(content, status=status,
|
||||
content_type=content_type)
|
||||
|
||||
logger.info('Normalizing response headers')
|
||||
set_response_headers(response, headers)
|
||||
|
||||
logger.debug('Response headers: %s', getattr(response, '_headers'))
|
||||
|
||||
cookies = proxy_response.headers.getlist('set-cookie')
|
||||
logger.info('Checking for invalid cookies')
|
||||
for cookie_string in cookies:
|
||||
cookie_dict = cookie_from_string(cookie_string,
|
||||
strict_cookies=strict_cookies)
|
||||
# if cookie is invalid cookie_dict will be None
|
||||
if cookie_dict:
|
||||
response.set_cookie(**cookie_dict)
|
||||
|
||||
logger.debug('Response cookies: %s', response.cookies)
|
||||
|
||||
return response
|
@ -1,42 +0,0 @@
|
||||
"""passbook app_gw rewriter"""
|
||||
|
||||
from passbook.app_gw.models import RewriteRule
|
||||
|
||||
RULE_CACHE = {}
|
||||
|
||||
class Context:
|
||||
"""Empty class which we dynamically add attributes to"""
|
||||
|
||||
class Rewriter:
|
||||
"""Apply rewrites"""
|
||||
|
||||
__application = None
|
||||
__request = None
|
||||
|
||||
def __init__(self, application, request):
|
||||
self.__application = application
|
||||
self.__request = request
|
||||
if self.__application.pk not in RULE_CACHE:
|
||||
RULE_CACHE[self.__application.pk] = RewriteRule.objects.filter(
|
||||
provider__in=[self.__application])
|
||||
|
||||
def __build_context(self, matches):
|
||||
"""Build object with .0, .1, etc as groups and give access to request"""
|
||||
context = Context()
|
||||
for index, group_match in enumerate(matches.groups()):
|
||||
setattr(context, "g%d" % (index + 1), group_match)
|
||||
setattr(context, 'request', self.__request)
|
||||
return context
|
||||
|
||||
def build(self):
|
||||
"""Run all rules over path and return final path"""
|
||||
path = self.__request.get_full_path()
|
||||
for rule in RULE_CACHE[self.__application.pk]:
|
||||
matches = rule.compiled_matcher.search(path)
|
||||
if not matches:
|
||||
continue
|
||||
replace_context = self.__build_context(matches)
|
||||
path = rule.replacement.format(context=replace_context)
|
||||
if rule.halt:
|
||||
return path
|
||||
return path
|
@ -1,225 +0,0 @@
|
||||
"""Utils from django-revproxy, slightly adjusted"""
|
||||
import logging
|
||||
import re
|
||||
from wsgiref.util import is_hop_by_hop
|
||||
|
||||
try:
|
||||
from http.cookies import SimpleCookie
|
||||
COOKIE_PREFIX = ''
|
||||
except ImportError:
|
||||
from Cookie import SimpleCookie
|
||||
COOKIE_PREFIX = 'Set-Cookie: '
|
||||
|
||||
|
||||
#: List containing string constant that are used to represent headers that can
|
||||
#: be ignored in the required_header function
|
||||
IGNORE_HEADERS = (
|
||||
'HTTP_ACCEPT_ENCODING', # We want content to be uncompressed so
|
||||
# we remove the Accept-Encoding from
|
||||
# original request
|
||||
'HTTP_HOST',
|
||||
'HTTP_REMOTE_USER',
|
||||
)
|
||||
|
||||
|
||||
# Default from HTTP RFC 2616
|
||||
# See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
|
||||
#: Variable that represent the default charset used
|
||||
DEFAULT_CHARSET = 'latin-1'
|
||||
|
||||
#: List containing string constants that represents possible html content type
|
||||
HTML_CONTENT_TYPES = (
|
||||
'text/html',
|
||||
'application/xhtml+xml'
|
||||
)
|
||||
|
||||
#: Variable used to represent a minimal content size required for response
|
||||
#: to be turned into stream
|
||||
MIN_STREAMING_LENGTH = 4 * 1024 # 4KB
|
||||
|
||||
#: Regex used to find charset in a html content type
|
||||
_get_charset_re = re.compile(r';\s*charset=(?P<charset>[^\s;]+)', re.I)
|
||||
|
||||
|
||||
def is_html_content_type(content_type):
|
||||
"""Function used to verify if the parameter is a proper html content type
|
||||
|
||||
:param content_type: String variable that represent a content-type
|
||||
:returns: A boolean value stating if the content_type is a valid html
|
||||
content type
|
||||
"""
|
||||
for html_content_type in HTML_CONTENT_TYPES:
|
||||
if content_type.startswith(html_content_type):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def should_stream(proxy_response):
|
||||
"""Function to verify if the proxy_response must be converted into
|
||||
a stream.This will be done by checking the proxy_response content-length
|
||||
and verify if its length is bigger than one stipulated
|
||||
by MIN_STREAMING_LENGTH.
|
||||
|
||||
:param proxy_response: An Instance of urllib3.response.HTTPResponse
|
||||
:returns: A boolean stating if the proxy_response should
|
||||
be treated as a stream
|
||||
"""
|
||||
content_type = proxy_response.headers.get('Content-Type')
|
||||
|
||||
if is_html_content_type(content_type):
|
||||
return False
|
||||
|
||||
try:
|
||||
content_length = int(proxy_response.headers.get('Content-Length', 0))
|
||||
except ValueError:
|
||||
content_length = 0
|
||||
|
||||
if not content_length or content_length > MIN_STREAMING_LENGTH:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_charset(content_type):
|
||||
"""Function used to retrieve the charset from a content-type.If there is no
|
||||
charset in the content type then the charset defined on DEFAULT_CHARSET
|
||||
will be returned
|
||||
|
||||
:param content_type: A string containing a Content-Type header
|
||||
:returns: A string containing the charset
|
||||
"""
|
||||
if not content_type:
|
||||
return DEFAULT_CHARSET
|
||||
|
||||
matched = _get_charset_re.search(content_type)
|
||||
if matched:
|
||||
# Extract the charset and strip its double quotes
|
||||
return matched.group('charset').replace('"', '')
|
||||
return DEFAULT_CHARSET
|
||||
|
||||
|
||||
def required_header(header):
|
||||
"""Function that verify if the header parameter is a essential header
|
||||
|
||||
:param header: A string represented a header
|
||||
:returns: A boolean value that represent if the header is required
|
||||
"""
|
||||
if header in IGNORE_HEADERS:
|
||||
return False
|
||||
|
||||
if header.startswith('HTTP_') or header == 'CONTENT_TYPE':
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def set_response_headers(response, response_headers):
|
||||
"""Set response's header"""
|
||||
for header, value in response_headers.items():
|
||||
if is_hop_by_hop(header) or header.lower() == 'set-cookie':
|
||||
continue
|
||||
|
||||
response[header.title()] = value
|
||||
|
||||
logger.debug('Response headers: %s', getattr(response, '_headers'))
|
||||
|
||||
|
||||
def normalize_request_headers(request):
|
||||
"""Function used to transform header, replacing 'HTTP\\_' to ''
|
||||
and replace '_' to '-'
|
||||
|
||||
:param request: A HttpRequest that will be transformed
|
||||
:returns: A dictionary with the normalized headers
|
||||
"""
|
||||
norm_headers = {}
|
||||
for header, value in request.META.items():
|
||||
if required_header(header):
|
||||
norm_header = header.replace('HTTP_', '').title().replace('_', '-')
|
||||
norm_headers[norm_header] = value
|
||||
|
||||
return norm_headers
|
||||
|
||||
|
||||
def encode_items(items):
|
||||
"""Function that encode all elements in the list of items passed as
|
||||
a parameter
|
||||
|
||||
:param items: A list of tuple
|
||||
:returns: A list of tuple with all items encoded in 'utf-8'
|
||||
"""
|
||||
encoded = []
|
||||
for key, values in items:
|
||||
for value in values:
|
||||
encoded.append((key.encode('utf-8'), value.encode('utf-8')))
|
||||
return encoded
|
||||
|
||||
|
||||
logger = logging.getLogger('revproxy.cookies')
|
||||
|
||||
|
||||
def cookie_from_string(cookie_string, strict_cookies=False):
|
||||
"""Parser for HTTP header set-cookie
|
||||
The return from this function will be used as parameters for
|
||||
django's response.set_cookie method. Because set_cookie doesn't
|
||||
have parameter comment, this cookie attribute will be ignored.
|
||||
|
||||
:param cookie_string: A string representing a valid cookie
|
||||
:param strict_cookies: Whether to only accept RFC-compliant cookies
|
||||
:returns: A dictionary containing the cookie_string attributes
|
||||
"""
|
||||
|
||||
if strict_cookies:
|
||||
|
||||
cookies = SimpleCookie(COOKIE_PREFIX + cookie_string)
|
||||
if not cookies.keys():
|
||||
return None
|
||||
cookie_name, = cookies.keys()
|
||||
cookie_dict = {k: v for k, v in cookies[cookie_name].items()
|
||||
if v and k != 'comment'}
|
||||
cookie_dict['key'] = cookie_name
|
||||
cookie_dict['value'] = cookies[cookie_name].value
|
||||
return cookie_dict
|
||||
valid_attrs = ('path', 'domain', 'comment', 'expires',
|
||||
'max_age', 'httponly', 'secure')
|
||||
|
||||
cookie_dict = {}
|
||||
|
||||
cookie_parts = cookie_string.split(';')
|
||||
try:
|
||||
cookie_dict['key'], cookie_dict['value'] = \
|
||||
cookie_parts[0].split('=', 1)
|
||||
cookie_dict['value'] = cookie_dict['value'].replace('"', '')
|
||||
except ValueError:
|
||||
logger.warning('Invalid cookie: `%s`', cookie_string)
|
||||
return None
|
||||
|
||||
if cookie_dict['value'].startswith('='):
|
||||
logger.warning('Invalid cookie: `%s`', cookie_string)
|
||||
return None
|
||||
|
||||
for part in cookie_parts[1:]:
|
||||
if '=' in part:
|
||||
attr, value = part.split('=', 1)
|
||||
value = value.strip()
|
||||
else:
|
||||
attr = part
|
||||
value = ''
|
||||
|
||||
attr = attr.strip().lower()
|
||||
if not attr:
|
||||
continue
|
||||
|
||||
if attr in valid_attrs:
|
||||
if attr in ('httponly', 'secure'):
|
||||
cookie_dict[attr] = True
|
||||
elif attr in 'comment':
|
||||
# ignoring comment attr as explained in the
|
||||
# function docstring
|
||||
continue
|
||||
else:
|
||||
cookie_dict[attr] = value
|
||||
else:
|
||||
logger.warning('Unknown cookie attribute %s', attr)
|
||||
|
||||
return cookie_dict
|
@ -1,5 +0,0 @@
|
||||
"""Application Security Gateway settings"""
|
||||
INSTALLED_APPS = [
|
||||
'channels'
|
||||
]
|
||||
ASGI_APPLICATION = "passbook.app_gw.websocket.routing.application"
|
@ -1,20 +0,0 @@
|
||||
"""passbook app_gw cache clean signals"""
|
||||
|
||||
from logging import getLogger
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||
from passbook.app_gw.proxy.handler import IGNORED_HOSTNAMES_KEY
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
@receiver(post_save)
|
||||
# pylint: disable=unused-argument
|
||||
def invalidate_app_gw_cache(sender, instance, **kwargs):
|
||||
"""Invalidate Policy cache when app_gw is updated"""
|
||||
if isinstance(instance, ApplicationGatewayProvider):
|
||||
LOGGER.debug("Invalidating cache for ignored hostnames")
|
||||
cache.delete(IGNORED_HOSTNAMES_KEY)
|
@ -1,2 +0,0 @@
|
||||
"""passbook app_gw urls"""
|
||||
urlpatterns = []
|
@ -1,83 +0,0 @@
|
||||
"""websocket proxy consumer"""
|
||||
import threading
|
||||
from logging import getLogger
|
||||
from ssl import CERT_NONE
|
||||
|
||||
import websocket
|
||||
from channels.generic.websocket import WebsocketConsumer
|
||||
|
||||
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
class ProxyConsumer(WebsocketConsumer):
|
||||
"""Proxy websocket connection to upstream"""
|
||||
|
||||
_headers_dict = {}
|
||||
_app_gw = None
|
||||
_client = None
|
||||
_thread = None
|
||||
|
||||
def _fix_headers(self, input_dict):
|
||||
"""Fix headers from bytestrings to normal strings"""
|
||||
return {
|
||||
key.decode('utf-8'): value.decode('utf-8')
|
||||
for key, value in dict(input_dict).items()
|
||||
}
|
||||
|
||||
def connect(self):
|
||||
"""Extract host header, lookup in database and proxy connection"""
|
||||
self._headers_dict = self._fix_headers(dict(self.scope.get('headers')))
|
||||
host = self._headers_dict.pop('host')
|
||||
query_string = self.scope.get('query_string').decode('utf-8')
|
||||
matches = ApplicationGatewayProvider.objects.filter(
|
||||
server_name__contains=[host],
|
||||
enabled=True)
|
||||
if matches.exists():
|
||||
self._app_gw = matches.first()
|
||||
# TODO: Get upstream that starts with wss or
|
||||
upstream = self._app_gw.upstream[0].replace('http', 'ws') + self.scope.get('path')
|
||||
if query_string:
|
||||
upstream += '?' + query_string
|
||||
sslopt = {}
|
||||
if not self._app_gw.upstream_ssl_verification:
|
||||
sslopt = {"cert_reqs": CERT_NONE}
|
||||
self._client = websocket.WebSocketApp(
|
||||
url=upstream,
|
||||
subprotocols=self.scope.get('subprotocols'),
|
||||
header=self._headers_dict,
|
||||
on_message=self._client_on_message_handler(),
|
||||
on_error=self._client_on_error_handler(),
|
||||
on_close=self._client_on_close_handler(),
|
||||
on_open=self._client_on_open_handler())
|
||||
LOGGER.debug("Accepting connection for %s", host)
|
||||
self._thread = threading.Thread(target=lambda: self._client.run_forever(sslopt=sslopt))
|
||||
self._thread.start()
|
||||
|
||||
def _client_on_open_handler(self):
|
||||
return lambda ws: self.accept(self._client.sock.handshake_response.subprotocol)
|
||||
|
||||
def _client_on_message_handler(self):
|
||||
# pylint: disable=unused-argument,invalid-name
|
||||
def message_handler(ws, message):
|
||||
if isinstance(message, str):
|
||||
self.send(text_data=message)
|
||||
else:
|
||||
self.send(bytes_data=message)
|
||||
return message_handler
|
||||
|
||||
def _client_on_error_handler(self):
|
||||
return lambda ws, error: print(error)
|
||||
|
||||
def _client_on_close_handler(self):
|
||||
return lambda ws: self.disconnect(0)
|
||||
|
||||
def disconnect(self, code):
|
||||
self._client.close()
|
||||
|
||||
def receive(self, text_data=None, bytes_data=None):
|
||||
if text_data:
|
||||
opcode = websocket.ABNF.OPCODE_TEXT
|
||||
if bytes_data:
|
||||
opcode = websocket.ABNF.OPCODE_BINARY
|
||||
self._client.send(text_data or bytes_data, opcode)
|
@ -1,17 +0,0 @@
|
||||
"""app_gw websocket proxy"""
|
||||
from channels.auth import AuthMiddlewareStack
|
||||
from channels.routing import ProtocolTypeRouter, URLRouter
|
||||
from django.conf.urls import url
|
||||
|
||||
from passbook.app_gw.websocket.consumer import ProxyConsumer
|
||||
|
||||
websocket_urlpatterns = [
|
||||
url(r'^(.*)$', ProxyConsumer),
|
||||
]
|
||||
|
||||
application = ProtocolTypeRouter({
|
||||
# (http->django views is added by default)
|
||||
'websocket': AuthMiddlewareStack(
|
||||
URLRouter(websocket_urlpatterns)
|
||||
),
|
||||
})
|
@ -1,7 +1,8 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-16 09:13
|
||||
# Generated by Django 2.2.6 on 2019-10-07 14:07
|
||||
|
||||
import uuid
|
||||
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
@ -23,7 +24,7 @@ class Migration(migrations.Migration):
|
||||
('action', models.TextField(choices=[('login', 'login'), ('login_failed', 'login_failed'), ('logout', 'logout'), ('authorize_application', 'authorize_application'), ('suspicious_request', 'suspicious_request'), ('sign_up', 'sign_up'), ('password_reset', 'password_reset'), ('invitation_created', 'invitation_created'), ('invitation_used', 'invitation_used')])),
|
||||
('date', models.DateTimeField(auto_now_add=True)),
|
||||
('app', models.TextField()),
|
||||
('_context', models.TextField()),
|
||||
('context', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=dict)),
|
||||
('request_ip', models.GenericIPAddressField()),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
|
||||
@ -33,19 +34,4 @@ class Migration(migrations.Migration):
|
||||
'verbose_name_plural': 'Audit Entries',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='LoginAttempt',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', models.DateField(auto_now_add=True)),
|
||||
('last_updated', models.DateTimeField(auto_now=True)),
|
||||
('target_uid', models.CharField(max_length=254)),
|
||||
('request_ip', models.GenericIPAddressField()),
|
||||
('attempts', models.IntegerField(default=1)),
|
||||
],
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='loginattempt',
|
||||
unique_together={('target_uid', 'request_ip', 'created')},
|
||||
),
|
||||
]
|
||||
|
@ -1,18 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-21 12:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_audit', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='loginattempt',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True),
|
||||
),
|
||||
]
|
@ -1,23 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-21 12:40
|
||||
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_audit', '0002_auto_20190221_1201'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='auditentry',
|
||||
name='_context',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='auditentry',
|
||||
name='context',
|
||||
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=dict),
|
||||
),
|
||||
]
|
@ -1,16 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-03-08 14:53
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_audit', '0003_auto_20190221_1240'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name='LoginAttempt',
|
||||
),
|
||||
]
|
@ -1,6 +1,4 @@
|
||||
"""passbook audit models"""
|
||||
from logging import getLogger
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.contrib.postgres.fields import JSONField
|
||||
@ -8,10 +6,11 @@ from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext as _
|
||||
from ipware import get_client_ip
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.lib.models import UUIDModel
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
LOGGER = get_logger()
|
||||
|
||||
class AuditEntry(UUIDModel):
|
||||
"""An individual audit log entry"""
|
||||
@ -61,7 +60,8 @@ class AuditEntry(UUIDModel):
|
||||
# User 255.255.255.255 as fallback if IP cannot be determined
|
||||
request_ip=client_ip or '255.255.255.255',
|
||||
context=kwargs)
|
||||
LOGGER.debug("Logged %s from %s (%s)", action, user, client_ip)
|
||||
LOGGER.debug("Created Audit entry", action=action,
|
||||
user=user, from_ip=client_ip, context=kwargs)
|
||||
return entry
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
@ -1,10 +0,0 @@
|
||||
"""passbook captcha app"""
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class PassbookCaptchaFactorConfig(AppConfig):
|
||||
"""passbook captcha app"""
|
||||
|
||||
name = 'passbook.captcha_factor'
|
||||
label = 'passbook_captcha_factor'
|
||||
verbose_name = 'passbook Captcha'
|
@ -1,2 +0,0 @@
|
||||
"""passbook core"""
|
||||
__version__ = '0.2.6-beta'
|
||||
|
@ -1,12 +1,6 @@
|
||||
"""passbook core app config"""
|
||||
from importlib import import_module
|
||||
from logging import getLogger
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
from passbook.lib.config import CONFIG
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
class PassbookCoreConfig(AppConfig):
|
||||
"""passbook core app config"""
|
||||
@ -15,13 +9,3 @@ class PassbookCoreConfig(AppConfig):
|
||||
label = 'passbook_core'
|
||||
verbose_name = 'passbook Core'
|
||||
mountpoint = ''
|
||||
|
||||
def ready(self):
|
||||
import_module('passbook.core.policies')
|
||||
factors_to_load = CONFIG.y('passbook.factors', [])
|
||||
for factors_to_load in factors_to_load:
|
||||
try:
|
||||
import_module(factors_to_load)
|
||||
LOGGER.info("Loaded %s", factors_to_load)
|
||||
except ImportError as exc:
|
||||
LOGGER.debug(exc)
|
||||
|
@ -16,7 +16,7 @@ class ApplicationForm(forms.ModelForm):
|
||||
|
||||
model = Application
|
||||
fields = ['name', 'slug', 'launch_url', 'icon_url',
|
||||
'policies', 'provider', 'skip_authorization']
|
||||
'provider', 'policies', 'skip_authorization']
|
||||
widgets = {
|
||||
'name': forms.TextInput(),
|
||||
'launch_url': forms.TextInput(),
|
||||
|
@ -1,16 +1,15 @@
|
||||
"""passbook core authentication forms"""
|
||||
from logging import getLogger
|
||||
|
||||
from django import forms
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import validate_email
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from structlog import get_logger
|
||||
|
||||
from passbook.core.models import User
|
||||
from passbook.lib.config import CONFIG
|
||||
from passbook.lib.utils.ui import human_list
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
LOGGER = get_logger()
|
||||
|
||||
class LoginForm(forms.Form):
|
||||
"""Allow users to login"""
|
||||
@ -82,13 +81,3 @@ class SignUpForm(forms.Form):
|
||||
if password != password_repeat:
|
||||
raise ValidationError(_("Passwords don't match"))
|
||||
return self.cleaned_data.get('password_repeat')
|
||||
|
||||
|
||||
class PasswordFactorForm(forms.Form):
|
||||
"""Password authentication form"""
|
||||
|
||||
password = forms.CharField(widget=forms.PasswordInput(attrs={
|
||||
'placeholder': _('Password'),
|
||||
'autofocus': 'autofocus',
|
||||
'autocomplete': 'current-password'
|
||||
}))
|
||||
|
@ -26,7 +26,7 @@ class GroupForm(forms.ModelForm):
|
||||
class Meta:
|
||||
|
||||
model = Group
|
||||
fields = ['name', 'parent', 'members', 'tags']
|
||||
fields = ['name', 'parent', 'members', 'attributes']
|
||||
widgets = {
|
||||
'name': forms.TextInput(),
|
||||
}
|
||||
|
@ -3,40 +3,8 @@
|
||||
from django import forms
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from passbook.core.models import (DebugPolicy, FieldMatcherPolicy,
|
||||
GroupMembershipPolicy, PasswordPolicy,
|
||||
SSOLoginPolicy, WebhookPolicy)
|
||||
|
||||
GENERAL_FIELDS = ['name', 'action', 'negate', 'order', 'timeout']
|
||||
|
||||
class FieldMatcherPolicyForm(forms.ModelForm):
|
||||
"""FieldMatcherPolicy Form"""
|
||||
|
||||
class Meta:
|
||||
|
||||
model = FieldMatcherPolicy
|
||||
fields = GENERAL_FIELDS + ['user_field', 'match_action', 'value', ]
|
||||
widgets = {
|
||||
'name': forms.TextInput(),
|
||||
'value': forms.TextInput(),
|
||||
}
|
||||
|
||||
|
||||
class WebhookPolicyForm(forms.ModelForm):
|
||||
"""WebhookPolicyForm Form"""
|
||||
|
||||
class Meta:
|
||||
|
||||
model = WebhookPolicy
|
||||
fields = GENERAL_FIELDS + ['url', 'method', 'json_body', 'json_headers',
|
||||
'result_jsonpath', 'result_json_value', ]
|
||||
widgets = {
|
||||
'name': forms.TextInput(),
|
||||
'json_body': forms.TextInput(),
|
||||
'json_headers': forms.TextInput(),
|
||||
'result_jsonpath': forms.TextInput(),
|
||||
'result_json_value': forms.TextInput(),
|
||||
}
|
||||
from passbook.core.models import DebugPolicy
|
||||
from passbook.policies.forms import GENERAL_FIELDS
|
||||
|
||||
|
||||
class DebugPolicyForm(forms.ModelForm):
|
||||
@ -52,49 +20,3 @@ class DebugPolicyForm(forms.ModelForm):
|
||||
labels = {
|
||||
'result': _('Allow user')
|
||||
}
|
||||
|
||||
|
||||
class GroupMembershipPolicyForm(forms.ModelForm):
|
||||
"""GroupMembershipPolicy Form"""
|
||||
|
||||
class Meta:
|
||||
|
||||
model = GroupMembershipPolicy
|
||||
fields = GENERAL_FIELDS + ['group', ]
|
||||
widgets = {
|
||||
'name': forms.TextInput(),
|
||||
'order': forms.NumberInput(),
|
||||
}
|
||||
|
||||
class SSOLoginPolicyForm(forms.ModelForm):
|
||||
"""Edit SSOLoginPolicy instances"""
|
||||
|
||||
class Meta:
|
||||
|
||||
model = SSOLoginPolicy
|
||||
fields = GENERAL_FIELDS
|
||||
widgets = {
|
||||
'name': forms.TextInput(),
|
||||
'order': forms.NumberInput(),
|
||||
}
|
||||
|
||||
class PasswordPolicyForm(forms.ModelForm):
|
||||
"""PasswordPolicy Form"""
|
||||
|
||||
class Meta:
|
||||
|
||||
model = PasswordPolicy
|
||||
fields = GENERAL_FIELDS + ['amount_uppercase', 'amount_lowercase',
|
||||
'amount_symbols', 'length_min', 'symbol_charset',
|
||||
'error_message']
|
||||
widgets = {
|
||||
'name': forms.TextInput(),
|
||||
'symbol_charset': forms.TextInput(),
|
||||
'error_message': forms.TextInput(),
|
||||
}
|
||||
labels = {
|
||||
'amount_uppercase': _('Minimum amount of Uppercase Characters'),
|
||||
'amount_lowercase': _('Minimum amount of Lowercase Characters'),
|
||||
'amount_symbols': _('Minimum amount of Symbols Characters'),
|
||||
'length_min': _('Minimum Length'),
|
||||
}
|
||||
|
@ -1,45 +0,0 @@
|
||||
"""passbook import_users management command"""
|
||||
from csv import DictReader
|
||||
from logging import getLogger
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.validators import EmailValidator, ValidationError
|
||||
|
||||
from passbook.core.models import User
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Import users from CSV file"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
# Positional arguments
|
||||
parser.add_argument('file', nargs='+', type=str)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""Create Users from CSV file"""
|
||||
for file in options.get('file'):
|
||||
with open(file, 'r') as _file:
|
||||
reader = DictReader(_file)
|
||||
for user in reader:
|
||||
LOGGER.debug('User %s', user.get('username'))
|
||||
try:
|
||||
# only import users with valid email addresses
|
||||
if user.get('email'):
|
||||
validator = EmailValidator()
|
||||
validator(user.get('email'))
|
||||
# use combination of username and email to check for existing user
|
||||
if User.objects.filter(
|
||||
username=user.get('username'),
|
||||
email=user.get('email')).exists():
|
||||
LOGGER.debug('User %s exists already, skipping', user.get('username'))
|
||||
# Create user
|
||||
User.objects.create(
|
||||
username=user.get('username'),
|
||||
email=user.get('email'),
|
||||
name=user.get('name'),
|
||||
password=user.get('password'))
|
||||
LOGGER.debug('Created User %s', user.get('username'))
|
||||
except ValidationError as exc:
|
||||
LOGGER.warning('User %s caused %r, skipping', user.get('username'), exc)
|
||||
continue
|
@ -1,36 +0,0 @@
|
||||
"""passbook Webserver management command"""
|
||||
|
||||
from logging import getLogger
|
||||
|
||||
import cherrypy
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from passbook.lib.config import CONFIG
|
||||
from passbook.root.wsgi import application
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Run CherryPy webserver"""
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""passbook cherrypy server"""
|
||||
cherrypy.config.update(CONFIG.get('web'))
|
||||
cherrypy.tree.graft(application, '/')
|
||||
# Mount NullObject to serve static files
|
||||
cherrypy.tree.mount(None, settings.STATIC_URL, config={
|
||||
'/': {
|
||||
'tools.staticdir.on': True,
|
||||
'tools.staticdir.dir': settings.STATIC_ROOT,
|
||||
'tools.expires.on': True,
|
||||
'tools.expires.secs': 86400,
|
||||
'tools.gzip.on': True,
|
||||
}
|
||||
})
|
||||
cherrypy.engine.start()
|
||||
for file in CONFIG.loaded_file:
|
||||
cherrypy.engine.autoreload.files.add(file)
|
||||
LOGGER.info("Added '%s' to autoreload triggers", file)
|
||||
cherrypy.engine.block()
|
@ -1,23 +0,0 @@
|
||||
"""passbook Worker management command"""
|
||||
|
||||
from logging import getLogger
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import autoreload
|
||||
|
||||
from passbook.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Run Celery Worker"""
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""celery worker"""
|
||||
autoreload.run_with_reloader(self.celery_worker)
|
||||
|
||||
def celery_worker(self):
|
||||
"""Run celery worker within autoreload"""
|
||||
autoreload.raise_last_exception()
|
||||
CELERY_APP.worker_main(['worker', '--autoscale=10,3', '-E', '-B'])
|
@ -1,21 +1,24 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-16 09:10
|
||||
# Generated by Django 2.2.6 on 2019-10-07 14:06
|
||||
|
||||
import uuid
|
||||
|
||||
import django.contrib.auth.models
|
||||
import django.contrib.auth.validators
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import passbook.core.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('auth', '0009_alter_user_last_name_max_length'),
|
||||
('auth', '0011_update_proxy_permissions'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@ -34,6 +37,8 @@ class Migration(migrations.Migration):
|
||||
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
|
||||
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False)),
|
||||
('name', models.TextField()),
|
||||
('password_change_date', models.DateTimeField(auto_now_add=True)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'user',
|
||||
@ -44,39 +49,17 @@ class Migration(migrations.Migration):
|
||||
('objects', django.contrib.auth.models.UserManager()),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Group',
|
||||
fields=[
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=80, verbose_name='name')),
|
||||
('extra_data', models.TextField(blank=True)),
|
||||
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='children', to='passbook_core.Group')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Invitation',
|
||||
fields=[
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('expires', models.DateTimeField(blank=True, default=None, null=True)),
|
||||
('fixed_username', models.TextField(blank=True, default=None)),
|
||||
('fixed_email', models.TextField(blank=True, default=None)),
|
||||
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Invitation',
|
||||
'verbose_name_plural': 'Invitations',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Policy',
|
||||
fields=[
|
||||
('created', models.DateField(auto_now_add=True)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('last_updated', models.DateTimeField(auto_now=True)),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('name', models.TextField(blank=True, null=True)),
|
||||
('action', models.CharField(choices=[('allow', 'allow'), ('deny', 'deny')], max_length=20)),
|
||||
('negate', models.BooleanField(default=False)),
|
||||
('order', models.IntegerField(default=0)),
|
||||
('timeout', models.IntegerField(default=30)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
@ -85,28 +68,136 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='PolicyModel',
|
||||
fields=[
|
||||
('created', models.DateField(auto_now_add=True)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('last_updated', models.DateTimeField(auto_now=True)),
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('policies', models.ManyToManyField(blank=True, to='passbook_core.Policy')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PropertyMapping',
|
||||
fields=[
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('name', models.TextField()),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Property Mapping',
|
||||
'verbose_name_plural': 'Property Mappings',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='DebugPolicy',
|
||||
fields=[
|
||||
('policy_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.Policy')),
|
||||
('result', models.BooleanField(default=False)),
|
||||
('wait_min', models.IntegerField(default=5)),
|
||||
('wait_max', models.IntegerField(default=30)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Debug Policy',
|
||||
'verbose_name_plural': 'Debug Policies',
|
||||
},
|
||||
bases=('passbook_core.policy',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Factor',
|
||||
fields=[
|
||||
('policymodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.PolicyModel')),
|
||||
('name', models.TextField()),
|
||||
('slug', models.SlugField(unique=True)),
|
||||
('order', models.IntegerField()),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=('passbook_core.policymodel',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Source',
|
||||
fields=[
|
||||
('policymodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.PolicyModel')),
|
||||
('name', models.TextField()),
|
||||
('slug', models.SlugField()),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=('passbook_core.policymodel',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Provider',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('property_mappings', models.ManyToManyField(blank=True, default=None, to='passbook_core.PropertyMapping')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Nonce',
|
||||
fields=[
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('expires', models.DateTimeField(default=passbook.core.models.default_nonce_duration)),
|
||||
('expiring', models.BooleanField(default=True)),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Nonce',
|
||||
'verbose_name_plural': 'Nonces',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Invitation',
|
||||
fields=[
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('expires', models.DateTimeField(blank=True, default=None, null=True)),
|
||||
('fixed_username', models.TextField(blank=True, default=None)),
|
||||
('fixed_email', models.TextField(blank=True, default=None)),
|
||||
('needs_confirmation', models.BooleanField(default=True)),
|
||||
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Invitation',
|
||||
'verbose_name_plural': 'Invitations',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Group',
|
||||
fields=[
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=80, verbose_name='name')),
|
||||
('tags', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=dict)),
|
||||
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='children', to='passbook_core.Group')),
|
||||
],
|
||||
options={
|
||||
'unique_together': {('name', 'parent')},
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='groups',
|
||||
field=models.ManyToManyField(to='passbook_core.Group'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='user_permissions',
|
||||
field=models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='UserSourceConnection',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created', models.DateField(auto_now_add=True)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('last_updated', models.DateTimeField(auto_now=True)),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='passbook_core.Source')),
|
||||
],
|
||||
options={
|
||||
'unique_together': {('user', 'source')},
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Application',
|
||||
@ -124,131 +215,9 @@ class Migration(migrations.Migration):
|
||||
},
|
||||
bases=('passbook_core.policymodel',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='DebugPolicy',
|
||||
fields=[
|
||||
('policy_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.Policy')),
|
||||
('result', models.BooleanField(default=False)),
|
||||
('wait_min', models.IntegerField(default=5)),
|
||||
('wait_max', models.IntegerField(default=30)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Debug Policy',
|
||||
'verbose_name_plural': 'Debug Policys',
|
||||
},
|
||||
bases=('passbook_core.policy',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Factor',
|
||||
fields=[
|
||||
('policymodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.PolicyModel')),
|
||||
('name', models.TextField()),
|
||||
('slug', models.SlugField(unique=True)),
|
||||
('order', models.IntegerField()),
|
||||
('type', models.TextField(unique=True)),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=('passbook_core.policymodel',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='FieldMatcherPolicy',
|
||||
fields=[
|
||||
('policy_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.Policy')),
|
||||
('user_field', models.TextField(choices=[('username', 'Username'), ('first_name', 'First Name'), ('last_name', 'Last Name'), ('email', 'E-Mail'), ('is_staff', 'Is staff'), ('is_active', 'Is active'), ('data_joined', 'Date joined')])),
|
||||
('match_action', models.CharField(choices=[('startswith', 'Starts with'), ('endswith', 'Ends with'), ('endswith', 'Contains'), ('regexp', 'Regexp'), ('exact', 'Exact')], max_length=50)),
|
||||
('value', models.TextField()),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Field matcher Policy',
|
||||
'verbose_name_plural': 'Field matcher Policys',
|
||||
},
|
||||
bases=('passbook_core.policy',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PasswordPolicyPolicy',
|
||||
fields=[
|
||||
('policy_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.Policy')),
|
||||
('amount_uppercase', models.IntegerField(default=0)),
|
||||
('amount_lowercase', models.IntegerField(default=0)),
|
||||
('amount_symbols', models.IntegerField(default=0)),
|
||||
('length_min', models.IntegerField(default=0)),
|
||||
('symbol_charset', models.TextField(default='!\\"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~ ')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Password Policy Policy',
|
||||
'verbose_name_plural': 'Password Policy Policys',
|
||||
},
|
||||
bases=('passbook_core.policy',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Source',
|
||||
fields=[
|
||||
('policymodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.PolicyModel')),
|
||||
('name', models.TextField()),
|
||||
('slug', models.SlugField()),
|
||||
('enabled', models.BooleanField(default=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=('passbook_core.policymodel',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='WebhookPolicy',
|
||||
fields=[
|
||||
('policy_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.Policy')),
|
||||
('url', models.URLField()),
|
||||
('method', models.CharField(choices=[('GET', 'GET'), ('POST', 'POST'), ('PATCH', 'PATCH'), ('DELETE', 'DELETE'), ('PUT', 'PUT')], max_length=10)),
|
||||
('json_body', models.TextField()),
|
||||
('json_headers', models.TextField()),
|
||||
('result_jsonpath', models.TextField()),
|
||||
('result_json_value', models.TextField()),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Webhook Policy',
|
||||
'verbose_name_plural': 'Webhook Policys',
|
||||
},
|
||||
bases=('passbook_core.policy',),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='policymodel',
|
||||
name='policies',
|
||||
field=models.ManyToManyField(blank=True, to='passbook_core.Policy'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='groups',
|
||||
field=models.ManyToManyField(to='passbook_core.Group'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='user_permissions',
|
||||
field=models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='usersourceconnection',
|
||||
name='source',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='passbook_core.Source'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='group',
|
||||
unique_together={('name', 'parent')},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='applications',
|
||||
field=models.ManyToManyField(to='passbook_core.Application'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='sources',
|
||||
field=models.ManyToManyField(through='passbook_core.UserSourceConnection', to='passbook_core.Source'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='usersourceconnection',
|
||||
unique_together={('user', 'source')},
|
||||
),
|
||||
]
|
||||
|
@ -1,29 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-16 10:02
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='debugpolicy',
|
||||
options={'verbose_name': 'Debug Policy', 'verbose_name_plural': 'Debug Policies'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='fieldmatcherpolicy',
|
||||
options={'verbose_name': 'Field matcher Policy', 'verbose_name_plural': 'Field matcher Policies'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='passwordpolicypolicy',
|
||||
options={'verbose_name': 'Password Policy Policy', 'verbose_name_plural': 'Password Policy Policies'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='webhookpolicy',
|
||||
options={'verbose_name': 'Webhook Policy', 'verbose_name_plural': 'Webhook Policies'},
|
||||
),
|
||||
]
|
@ -1,4 +1,4 @@
|
||||
# Generated by Django 2.2 on 2019-04-18 09:09
|
||||
# Generated by Django 2.2.6 on 2019-10-10 11:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
@ -6,13 +6,13 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_saml_idp', '0002_samlpropertymapping'),
|
||||
('passbook_core', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='samlprovider',
|
||||
name='audience',
|
||||
model_name='nonce',
|
||||
name='description',
|
||||
field=models.TextField(blank=True, default=''),
|
||||
),
|
||||
]
|
@ -1,17 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-16 10:04
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0002_auto_20190216_1002'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameModel(
|
||||
old_name='PasswordPolicyPolicy',
|
||||
new_name='PasswordPolicy',
|
||||
),
|
||||
]
|
29
passbook/core/migrations/0003_auto_20191011_0914.py
Normal file
29
passbook/core/migrations/0003_auto_20191011_0914.py
Normal file
@ -0,0 +1,29 @@
|
||||
# Generated by Django 2.2.6 on 2019-10-11 09:14
|
||||
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0002_nonce_description'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='group',
|
||||
old_name='tags',
|
||||
new_name='attributes',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='source',
|
||||
name='property_mappings',
|
||||
field=models.ManyToManyField(blank=True, default=None, to='passbook_core.PropertyMapping'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='attributes',
|
||||
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=dict),
|
||||
),
|
||||
]
|
@ -1,17 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-16 10:13
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0003_auto_20190216_1004'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='passwordpolicy',
|
||||
options={'verbose_name': 'Password Policy', 'verbose_name_plural': 'Password Policies'},
|
||||
),
|
||||
]
|
@ -1,28 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-21 12:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0004_auto_20190216_1013'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='policy',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='policymodel',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='usersourceconnection',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True),
|
||||
),
|
||||
]
|
@ -1,19 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-21 12:32
|
||||
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0005_auto_20190221_1201'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='factor',
|
||||
name='arguments',
|
||||
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
|
||||
),
|
||||
]
|
@ -1,19 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-21 12:33
|
||||
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0006_factor_arguments'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='factor',
|
||||
name='arguments',
|
||||
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=dict),
|
||||
),
|
||||
]
|
@ -1,18 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-21 15:16
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0007_auto_20190221_1233'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='fieldmatcherpolicy',
|
||||
name='match_action',
|
||||
field=models.CharField(choices=[('startswith', 'Starts with'), ('endswith', 'Ends with'), ('contains', 'Contains'), ('regexp', 'Regexp'), ('exact', 'Exact')], max_length=50),
|
||||
),
|
||||
]
|
@ -1,44 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-24 09:50
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0008_auto_20190221_1516'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='DummyFactor',
|
||||
fields=[
|
||||
('factor_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.Factor')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=('passbook_core.factor',),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PasswordFactor',
|
||||
fields=[
|
||||
('factor_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='passbook_core.Factor')),
|
||||
('backends', django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), size=None)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=('passbook_core.factor',),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='factor',
|
||||
name='arguments',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='factor',
|
||||
name='type',
|
||||
),
|
||||
]
|
@ -1,21 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-24 10:16
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0009_auto_20190224_0950'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='dummyfactor',
|
||||
options={'verbose_name': 'Dummy Factor', 'verbose_name_plural': 'Dummy Factors'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='passwordfactor',
|
||||
options={'verbose_name': 'Password Factor', 'verbose_name_plural': 'Password Factors'},
|
||||
),
|
||||
]
|
@ -1,25 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-25 14:38
|
||||
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0010_auto_20190224_1016'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='passwordfactor',
|
||||
name='password_policies',
|
||||
field=models.ManyToManyField(blank=True, to='passbook_core.Policy'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='password_change_date',
|
||||
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
|
||||
preserve_default=False,
|
||||
),
|
||||
]
|
@ -1,31 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-25 19:12
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import passbook.core.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0011_auto_20190225_1438'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Nonce',
|
||||
fields=[
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('expires', models.DateTimeField(default=passbook.core.models.default_nonce_duration)),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Nonce',
|
||||
'verbose_name_plural': 'Nonces',
|
||||
},
|
||||
),
|
||||
]
|
@ -1,18 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-25 19:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0012_nonce'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='invitation',
|
||||
name='needs_confirmation',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
@ -1,19 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-26 14:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0014_auto_20190226_0850'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='passwordpolicy',
|
||||
name='error_message',
|
||||
field=models.TextField(default=''),
|
||||
preserve_default=False,
|
||||
),
|
||||
]
|
@ -1,38 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-02-27 13:55
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def migrate_names(apps, schema_editor):
|
||||
"""migrate first_name and last_name to name"""
|
||||
User = apps.get_model("passbook_core", "User")
|
||||
for user in User.objects.all():
|
||||
user.name = '%s %s' % (user.first_name, user.last_name)
|
||||
user.save()
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0015_passwordpolicy_error_message'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='name',
|
||||
field=models.TextField(default=''),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.RunPython(migrate_names),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='name',
|
||||
field=models.TextField(),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='fieldmatcherpolicy',
|
||||
name='user_field',
|
||||
field=models.TextField(choices=[('username', 'Username'), ('name', 'Name'), ('email', 'E-Mail'), ('is_staff', 'Is staff'), ('is_active', 'Is active'), ('data_joined', 'Date joined')]),
|
||||
),
|
||||
]
|
@ -1,26 +0,0 @@
|
||||
# Generated by Django 2.1.7 on 2019-03-08 10:40
|
||||
|
||||
import uuid
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('passbook_core', '0016_auto_20190227_1355'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='PropertyMapping',
|
||||
fields=[
|
||||
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('name', models.TextField()),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Property Mapping',
|
||||
'verbose_name_plural': 'Property Mappings',
|
||||
},
|
||||
),
|
||||
]
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user