merged in main and resolved conflicts w/ burnettk cullerton
This commit is contained in:
commit
b54378a9fc
|
@ -0,0 +1,117 @@
|
||||||
|
name: Release Builds
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags: [ v* ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
create_frontend_docker_container:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_NAME: sartography/spiffworkflow-frontend
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- name: Check out the repository
|
||||||
|
uses: actions/checkout@v3.0.2
|
||||||
|
with:
|
||||||
|
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Log in to the Container registry
|
||||||
|
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
|
||||||
|
with:
|
||||||
|
context: spiffworkflow-frontend
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
|
||||||
|
- name: Build and push Frontend Docker image
|
||||||
|
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
|
||||||
|
with:
|
||||||
|
# this action doesn't seem to respect working-directory so set context
|
||||||
|
context: spiffworkflow-frontend
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
|
||||||
|
create_backend_docker_container:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_NAME: sartography/spiffworkflow-backend
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- name: Check out the repository
|
||||||
|
uses: actions/checkout@v3.0.2
|
||||||
|
with:
|
||||||
|
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Log in to the Container registry
|
||||||
|
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
|
||||||
|
- name: Build and push Backend Docker image
|
||||||
|
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
|
||||||
|
with:
|
||||||
|
# this action doesn't seem to respect working-directory so set context
|
||||||
|
context: spiffworkflow-backend
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
# Is this getting updated, I wonder?
|
||||||
|
create_demo-proxy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_NAME: sartography/connector-proxy-demo
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- name: Check out the repository
|
||||||
|
uses: actions/checkout@v3.0.2
|
||||||
|
with:
|
||||||
|
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Log in to the Container registry
|
||||||
|
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
|
||||||
|
with:
|
||||||
|
context: connector-proxy-demo
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
|
||||||
|
- name: Build and push the connector proxy
|
||||||
|
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
|
||||||
|
with:
|
||||||
|
# this action doesn't seem to respect working-directory so set context
|
||||||
|
context: connector-proxy-demo
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
|
@ -0,0 +1,27 @@
|
||||||
|
FROM ghcr.io/sartography/python:3.11
|
||||||
|
|
||||||
|
RUN pip install poetry
|
||||||
|
RUN useradd _gunicorn --no-create-home --user-group
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y -q \
|
||||||
|
gcc libssl-dev \
|
||||||
|
curl gunicorn3
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
COPY pyproject.toml poetry.lock /app/
|
||||||
|
RUN poetry install --without dev
|
||||||
|
|
||||||
|
RUN set -xe \
|
||||||
|
&& apt-get remove -y gcc python3-dev libssl-dev \
|
||||||
|
&& apt-get autoremove -y \
|
||||||
|
&& apt-get clean -y \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY . /app/
|
||||||
|
|
||||||
|
# run poetry install again AFTER copying the app into the image
|
||||||
|
# otherwise it does not know what the main app module is
|
||||||
|
RUN poetry install --without dev
|
||||||
|
|
||||||
|
CMD ./bin/boot_server_in_docker
|
|
@ -0,0 +1,19 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
function error_handler() {
|
||||||
|
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||||
|
exit "$2"
|
||||||
|
}
|
||||||
|
trap 'error_handler ${LINENO} $?' ERR
|
||||||
|
set -o errtrace -o errexit -o nounset -o pipefail
|
||||||
|
|
||||||
|
port="${CONNECTOR_PROXY_PORT:-}"
|
||||||
|
if [[ -z "$port" ]]; then
|
||||||
|
port=7004
|
||||||
|
fi
|
||||||
|
|
||||||
|
workers=3
|
||||||
|
|
||||||
|
# THIS MUST BE THE LAST COMMAND!
|
||||||
|
# default --limit-request-line is 4094. see https://stackoverflow.com/a/66688382/6090676
|
||||||
|
exec poetry run gunicorn --bind "0.0.0.0:$port" --workers="$workers" --limit-request-line 8192 --timeout 90 --capture-output --access-logfile '-' --log-level debug app:app
|
|
@ -55,7 +55,7 @@ optional = false
|
||||||
python-versions = ">=3.6.0"
|
python-versions = ">=3.6.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
unicode_backport = ["unicodedata2"]
|
unicode-backport = ["unicodedata2"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "click"
|
name = "click"
|
||||||
|
@ -127,6 +127,23 @@ Flask = "*"
|
||||||
oauthlib = ">=1.1.2,<2.0.3 || >2.0.3,<2.0.4 || >2.0.4,<2.0.5 || >2.0.5,<3.0.0"
|
oauthlib = ">=1.1.2,<2.0.3 || >2.0.3,<2.0.4 || >2.0.4,<2.0.5 || >2.0.5,<3.0.0"
|
||||||
requests-oauthlib = ">=0.6.2,<1.2.0"
|
requests-oauthlib = ">=0.6.2,<1.2.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "gunicorn"
|
||||||
|
version = "20.1.0"
|
||||||
|
description = "WSGI HTTP Server for UNIX"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.5"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
setuptools = ">=3.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
eventlet = ["eventlet (>=0.24.1)"]
|
||||||
|
gevent = ["gevent (>=1.4.0)"]
|
||||||
|
setproctitle = ["setproctitle"]
|
||||||
|
tornado = ["tornado (>=0.2)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "idna"
|
name = "idna"
|
||||||
version = "3.4"
|
version = "3.4"
|
||||||
|
@ -214,7 +231,7 @@ urllib3 = ">=1.21.1,<1.27"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||||
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
|
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "requests-oauthlib"
|
name = "requests-oauthlib"
|
||||||
|
@ -245,6 +262,19 @@ botocore = ">=1.12.36,<2.0a.0"
|
||||||
[package.extras]
|
[package.extras]
|
||||||
crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
|
crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "setuptools"
|
||||||
|
version = "65.6.0"
|
||||||
|
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
|
||||||
|
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
||||||
|
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "simplejson"
|
name = "simplejson"
|
||||||
version = "3.17.6"
|
version = "3.17.6"
|
||||||
|
@ -310,7 +340,7 @@ watchdog = ["watchdog"]
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = "^3.10"
|
python-versions = "^3.10"
|
||||||
content-hash = "86cf682d49dc495c8cf6dc60a8aedc31ad32a293e6ceaf7b1428e0c232f8319e"
|
content-hash = "cc395c0c1ce2b0b7ca063a17617981b2d55db39802265b36f0bc3c4383c89919"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
boto3 = [
|
boto3 = [
|
||||||
|
@ -350,6 +380,10 @@ Flask-OAuthlib = [
|
||||||
{file = "Flask-OAuthlib-0.9.6.tar.gz", hash = "sha256:5bb79c8a8e670c2eb4cb553dfc3283b6c8d1202f674934676dc173cee94fe39c"},
|
{file = "Flask-OAuthlib-0.9.6.tar.gz", hash = "sha256:5bb79c8a8e670c2eb4cb553dfc3283b6c8d1202f674934676dc173cee94fe39c"},
|
||||||
{file = "Flask_OAuthlib-0.9.6-py3-none-any.whl", hash = "sha256:a5c3b62959aa1922470a62b6ebf4273b75f1c29561a7eb4a69cde85d45a1d669"},
|
{file = "Flask_OAuthlib-0.9.6-py3-none-any.whl", hash = "sha256:a5c3b62959aa1922470a62b6ebf4273b75f1c29561a7eb4a69cde85d45a1d669"},
|
||||||
]
|
]
|
||||||
|
gunicorn = [
|
||||||
|
{file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"},
|
||||||
|
{file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"},
|
||||||
|
]
|
||||||
idna = [
|
idna = [
|
||||||
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
||||||
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
||||||
|
@ -428,6 +462,10 @@ s3transfer = [
|
||||||
{file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"},
|
{file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"},
|
||||||
{file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"},
|
{file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"},
|
||||||
]
|
]
|
||||||
|
setuptools = [
|
||||||
|
{file = "setuptools-65.6.0-py3-none-any.whl", hash = "sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840"},
|
||||||
|
{file = "setuptools-65.6.0.tar.gz", hash = "sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d"},
|
||||||
|
]
|
||||||
simplejson = [
|
simplejson = [
|
||||||
{file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"},
|
{file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"},
|
||||||
{file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"},
|
{file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"},
|
||||||
|
|
|
@ -5,14 +5,14 @@ description = "An example showing how to use the Spiffworkflow-proxy's Flask Blu
|
||||||
authors = ["Dan <dan@sartography.com>"]
|
authors = ["Dan <dan@sartography.com>"]
|
||||||
license = "LGPL"
|
license = "LGPL"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
packages = [{include = "connector_proxy_demo", from = "src"}]
|
#packages = [{include = "connector_proxy_demo", from = "."}]
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.10"
|
python = "^3.10"
|
||||||
Flask = "^2.2.2"
|
Flask = "^2.2.2"
|
||||||
spiffworkflow-proxy = {git = "https://github.com/sartography/spiffworkflow-proxy"}
|
spiffworkflow-proxy = {git = "https://github.com/sartography/spiffworkflow-proxy"}
|
||||||
connector-aws = { git = "https://github.com/sartography/connector-aws.git"}
|
connector-aws = { git = "https://github.com/sartography/connector-aws.git"}
|
||||||
|
gunicorn = "^20.1.0"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core"]
|
requires = ["poetry-core"]
|
||||||
|
@ -20,5 +20,5 @@ build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
pythonpath = [
|
pythonpath = [
|
||||||
".", "src",
|
"."
|
||||||
]
|
]
|
|
@ -0,0 +1,83 @@
|
||||||
|
version: "3.8"
|
||||||
|
services:
|
||||||
|
spiffworkflow-db:
|
||||||
|
container_name: spiffworkflow-db
|
||||||
|
image: mysql:8.0.29
|
||||||
|
platform: linux/amd64
|
||||||
|
cap_add:
|
||||||
|
- SYS_NICE
|
||||||
|
restart: "no"
|
||||||
|
environment:
|
||||||
|
- MYSQL_DATABASE=spiffworkflow_backend_development
|
||||||
|
- MYSQL_ROOT_PASSWORD=my-secret-pw
|
||||||
|
- MYSQL_TCP_PORT=7003
|
||||||
|
ports:
|
||||||
|
- "7003"
|
||||||
|
healthcheck:
|
||||||
|
test: mysql --user=root --password=my-secret-pw -e 'select 1' spiffworkflow_backend_development
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 10
|
||||||
|
|
||||||
|
spiffworkflow-backend:
|
||||||
|
container_name: spiffworkflow-backend
|
||||||
|
image: ghcr.io/sartography/spiffworkflow-backend:latest
|
||||||
|
depends_on:
|
||||||
|
spiffworkflow-db:
|
||||||
|
condition: service_healthy
|
||||||
|
environment:
|
||||||
|
- APPLICATION_ROOT=/
|
||||||
|
- SPIFFWORKFLOW_BACKEND_ENV=development
|
||||||
|
- FLASK_DEBUG=0
|
||||||
|
- FLASK_SESSION_SECRET_KEY=super_secret_key
|
||||||
|
- OPEN_ID_SERVER_URL=http://localhost:7000/openid
|
||||||
|
- SPIFFWORKFLOW_FRONTEND_URL=http://localhost:7001
|
||||||
|
- SPIFFWORKFLOW_BACKEND_URL=http://localhost:7000
|
||||||
|
- SPIFFWORKFLOW_BACKEND_PORT=7000
|
||||||
|
- SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true
|
||||||
|
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:my-secret-pw@spiffworkflow-db:7003/spiffworkflow_backend_development
|
||||||
|
- BPMN_SPEC_ABSOLUTE_DIR=/app/process_models
|
||||||
|
- SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=false
|
||||||
|
- SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=example.yml
|
||||||
|
- RUN_BACKGROUND_SCHEDULER=true
|
||||||
|
- OPEN_ID_CLIENT_ID=spiffworkflow-backend
|
||||||
|
- OPEN_ID_CLIENT_SECRET_KEY=my_open_id_secret_key
|
||||||
|
ports:
|
||||||
|
- "7000:7000"
|
||||||
|
volumes:
|
||||||
|
- ./process_models:/app/process_models
|
||||||
|
- ./log:/app/log
|
||||||
|
healthcheck:
|
||||||
|
test: curl localhost:7000/v1.0/status --fail
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 20
|
||||||
|
|
||||||
|
spiffworkflow-frontend:
|
||||||
|
container_name: spiffworkflow-frontend
|
||||||
|
image: ghcr.io/sartography/spiffworkflow-frontend
|
||||||
|
environment:
|
||||||
|
- APPLICATION_ROOT=/
|
||||||
|
- PORT0=7001
|
||||||
|
ports:
|
||||||
|
- "7001:7001"
|
||||||
|
|
||||||
|
spiffworkflow-connector:
|
||||||
|
container_name: spiffworkflow-connector
|
||||||
|
image: ghcr.io/sartography/connector-proxy-demo
|
||||||
|
environment:
|
||||||
|
- FLASK_ENV=${FLASK_ENV:-development}
|
||||||
|
- FLASK_DEBUG=0
|
||||||
|
- FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key}
|
||||||
|
ports:
|
||||||
|
- "7004:7004"
|
||||||
|
healthcheck:
|
||||||
|
test: curl localhost:7004/liveness --fail
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 20
|
||||||
|
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
spiffworkflow_backend:
|
||||||
|
driver: local
|
|
@ -64,7 +64,6 @@ sphinx-click = "^4.3.0"
|
||||||
Pygments = "^2.13.0"
|
Pygments = "^2.13.0"
|
||||||
pyupgrade = "^3.2.2"
|
pyupgrade = "^3.2.2"
|
||||||
furo = ">=2021.11.12"
|
furo = ">=2021.11.12"
|
||||||
MonkeyType = "^22.2.0"
|
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
flask-bpmn = "flask_bpmn.__main__:main"
|
flask-bpmn = "flask_bpmn.__main__:main"
|
||||||
|
|
|
@ -614,7 +614,7 @@ werkzeug = "*"
|
||||||
type = "git"
|
type = "git"
|
||||||
url = "https://github.com/sartography/flask-bpmn"
|
url = "https://github.com/sartography/flask-bpmn"
|
||||||
reference = "main"
|
reference = "main"
|
||||||
resolved_reference = "5e40777f4013f71f2c1237f13f7dba1bdd5c0de3"
|
resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flask-cors"
|
name = "flask-cors"
|
||||||
|
@ -884,22 +884,6 @@ category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "libcst"
|
|
||||||
version = "0.4.7"
|
|
||||||
description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
pyyaml = ">=5.2"
|
|
||||||
typing-extensions = ">=3.7.4.2"
|
|
||||||
typing-inspect = ">=0.4.0"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["black (==22.3.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.0.3)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.9)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.9)", "setuptools-rust (>=0.12.1)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==1.3)", "usort (==1.0.0rc1)"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "livereload"
|
name = "livereload"
|
||||||
version = "2.6.3"
|
version = "2.6.3"
|
||||||
|
@ -1005,18 +989,6 @@ category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "monkeytype"
|
|
||||||
version = "22.2.0"
|
|
||||||
description = "Generating type annotations from sampled production types"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
libcst = ">=0.3.7"
|
|
||||||
mypy-extensions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mypy"
|
name = "mypy"
|
||||||
version = "0.982"
|
version = "0.982"
|
||||||
|
@ -1788,7 +1760,7 @@ lxml = "*"
|
||||||
type = "git"
|
type = "git"
|
||||||
url = "https://github.com/sartography/SpiffWorkflow"
|
url = "https://github.com/sartography/SpiffWorkflow"
|
||||||
reference = "main"
|
reference = "main"
|
||||||
resolved_reference = "580939cc8cb0b7ade1571483bd1e28f554434ac4"
|
resolved_reference = "bba7ddf5478af579b891ca63c50babbfccf6b7a4"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlalchemy"
|
name = "sqlalchemy"
|
||||||
|
@ -1998,18 +1970,6 @@ category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "typing-inspect"
|
|
||||||
version = "0.8.0"
|
|
||||||
description = "Runtime inspection utilities for typing module."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
mypy-extensions = ">=0.3.0"
|
|
||||||
typing-extensions = ">=3.7.4"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tzdata"
|
name = "tzdata"
|
||||||
version = "2022.5"
|
version = "2022.5"
|
||||||
|
@ -2151,7 +2111,7 @@ tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "p
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = ">=3.11,<3.12"
|
python-versions = ">=3.11,<3.12"
|
||||||
content-hash = "8c37333988fdd68bc6868faf474e628a690582acd17ee3b31b18e005a864fecf"
|
content-hash = "17e037a3784758eb23a5ed9889fd774913ebde97225692dcd9df159f03da8a22"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
alabaster = [
|
alabaster = [
|
||||||
|
@ -2484,6 +2444,7 @@ greenlet = [
|
||||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"},
|
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"},
|
||||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"},
|
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"},
|
||||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"},
|
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"},
|
||||||
|
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"},
|
||||||
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"},
|
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"},
|
||||||
{file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"},
|
{file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"},
|
||||||
{file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"},
|
{file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"},
|
||||||
|
@ -2492,6 +2453,7 @@ greenlet = [
|
||||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"},
|
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"},
|
||||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"},
|
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"},
|
||||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"},
|
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"},
|
||||||
|
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"},
|
||||||
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"},
|
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"},
|
||||||
{file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"},
|
{file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"},
|
||||||
{file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"},
|
{file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"},
|
||||||
|
@ -2500,6 +2462,7 @@ greenlet = [
|
||||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"},
|
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"},
|
||||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"},
|
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"},
|
||||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"},
|
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"},
|
||||||
|
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"},
|
||||||
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"},
|
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"},
|
||||||
{file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"},
|
{file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"},
|
||||||
{file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"},
|
{file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"},
|
||||||
|
@ -2566,32 +2529,6 @@ lazy-object-proxy = [
|
||||||
{file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"},
|
{file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"},
|
||||||
{file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"},
|
{file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"},
|
||||||
]
|
]
|
||||||
libcst = [
|
|
||||||
{file = "libcst-0.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dc6f8965b6ca68d47e11321772887d81fa6fd8ea86e6ef87434ca2147de10747"},
|
|
||||||
{file = "libcst-0.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f47d809df59fcd83058b777b86a300154ee3a1f1b0523a398a67b5f8affd4c"},
|
|
||||||
{file = "libcst-0.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0d19de56aa733b4ef024527e3ce4896d4b0e9806889797f409ec24caa651a44"},
|
|
||||||
{file = "libcst-0.4.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31da97bc986dc3f7a97f7d431fa911932aaf716d2f8bcda947fc964afd3b57cd"},
|
|
||||||
{file = "libcst-0.4.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71b2e2c5e33e53669c20de0853cecfac1ffb8657ee727ab8527140f39049b820"},
|
|
||||||
{file = "libcst-0.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:76fae68bd6b7ce069e267b3322c806b4305341cea78d161ae40e0ed641c8c660"},
|
|
||||||
{file = "libcst-0.4.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bac76d69980bb3254f503f52128c256ef4d1bcbaabe4a17c3a9ebcd1fc0472c0"},
|
|
||||||
{file = "libcst-0.4.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f86535271eaefe84a99736875566a038449f92e1a2a61ea0b588d8359fbefd"},
|
|
||||||
{file = "libcst-0.4.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:617f7fa2610a8c86cf22d8d03416f25391383d05bd0ad1ca8ef68023ddd6b4f6"},
|
|
||||||
{file = "libcst-0.4.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3637fffe476c5b4ee2225c6474b83382518f2c1b2fe4771039e06bdd7835a4a"},
|
|
||||||
{file = "libcst-0.4.7-cp37-cp37m-win_amd64.whl", hash = "sha256:f56565124c2541adee0634e411b2126b3f335306d19e91ed2bfe52efa698b219"},
|
|
||||||
{file = "libcst-0.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0ca2771ff3cfdf1f148349f89fcae64afa365213ed5c2703a69a89319325d0c8"},
|
|
||||||
{file = "libcst-0.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa438131b7befc7e5a3cbadb5a7b1506305de5d62262ea0556add0152f40925e"},
|
|
||||||
{file = "libcst-0.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6bd66a8be2ffad7b968d90dae86c62fd4739c0e011d71f3e76544a891ae743"},
|
|
||||||
{file = "libcst-0.4.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:214a9c4f4f90cd5b4bfa18e17877da4dd9a896821d9af9be86fa3effdc289b9b"},
|
|
||||||
{file = "libcst-0.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a37f2b459a8b51a41e260bd89c24ae41ab1d658f610c91650c79b1bbf27138"},
|
|
||||||
{file = "libcst-0.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:2f6766391d90472f036b88a95251c87d498ab068c377724f212ab0cc20509a68"},
|
|
||||||
{file = "libcst-0.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:234293aa8681a3d47fef1716c5622797a81cbe85a9381fe023815468cfe20eed"},
|
|
||||||
{file = "libcst-0.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fa618dc359663a0a097c633452b104c1ca93365da7a811e655c6944f6b323239"},
|
|
||||||
{file = "libcst-0.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3569d9901c18940632414fb7a0943bffd326db9f726a9c041664926820857815"},
|
|
||||||
{file = "libcst-0.4.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beb5347e46b419f782589da060e9300957e71d561aa5574309883b71f93c1dfe"},
|
|
||||||
{file = "libcst-0.4.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e541ccfeebda1ae5f005fc120a5bf3e8ac9ccfda405ec3efd3df54fc4688ac3"},
|
|
||||||
{file = "libcst-0.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:3a2b7253cd2e3f0f8a3e23b5c2acb492811d865ef36e0816091c925f32b713d2"},
|
|
||||||
{file = "libcst-0.4.7.tar.gz", hash = "sha256:95c52c2130531f6e726a3b077442cfd486975435fecf3db8224d43fba7b85099"},
|
|
||||||
]
|
|
||||||
livereload = [
|
livereload = [
|
||||||
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
|
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
|
||||||
]
|
]
|
||||||
|
@ -2729,10 +2666,6 @@ mccabe = [
|
||||||
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
||||||
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
|
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
|
||||||
]
|
]
|
||||||
monkeytype = [
|
|
||||||
{file = "MonkeyType-22.2.0-py3-none-any.whl", hash = "sha256:3d0815c7e98a18e9267990a452548247f6775fd636e65df5a7d77100ea7ad282"},
|
|
||||||
{file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"},
|
|
||||||
]
|
|
||||||
mypy = [
|
mypy = [
|
||||||
{file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"},
|
{file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"},
|
||||||
{file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"},
|
{file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"},
|
||||||
|
@ -3336,10 +3269,6 @@ typing-extensions = [
|
||||||
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
|
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
|
||||||
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
|
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
|
||||||
]
|
]
|
||||||
typing-inspect = [
|
|
||||||
{file = "typing_inspect-0.8.0-py3-none-any.whl", hash = "sha256:5fbf9c1e65d4fa01e701fe12a5bca6c6e08a4ffd5bc60bfac028253a447c5188"},
|
|
||||||
{file = "typing_inspect-0.8.0.tar.gz", hash = "sha256:8b1ff0c400943b6145df8119c41c244ca8207f1f10c9c057aeed1560e4806e3d"},
|
|
||||||
]
|
|
||||||
tzdata = [
|
tzdata = [
|
||||||
{file = "tzdata-2022.5-py2.py3-none-any.whl", hash = "sha256:323161b22b7802fdc78f20ca5f6073639c64f1a7227c40cd3e19fd1d0ce6650a"},
|
{file = "tzdata-2022.5-py2.py3-none-any.whl", hash = "sha256:323161b22b7802fdc78f20ca5f6073639c64f1a7227c40cd3e19fd1d0ce6650a"},
|
||||||
{file = "tzdata-2022.5.tar.gz", hash = "sha256:e15b2b3005e2546108af42a0eb4ccab4d9e225e2dfbf4f77aad50c70a4b1f3ab"},
|
{file = "tzdata-2022.5.tar.gz", hash = "sha256:e15b2b3005e2546108af42a0eb4ccab4d9e225e2dfbf4f77aad50c70a4b1f3ab"},
|
||||||
|
|
|
@ -99,7 +99,6 @@ sphinx-click = "^4.3.0"
|
||||||
Pygments = "^2.10.0"
|
Pygments = "^2.10.0"
|
||||||
pyupgrade = "^3.1.0"
|
pyupgrade = "^3.1.0"
|
||||||
furo = ">=2021.11.12"
|
furo = ">=2021.11.12"
|
||||||
MonkeyType = "^22.2.0"
|
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
spiffworkflow-backend = "spiffworkflow_backend.__main__:main"
|
spiffworkflow-backend = "spiffworkflow_backend.__main__:main"
|
||||||
|
|
|
@ -1251,12 +1251,17 @@
|
||||||
}, {
|
}, {
|
||||||
"id" : "f44558af-3601-4e54-b854-08396a247544",
|
"id" : "f44558af-3601-4e54-b854-08396a247544",
|
||||||
"clientId" : "spiffworkflow-backend",
|
"clientId" : "spiffworkflow-backend",
|
||||||
|
"name" : "",
|
||||||
|
"description" : "",
|
||||||
|
"rootUrl" : "",
|
||||||
|
"adminUrl" : "",
|
||||||
|
"baseUrl" : "",
|
||||||
"surrogateAuthRequired" : false,
|
"surrogateAuthRequired" : false,
|
||||||
"enabled" : true,
|
"enabled" : true,
|
||||||
"alwaysDisplayInConsole" : false,
|
"alwaysDisplayInConsole" : false,
|
||||||
"clientAuthenticatorType" : "client-secret",
|
"clientAuthenticatorType" : "client-secret",
|
||||||
"secret" : "JXeQExm0JhQPLumgHtIIqf52bDalHz0q",
|
"secret" : "JXeQExm0JhQPLumgHtIIqf52bDalHz0q",
|
||||||
"redirectUris" : [ "http://localhost:7000/*", "https://api.unused-for-local-dev.spiffworkflow.org/*", "http://67.205.133.116:7000/*", "http://167.172.242.138:7000/*", "https://api.demo.spiffworkflow.org/*" ],
|
"redirectUris" : [ "http://localhost:7000/*", "https://api.unused-for-local-dev.spiffworkflow.org/*", "https://api.replace-me-with-spiff-subdomain.spiffworkflow.org/*", "http://67.205.133.116:7000/*", "http://167.172.242.138:7000/*" ],
|
||||||
"webOrigins" : [ ],
|
"webOrigins" : [ ],
|
||||||
"notBefore" : 0,
|
"notBefore" : 0,
|
||||||
"bearerOnly" : false,
|
"bearerOnly" : false,
|
||||||
|
@ -1273,7 +1278,7 @@
|
||||||
"saml.force.post.binding" : "false",
|
"saml.force.post.binding" : "false",
|
||||||
"saml.multivalued.roles" : "false",
|
"saml.multivalued.roles" : "false",
|
||||||
"frontchannel.logout.session.required" : "false",
|
"frontchannel.logout.session.required" : "false",
|
||||||
"post.logout.redirect.uris" : "+",
|
"post.logout.redirect.uris" : "https://replace-me-with-spiff-subdomain.spiffworkflow.org/*##http://localhost:7001/*",
|
||||||
"oauth2.device.authorization.grant.enabled" : "false",
|
"oauth2.device.authorization.grant.enabled" : "false",
|
||||||
"backchannel.logout.revoke.offline.tokens" : "false",
|
"backchannel.logout.revoke.offline.tokens" : "false",
|
||||||
"saml.server.signature.keyinfo.ext" : "false",
|
"saml.server.signature.keyinfo.ext" : "false",
|
||||||
|
@ -2161,7 +2166,7 @@
|
||||||
"subType" : "authenticated",
|
"subType" : "authenticated",
|
||||||
"subComponents" : { },
|
"subComponents" : { },
|
||||||
"config" : {
|
"config" : {
|
||||||
"allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ]
|
"allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-address-mapper" ]
|
||||||
}
|
}
|
||||||
}, {
|
}, {
|
||||||
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
|
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
|
||||||
|
@ -2179,7 +2184,7 @@
|
||||||
"subType" : "anonymous",
|
"subType" : "anonymous",
|
||||||
"subComponents" : { },
|
"subComponents" : { },
|
||||||
"config" : {
|
"config" : {
|
||||||
"allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-address-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper" ]
|
"allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper" ]
|
||||||
}
|
}
|
||||||
}, {
|
}, {
|
||||||
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
|
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
|
||||||
|
@ -2269,7 +2274,7 @@
|
||||||
"internationalizationEnabled" : false,
|
"internationalizationEnabled" : false,
|
||||||
"supportedLocales" : [ ],
|
"supportedLocales" : [ ],
|
||||||
"authenticationFlows" : [ {
|
"authenticationFlows" : [ {
|
||||||
"id" : "b30ab201-b13a-405f-bc57-cb5cd934bdc3",
|
"id" : "b896c673-57ab-4f24-bbb1-334bdadbecd3",
|
||||||
"alias" : "Account verification options",
|
"alias" : "Account verification options",
|
||||||
"description" : "Method with which to verity the existing account",
|
"description" : "Method with which to verity the existing account",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2291,7 +2296,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "7d22faa2-1da8-49ae-a2cc-74e9c9f6ed51",
|
"id" : "4da99e29-371e-4f4b-a863-e5079f30a714",
|
||||||
"alias" : "Authentication Options",
|
"alias" : "Authentication Options",
|
||||||
"description" : "Authentication options.",
|
"description" : "Authentication options.",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2320,7 +2325,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "ae089cf3-3179-4e12-a683-7969a31be566",
|
"id" : "d398c928-e201-4e8b-ab09-289bb351cd2e",
|
||||||
"alias" : "Browser - Conditional OTP",
|
"alias" : "Browser - Conditional OTP",
|
||||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2342,7 +2347,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "27a21643-2167-4847-a6b4-b07007671d9a",
|
"id" : "663b7aa3-84f6-4347-8ed4-588c2464b75d",
|
||||||
"alias" : "Direct Grant - Conditional OTP",
|
"alias" : "Direct Grant - Conditional OTP",
|
||||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2364,7 +2369,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "0ee33ef7-da6b-4248-81c6-9f4f11b58195",
|
"id" : "98013bc1-e4dd-41f7-9849-1f898143b944",
|
||||||
"alias" : "First broker login - Conditional OTP",
|
"alias" : "First broker login - Conditional OTP",
|
||||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2386,7 +2391,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "e1d02af3-2886-42bb-95f4-bfa6f1299edc",
|
"id" : "b77e7545-9e39-4d72-93f8-1b38c954c2e2",
|
||||||
"alias" : "Handle Existing Account",
|
"alias" : "Handle Existing Account",
|
||||||
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
|
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2408,7 +2413,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "35cfc75f-70e3-487c-acd7-0627ab1dbdf1",
|
"id" : "2470e6f4-9a01-476a-9057-75d78e577182",
|
||||||
"alias" : "Reset - Conditional OTP",
|
"alias" : "Reset - Conditional OTP",
|
||||||
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
|
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2430,7 +2435,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "cc2f7206-8d15-46db-b974-71e67d4d1077",
|
"id" : "8e7dad0b-f4e1-4534-b618-b635b0a0e4f9",
|
||||||
"alias" : "User creation or linking",
|
"alias" : "User creation or linking",
|
||||||
"description" : "Flow for the existing/non-existing user alternatives",
|
"description" : "Flow for the existing/non-existing user alternatives",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2453,7 +2458,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "d8314533-eacb-40ef-8f44-7c06321e9793",
|
"id" : "97c83e43-cba8-4d92-b108-9181bca07a1e",
|
||||||
"alias" : "Verify Existing Account by Re-authentication",
|
"alias" : "Verify Existing Account by Re-authentication",
|
||||||
"description" : "Reauthentication of existing account",
|
"description" : "Reauthentication of existing account",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2475,7 +2480,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "d58a5ff1-9a9c-45a9-9f97-1324565e9679",
|
"id" : "fbabd64c-20de-4b8c-bfd2-be6822572278",
|
||||||
"alias" : "browser",
|
"alias" : "browser",
|
||||||
"description" : "browser based authentication",
|
"description" : "browser based authentication",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2511,7 +2516,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "3ea2aed9-12d9-4999-a104-67f5c5f7841a",
|
"id" : "0628a99f-b194-495d-8e54-cc4ca8684956",
|
||||||
"alias" : "clients",
|
"alias" : "clients",
|
||||||
"description" : "Base authentication for clients",
|
"description" : "Base authentication for clients",
|
||||||
"providerId" : "client-flow",
|
"providerId" : "client-flow",
|
||||||
|
@ -2547,7 +2552,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "c605af3c-bede-4f8f-a5c5-94176171c82c",
|
"id" : "ce6bf7af-3bff-48ce-b214-7fed08503a2a",
|
||||||
"alias" : "direct grant",
|
"alias" : "direct grant",
|
||||||
"description" : "OpenID Connect Resource Owner Grant",
|
"description" : "OpenID Connect Resource Owner Grant",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2576,7 +2581,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "901b4d6c-9c27-4d3d-981a-1b5281c1ea2b",
|
"id" : "60ce729b-d055-4ae7-83cb-85dbcf8cfdaa",
|
||||||
"alias" : "docker auth",
|
"alias" : "docker auth",
|
||||||
"description" : "Used by Docker clients to authenticate against the IDP",
|
"description" : "Used by Docker clients to authenticate against the IDP",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2591,7 +2596,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "9d1de1bf-b170-4235-92f1-5dfd3ec31c45",
|
"id" : "0bd3cf93-7f33-46b2-ad1f-85cdfb0a87f9",
|
||||||
"alias" : "first broker login",
|
"alias" : "first broker login",
|
||||||
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
|
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2614,7 +2619,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "8ee6b54f-4d31-4847-9ddc-36cb4c01b92b",
|
"id" : "3e52f178-9b9d-4a62-97d5-f9f3f872bcd9",
|
||||||
"alias" : "forms",
|
"alias" : "forms",
|
||||||
"description" : "Username, password, otp and other auth forms.",
|
"description" : "Username, password, otp and other auth forms.",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2636,7 +2641,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "76d3380b-218b-443d-a3ea-bea712f4a1f4",
|
"id" : "3f5fd6cc-2935-45d8-9bef-6857bba3657a",
|
||||||
"alias" : "http challenge",
|
"alias" : "http challenge",
|
||||||
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
|
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2658,7 +2663,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "cd756473-4606-4150-9ba5-5b96e6f39c3a",
|
"id" : "2c2b32dd-57dc-45d7-9a24-b4a253cb6a03",
|
||||||
"alias" : "registration",
|
"alias" : "registration",
|
||||||
"description" : "registration flow",
|
"description" : "registration flow",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2674,7 +2679,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "574fcee6-e152-4069-b328-a7fe33aded3a",
|
"id" : "dbc28b13-dba7-42a0-a8ab-faa8762979c3",
|
||||||
"alias" : "registration form",
|
"alias" : "registration form",
|
||||||
"description" : "registration form",
|
"description" : "registration form",
|
||||||
"providerId" : "form-flow",
|
"providerId" : "form-flow",
|
||||||
|
@ -2710,7 +2715,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "e5a890ee-140a-4ab3-8d79-87e3499385b0",
|
"id" : "b4a901d5-e7b9-4eb6-9f8e-1d3305846828",
|
||||||
"alias" : "reset credentials",
|
"alias" : "reset credentials",
|
||||||
"description" : "Reset credentials for a user if they forgot their password or something",
|
"description" : "Reset credentials for a user if they forgot their password or something",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2746,7 +2751,7 @@
|
||||||
"userSetupAllowed" : false
|
"userSetupAllowed" : false
|
||||||
} ]
|
} ]
|
||||||
}, {
|
}, {
|
||||||
"id" : "6243167c-7e2e-4cc7-b35d-bad7862dc9ef",
|
"id" : "824fe757-cc5c-4e13-ab98-9a2132e10f5c",
|
||||||
"alias" : "saml ecp",
|
"alias" : "saml ecp",
|
||||||
"description" : "SAML ECP Profile Authentication Flow",
|
"description" : "SAML ECP Profile Authentication Flow",
|
||||||
"providerId" : "basic-flow",
|
"providerId" : "basic-flow",
|
||||||
|
@ -2762,13 +2767,13 @@
|
||||||
} ]
|
} ]
|
||||||
} ],
|
} ],
|
||||||
"authenticatorConfig" : [ {
|
"authenticatorConfig" : [ {
|
||||||
"id" : "ae605746-d169-4a81-8348-b5f52e07ae14",
|
"id" : "817a93da-29df-447f-ab05-cd9557e66745",
|
||||||
"alias" : "create unique user config",
|
"alias" : "create unique user config",
|
||||||
"config" : {
|
"config" : {
|
||||||
"require.password.update.after.registration" : "false"
|
"require.password.update.after.registration" : "false"
|
||||||
}
|
}
|
||||||
}, {
|
}, {
|
||||||
"id" : "c5feb20c-eea5-4556-b9f8-797be4d67e26",
|
"id" : "4a8a9659-fa0d-4da8-907b-3b6daec1c878",
|
||||||
"alias" : "review profile config",
|
"alias" : "review profile config",
|
||||||
"config" : {
|
"config" : {
|
||||||
"update.profile.on.first.login" : "missing"
|
"update.profile.on.first.login" : "missing"
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
"""empty message
|
"""empty message
|
||||||
|
|
||||||
Revision ID: 40a2ed63cc5a
|
Revision ID: 4d75421c0af0
|
||||||
Revises:
|
Revises:
|
||||||
Create Date: 2022-11-29 16:59:02.980181
|
Create Date: 2022-12-06 17:42:56.417673
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from alembic import op
|
from alembic import op
|
||||||
|
@ -10,7 +10,7 @@ import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = '40a2ed63cc5a'
|
revision = '4d75421c0af0'
|
||||||
down_revision = None
|
down_revision = None
|
||||||
branch_labels = None
|
branch_labels = None
|
||||||
depends_on = None
|
depends_on = None
|
||||||
|
@ -79,8 +79,7 @@ def upgrade():
|
||||||
sa.Column('email', sa.String(length=255), nullable=True),
|
sa.Column('email', sa.String(length=255), nullable=True),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
sa.UniqueConstraint('service', 'service_id', name='service_key'),
|
sa.UniqueConstraint('service', 'service_id', name='service_key'),
|
||||||
sa.UniqueConstraint('uid'),
|
sa.UniqueConstraint('uid')
|
||||||
sa.UniqueConstraint('username')
|
|
||||||
)
|
)
|
||||||
op.create_table('message_correlation_property',
|
op.create_table('message_correlation_property',
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
sa.Column('id', sa.Integer(), nullable=False),
|
|
@ -19,6 +19,9 @@ from werkzeug.exceptions import NotFound
|
||||||
import spiffworkflow_backend.load_database_models # noqa: F401
|
import spiffworkflow_backend.load_database_models # noqa: F401
|
||||||
from spiffworkflow_backend.config import setup_config
|
from spiffworkflow_backend.config import setup_config
|
||||||
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
|
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
|
||||||
|
from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import (
|
||||||
|
openid_blueprint,
|
||||||
|
)
|
||||||
from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint
|
from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint
|
||||||
from spiffworkflow_backend.routes.user import verify_token
|
from spiffworkflow_backend.routes.user import verify_token
|
||||||
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
|
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
|
||||||
|
@ -103,6 +106,7 @@ def create_app() -> flask.app.Flask:
|
||||||
app.register_blueprint(process_api_blueprint)
|
app.register_blueprint(process_api_blueprint)
|
||||||
app.register_blueprint(api_error_blueprint)
|
app.register_blueprint(api_error_blueprint)
|
||||||
app.register_blueprint(admin_blueprint, url_prefix="/admin")
|
app.register_blueprint(admin_blueprint, url_prefix="/admin")
|
||||||
|
app.register_blueprint(openid_blueprint, url_prefix="/openid")
|
||||||
|
|
||||||
# preflight options requests will be allowed if they meet the requirements of the url regex.
|
# preflight options requests will be allowed if they meet the requirements of the url regex.
|
||||||
# we will add an Access-Control-Max-Age header to the response to tell the browser it doesn't
|
# we will add an Access-Control-Max-Age header to the response to tell the browser it doesn't
|
||||||
|
|
|
@ -14,13 +14,13 @@ class ConfigurationError(Exception):
|
||||||
|
|
||||||
def setup_database_uri(app: Flask) -> None:
|
def setup_database_uri(app: Flask) -> None:
|
||||||
"""Setup_database_uri."""
|
"""Setup_database_uri."""
|
||||||
if os.environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
|
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
|
||||||
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
|
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
|
||||||
if os.environ.get("SPIFF_DATABASE_TYPE") == "sqlite":
|
if app.config.get("SPIFF_DATABASE_TYPE") == "sqlite":
|
||||||
app.config[
|
app.config[
|
||||||
"SQLALCHEMY_DATABASE_URI"
|
"SQLALCHEMY_DATABASE_URI"
|
||||||
] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
|
] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
|
||||||
elif os.environ.get("SPIFF_DATABASE_TYPE") == "postgres":
|
elif app.config.get("SPIFF_DATABASE_TYPE") == "postgres":
|
||||||
app.config[
|
app.config[
|
||||||
"SQLALCHEMY_DATABASE_URI"
|
"SQLALCHEMY_DATABASE_URI"
|
||||||
] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
|
] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
|
||||||
|
@ -33,7 +33,7 @@ def setup_database_uri(app: Flask) -> None:
|
||||||
"SQLALCHEMY_DATABASE_URI"
|
"SQLALCHEMY_DATABASE_URI"
|
||||||
] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
|
] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
|
||||||
else:
|
else:
|
||||||
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get(
|
app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get(
|
||||||
"SPIFFWORKFLOW_BACKEND_DATABASE_URI"
|
"SPIFFWORKFLOW_BACKEND_DATABASE_URI"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -52,12 +52,6 @@ def setup_config(app: Flask) -> None:
|
||||||
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
||||||
app.config.from_object("spiffworkflow_backend.config.default")
|
app.config.from_object("spiffworkflow_backend.config.default")
|
||||||
|
|
||||||
# This allows config/testing.py or instance/config.py to override the default config
|
|
||||||
if "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "testing":
|
|
||||||
app.config.from_pyfile("config/testing.py", silent=True)
|
|
||||||
else:
|
|
||||||
app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True)
|
|
||||||
|
|
||||||
env_config_prefix = "spiffworkflow_backend.config."
|
env_config_prefix = "spiffworkflow_backend.config."
|
||||||
env_config_module = env_config_prefix + app.config["ENV_IDENTIFIER"]
|
env_config_module = env_config_prefix + app.config["ENV_IDENTIFIER"]
|
||||||
try:
|
try:
|
||||||
|
@ -67,14 +61,17 @@ def setup_config(app: Flask) -> None:
|
||||||
os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") == "true"
|
os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") == "true"
|
||||||
and os.environ.get("SPIFFWORKFLOW_BACKEND_ENV") is not None
|
and os.environ.get("SPIFFWORKFLOW_BACKEND_ENV") is not None
|
||||||
):
|
):
|
||||||
app.config.from_object("{env_config_prefix}terraform_deployed_environment")
|
app.config.from_object(f"{env_config_prefix}terraform_deployed_environment")
|
||||||
else:
|
else:
|
||||||
raise ModuleNotFoundError(
|
raise ModuleNotFoundError(
|
||||||
f"Cannot find config module: {env_config_module}"
|
f"Cannot find config module: {env_config_module}"
|
||||||
) from exception
|
) from exception
|
||||||
|
|
||||||
setup_database_uri(app)
|
# This allows config/testing.py or instance/config.py to override the default config
|
||||||
setup_logger(app)
|
if "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "testing":
|
||||||
|
app.config.from_pyfile("config/testing.py", silent=True)
|
||||||
|
else:
|
||||||
|
app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True)
|
||||||
|
|
||||||
app.config["PERMISSIONS_FILE_FULLPATH"] = None
|
app.config["PERMISSIONS_FILE_FULLPATH"] = None
|
||||||
if app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"]:
|
if app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"]:
|
||||||
|
@ -92,5 +89,8 @@ def setup_config(app: Flask) -> None:
|
||||||
if app.config["BPMN_SPEC_ABSOLUTE_DIR"] is None:
|
if app.config["BPMN_SPEC_ABSOLUTE_DIR"] is None:
|
||||||
raise ConfigurationError("BPMN_SPEC_ABSOLUTE_DIR config must be set")
|
raise ConfigurationError("BPMN_SPEC_ABSOLUTE_DIR config must be set")
|
||||||
|
|
||||||
|
setup_database_uri(app)
|
||||||
|
setup_logger(app)
|
||||||
|
|
||||||
thread_local_data = threading.local()
|
thread_local_data = threading.local()
|
||||||
app.config["THREAD_LOCAL_DATA"] = thread_local_data
|
app.config["THREAD_LOCAL_DATA"] = thread_local_data
|
||||||
|
|
|
@ -30,9 +30,12 @@ CONNECTOR_PROXY_URL = environ.get(
|
||||||
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
|
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
|
||||||
|
|
||||||
# Open ID server
|
# Open ID server
|
||||||
OPEN_ID_SERVER_URL = environ.get("OPEN_ID_SERVER_URL", default="http://localhost:7002")
|
OPEN_ID_SERVER_URL = environ.get(
|
||||||
|
"OPEN_ID_SERVER_URL", default="http://localhost:7002/realms/spiffworkflow"
|
||||||
|
)
|
||||||
|
# Replace above line with this to use the built-in Open ID Server.
|
||||||
|
# OPEN_ID_SERVER_URL = environ.get("OPEN_ID_SERVER_URL", default="http://localhost:7000/openid")
|
||||||
OPEN_ID_CLIENT_ID = environ.get("OPEN_ID_CLIENT_ID", default="spiffworkflow-backend")
|
OPEN_ID_CLIENT_ID = environ.get("OPEN_ID_CLIENT_ID", default="spiffworkflow-backend")
|
||||||
OPEN_ID_REALM_NAME = environ.get("OPEN_ID_REALM_NAME", default="spiffworkflow")
|
|
||||||
OPEN_ID_CLIENT_SECRET_KEY = environ.get(
|
OPEN_ID_CLIENT_SECRET_KEY = environ.get(
|
||||||
"OPEN_ID_CLIENT_SECRET_KEY", default="JXeQExm0JhQPLumgHtIIqf52bDalHz0q"
|
"OPEN_ID_CLIENT_SECRET_KEY", default="JXeQExm0JhQPLumgHtIIqf52bDalHz0q"
|
||||||
) # noqa: S105
|
) # noqa: S105
|
||||||
|
@ -57,6 +60,16 @@ SENTRY_TRACES_SAMPLE_RATE = environ.get(
|
||||||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
||||||
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="info"
|
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="info"
|
||||||
)
|
)
|
||||||
|
|
||||||
# When a user clicks on the `Publish` button, this is the default branch this server merges into.
|
# When a user clicks on the `Publish` button, this is the default branch this server merges into.
|
||||||
# I.e., dev server could have `staging` here. Staging server might have `production` here.
|
# I.e., dev server could have `staging` here. Staging server might have `production` here.
|
||||||
GIT_MERGE_BRANCH = environ.get("GIT_MERGE_BRANCH", default="staging")
|
GIT_MERGE_BRANCH = environ.get("GIT_MERGE_BRANCH", default="staging")
|
||||||
|
|
||||||
|
# Datbase Configuration
|
||||||
|
SPIFF_DATABASE_TYPE = environ.get(
|
||||||
|
"SPIFF_DATABASE_TYPE", default="mysql"
|
||||||
|
) # can also be sqlite, postgres
|
||||||
|
# Overide above with specific sqlalchymy connection string.
|
||||||
|
SPIFFWORKFLOW_BACKEND_DATABASE_URI = environ.get(
|
||||||
|
"SPIFFWORKFLOW_BACKEND_DATABASE_URI", default=None
|
||||||
|
)
|
||||||
|
|
|
@ -1,5 +1,11 @@
|
||||||
default_group: everybody
|
default_group: everybody
|
||||||
|
|
||||||
|
users:
|
||||||
|
admin:
|
||||||
|
email: admin@spiffworkflow.org
|
||||||
|
password: admin
|
||||||
|
preferred_username: Admin
|
||||||
|
|
||||||
groups:
|
groups:
|
||||||
admin:
|
admin:
|
||||||
users:
|
users:
|
||||||
|
|
|
@ -0,0 +1,88 @@
|
||||||
|
default_group: everybody
|
||||||
|
|
||||||
|
users:
|
||||||
|
admin:
|
||||||
|
email: admin@spiffworkflow.org
|
||||||
|
password: admin
|
||||||
|
preferred_username: Admin
|
||||||
|
nelson:
|
||||||
|
email: nelson@spiffworkflow.org
|
||||||
|
password: nelson
|
||||||
|
preferred_username: Nelson
|
||||||
|
malala:
|
||||||
|
email: malala@spiffworkflow.org
|
||||||
|
password: malala
|
||||||
|
preferred_username: Malala
|
||||||
|
|
||||||
|
groups:
|
||||||
|
admin:
|
||||||
|
users:
|
||||||
|
[
|
||||||
|
admin,
|
||||||
|
]
|
||||||
|
Education:
|
||||||
|
users:
|
||||||
|
[
|
||||||
|
malala
|
||||||
|
]
|
||||||
|
President:
|
||||||
|
users:
|
||||||
|
[
|
||||||
|
nelson
|
||||||
|
]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
# Admins have access to everything.
|
||||||
|
admin:
|
||||||
|
groups: [admin]
|
||||||
|
users: []
|
||||||
|
allowed_permissions: [create, read, update, delete]
|
||||||
|
uri: /*
|
||||||
|
|
||||||
|
# Everybody can participate in tasks assigned to them.
|
||||||
|
tasks-crud:
|
||||||
|
groups: [everybody]
|
||||||
|
users: []
|
||||||
|
allowed_permissions: [create, read, update, delete]
|
||||||
|
uri: /v1.0/tasks/*
|
||||||
|
|
||||||
|
# Everyone can see everything (all groups, and processes are visible)
|
||||||
|
read-all-process-groups:
|
||||||
|
groups: [ everybody ]
|
||||||
|
users: [ ]
|
||||||
|
allowed_permissions: [ read ]
|
||||||
|
uri: /v1.0/process-groups/*
|
||||||
|
read-all-process-models:
|
||||||
|
groups: [ everybody ]
|
||||||
|
users: [ ]
|
||||||
|
allowed_permissions: [ read ]
|
||||||
|
uri: /v1.0/process-models/*
|
||||||
|
read-all-process-instance:
|
||||||
|
groups: [ everybody ]
|
||||||
|
users: [ ]
|
||||||
|
allowed_permissions: [ read ]
|
||||||
|
uri: /v1.0/process-instances/*
|
||||||
|
read-process-instance-reports:
|
||||||
|
groups: [ everybody ]
|
||||||
|
users: [ ]
|
||||||
|
allowed_permissions: [ read ]
|
||||||
|
uri: /v1.0/process-instances/reports/*
|
||||||
|
processes-read:
|
||||||
|
groups: [ everybody ]
|
||||||
|
users: [ ]
|
||||||
|
allowed_permissions: [ read ]
|
||||||
|
uri: /v1.0/processes
|
||||||
|
|
||||||
|
# Members of the Education group can change they processes work.
|
||||||
|
education-admin:
|
||||||
|
groups: ["Education", "President"]
|
||||||
|
users: []
|
||||||
|
allowed_permissions: [create, read, update, delete]
|
||||||
|
uri: /v1.0/process-groups/education:*
|
||||||
|
|
||||||
|
# Anyone can start an education process.
|
||||||
|
education-everybody:
|
||||||
|
groups: [everybody]
|
||||||
|
users: []
|
||||||
|
allowed_permissions: [create, read]
|
||||||
|
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/*
|
|
@ -70,6 +70,13 @@ permissions:
|
||||||
allowed_permissions: [create, read, update, delete]
|
allowed_permissions: [create, read, update, delete]
|
||||||
uri: /v1.0/tasks/*
|
uri: /v1.0/tasks/*
|
||||||
|
|
||||||
|
service-tasks:
|
||||||
|
groups: [everybody]
|
||||||
|
users: []
|
||||||
|
allowed_permissions: [read]
|
||||||
|
uri: /v1.0/service-tasks
|
||||||
|
|
||||||
|
|
||||||
# read all for everybody
|
# read all for everybody
|
||||||
read-all-process-groups:
|
read-all-process-groups:
|
||||||
groups: [everybody]
|
groups: [everybody]
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
"""Staging."""
|
|
||||||
from os import environ
|
|
||||||
|
|
||||||
GIT_COMMIT_ON_SAVE = True
|
|
||||||
GIT_COMMIT_USERNAME = "staging"
|
|
||||||
GIT_COMMIT_EMAIL = "staging@example.com"
|
|
||||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
|
||||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="staging.yml"
|
|
||||||
)
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
"""Terraform-deployed environment."""
|
||||||
|
from os import environ
|
||||||
|
|
||||||
|
# default.py already ensured that this key existed as was not None
|
||||||
|
environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"]
|
||||||
|
|
||||||
|
GIT_COMMIT_ON_SAVE = True
|
||||||
|
GIT_COMMIT_USERNAME = environment_identifier_for_this_config_file_only
|
||||||
|
GIT_COMMIT_EMAIL = f"{environment_identifier_for_this_config_file_only}@example.com"
|
||||||
|
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||||
|
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME",
|
||||||
|
default="terraform_deployed_environment.yml",
|
||||||
|
)
|
||||||
|
|
||||||
|
RUN_BACKGROUND_SCHEDULER = (
|
||||||
|
environ.get("RUN_BACKGROUND_SCHEDULER", default="false") == "true"
|
||||||
|
)
|
||||||
|
|
||||||
|
OPEN_ID_SERVER_URL = f"https://keycloak.{environment_identifier_for_this_config_file_only}.spiffworkflow.org/realms/spiffworkflow"
|
||||||
|
SPIFFWORKFLOW_FRONTEND_URL = (
|
||||||
|
f"https://{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||||
|
)
|
||||||
|
SPIFFWORKFLOW_BACKEND_URL = (
|
||||||
|
f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||||
|
)
|
||||||
|
CONNECTOR_PROXY_URL = f"https://connector-proxy.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
|
@ -1,16 +0,0 @@
|
||||||
"""Terraform-deployed environment."""
|
|
||||||
from os import environ
|
|
||||||
|
|
||||||
# default.py already ensured that this key existed as was not None
|
|
||||||
environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"]
|
|
||||||
|
|
||||||
GIT_COMMIT_ON_SAVE = True
|
|
||||||
GIT_COMMIT_USERNAME = environment_identifier_for_this_config_file_only
|
|
||||||
GIT_COMMIT_EMAIL = f"{environment_identifier_for_this_config_file_only}@example.com"
|
|
||||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
|
||||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="terraform_deployed_environment.yml"
|
|
||||||
)
|
|
||||||
|
|
||||||
RUN_BACKGROUND_SCHEDULER = (
|
|
||||||
environ.get("RUN_BACKGROUND_SCHEDULER", default="false") == "true"
|
|
||||||
)
|
|
|
@ -30,7 +30,8 @@ class UserModel(SpiffworkflowBaseDBModel):
|
||||||
__table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),)
|
__table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),)
|
||||||
|
|
||||||
id = db.Column(db.Integer, primary_key=True)
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
username = db.Column(db.String(255), nullable=False, unique=True)
|
# server and service id must be unique, not username.
|
||||||
|
username = db.Column(db.String(255), nullable=False, unique=False)
|
||||||
uid = db.Column(db.String(50), unique=True)
|
uid = db.Column(db.String(50), unique=True)
|
||||||
service = db.Column(db.String(50), nullable=False, unique=False)
|
service = db.Column(db.String(50), nullable=False, unique=False)
|
||||||
service_id = db.Column(db.String(255), nullable=False, unique=False)
|
service_id = db.Column(db.String(255), nullable=False, unique=False)
|
||||||
|
@ -83,10 +84,6 @@ class UserModel(SpiffworkflowBaseDBModel):
|
||||||
algorithm="HS256",
|
algorithm="HS256",
|
||||||
)
|
)
|
||||||
|
|
||||||
def is_admin(self) -> bool:
|
|
||||||
"""Is_admin."""
|
|
||||||
return True
|
|
||||||
|
|
||||||
# @classmethod
|
# @classmethod
|
||||||
# def from_open_id_user_info(cls, user_info: dict) -> Any:
|
# def from_open_id_user_info(cls, user_info: dict) -> Any:
|
||||||
# """From_open_id_user_info."""
|
# """From_open_id_user_info."""
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
"""__init__."""
|
|
@ -0,0 +1,153 @@
|
||||||
|
"""OpenID Implementation for demos and local development.
|
||||||
|
|
||||||
|
A very insecure and partial OpenID implementation for use in demos and testing.
|
||||||
|
Provides the bare minimum endpoints required by SpiffWorkflow to
|
||||||
|
handle openid authentication -- definitely not a production ready system.
|
||||||
|
This is just here to make local development, testing, and demonstration easier.
|
||||||
|
"""
|
||||||
|
import base64
|
||||||
|
import time
|
||||||
|
from typing import Any
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
|
import jwt
|
||||||
|
import yaml
|
||||||
|
from flask import Blueprint
|
||||||
|
from flask import current_app
|
||||||
|
from flask import redirect
|
||||||
|
from flask import render_template
|
||||||
|
from flask import request
|
||||||
|
from flask import url_for
|
||||||
|
from werkzeug.wrappers import Response
|
||||||
|
|
||||||
|
openid_blueprint = Blueprint(
|
||||||
|
"openid", __name__, template_folder="templates", static_folder="static"
|
||||||
|
)
|
||||||
|
|
||||||
|
OPEN_ID_CODE = ":this_is_not_secure_do_not_use_in_production"
|
||||||
|
|
||||||
|
|
||||||
|
@openid_blueprint.route("/.well-known/openid-configuration", methods=["GET"])
|
||||||
|
def well_known() -> dict:
|
||||||
|
"""Open ID Discovery endpoint.
|
||||||
|
|
||||||
|
These urls can be very different from one openid impl to the next, this is just a small subset.
|
||||||
|
"""
|
||||||
|
host_url = request.host_url.strip("/")
|
||||||
|
return {
|
||||||
|
"issuer": f"{host_url}/openid",
|
||||||
|
"authorization_endpoint": f"{host_url}{url_for('openid.auth')}",
|
||||||
|
"token_endpoint": f"{host_url}{url_for('openid.token')}",
|
||||||
|
"end_session_endpoint": f"{host_url}{url_for('openid.end_session')}",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@openid_blueprint.route("/auth", methods=["GET"])
|
||||||
|
def auth() -> str:
|
||||||
|
"""Accepts a series of parameters."""
|
||||||
|
return render_template(
|
||||||
|
"login.html",
|
||||||
|
state=request.args.get("state"),
|
||||||
|
response_type=request.args.get("response_type"),
|
||||||
|
client_id=request.args.get("client_id"),
|
||||||
|
scope=request.args.get("scope"),
|
||||||
|
redirect_uri=request.args.get("redirect_uri"),
|
||||||
|
error_message=request.args.get("error_message", ""),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@openid_blueprint.route("/form_submit", methods=["POST"])
|
||||||
|
def form_submit() -> Any:
|
||||||
|
"""Handles the login form submission."""
|
||||||
|
users = get_users()
|
||||||
|
if (
|
||||||
|
request.values["Uname"] in users
|
||||||
|
and request.values["Pass"] == users[request.values["Uname"]]["password"]
|
||||||
|
):
|
||||||
|
# Redirect back to the end user with some detailed information
|
||||||
|
state = request.values.get("state")
|
||||||
|
data = {
|
||||||
|
"state": state,
|
||||||
|
"code": request.values["Uname"] + OPEN_ID_CODE,
|
||||||
|
"session_state": "",
|
||||||
|
}
|
||||||
|
url = request.values.get("redirect_uri") + "?" + urlencode(data)
|
||||||
|
return redirect(url)
|
||||||
|
else:
|
||||||
|
return render_template(
|
||||||
|
"login.html",
|
||||||
|
state=request.values.get("state"),
|
||||||
|
response_type=request.values.get("response_type"),
|
||||||
|
client_id=request.values.get("client_id"),
|
||||||
|
scope=request.values.get("scope"),
|
||||||
|
redirect_uri=request.values.get("redirect_uri"),
|
||||||
|
error_message="Login failed. Please try again.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@openid_blueprint.route("/token", methods=["POST"])
|
||||||
|
def token() -> dict:
|
||||||
|
"""Url that will return a valid token, given the super secret sauce."""
|
||||||
|
request.values.get("grant_type")
|
||||||
|
code = request.values.get("code")
|
||||||
|
request.values.get("redirect_uri")
|
||||||
|
|
||||||
|
"""We just stuffed the user name on the front of the code, so grab it."""
|
||||||
|
user_name, secret_hash = code.split(":")
|
||||||
|
user_details = get_users()[user_name]
|
||||||
|
|
||||||
|
"""Get authentication from headers."""
|
||||||
|
authorization = request.headers.get("Authorization", "Basic ")
|
||||||
|
authorization = authorization[6:] # Remove "Basic"
|
||||||
|
authorization = base64.b64decode(authorization).decode("utf-8")
|
||||||
|
client_id, client_secret = authorization.split(":")
|
||||||
|
|
||||||
|
base_url = request.host_url + "openid"
|
||||||
|
|
||||||
|
id_token = jwt.encode(
|
||||||
|
{
|
||||||
|
"iss": base_url,
|
||||||
|
"aud": [client_id, "account"],
|
||||||
|
"iat": time.time(),
|
||||||
|
"exp": time.time() + 86400, # Expire after a day.
|
||||||
|
"sub": user_name,
|
||||||
|
"preferred_username": user_details.get("preferred_username", user_name),
|
||||||
|
},
|
||||||
|
client_secret,
|
||||||
|
algorithm="HS256",
|
||||||
|
)
|
||||||
|
response = {
|
||||||
|
"access_token": id_token,
|
||||||
|
"id_token": id_token,
|
||||||
|
"refresh_token": id_token,
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@openid_blueprint.route("/end_session", methods=["GET"])
|
||||||
|
def end_session() -> Response:
|
||||||
|
"""Logout."""
|
||||||
|
redirect_url = request.args.get("post_logout_redirect_uri", "http://localhost")
|
||||||
|
request.args.get("id_token_hint")
|
||||||
|
return redirect(redirect_url)
|
||||||
|
|
||||||
|
|
||||||
|
@openid_blueprint.route("/refresh", methods=["POST"])
|
||||||
|
def refresh() -> str:
|
||||||
|
"""Refresh."""
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
permission_cache = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_users() -> Any:
|
||||||
|
"""Load users from a local configuration file."""
|
||||||
|
global permission_cache
|
||||||
|
if not permission_cache:
|
||||||
|
with open(current_app.config["PERMISSIONS_FILE_FULLPATH"]) as file:
|
||||||
|
permission_cache = yaml.safe_load(file)
|
||||||
|
if "users" in permission_cache:
|
||||||
|
return permission_cache["users"]
|
||||||
|
else:
|
||||||
|
return {}
|
|
@ -0,0 +1,112 @@
|
||||||
|
body{
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
background-color:white;
|
||||||
|
font-family: 'Arial';
|
||||||
|
}
|
||||||
|
header {
|
||||||
|
width: 100%;
|
||||||
|
background-color: black;
|
||||||
|
}
|
||||||
|
.logo_small {
|
||||||
|
padding: 5px 20px;
|
||||||
|
}
|
||||||
|
.error {
|
||||||
|
margin: 20px auto;
|
||||||
|
color: red;
|
||||||
|
font-weight: bold;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
.login{
|
||||||
|
width: 400px;
|
||||||
|
overflow: hidden;
|
||||||
|
margin: 20px auto;
|
||||||
|
padding: 50px;
|
||||||
|
background: #fff;
|
||||||
|
border-radius: 15px ;
|
||||||
|
}
|
||||||
|
h2{
|
||||||
|
text-align: center;
|
||||||
|
color: #277582;
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
label{
|
||||||
|
color: #fff;
|
||||||
|
width: 200px;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
#log {
|
||||||
|
width: 100px;
|
||||||
|
height: 50px;
|
||||||
|
border: none;
|
||||||
|
padding-left: 7px;
|
||||||
|
background-color:#202020;
|
||||||
|
color: #DDD;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
.cds--btn--primary {
|
||||||
|
background-color: #0f62fe;
|
||||||
|
border: 1px solid #0000;
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
|
.cds--btn {
|
||||||
|
align-items: center;
|
||||||
|
border: 0;
|
||||||
|
border-radius: 0;
|
||||||
|
box-sizing: border-box;
|
||||||
|
cursor: pointer;
|
||||||
|
display: inline-flex;
|
||||||
|
flex-shrink: 0;
|
||||||
|
font-family: inherit;
|
||||||
|
font-size: 100%;
|
||||||
|
font-size: .875rem;
|
||||||
|
font-weight: 400;
|
||||||
|
justify-content: space-between;
|
||||||
|
letter-spacing: .16px;
|
||||||
|
line-height: 1.28572;
|
||||||
|
margin: 0;
|
||||||
|
max-width: 20rem;
|
||||||
|
min-height: 3rem;
|
||||||
|
outline: none;
|
||||||
|
padding: calc(0.875rem - 3px) 63px calc(0.875rem - 3px) 15px;
|
||||||
|
position: relative;
|
||||||
|
text-align: left;
|
||||||
|
text-decoration: none;
|
||||||
|
transition: background 70ms cubic-bezier(0, 0, .38, .9), box-shadow 70ms cubic-bezier(0, 0, .38, .9), border-color 70ms cubic-bezier(0, 0, .38, .9), outline 70ms cubic-bezier(0, 0, .38, .9);
|
||||||
|
vertical-align: initial;
|
||||||
|
vertical-align: top;
|
||||||
|
width: max-content;
|
||||||
|
}
|
||||||
|
.cds--btn:hover {
|
||||||
|
background-color: #0145c5;
|
||||||
|
}
|
||||||
|
.cds--btn:focus {
|
||||||
|
background-color: #01369a;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cds--text-input {
|
||||||
|
background-color: #eee;
|
||||||
|
border: none;
|
||||||
|
border-bottom: 1px solid #8d8d8d;
|
||||||
|
color: #161616;
|
||||||
|
font-family: inherit;
|
||||||
|
font-size: .875rem;
|
||||||
|
font-weight: 400;
|
||||||
|
height: 2.5rem;
|
||||||
|
letter-spacing: .16px;
|
||||||
|
line-height: 1.28572;
|
||||||
|
outline: 2px solid #0000;
|
||||||
|
outline-offset: -2px;
|
||||||
|
padding: 0 1rem;
|
||||||
|
transition: background-color 70ms cubic-bezier(.2,0,.38,.9),outline 70ms cubic-bezier(.2,0,.38,.9);
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
span{
|
||||||
|
color: white;
|
||||||
|
font-size: 17px;
|
||||||
|
}
|
||||||
|
a{
|
||||||
|
float: right;
|
||||||
|
background-color: grey;
|
||||||
|
}
|
Binary file not shown.
After Width: | Height: | Size: 9.9 KiB |
Binary file not shown.
After Width: | Height: | Size: 4.9 KiB |
|
@ -0,0 +1,36 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>Login Form</title>
|
||||||
|
<link rel="stylesheet" type="text/css" href="{{ url_for('openid.static', filename='login.css') }}">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<header>
|
||||||
|
<img class="logo_small" src="{{ url_for('openid.static', filename='logo_small.png') }}"/>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<h2>Login</h2>
|
||||||
|
<div class="error">{{error_message}}</div>
|
||||||
|
<div class="login">
|
||||||
|
<form id="login" method="post" action="{{ url_for('openid.form_submit') }}">
|
||||||
|
<input type="text" class="cds--text-input" name="Uname" id="Uname" placeholder="Username">
|
||||||
|
<br><br>
|
||||||
|
<input type="Password" class="cds--text-input" name="Pass" id="Pass" placeholder="Password">
|
||||||
|
<br><br>
|
||||||
|
<input type="hidden" name="state" value="{{state}}"/>
|
||||||
|
<input type="hidden" name="response_type" value="{{response_type}}"/>
|
||||||
|
<input type="hidden" name="client_id" value="{{client_id}}"/>
|
||||||
|
<input type="hidden" name="scope" value="{{scope}}"/>
|
||||||
|
<input type="hidden" name="redirect_uri" value="{{redirect_uri}}"/>
|
||||||
|
<input type="submit" name="log" class="cds--btn cds--btn--primary" value="Log In">
|
||||||
|
<br><br>
|
||||||
|
<!-- should maybe add this stuff in eventually, but this is just for testing.
|
||||||
|
<input type="checkbox" id="check">
|
||||||
|
<span>Remember me</span>
|
||||||
|
<br><br>
|
||||||
|
Forgot <a href="#">Password</a>
|
||||||
|
-->
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -33,7 +33,7 @@ from sqlalchemy import asc
|
||||||
from sqlalchemy import desc
|
from sqlalchemy import desc
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func
|
||||||
from sqlalchemy.orm import aliased
|
from sqlalchemy.orm import aliased
|
||||||
from sqlalchemy.orm import joinedload
|
from sqlalchemy.orm import selectinload
|
||||||
|
|
||||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||||
ProcessEntityNotFoundError,
|
ProcessEntityNotFoundError,
|
||||||
|
@ -853,7 +853,7 @@ def process_instance_list(
|
||||||
process_instance_query = ProcessInstanceModel.query
|
process_instance_query = ProcessInstanceModel.query
|
||||||
# Always join that hot user table for good performance at serialization time.
|
# Always join that hot user table for good performance at serialization time.
|
||||||
process_instance_query = process_instance_query.options(
|
process_instance_query = process_instance_query.options(
|
||||||
joinedload(ProcessInstanceModel.process_initiator)
|
selectinload(ProcessInstanceModel.process_initiator)
|
||||||
)
|
)
|
||||||
|
|
||||||
if report_filter.process_model_identifier is not None:
|
if report_filter.process_model_identifier is not None:
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
"""User."""
|
"""User."""
|
||||||
import ast
|
import ast
|
||||||
import base64
|
import base64
|
||||||
|
import json
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
@ -58,7 +59,6 @@ def verify_token(
|
||||||
decoded_token = get_decoded_token(token)
|
decoded_token = get_decoded_token(token)
|
||||||
|
|
||||||
if decoded_token is not None:
|
if decoded_token is not None:
|
||||||
|
|
||||||
if "token_type" in decoded_token:
|
if "token_type" in decoded_token:
|
||||||
token_type = decoded_token["token_type"]
|
token_type = decoded_token["token_type"]
|
||||||
if token_type == "internal": # noqa: S105
|
if token_type == "internal": # noqa: S105
|
||||||
|
@ -68,11 +68,11 @@ def verify_token(
|
||||||
current_app.logger.error(
|
current_app.logger.error(
|
||||||
f"Exception in verify_token getting user from decoded internal token. {e}"
|
f"Exception in verify_token getting user from decoded internal token. {e}"
|
||||||
)
|
)
|
||||||
|
|
||||||
elif "iss" in decoded_token.keys():
|
elif "iss" in decoded_token.keys():
|
||||||
try:
|
try:
|
||||||
user_info = AuthenticationService.get_user_info_from_open_id(token)
|
if AuthenticationService.validate_id_token(token):
|
||||||
except ApiError as ae:
|
user_info = decoded_token
|
||||||
|
except ApiError as ae: # API Error is only thrown in the token is outdated.
|
||||||
# Try to refresh the token
|
# Try to refresh the token
|
||||||
user = UserService.get_user_by_service_and_service_id(
|
user = UserService.get_user_by_service_and_service_id(
|
||||||
"open_id", decoded_token["sub"]
|
"open_id", decoded_token["sub"]
|
||||||
|
@ -86,14 +86,9 @@ def verify_token(
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if auth_token and "error" not in auth_token:
|
if auth_token and "error" not in auth_token:
|
||||||
# redirect to original url, with auth_token?
|
# We have the user, but this code is a bit convoluted, and will later demand
|
||||||
user_info = (
|
# a user_info object so it can look up the user. Sorry to leave this crap here.
|
||||||
AuthenticationService.get_user_info_from_open_id(
|
user_info = {"sub": user.service_id}
|
||||||
auth_token["access_token"]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if not user_info:
|
|
||||||
raise ae
|
|
||||||
else:
|
else:
|
||||||
raise ae
|
raise ae
|
||||||
else:
|
else:
|
||||||
|
@ -203,6 +198,18 @@ def login(redirect_url: str = "/") -> Response:
|
||||||
return redirect(login_redirect_url)
|
return redirect(login_redirect_url)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_id_token(token: str) -> Any:
|
||||||
|
"""Parse the id token."""
|
||||||
|
parts = token.split(".")
|
||||||
|
if len(parts) != 3:
|
||||||
|
raise Exception("Incorrect id token format")
|
||||||
|
|
||||||
|
payload = parts[1]
|
||||||
|
padded = payload + "=" * (4 - len(payload) % 4)
|
||||||
|
decoded = base64.b64decode(padded)
|
||||||
|
return json.loads(decoded)
|
||||||
|
|
||||||
|
|
||||||
def login_return(code: str, state: str, session_state: str) -> Optional[Response]:
|
def login_return(code: str, state: str, session_state: str) -> Optional[Response]:
|
||||||
"""Login_return."""
|
"""Login_return."""
|
||||||
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
|
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
|
||||||
|
@ -211,10 +218,9 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response
|
||||||
if "id_token" in auth_token_object:
|
if "id_token" in auth_token_object:
|
||||||
id_token = auth_token_object["id_token"]
|
id_token = auth_token_object["id_token"]
|
||||||
|
|
||||||
|
user_info = parse_id_token(id_token)
|
||||||
|
|
||||||
if AuthenticationService.validate_id_token(id_token):
|
if AuthenticationService.validate_id_token(id_token):
|
||||||
user_info = AuthenticationService.get_user_info_from_open_id(
|
|
||||||
auth_token_object["access_token"]
|
|
||||||
)
|
|
||||||
if user_info and "error" not in user_info:
|
if user_info and "error" not in user_info:
|
||||||
user_model = AuthorizationService.create_user_from_sign_in(user_info)
|
user_model = AuthorizationService.create_user_from_sign_in(user_info)
|
||||||
g.user = user_model.id
|
g.user = user_model.id
|
||||||
|
@ -332,15 +338,11 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo
|
||||||
.filter(UserModel.service_id == service_id)
|
.filter(UserModel.service_id == service_id)
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
# user: UserModel = UserModel.query.filter()
|
|
||||||
if user:
|
if user:
|
||||||
return user
|
return user
|
||||||
user = UserModel(
|
user = UserModel(
|
||||||
username=service_id,
|
username=service_id,
|
||||||
uid=service_id,
|
|
||||||
service=service,
|
service=service,
|
||||||
service_id=service_id,
|
service_id=service_id,
|
||||||
name="API User",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return user
|
return user
|
||||||
|
|
|
@ -26,58 +26,35 @@ class AuthenticationProviderTypes(enum.Enum):
|
||||||
class AuthenticationService:
|
class AuthenticationService:
|
||||||
"""AuthenticationService."""
|
"""AuthenticationService."""
|
||||||
|
|
||||||
|
ENDPOINT_CACHE: dict = (
|
||||||
|
{}
|
||||||
|
) # We only need to find the openid endpoints once, then we can cache them.
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_open_id_args() -> tuple:
|
def client_id() -> str:
|
||||||
"""Get_open_id_args."""
|
"""Returns the client id from the config."""
|
||||||
open_id_server_url = current_app.config["OPEN_ID_SERVER_URL"]
|
return current_app.config.get("OPEN_ID_CLIENT_ID", "")
|
||||||
open_id_client_id = current_app.config["OPEN_ID_CLIENT_ID"]
|
|
||||||
open_id_realm_name = current_app.config["OPEN_ID_REALM_NAME"]
|
@staticmethod
|
||||||
open_id_client_secret_key = current_app.config[
|
def server_url() -> str:
|
||||||
"OPEN_ID_CLIENT_SECRET_KEY"
|
"""Returns the server url from the config."""
|
||||||
] # noqa: S105
|
return current_app.config.get("OPEN_ID_SERVER_URL", "")
|
||||||
return (
|
|
||||||
open_id_server_url,
|
@staticmethod
|
||||||
open_id_client_id,
|
def secret_key() -> str:
|
||||||
open_id_realm_name,
|
"""Returns the secret key from the config."""
|
||||||
open_id_client_secret_key,
|
return current_app.config.get("OPEN_ID_CLIENT_SECRET_KEY", "")
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_user_info_from_open_id(cls, token: str) -> dict:
|
def open_id_endpoint_for_name(cls, name: str) -> str:
|
||||||
"""The token is an auth_token."""
|
"""All openid systems provide a mapping of static names to the full path of that endpoint."""
|
||||||
(
|
if name not in AuthenticationService.ENDPOINT_CACHE:
|
||||||
open_id_server_url,
|
request_url = f"{cls.server_url()}/.well-known/openid-configuration"
|
||||||
open_id_client_id,
|
response = requests.get(request_url)
|
||||||
open_id_realm_name,
|
AuthenticationService.ENDPOINT_CACHE = response.json()
|
||||||
open_id_client_secret_key,
|
if name not in AuthenticationService.ENDPOINT_CACHE:
|
||||||
) = cls.get_open_id_args()
|
raise Exception(f"Unknown OpenID Endpoint: {name}")
|
||||||
|
return AuthenticationService.ENDPOINT_CACHE.get(name, "")
|
||||||
headers = {"Authorization": f"Bearer {token}"}
|
|
||||||
|
|
||||||
request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/userinfo"
|
|
||||||
try:
|
|
||||||
request_response = requests.get(request_url, headers=headers)
|
|
||||||
except Exception as e:
|
|
||||||
current_app.logger.error(f"Exception in get_user_info_from_id_token: {e}")
|
|
||||||
raise ApiError(
|
|
||||||
error_code="token_error",
|
|
||||||
message=f"Exception in get_user_info_from_id_token: {e}",
|
|
||||||
status_code=401,
|
|
||||||
) from e
|
|
||||||
|
|
||||||
if request_response.status_code == 401:
|
|
||||||
raise ApiError(
|
|
||||||
error_code="invalid_token", message="Please login", status_code=401
|
|
||||||
)
|
|
||||||
elif request_response.status_code == 200:
|
|
||||||
user_info: dict = json.loads(request_response.text)
|
|
||||||
return user_info
|
|
||||||
|
|
||||||
raise ApiError(
|
|
||||||
error_code="user_info_error",
|
|
||||||
message="Cannot get user info in get_user_info_from_id_token",
|
|
||||||
status_code=401,
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_backend_url() -> str:
|
def get_backend_url() -> str:
|
||||||
|
@ -87,17 +64,10 @@ class AuthenticationService:
|
||||||
def logout(self, id_token: str, redirect_url: Optional[str] = None) -> Response:
|
def logout(self, id_token: str, redirect_url: Optional[str] = None) -> Response:
|
||||||
"""Logout."""
|
"""Logout."""
|
||||||
if redirect_url is None:
|
if redirect_url is None:
|
||||||
redirect_url = "/"
|
redirect_url = f"{self.get_backend_url()}/v1.0/logout_return"
|
||||||
return_redirect_url = f"{self.get_backend_url()}/v1.0/logout_return"
|
|
||||||
(
|
|
||||||
open_id_server_url,
|
|
||||||
open_id_client_id,
|
|
||||||
open_id_realm_name,
|
|
||||||
open_id_client_secret_key,
|
|
||||||
) = AuthenticationService.get_open_id_args()
|
|
||||||
request_url = (
|
request_url = (
|
||||||
f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/logout?"
|
self.open_id_endpoint_for_name("end_session_endpoint")
|
||||||
+ f"post_logout_redirect_uri={return_redirect_url}&"
|
+ f"?post_logout_redirect_uri={redirect_url}&"
|
||||||
+ f"id_token_hint={id_token}"
|
+ f"id_token_hint={id_token}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -113,18 +83,12 @@ class AuthenticationService:
|
||||||
self, state: str, redirect_url: str = "/v1.0/login_return"
|
self, state: str, redirect_url: str = "/v1.0/login_return"
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Get_login_redirect_url."""
|
"""Get_login_redirect_url."""
|
||||||
(
|
|
||||||
open_id_server_url,
|
|
||||||
open_id_client_id,
|
|
||||||
open_id_realm_name,
|
|
||||||
open_id_client_secret_key,
|
|
||||||
) = AuthenticationService.get_open_id_args()
|
|
||||||
return_redirect_url = f"{self.get_backend_url()}{redirect_url}"
|
return_redirect_url = f"{self.get_backend_url()}{redirect_url}"
|
||||||
login_redirect_url = (
|
login_redirect_url = (
|
||||||
f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/auth?"
|
self.open_id_endpoint_for_name("authorization_endpoint")
|
||||||
+ f"state={state}&"
|
+ f"?state={state}&"
|
||||||
+ "response_type=code&"
|
+ "response_type=code&"
|
||||||
+ f"client_id={open_id_client_id}&"
|
+ f"client_id={self.client_id()}&"
|
||||||
+ "scope=openid&"
|
+ "scope=openid&"
|
||||||
+ f"redirect_uri={return_redirect_url}"
|
+ f"redirect_uri={return_redirect_url}"
|
||||||
)
|
)
|
||||||
|
@ -134,14 +98,7 @@ class AuthenticationService:
|
||||||
self, code: str, redirect_url: str = "/v1.0/login_return"
|
self, code: str, redirect_url: str = "/v1.0/login_return"
|
||||||
) -> dict:
|
) -> dict:
|
||||||
"""Get_auth_token_object."""
|
"""Get_auth_token_object."""
|
||||||
(
|
backend_basic_auth_string = f"{self.client_id()}:{self.secret_key()}"
|
||||||
open_id_server_url,
|
|
||||||
open_id_client_id,
|
|
||||||
open_id_realm_name,
|
|
||||||
open_id_client_secret_key,
|
|
||||||
) = AuthenticationService.get_open_id_args()
|
|
||||||
|
|
||||||
backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}"
|
|
||||||
backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
|
backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
|
||||||
backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)
|
backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)
|
||||||
headers = {
|
headers = {
|
||||||
|
@ -154,7 +111,7 @@ class AuthenticationService:
|
||||||
"redirect_uri": f"{self.get_backend_url()}{redirect_url}",
|
"redirect_uri": f"{self.get_backend_url()}{redirect_url}",
|
||||||
}
|
}
|
||||||
|
|
||||||
request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token"
|
request_url = self.open_id_endpoint_for_name("token_endpoint")
|
||||||
|
|
||||||
response = requests.post(request_url, data=data, headers=headers)
|
response = requests.post(request_url, data=data, headers=headers)
|
||||||
auth_token_object: dict = json.loads(response.text)
|
auth_token_object: dict = json.loads(response.text)
|
||||||
|
@ -165,12 +122,6 @@ class AuthenticationService:
|
||||||
"""Https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation."""
|
"""Https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation."""
|
||||||
valid = True
|
valid = True
|
||||||
now = time.time()
|
now = time.time()
|
||||||
(
|
|
||||||
open_id_server_url,
|
|
||||||
open_id_client_id,
|
|
||||||
open_id_realm_name,
|
|
||||||
open_id_client_secret_key,
|
|
||||||
) = cls.get_open_id_args()
|
|
||||||
try:
|
try:
|
||||||
decoded_token = jwt.decode(id_token, options={"verify_signature": False})
|
decoded_token = jwt.decode(id_token, options={"verify_signature": False})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -179,15 +130,15 @@ class AuthenticationService:
|
||||||
message="Cannot decode id_token",
|
message="Cannot decode id_token",
|
||||||
status_code=401,
|
status_code=401,
|
||||||
) from e
|
) from e
|
||||||
if decoded_token["iss"] != f"{open_id_server_url}/realms/{open_id_realm_name}":
|
if decoded_token["iss"] != cls.server_url():
|
||||||
valid = False
|
valid = False
|
||||||
elif (
|
elif (
|
||||||
open_id_client_id not in decoded_token["aud"]
|
cls.client_id() not in decoded_token["aud"]
|
||||||
and "account" not in decoded_token["aud"]
|
and "account" not in decoded_token["aud"]
|
||||||
):
|
):
|
||||||
valid = False
|
valid = False
|
||||||
elif "azp" in decoded_token and decoded_token["azp"] not in (
|
elif "azp" in decoded_token and decoded_token["azp"] not in (
|
||||||
open_id_client_id,
|
cls.client_id(),
|
||||||
"account",
|
"account",
|
||||||
):
|
):
|
||||||
valid = False
|
valid = False
|
||||||
|
@ -241,15 +192,8 @@ class AuthenticationService:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_auth_token_from_refresh_token(cls, refresh_token: str) -> dict:
|
def get_auth_token_from_refresh_token(cls, refresh_token: str) -> dict:
|
||||||
"""Get a new auth_token from a refresh_token."""
|
"""Converts a refresh token to an Auth Token by calling the openid's auth endpoint."""
|
||||||
(
|
backend_basic_auth_string = f"{cls.client_id()}:{cls.secret_key()}"
|
||||||
open_id_server_url,
|
|
||||||
open_id_client_id,
|
|
||||||
open_id_realm_name,
|
|
||||||
open_id_client_secret_key,
|
|
||||||
) = cls.get_open_id_args()
|
|
||||||
|
|
||||||
backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}"
|
|
||||||
backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
|
backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
|
||||||
backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)
|
backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)
|
||||||
headers = {
|
headers = {
|
||||||
|
@ -260,11 +204,11 @@ class AuthenticationService:
|
||||||
data = {
|
data = {
|
||||||
"grant_type": "refresh_token",
|
"grant_type": "refresh_token",
|
||||||
"refresh_token": refresh_token,
|
"refresh_token": refresh_token,
|
||||||
"client_id": open_id_client_id,
|
"client_id": cls.client_id(),
|
||||||
"client_secret": open_id_client_secret_key,
|
"client_secret": cls.secret_key(),
|
||||||
}
|
}
|
||||||
|
|
||||||
request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token"
|
request_url = cls.open_id_endpoint_for_name("token_endpoint")
|
||||||
|
|
||||||
response = requests.post(request_url, data=data, headers=headers)
|
response = requests.post(request_url, data=data, headers=headers)
|
||||||
auth_token_object: dict = json.loads(response.text)
|
auth_token_object: dict = json.loads(response.text)
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
"""Authorization_service."""
|
"""Authorization_service."""
|
||||||
|
import inspect
|
||||||
import re
|
import re
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
@ -8,6 +9,7 @@ import yaml
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from flask import g
|
from flask import g
|
||||||
from flask import request
|
from flask import request
|
||||||
|
from flask import scaffold
|
||||||
from flask_bpmn.api.api_error import ApiError
|
from flask_bpmn.api.api_error import ApiError
|
||||||
from flask_bpmn.models.db import db
|
from flask_bpmn.models.db import db
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
|
@ -23,6 +25,7 @@ from spiffworkflow_backend.models.principal import PrincipalModel
|
||||||
from spiffworkflow_backend.models.user import UserModel
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
from spiffworkflow_backend.models.user import UserNotFoundError
|
from spiffworkflow_backend.models.user import UserNotFoundError
|
||||||
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
|
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
|
||||||
|
from spiffworkflow_backend.routes.openid_blueprint import openid_blueprint
|
||||||
from spiffworkflow_backend.services.group_service import GroupService
|
from spiffworkflow_backend.services.group_service import GroupService
|
||||||
from spiffworkflow_backend.services.user_service import UserService
|
from spiffworkflow_backend.services.user_service import UserService
|
||||||
|
|
||||||
|
@ -241,6 +244,7 @@ class AuthorizationService:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
api_view_function = current_app.view_functions[request.endpoint]
|
api_view_function = current_app.view_functions[request.endpoint]
|
||||||
|
module = inspect.getmodule(api_view_function)
|
||||||
if (
|
if (
|
||||||
api_view_function
|
api_view_function
|
||||||
and api_view_function.__name__.startswith("login")
|
and api_view_function.__name__.startswith("login")
|
||||||
|
@ -248,6 +252,8 @@ class AuthorizationService:
|
||||||
or api_view_function.__name__.startswith("console_ui_")
|
or api_view_function.__name__.startswith("console_ui_")
|
||||||
or api_view_function.__name__ in authentication_exclusion_list
|
or api_view_function.__name__ in authentication_exclusion_list
|
||||||
or api_view_function.__name__ in swagger_functions
|
or api_view_function.__name__ in swagger_functions
|
||||||
|
or module == openid_blueprint
|
||||||
|
or module == scaffold # don't check permissions for static assets
|
||||||
):
|
):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
|
@ -100,6 +100,7 @@ from spiffworkflow_backend.services.service_task_service import ServiceTaskDeleg
|
||||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||||
from spiffworkflow_backend.services.user_service import UserService
|
from spiffworkflow_backend.services.user_service import UserService
|
||||||
|
|
||||||
|
|
||||||
# Sorry about all this crap. I wanted to move this thing to another file, but
|
# Sorry about all this crap. I wanted to move this thing to another file, but
|
||||||
# importing a bunch of types causes circular imports.
|
# importing a bunch of types causes circular imports.
|
||||||
|
|
||||||
|
@ -594,20 +595,25 @@ class ProcessInstanceProcessor:
|
||||||
path_segments = path.split(".")
|
path_segments = path.split(".")
|
||||||
data_for_key = current_data
|
data_for_key = current_data
|
||||||
for path_segment in path_segments:
|
for path_segment in path_segments:
|
||||||
data_for_key = data_for_key[path_segment]
|
if path_segment in data_for_key:
|
||||||
|
data_for_key = data_for_key[path_segment]
|
||||||
|
else:
|
||||||
|
data_for_key = None # type: ignore
|
||||||
|
break
|
||||||
|
|
||||||
pim = ProcessInstanceMetadataModel.query.filter_by(
|
if data_for_key is not None:
|
||||||
process_instance_id=self.process_instance_model.id,
|
pim = ProcessInstanceMetadataModel.query.filter_by(
|
||||||
key=key,
|
|
||||||
).first()
|
|
||||||
if pim is None:
|
|
||||||
pim = ProcessInstanceMetadataModel(
|
|
||||||
process_instance_id=self.process_instance_model.id,
|
process_instance_id=self.process_instance_model.id,
|
||||||
key=key,
|
key=key,
|
||||||
)
|
).first()
|
||||||
pim.value = data_for_key
|
if pim is None:
|
||||||
db.session.add(pim)
|
pim = ProcessInstanceMetadataModel(
|
||||||
db.session.commit()
|
process_instance_id=self.process_instance_model.id,
|
||||||
|
key=key,
|
||||||
|
)
|
||||||
|
pim.value = data_for_key
|
||||||
|
db.session.add(pim)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
def save(self) -> None:
|
def save(self) -> None:
|
||||||
"""Saves the current state of this processor to the database."""
|
"""Saves the current state of this processor to the database."""
|
||||||
|
|
|
@ -148,20 +148,18 @@ class ProcessModelService(FileSystemService):
|
||||||
error_code="existing_instances",
|
error_code="existing_instances",
|
||||||
message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.",
|
message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.",
|
||||||
)
|
)
|
||||||
self.get_process_model(process_model_id)
|
process_model = self.get_process_model(process_model_id)
|
||||||
# path = self.workflow_path(process_model)
|
path = self.workflow_path(process_model)
|
||||||
path = f"{FileSystemService.root_path()}/{process_model_id}"
|
|
||||||
shutil.rmtree(path)
|
shutil.rmtree(path)
|
||||||
|
|
||||||
def process_model_move(
|
def process_model_move(
|
||||||
self, original_process_model_id: str, new_location: str
|
self, original_process_model_id: str, new_location: str
|
||||||
) -> ProcessModelInfo:
|
) -> ProcessModelInfo:
|
||||||
"""Process_model_move."""
|
"""Process_model_move."""
|
||||||
original_model_path = os.path.abspath(
|
process_model = self.get_process_model(original_process_model_id)
|
||||||
os.path.join(FileSystemService.root_path(), original_process_model_id)
|
original_model_path = self.workflow_path(process_model)
|
||||||
)
|
|
||||||
_, model_id = os.path.split(original_model_path)
|
_, model_id = os.path.split(original_model_path)
|
||||||
new_relative_path = f"{new_location}/{model_id}"
|
new_relative_path = os.path.join(new_location, model_id)
|
||||||
new_model_path = os.path.abspath(
|
new_model_path = os.path.abspath(
|
||||||
os.path.join(FileSystemService.root_path(), new_relative_path)
|
os.path.join(FileSystemService.root_path(), new_relative_path)
|
||||||
)
|
)
|
||||||
|
@ -245,7 +243,7 @@ class ProcessModelService(FileSystemService):
|
||||||
if full_group_id_path is None:
|
if full_group_id_path is None:
|
||||||
full_group_id_path = process_group_id_segment
|
full_group_id_path = process_group_id_segment
|
||||||
else:
|
else:
|
||||||
full_group_id_path = f"{full_group_id_path}/{process_group_id_segment}" # type: ignore
|
full_group_id_path = os.path.join(full_group_id_path, process_group_id_segment) # type: ignore
|
||||||
parent_group = ProcessModelService.get_process_group(full_group_id_path)
|
parent_group = ProcessModelService.get_process_group(full_group_id_path)
|
||||||
if parent_group:
|
if parent_group:
|
||||||
parent_group_array.append(
|
parent_group_array.append(
|
||||||
|
@ -307,8 +305,8 @@ class ProcessModelService(FileSystemService):
|
||||||
) -> ProcessGroup:
|
) -> ProcessGroup:
|
||||||
"""Process_group_move."""
|
"""Process_group_move."""
|
||||||
original_group_path = self.process_group_path(original_process_group_id)
|
original_group_path = self.process_group_path(original_process_group_id)
|
||||||
original_root, original_group_id = os.path.split(original_group_path)
|
_, original_group_id = os.path.split(original_group_path)
|
||||||
new_root = f"{FileSystemService.root_path()}/{new_location}"
|
new_root = os.path.join(FileSystemService.root_path(), new_location)
|
||||||
new_group_path = os.path.abspath(
|
new_group_path = os.path.abspath(
|
||||||
os.path.join(FileSystemService.root_path(), new_root, original_group_id)
|
os.path.join(FileSystemService.root_path(), new_root, original_group_id)
|
||||||
)
|
)
|
||||||
|
|
|
@ -0,0 +1,61 @@
|
||||||
|
"""Test_authentication."""
|
||||||
|
from flask import Flask
|
||||||
|
from flask.testing import FlaskClient
|
||||||
|
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||||
|
|
||||||
|
|
||||||
|
class TestFlaskOpenId(BaseTest):
|
||||||
|
"""An integrated Open ID that responds to openID requests.
|
||||||
|
|
||||||
|
By referencing a build in YAML file. Useful for
|
||||||
|
local development, testing, demos etc...
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_discovery_of_endpoints(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
) -> None:
|
||||||
|
"""Test discovery endpoints."""
|
||||||
|
response = client.get("/openid/.well-known/openid-configuration")
|
||||||
|
discovered_urls = response.json
|
||||||
|
assert "http://localhost/openid" == discovered_urls["issuer"]
|
||||||
|
assert (
|
||||||
|
"http://localhost/openid/auth" == discovered_urls["authorization_endpoint"]
|
||||||
|
)
|
||||||
|
assert "http://localhost/openid/token" == discovered_urls["token_endpoint"]
|
||||||
|
|
||||||
|
def test_get_login_page(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
) -> None:
|
||||||
|
"""It should be possible to get to a login page."""
|
||||||
|
data = {"state": {"bubblegum": 1, "daydream": 2}}
|
||||||
|
response = client.get("/openid/auth", query_string=data)
|
||||||
|
assert b"<h2>Login</h2>" in response.data
|
||||||
|
assert b"bubblegum" in response.data
|
||||||
|
|
||||||
|
def test_get_token(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
) -> None:
|
||||||
|
"""It should be possible to get a token."""
|
||||||
|
code = (
|
||||||
|
"c3BpZmZ3b3JrZmxvdy1iYWNrZW5kOkpYZVFFeG0wSmhRUEx1bWdIdElJcWY1MmJEYWxIejBx"
|
||||||
|
)
|
||||||
|
headers = {
|
||||||
|
"Content-Type": "application/x-www-form-urlencoded",
|
||||||
|
"Authorization": f"Basic {code}",
|
||||||
|
}
|
||||||
|
data = {
|
||||||
|
"grant_type": "authorization_code",
|
||||||
|
"code": code,
|
||||||
|
"redirect_url": "http://localhost:7000/v1.0/login_return",
|
||||||
|
}
|
||||||
|
response = client.post("/openid/token", data=data, headers=headers)
|
||||||
|
assert response
|
Loading…
Reference in New Issue