From da2d8c5df5242309dbb1c485a77b5d5862a454c6 Mon Sep 17 00:00:00 2001 From: Dan Date: Fri, 18 Nov 2022 12:10:59 -0500 Subject: [PATCH 01/29] create latest/vX.X whever we tag. --- .github/workflows/release_builds.yml | 78 ++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 .github/workflows/release_builds.yml diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml new file mode 100644 index 000000000..783442490 --- /dev/null +++ b/.github/workflows/release_builds.yml @@ -0,0 +1,78 @@ +name: Build docker containers on version release + +on: + push: + tags: [ v* ] + +jobs: + create_frontend_docker_container: + runs-on: ubuntu-latest + env: + REGISTRY: ghcr.io + IMAGE_NAME: sartography/spiffworkflow-frontend + permissions: + contents: read + packages: write + steps: + - name: Check out the repository + uses: actions/checkout@v3.0.2 + with: + # Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud + fetch-depth: 0 + - name: Log in to the Container registry + uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + - name: Build and push Frontend Docker image + uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc + with: + # this action doesn't seem to respect working-directory so set context + context: spiffworkflow-backend + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + create_backend_docker_container: + runs-on: ubuntu-latest + env: + REGISTRY: ghcr.io + IMAGE_NAME: sartography/spiffworkflow-backend + permissions: + contents: read + packages: write + steps: + - name: Check out the repository + uses: actions/checkout@v3.0.2 + with: + # Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud + fetch-depth: 0 + - name: Log in to the Container registry + uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + - name: Build and push Backend Docker image + uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc + with: + # this action doesn't seem to respect working-directory so set context + context: spiffworkflow-backend + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} From 2ec75256e21a390d52aff13962060a5ebc713b97 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:01:10 -0500 Subject: [PATCH 02/29] Adding connector proxy demo. --- .github/workflows/release_builds.yml | 43 ++++++++++++++++++++++++++-- 1 file changed, 41 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index 783442490..c93c96402 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest env: REGISTRY: ghcr.io - IMAGE_NAME: sartography/spiffworkflow-frontend + IMAGE_NAME: sartography/ permissions: contents: read packages: write @@ -30,13 +30,14 @@ jobs: id: meta uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 with: + context: spiffworkflow-frontend images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - name: Build and push Frontend Docker image uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc with: # this action doesn't seem to respect working-directory so set context - context: spiffworkflow-backend + context: spiffworkflow-frontend push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} @@ -76,3 +77,41 @@ jobs: push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} + + create_demo-proxy: + runs-on: ubuntu-latest + env: + REGISTRY: ghcr.io + IMAGE_NAME: sartography/connector-proxy-demo + + permissions: + contents: read + packages: write + steps: + - name: Check out the repository + uses: actions/checkout@v3.0.2 + with: + # Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud + fetch-depth: 0 + - name: Log in to the Container registry + uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 + with: + context: connector-proxy-demo + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + - name: Build and push the connector proxy + uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc + with: + # this action doesn't seem to respect working-directory so set context + context: connector-proxy-demo + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} From a5584ec2357f518d90ea97472d7447f53b3b1ea4 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:12:39 -0500 Subject: [PATCH 03/29] tweeking. --- .github/workflows/release_builds.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index c93c96402..5c6807bd3 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -1,6 +1,7 @@ name: Build docker containers on version release on: + workflow_dispatch push: tags: [ v* ] From eaa64f3ba87e995ab7b04499baf8c112e5b4a933 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:13:48 -0500 Subject: [PATCH 04/29] tweeking. --- .github/workflows/release_builds.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index 5c6807bd3..eb2457536 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -1,7 +1,7 @@ name: Build docker containers on version release on: - workflow_dispatch + workflow_dispatch: push: tags: [ v* ] From 8cf2aff2961c9500b8d998a16ada1146baa8affb Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:16:01 -0500 Subject: [PATCH 05/29] avoid running tests while doing builds. --- .github/workflows/backend_tests.yml | 5 +++-- .github/workflows/frontend_tests.yml | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index deed14483..370af4afb 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -1,8 +1,9 @@ name: Backend Tests on: - - push - - pull_request + - workflow_dispatch +# - push +# - pull_request defaults: run: diff --git a/.github/workflows/frontend_tests.yml b/.github/workflows/frontend_tests.yml index 1bbfdbedc..0cd1d5e74 100644 --- a/.github/workflows/frontend_tests.yml +++ b/.github/workflows/frontend_tests.yml @@ -1,8 +1,9 @@ name: Frontend Tests on: - - push - - pull_request + - workflow_dispatch +# - push +# - pull_request defaults: run: From ef582919bdb20ad4b996ef8f1a62319cc738cc6f Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:17:43 -0500 Subject: [PATCH 06/29] twiddling. --- .github/workflows/release_builds.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index eb2457536..d4b9b319a 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -79,7 +79,7 @@ jobs: tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - create_demo-proxy: + create_demo-proxy: runs-on: ubuntu-latest env: REGISTRY: ghcr.io From fd711cb1b2d5e4b23294e93f4a209d4aca04e00a Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:24:25 -0500 Subject: [PATCH 07/29] updating the action. --- .github/workflows/release_builds.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index d4b9b319a..203f9ad72 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -1,4 +1,4 @@ -name: Build docker containers on version release +name: Release Builds on: workflow_dispatch: From cc4b1a2edd421039425c3fdc89aeec1b5213779f Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:26:28 -0500 Subject: [PATCH 08/29] can I even do this on a branch? --- .github/workflows/release_builds.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index 203f9ad72..6767895cf 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -78,7 +78,7 @@ jobs: push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - +# Is this getting updated, I wonder? create_demo-proxy: runs-on: ubuntu-latest env: From e8a6d1cf21b64161c428941bbbae9ab4167b582b Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:28:01 -0500 Subject: [PATCH 09/29] Getting the darn thing to trigger. --- .github/workflows/release_builds.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index 6767895cf..0e0ec655c 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -1,9 +1,10 @@ name: Release Builds on: - workflow_dispatch: - push: - tags: [ v* ] + - workflow_dispatch + - push +# push: +# tags: [ v* ] jobs: create_frontend_docker_container: From 5c3fe94375674bb0ad45a996ea1468e3bddc7317 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:29:12 -0500 Subject: [PATCH 10/29] Just do it on tags. --- .github/workflows/release_builds.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index 0e0ec655c..adc890ef4 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -1,10 +1,8 @@ name: Release Builds on: - - workflow_dispatch - - push -# push: -# tags: [ v* ] + push: + tags: [ v* ] jobs: create_frontend_docker_container: From 8c3be88e57bd6739cbb073e7d690fe74922c0931 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:32:57 -0500 Subject: [PATCH 11/29] more tweaking --- .github/workflows/release_builds.yml | 2 +- connector-proxy-demo/Dockerfile | 28 +++++++++++++++++++ .../bin/boot_server_in_docker | 19 +++++++++++++ 3 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 connector-proxy-demo/Dockerfile create mode 100755 connector-proxy-demo/bin/boot_server_in_docker diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index adc890ef4..ef1c3b992 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest env: REGISTRY: ghcr.io - IMAGE_NAME: sartography/ + IMAGE_NAME: sartography/spiffworkflow-frontend permissions: contents: read packages: write diff --git a/connector-proxy-demo/Dockerfile b/connector-proxy-demo/Dockerfile new file mode 100644 index 000000000..e2d89bebd --- /dev/null +++ b/connector-proxy-demo/Dockerfile @@ -0,0 +1,28 @@ +FROM ghcr.io/sartography/python:3.11 + +RUN pip install poetry +RUN useradd _gunicorn --no-create-home --user-group + +RUN apt-get update && \ + apt-get install -y -q \ + gcc libssl-dev \ + curl git-core libpq-dev \ + gunicorn3 default-mysql-client + +WORKDIR /app +COPY pyproject.toml poetry.lock /app/ +RUN poetry install --without dev + +RUN set -xe \ + && apt-get remove -y gcc python3-dev libssl-dev \ + && apt-get autoremove -y \ + && apt-get clean -y \ + && rm -rf /var/lib/apt/lists/* + +COPY . /app/ + +# run poetry install again AFTER copying the app into the image +# otherwise it does not know what the main app module is +RUN poetry install --without dev + +CMD ./bin/boot_server_in_docker diff --git a/connector-proxy-demo/bin/boot_server_in_docker b/connector-proxy-demo/bin/boot_server_in_docker new file mode 100755 index 000000000..d64f417bc --- /dev/null +++ b/connector-proxy-demo/bin/boot_server_in_docker @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +port="${CONNECTOR_PROXY_STATUS_IM_PORT:-}" +if [[ -z "$port" ]]; then + port=7004 +fi + +workers=3 + +# THIS MUST BE THE LAST COMMAND! +# default --limit-request-line is 4094. see https://stackoverflow.com/a/66688382/6090676 +exec poetry run gunicorn --bind "0.0.0.0:$port" --workers="$workers" --limit-request-line 8192 --timeout 90 --capture-output --access-logfile '-' --log-level debug app:app From e6146423bbdaa50405f7d3aae87a66d5152fd0de Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:43:15 -0500 Subject: [PATCH 12/29] Don't look for sources where there aren't any --- connector-proxy-demo/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector-proxy-demo/pyproject.toml b/connector-proxy-demo/pyproject.toml index 9a6f51f88..c3630780e 100644 --- a/connector-proxy-demo/pyproject.toml +++ b/connector-proxy-demo/pyproject.toml @@ -20,5 +20,5 @@ build-backend = "poetry.core.masonry.api" [tool.pytest.ini_options] pythonpath = [ - ".", "src", + "." ] \ No newline at end of file From 7a6282dc22ddda352e3bfa0669ac1285f273737f Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 14:27:43 -0500 Subject: [PATCH 13/29] fixing a few borked up things about the connector-proxy-demo's docker contaier. --- connector-proxy-demo/poetry.lock | 44 +++++++++++++++++++++++++++-- connector-proxy-demo/pyproject.toml | 4 +-- 2 files changed, 43 insertions(+), 5 deletions(-) diff --git a/connector-proxy-demo/poetry.lock b/connector-proxy-demo/poetry.lock index 9147d0315..d7798e2dc 100644 --- a/connector-proxy-demo/poetry.lock +++ b/connector-proxy-demo/poetry.lock @@ -55,7 +55,7 @@ optional = false python-versions = ">=3.6.0" [package.extras] -unicode_backport = ["unicodedata2"] +unicode-backport = ["unicodedata2"] [[package]] name = "click" @@ -127,6 +127,23 @@ Flask = "*" oauthlib = ">=1.1.2,<2.0.3 || >2.0.3,<2.0.4 || >2.0.4,<2.0.5 || >2.0.5,<3.0.0" requests-oauthlib = ">=0.6.2,<1.2.0" +[[package]] +name = "gunicorn" +version = "20.1.0" +description = "WSGI HTTP Server for UNIX" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +setuptools = ">=3.0" + +[package.extras] +eventlet = ["eventlet (>=0.24.1)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +tornado = ["tornado (>=0.2)"] + [[package]] name = "idna" version = "3.4" @@ -214,7 +231,7 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-oauthlib" @@ -245,6 +262,19 @@ botocore = ">=1.12.36,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] +[[package]] +name = "setuptools" +version = "65.6.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "simplejson" version = "3.17.6" @@ -310,7 +340,7 @@ watchdog = ["watchdog"] [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "86cf682d49dc495c8cf6dc60a8aedc31ad32a293e6ceaf7b1428e0c232f8319e" +content-hash = "cc395c0c1ce2b0b7ca063a17617981b2d55db39802265b36f0bc3c4383c89919" [metadata.files] boto3 = [ @@ -350,6 +380,10 @@ Flask-OAuthlib = [ {file = "Flask-OAuthlib-0.9.6.tar.gz", hash = "sha256:5bb79c8a8e670c2eb4cb553dfc3283b6c8d1202f674934676dc173cee94fe39c"}, {file = "Flask_OAuthlib-0.9.6-py3-none-any.whl", hash = "sha256:a5c3b62959aa1922470a62b6ebf4273b75f1c29561a7eb4a69cde85d45a1d669"}, ] +gunicorn = [ + {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, + {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, +] idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, @@ -428,6 +462,10 @@ s3transfer = [ {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, ] +setuptools = [ + {file = "setuptools-65.6.0-py3-none-any.whl", hash = "sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840"}, + {file = "setuptools-65.6.0.tar.gz", hash = "sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d"}, +] simplejson = [ {file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"}, {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"}, diff --git a/connector-proxy-demo/pyproject.toml b/connector-proxy-demo/pyproject.toml index c3630780e..8acd820e6 100644 --- a/connector-proxy-demo/pyproject.toml +++ b/connector-proxy-demo/pyproject.toml @@ -5,14 +5,14 @@ description = "An example showing how to use the Spiffworkflow-proxy's Flask Blu authors = ["Dan "] license = "LGPL" readme = "README.md" -packages = [{include = "connector_proxy_demo", from = "src"}] +#packages = [{include = "connector_proxy_demo", from = "."}] [tool.poetry.dependencies] python = "^3.10" Flask = "^2.2.2" spiffworkflow-proxy = {git = "https://github.com/sartography/spiffworkflow-proxy"} connector-aws = { git = "https://github.com/sartography/connector-aws.git"} - +gunicorn = "^20.1.0" [build-system] requires = ["poetry-core"] From ce82b799dfa91f8bd550f6d957ec5c696ddc04de Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 15:20:30 -0500 Subject: [PATCH 14/29] adding a docker compose that will spin up all services. --- docker-compose.yml | 97 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 docker-compose.yml diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..0625d1044 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,97 @@ +# Why we are running with network_mode: host +# +# In order for the backend server to talk to the mysql server, they need to be on the same network. +# I tried splitting it out where the mysql runs on a custom network and the backend runs on both +# the custom network AND with localhost. Nothing I tried worked and googling didn't help. They +# only ever mentioned one thing or using host.docker.internal which would cause the domains to +# be different. +# +# So instead we are running with both the mysql server and the backend server in host network mode. +# There may be a better way to do this but if it works, then it works. + +version: "3.8" +services: + db: + container_name: db + image: mysql:8.0.29 + platform: linux/amd64 + cap_add: + - SYS_NICE + restart: "${SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY:-no}" + environment: + - MYSQL_DATABASE=${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} + - MYSQL_ROOT_PASSWORD=${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw} + - MYSQL_TCP_PORT=7003 + network_mode: host + ports: + - "7003" + volumes: + - spiffworkflow_backend:/var/lib/mysql + healthcheck: + test: mysql --user=root --password=${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw} -e 'select 1' ${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} + interval: 10s + timeout: 5s + retries: 10 + + spiffworkflow-backend: + container_name: spiffworkflow-backend + image: ghcr.io/sartography/spiffworkflow-backend + depends_on: + db: + condition: service_healthy + environment: + - APPLICATION_ROOT=/ + - SPIFFWORKFLOW_BACKEND_ENV=${SPIFFWORKFLOW_BACKEND_ENV:-development} + - FLASK_DEBUG=0 + - FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key} + - OPEN_ID_SERVER_URL=${OPEN_ID_SERVER_URL:-http://localhost:7002} + - SPIFFWORKFLOW_FRONTEND_URL=${SPIFFWORKFLOW_FRONTEND_URL:-http://localhost:7001} + - SPIFFWORKFLOW_BACKEND_URL=${SPIFFWORKFLOW_BACKEND_URL:-http://localhost:7000} + - SPIFFWORKFLOW_BACKEND_PORT=7000 + - SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true + - SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@localhost:7003/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} + - BPMN_SPEC_ABSOLUTE_DIR=/app/process_models + - SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-false} + - SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=${SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME:-acceptance_tests.yml} + - RUN_BACKGROUND_SCHEDULER=true + ports: + - "7000:7000" + network_mode: host + volumes: + - ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models + - ./log:/app/log + healthcheck: + test: curl localhost:7000/v1.0/status --fail + interval: 10s + timeout: 5s + retries: 20 + + spiffworkflow-frontend: + container_name: spiffworkflow-frontend + image: ghcr.io/sartography/spiffworkflow-frontend + environment: + - APPLICATION_ROOT=/ + - PORT0=7001 + ports: + - "7001:7001" + + connector-proxy-demo: &connector-proxy-demo + container_name: connector-proxy-demo + image: ghcr.io/sartography/connector-proxy-demo + environment: + - FLASK_ENV=${FLASK_ENV:-development} + - FLASK_DEBUG=0 + - FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key} + ports: + - "7004:7004" + network_mode: host + healthcheck: + test: curl localhost:7004/liveness --fail + interval: 10s + timeout: 5s + retries: 20 + + +volumes: + spiffworkflow_backend: + driver: local From 05fe43c8cbe3bdd3b3404a3a7edcc5bbef30c226 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 15:23:32 -0500 Subject: [PATCH 15/29] Reenable the tests. --- .github/workflows/backend_tests.yml | 5 ++--- .github/workflows/frontend_tests.yml | 5 ++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index 370af4afb..deed14483 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -1,9 +1,8 @@ name: Backend Tests on: - - workflow_dispatch -# - push -# - pull_request + - push + - pull_request defaults: run: diff --git a/.github/workflows/frontend_tests.yml b/.github/workflows/frontend_tests.yml index 0cd1d5e74..1bbfdbedc 100644 --- a/.github/workflows/frontend_tests.yml +++ b/.github/workflows/frontend_tests.yml @@ -1,9 +1,8 @@ name: Frontend Tests on: - - workflow_dispatch -# - push -# - pull_request + - push + - pull_request defaults: run: From 934681f03c19fabeb4d942a5e1fcb274cc35758c Mon Sep 17 00:00:00 2001 From: Dan Date: Thu, 24 Nov 2022 14:58:16 -0500 Subject: [PATCH 16/29] just a bit of cleanup in the docker compose file. --- docker-compose.yml | 32 +++++++++----------------------- 1 file changed, 9 insertions(+), 23 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 0625d1044..d6a86149c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,18 +1,7 @@ -# Why we are running with network_mode: host -# -# In order for the backend server to talk to the mysql server, they need to be on the same network. -# I tried splitting it out where the mysql runs on a custom network and the backend runs on both -# the custom network AND with localhost. Nothing I tried worked and googling didn't help. They -# only ever mentioned one thing or using host.docker.internal which would cause the domains to -# be different. -# -# So instead we are running with both the mysql server and the backend server in host network mode. -# There may be a better way to do this but if it works, then it works. - version: "3.8" services: - db: - container_name: db + spiffworkflow-db: + container_name: spiffworkflow-db image: mysql:8.0.29 platform: linux/amd64 cap_add: @@ -22,7 +11,6 @@ services: - MYSQL_DATABASE=${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} - MYSQL_ROOT_PASSWORD=${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw} - MYSQL_TCP_PORT=7003 - network_mode: host ports: - "7003" volumes: @@ -37,26 +25,25 @@ services: container_name: spiffworkflow-backend image: ghcr.io/sartography/spiffworkflow-backend depends_on: - db: + spiffworkflow-db: condition: service_healthy environment: - APPLICATION_ROOT=/ - SPIFFWORKFLOW_BACKEND_ENV=${SPIFFWORKFLOW_BACKEND_ENV:-development} - FLASK_DEBUG=0 - FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key} - - OPEN_ID_SERVER_URL=${OPEN_ID_SERVER_URL:-http://localhost:7002} - - SPIFFWORKFLOW_FRONTEND_URL=${SPIFFWORKFLOW_FRONTEND_URL:-http://localhost:7001} - - SPIFFWORKFLOW_BACKEND_URL=${SPIFFWORKFLOW_BACKEND_URL:-http://localhost:7000} + - OPEN_ID_SERVER_URL=${OPEN_ID_SERVER_URL:-http://spiffworkflow-openid:7002} + - SPIFFWORKFLOW_FRONTEND_URL=${SPIFFWORKFLOW_FRONTEND_URL:-http://spiffworkflow-frontend:7001} + - SPIFFWORKFLOW_BACKEND_URL=${SPIFFWORKFLOW_BACKEND_URL:-http://spiffworkflow-backend:7000} - SPIFFWORKFLOW_BACKEND_PORT=7000 - SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true - - SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@localhost:7003/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} + - SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@spiffworkflow-db:7003/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} - BPMN_SPEC_ABSOLUTE_DIR=/app/process_models - SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-false} - SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=${SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME:-acceptance_tests.yml} - RUN_BACKGROUND_SCHEDULER=true ports: - "7000:7000" - network_mode: host volumes: - ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models - ./log:/app/log @@ -75,8 +62,8 @@ services: ports: - "7001:7001" - connector-proxy-demo: &connector-proxy-demo - container_name: connector-proxy-demo + spiffworkflow-connector: + container_name: spiffworkflow-connector image: ghcr.io/sartography/connector-proxy-demo environment: - FLASK_ENV=${FLASK_ENV:-development} @@ -84,7 +71,6 @@ services: - FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key} ports: - "7004:7004" - network_mode: host healthcheck: test: curl localhost:7004/liveness --fail interval: 10s From 3ade3e5b999f9383eb8098effdadca498c2e6285 Mon Sep 17 00:00:00 2001 From: Dan Date: Wed, 30 Nov 2022 11:32:55 -0500 Subject: [PATCH 17/29] Adding a blueprint for openid - a very lightweight embedded authentication system to make it eaiser to try out SpiffWorkflow when you don't have openID set up with Google etal. Removing all calls to open id's user_info endpoint - as these are unncessiary. Adding a users section to the permission files -- so we can handle all user/group/permissions in one file when needed. There was a very confusing is_admin function on the user model that needed killin. --- .../src/spiffworkflow_backend/__init__.py | 2 + .../config/permissions/development.yml | 10 ++ .../src/spiffworkflow_backend/models/user.py | 4 - .../routes/openid_blueprint/__init__.py | 1 + .../openid_blueprint/openid_blueprint.py | 116 ++++++++++++++++++ .../openid_blueprint/templates/login.html | 103 ++++++++++++++++ .../src/spiffworkflow_backend/routes/user.py | 42 ++++--- .../services/authentication_service.py | 37 ------ .../services/authorization_service.py | 6 + .../integration/test_openid_blueprint.py | 79 ++++++++++++ 10 files changed, 339 insertions(+), 61 deletions(-) create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/__init__.py create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html create mode 100644 spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 5d591d847..389c93704 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -19,6 +19,7 @@ from werkzeug.exceptions import NotFound import spiffworkflow_backend.load_database_models # noqa: F401 from spiffworkflow_backend.config import setup_config from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint +from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import openid_blueprint from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.routes.user_blueprint import user_blueprint @@ -103,6 +104,7 @@ def create_app() -> flask.app.Flask: app.register_blueprint(process_api_blueprint) app.register_blueprint(api_error_blueprint) app.register_blueprint(admin_blueprint, url_prefix="/admin") + app.register_blueprint(openid_blueprint, url_prefix="/openid") origins_re = [ r"^https?:\/\/%s(.*)" % o.replace(".", r"\.") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml index e17e3f110..1acace141 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml @@ -1,5 +1,15 @@ default_group: everybody +users: + admin: + email: admin@spiffworkflow.org + password: admin + dan: + email: dan@spiffworkflow.org + password: password + + + groups: admin: users: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py index c33a72e7a..eb88e5de9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py @@ -83,10 +83,6 @@ class UserModel(SpiffworkflowBaseDBModel): algorithm="HS256", ) - def is_admin(self) -> bool: - """Is_admin.""" - return True - # @classmethod # def from_open_id_user_info(cls, user_info: dict) -> Any: # """From_open_id_user_info.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/__init__.py new file mode 100644 index 000000000..f520b09de --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/__init__.py @@ -0,0 +1 @@ +"""__init__.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py new file mode 100644 index 000000000..dd8928c63 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py @@ -0,0 +1,116 @@ +""" +Provides the bare minimum endpoints required by SpiffWorkflow to +handle openid authentication -- definitely not a production system. +This is just here to make local development, testing, and +demonstration easier. +""" +import base64 +import time +import urllib +from urllib.parse import urlencode + +import jwt +import yaml +from flask import Blueprint, render_template, request, current_app, redirect, url_for, g + +openid_blueprint = Blueprint( + "openid", __name__, template_folder="templates", static_folder="static" +) + +MY_SECRET_CODE = ":this_should_be_some_crazy_code_different_all_the_time" + +@openid_blueprint.route("/well-known/openid-configuration", methods=["GET"]) +def well_known(): + """OpenID Discovery endpoint -- as these urls can be very different from system to system, + this is just a small subset.""" + host_url = request.host_url.strip('/') + return { + "issuer": f"{host_url}/openid", + "authorization_endpoint": f"{host_url}{url_for('openid.auth')}", + "token_endpoint": f"{host_url}{url_for('openid.token')}", + } + + +@openid_blueprint.route("/auth", methods=["GET"]) +def auth(): + """Accepts a series of parameters""" + return render_template('login.html', + state=request.args.get('state'), + response_type=request.args.get('response_type'), + client_id=request.args.get('client_id'), + scope=request.args.get('scope'), + redirect_uri=request.args.get('redirect_uri'), + error_message=request.args.get('error_message')) + + +@openid_blueprint.route("/form_submit", methods=["POST"]) +def form_submit(): + + users = get_users() + if request.values['Uname'] in users and request.values['Pass'] == users[request.values['Uname']]["password"]: + # Redirect back to the end user with some detailed information + state = request.values.get('state') + data = { + "state": base64.b64encode(bytes(state, 'UTF-8')), + "code": request.values['Uname'] + MY_SECRET_CODE + } + url = request.values.get('redirect_uri') + urlencode(data) + return redirect(url, code=200) + else: + return render_template('login.html', + state=request.values.get('state'), + response_type=request.values.get('response_type'), + client_id=request.values.get('client_id'), + scope=request.values.get('scope'), + redirect_uri=request.values.get('redirect_uri'), + error_message="Login failed. Please try agian.") + + +@openid_blueprint.route("/token", methods=["POST"]) +def token(): + """Url that will return a valid token, given the super secret sauce""" + grant_type=request.values.get('grant_type') + code=request.values.get('code') + redirect_uri=request.values.get('redirect_uri') + + """We just stuffed the user name on the front of the code, so grab it.""" + user_name, secret_hash = code.split(":") + + """Get authentication from headers.""" + authorization = request.headers.get('Authorization') + authorization = authorization[6:] # Remove "Basic" + authorization = base64.b64decode(authorization).decode('utf-8') + client_id, client_secret = authorization.split(":") + + base_url = url_for(openid_blueprint) + access_token = "..." + refresh_token = "..." + id_token = jwt.encode({ + "iss": base_url, + "aud": [client_id, "account"], + "iat": time.time(), + "exp": time.time() + 86400 # Exprire after a day. + }) + + {'exp': 1669757386, 'iat': 1669755586, 'auth_time': 1669753049, 'jti': '0ec2cc09-3498-4921-a021-c3b98427df70', + 'iss': 'http://localhost:7002/realms/spiffworkflow', 'aud': 'spiffworkflow-backend', + 'sub': '99e7e4ea-d4ae-4944-bd31-873dac7b004c', 'typ': 'ID', 'azp': 'spiffworkflow-backend', + 'session_state': '8751d5f6-2c60-4205-9be0-2b1005f5891e', 'at_hash': 'O5i-VLus6sryR0grMS2Y4w', 'acr': '0', + 'sid': '8751d5f6-2c60-4205-9be0-2b1005f5891e', 'email_verified': False, 'preferred_username': 'dan'} + + response = { + "access_token": id_token, + "id_token": id_token, + } + +@openid_blueprint.route("/refresh", methods=["POST"]) +def refresh(): + pass + +def get_users(): + with open(current_app.config["PERMISSIONS_FILE_FULLPATH"]) as file: + permission_configs = yaml.safe_load(file) + if "users" in permission_configs: + return permission_configs["users"] + else: + return {} \ No newline at end of file diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html new file mode 100644 index 000000000..1da64914d --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html @@ -0,0 +1,103 @@ + + + + Login Form + + + + +

Login to SpiffWorkflow


+
{{error_message}}
+ + + \ No newline at end of file diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index 5fe10e0af..c9059427c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -1,6 +1,7 @@ """User.""" import ast import base64 +import json from typing import Any from typing import Dict from typing import Optional @@ -14,9 +15,12 @@ from flask import request from flask_bpmn.api.api_error import ApiError from werkzeug.wrappers import Response +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel +from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authentication_service import ( - AuthenticationService, + AuthenticationService, AuthenticationProviderTypes, ) from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.user_service import UserService @@ -58,7 +62,6 @@ def verify_token( decoded_token = get_decoded_token(token) if decoded_token is not None: - if "token_type" in decoded_token: token_type = decoded_token["token_type"] if token_type == "internal": # noqa: S105 @@ -68,11 +71,11 @@ def verify_token( current_app.logger.error( f"Exception in verify_token getting user from decoded internal token. {e}" ) - elif "iss" in decoded_token.keys(): try: - user_info = AuthenticationService.get_user_info_from_open_id(token) - except ApiError as ae: + if AuthenticationService.validate_id_token(token): + user_info = decoded_token + except ApiError as ae: # API Error is only thrown in the token is outdated. # Try to refresh the token user = UserService.get_user_by_service_and_service_id( "open_id", decoded_token["sub"] @@ -86,14 +89,9 @@ def verify_token( ) ) if auth_token and "error" not in auth_token: - # redirect to original url, with auth_token? - user_info = ( - AuthenticationService.get_user_info_from_open_id( - auth_token["access_token"] - ) - ) - if not user_info: - raise ae + # We have the user, but this code is a bit convoluted, and will later demand + # a user_info object so it can look up the user. Sorry to leave this crap here. + user_info = {"sub": user.service_id } else: raise ae else: @@ -202,6 +200,15 @@ def login(redirect_url: str = "/") -> Response: ) return redirect(login_redirect_url) +def parse_id_token(token: str) -> dict: + parts = token.split(".") + if len(parts) != 3: + raise Exception("Incorrect id token format") + + payload = parts[1] + padded = payload + '=' * (4 - len(payload) % 4) + decoded = base64.b64decode(padded) + return json.loads(decoded) def login_return(code: str, state: str, session_state: str) -> Optional[Response]: """Login_return.""" @@ -211,10 +218,9 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response if "id_token" in auth_token_object: id_token = auth_token_object["id_token"] + user_info = parse_id_token(id_token) + if AuthenticationService.validate_id_token(id_token): - user_info = AuthenticationService.get_user_info_from_open_id( - auth_token_object["access_token"] - ) if user_info and "error" not in user_info: user_model = AuthorizationService.create_user_from_sign_in(user_info) g.user = user_model.id @@ -332,15 +338,11 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo .filter(UserModel.service_id == service_id) .first() ) - # user: UserModel = UserModel.query.filter() if user: return user user = UserModel( username=service_id, - uid=service_id, service=service, service_id=service_id, - name="API User", ) - return user diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index 18f08d0f3..a12e57fae 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -42,43 +42,6 @@ class AuthenticationService: open_id_client_secret_key, ) - @classmethod - def get_user_info_from_open_id(cls, token: str) -> dict: - """The token is an auth_token.""" - ( - open_id_server_url, - open_id_client_id, - open_id_realm_name, - open_id_client_secret_key, - ) = cls.get_open_id_args() - - headers = {"Authorization": f"Bearer {token}"} - - request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/userinfo" - try: - request_response = requests.get(request_url, headers=headers) - except Exception as e: - current_app.logger.error(f"Exception in get_user_info_from_id_token: {e}") - raise ApiError( - error_code="token_error", - message=f"Exception in get_user_info_from_id_token: {e}", - status_code=401, - ) from e - - if request_response.status_code == 401: - raise ApiError( - error_code="invalid_token", message="Please login", status_code=401 - ) - elif request_response.status_code == 200: - user_info: dict = json.loads(request_response.text) - return user_info - - raise ApiError( - error_code="user_info_error", - message="Cannot get user info in get_user_info_from_id_token", - status_code=401, - ) - @staticmethod def get_backend_url() -> str: """Get_backend_url.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index ea488f7a9..f29c09851 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -1,4 +1,5 @@ """Authorization_service.""" +import inspect import re from typing import Optional from typing import Union @@ -23,6 +24,7 @@ from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserNotFoundError from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel +from spiffworkflow_backend.routes.openid_blueprint import openid_blueprint from spiffworkflow_backend.services.group_service import GroupService from spiffworkflow_backend.services.user_service import UserService @@ -125,6 +127,7 @@ class AuthorizationService: db.session.add(user_group_assignemnt) db.session.commit() + @classmethod def import_permissions_from_yaml_file( cls, raise_if_missing_user: bool = False @@ -241,6 +244,7 @@ class AuthorizationService: return True api_view_function = current_app.view_functions[request.endpoint] + module = inspect.getmodule(api_view_function) if ( api_view_function and api_view_function.__name__.startswith("login") @@ -248,6 +252,7 @@ class AuthorizationService: or api_view_function.__name__.startswith("console_ui_") or api_view_function.__name__ in authentication_exclusion_list or api_view_function.__name__ in swagger_functions + or module == openid_blueprint ): return True @@ -442,6 +447,7 @@ class AuthorizationService: email=email, ) + # this may eventually get too slow. # when it does, be careful about backgrounding, because # the user will immediately need permissions to use the site. diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py new file mode 100644 index 000000000..b234d914e --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py @@ -0,0 +1,79 @@ +"""Test_authentication.""" +import ast +import base64 + +from flask import Flask +from flask.testing import FlaskClient + +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.services.authentication_service import ( + AuthenticationService, +) + + +class TestFaskOpenId(BaseTest): + """An integrated Open ID that responds to openID requests + by referencing a build in YAML file. Useful for + local development, testing, demos etc...""" + + def test_discovery_of_endpoints(self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None,) -> None: + response = client.get("/openid/well-known/openid-configuration") + discovered_urls = response.json + assert "http://localhost/openid" == discovered_urls["issuer"] + assert "http://localhost/openid/auth" == discovered_urls["authorization_endpoint"] + assert "http://localhost/openid/token" == discovered_urls["token_endpoint"] + + def test_get_login_page(self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None,) -> None: + # It should be possible to get to a login page + data = { + "state": {"bubblegum":1, "daydream":2} + } + response = client.get("/openid/auth", query_string=data) + assert b"

Login to SpiffWorkflow

" in response.data + assert b"bubblegum" in response.data + + def test_get_token(self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None,) -> None: + + code = "c3BpZmZ3b3JrZmxvdy1iYWNrZW5kOkpYZVFFeG0wSmhRUEx1bWdIdElJcWY1MmJEYWxIejBx" + headers = { + "Content-Type": "application/x-www-form-urlencoded", + "Authorization": f"Basic {code}", + } + data = { + "grant_type": 'authorization_code', + "code": code, + "redirect_url": 'http://localhost:7000/v1.0/login_return' + } + response = client.post("/openid/token", data=data, headers=headers) + assert response + + def test_refresh_token_endpoint(self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None,) -> None: + pass + # Handle a refresh with the following + # data provided + # "grant_type": "refresh_token", + # "refresh_token": refresh_token, + # "client_id": open_id_client_id, + # "client_secret": open_id_client_secret_key, + # Return an json response with: + # id - (this users' id) + + def test_logout(self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None,) -> None: + pass + # It should be possible to logout and be redirected back. From 2082c113b2493638cffe4e53a7f1ba59af4d1b33 Mon Sep 17 00:00:00 2001 From: Dan Date: Wed, 30 Nov 2022 11:51:20 -0500 Subject: [PATCH 18/29] Not all open id systems have realms like KeyCloak does -- so removing this in favor of setting just one value - which is the base url of the openid system -- which will work across all openid systems. --- .../spiffworkflow_backend/config/default.py | 3 +-- .../services/authentication_service.py | 18 +++++------------- 2 files changed, 6 insertions(+), 15 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index 53d670c77..c32c48828 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -30,9 +30,8 @@ CONNECTOR_PROXY_URL = environ.get( GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true" # Open ID server -OPEN_ID_SERVER_URL = environ.get("OPEN_ID_SERVER_URL", default="http://localhost:7002") +OPEN_ID_SERVER_URL = environ.get("OPEN_ID_SERVER_URL", default="http://localhost:7002/realms/spiffworkflow") OPEN_ID_CLIENT_ID = environ.get("OPEN_ID_CLIENT_ID", default="spiffworkflow-backend") -OPEN_ID_REALM_NAME = environ.get("OPEN_ID_REALM_NAME", default="spiffworkflow") OPEN_ID_CLIENT_SECRET_KEY = environ.get( "OPEN_ID_CLIENT_SECRET_KEY", default="JXeQExm0JhQPLumgHtIIqf52bDalHz0q" ) # noqa: S105 diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index a12e57fae..f8171d88d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -15,7 +15,6 @@ from werkzeug.wrappers import Response from spiffworkflow_backend.models.refresh_token import RefreshTokenModel - class AuthenticationProviderTypes(enum.Enum): """AuthenticationServiceProviders.""" @@ -31,14 +30,12 @@ class AuthenticationService: """Get_open_id_args.""" open_id_server_url = current_app.config["OPEN_ID_SERVER_URL"] open_id_client_id = current_app.config["OPEN_ID_CLIENT_ID"] - open_id_realm_name = current_app.config["OPEN_ID_REALM_NAME"] open_id_client_secret_key = current_app.config[ "OPEN_ID_CLIENT_SECRET_KEY" ] # noqa: S105 return ( open_id_server_url, open_id_client_id, - open_id_realm_name, open_id_client_secret_key, ) @@ -55,11 +52,10 @@ class AuthenticationService: ( open_id_server_url, open_id_client_id, - open_id_realm_name, open_id_client_secret_key, ) = AuthenticationService.get_open_id_args() request_url = ( - f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/logout?" + f"{open_id_server_url}/protocol/openid-connect/logout?" + f"post_logout_redirect_uri={return_redirect_url}&" + f"id_token_hint={id_token}" ) @@ -79,12 +75,11 @@ class AuthenticationService: ( open_id_server_url, open_id_client_id, - open_id_realm_name, open_id_client_secret_key, ) = AuthenticationService.get_open_id_args() return_redirect_url = f"{self.get_backend_url()}{redirect_url}" login_redirect_url = ( - f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/auth?" + f"{open_id_server_url}/protocol/openid-connect/auth?" + f"state={state}&" + "response_type=code&" + f"client_id={open_id_client_id}&" @@ -100,7 +95,6 @@ class AuthenticationService: ( open_id_server_url, open_id_client_id, - open_id_realm_name, open_id_client_secret_key, ) = AuthenticationService.get_open_id_args() @@ -117,7 +111,7 @@ class AuthenticationService: "redirect_uri": f"{self.get_backend_url()}{redirect_url}", } - request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + request_url = f"{open_id_server_url}/protocol/openid-connect/token" response = requests.post(request_url, data=data, headers=headers) auth_token_object: dict = json.loads(response.text) @@ -131,7 +125,6 @@ class AuthenticationService: ( open_id_server_url, open_id_client_id, - open_id_realm_name, open_id_client_secret_key, ) = cls.get_open_id_args() try: @@ -142,7 +135,7 @@ class AuthenticationService: message="Cannot decode id_token", status_code=401, ) from e - if decoded_token["iss"] != f"{open_id_server_url}/realms/{open_id_realm_name}": + if decoded_token["iss"] != open_id_server_url: valid = False elif ( open_id_client_id not in decoded_token["aud"] @@ -207,7 +200,6 @@ class AuthenticationService: ( open_id_server_url, open_id_client_id, - open_id_realm_name, open_id_client_secret_key, ) = cls.get_open_id_args() @@ -226,7 +218,7 @@ class AuthenticationService: "client_secret": open_id_client_secret_key, } - request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + request_url = f"{open_id_server_url}/protocol/openid-connect/token" response = requests.post(request_url, data=data, headers=headers) auth_token_object: dict = json.loads(response.text) From ca339ee9334f16fc65f696db0a0eeba066450549 Mon Sep 17 00:00:00 2001 From: Dan Date: Wed, 30 Nov 2022 16:33:44 -0500 Subject: [PATCH 19/29] Use the "well-known" configuration dictionary from openid to get the url endpoints, rather than trying to configure or guess the correct endpoint urls. --- .../openid_blueprint/openid_blueprint.py | 7 +- .../services/authentication_service.py | 92 ++++++++----------- 2 files changed, 43 insertions(+), 56 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py index dd8928c63..b16ba46af 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py @@ -19,7 +19,7 @@ openid_blueprint = Blueprint( MY_SECRET_CODE = ":this_should_be_some_crazy_code_different_all_the_time" -@openid_blueprint.route("/well-known/openid-configuration", methods=["GET"]) +@openid_blueprint.route("/.well-known/openid-configuration", methods=["GET"]) def well_known(): """OpenID Discovery endpoint -- as these urls can be very different from system to system, this is just a small subset.""" @@ -52,9 +52,10 @@ def form_submit(): state = request.values.get('state') data = { "state": base64.b64encode(bytes(state, 'UTF-8')), - "code": request.values['Uname'] + MY_SECRET_CODE + "code": request.values['Uname'] + MY_SECRET_CODE, + "session_state": "" } - url = request.values.get('redirect_uri') + urlencode(data) + url = request.values.get('redirect_uri') + "?" + urlencode(data) return redirect(url, code=200) else: return render_template('login.html', diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index f8171d88d..5fdedf767 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -3,6 +3,7 @@ import base64 import enum import json import time +import typing from typing import Optional import jwt @@ -15,6 +16,7 @@ from werkzeug.wrappers import Response from spiffworkflow_backend.models.refresh_token import RefreshTokenModel + class AuthenticationProviderTypes(enum.Enum): """AuthenticationServiceProviders.""" @@ -24,20 +26,31 @@ class AuthenticationProviderTypes(enum.Enum): class AuthenticationService: """AuthenticationService.""" + ENDPOINT_CACHE = {} # We only need to find the openid endpoints once, then we can cache them. @staticmethod - def get_open_id_args() -> tuple: - """Get_open_id_args.""" - open_id_server_url = current_app.config["OPEN_ID_SERVER_URL"] - open_id_client_id = current_app.config["OPEN_ID_CLIENT_ID"] - open_id_client_secret_key = current_app.config[ - "OPEN_ID_CLIENT_SECRET_KEY" - ] # noqa: S105 - return ( - open_id_server_url, - open_id_client_id, - open_id_client_secret_key, - ) + def client_id(): + return current_app.config["OPEN_ID_CLIENT_ID"] + + @staticmethod + def server_url(): + return current_app.config["OPEN_ID_SERVER_URL"] + + @staticmethod + def secret_key(): + return current_app.config["OPEN_ID_CLIENT_SECRET_KEY"] + + + @classmethod + def open_id_endpoint_for_name(cls, name: str) -> None: + """All openid systems provide a mapping of static names to the full path of that endpoint.""" + if name not in AuthenticationService.ENDPOINT_CACHE: + request_url = f"{cls.server_url()}/.well-known/openid-configuration" + response = requests.get(request_url) + AuthenticationService.ENDPOINT_CACHE = response.json() + if name not in AuthenticationService.ENDPOINT_CACHE: + raise Exception(f"Unknown OpenID Endpoint: {name}") + return AuthenticationService.ENDPOINT_CACHE[name] @staticmethod def get_backend_url() -> str: @@ -49,14 +62,9 @@ class AuthenticationService: if redirect_url is None: redirect_url = "/" return_redirect_url = f"{self.get_backend_url()}/v1.0/logout_return" - ( - open_id_server_url, - open_id_client_id, - open_id_client_secret_key, - ) = AuthenticationService.get_open_id_args() request_url = ( - f"{open_id_server_url}/protocol/openid-connect/logout?" - + f"post_logout_redirect_uri={return_redirect_url}&" + self.open_id_endpoint_for_name("end_session_endpoint") + + f"?post_logout_redirect_uri={return_redirect_url}&" + f"id_token_hint={id_token}" ) @@ -72,17 +80,12 @@ class AuthenticationService: self, state: str, redirect_url: str = "/v1.0/login_return" ) -> str: """Get_login_redirect_url.""" - ( - open_id_server_url, - open_id_client_id, - open_id_client_secret_key, - ) = AuthenticationService.get_open_id_args() return_redirect_url = f"{self.get_backend_url()}{redirect_url}" login_redirect_url = ( - f"{open_id_server_url}/protocol/openid-connect/auth?" - + f"state={state}&" + self.open_id_endpoint_for_name("authorization_endpoint") + + f"?state={state}&" + "response_type=code&" - + f"client_id={open_id_client_id}&" + + f"client_id={self.client_id()}&" + "scope=openid&" + f"redirect_uri={return_redirect_url}" ) @@ -92,13 +95,7 @@ class AuthenticationService: self, code: str, redirect_url: str = "/v1.0/login_return" ) -> dict: """Get_auth_token_object.""" - ( - open_id_server_url, - open_id_client_id, - open_id_client_secret_key, - ) = AuthenticationService.get_open_id_args() - - backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}" + backend_basic_auth_string = f"{self.client_id()}:{self.secret_key()}" backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) headers = { @@ -111,7 +108,7 @@ class AuthenticationService: "redirect_uri": f"{self.get_backend_url()}{redirect_url}", } - request_url = f"{open_id_server_url}/protocol/openid-connect/token" + request_url = self.open_id_endpoint_for_name("token_endpoint") response = requests.post(request_url, data=data, headers=headers) auth_token_object: dict = json.loads(response.text) @@ -122,11 +119,6 @@ class AuthenticationService: """Https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation.""" valid = True now = time.time() - ( - open_id_server_url, - open_id_client_id, - open_id_client_secret_key, - ) = cls.get_open_id_args() try: decoded_token = jwt.decode(id_token, options={"verify_signature": False}) except Exception as e: @@ -135,15 +127,15 @@ class AuthenticationService: message="Cannot decode id_token", status_code=401, ) from e - if decoded_token["iss"] != open_id_server_url: + if decoded_token["iss"] != cls.server_url(): valid = False elif ( - open_id_client_id not in decoded_token["aud"] + cls.client_id() not in decoded_token["aud"] and "account" not in decoded_token["aud"] ): valid = False elif "azp" in decoded_token and decoded_token["azp"] not in ( - open_id_client_id, + cls.client_id(), "account", ): valid = False @@ -196,14 +188,8 @@ class AuthenticationService: @classmethod def get_auth_token_from_refresh_token(cls, refresh_token: str) -> dict: - """Get a new auth_token from a refresh_token.""" - ( - open_id_server_url, - open_id_client_id, - open_id_client_secret_key, - ) = cls.get_open_id_args() - backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}" + backend_basic_auth_string = f"{cls.client_id()}:{cls.secret_key()}" backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) headers = { @@ -214,11 +200,11 @@ class AuthenticationService: data = { "grant_type": "refresh_token", "refresh_token": refresh_token, - "client_id": open_id_client_id, - "client_secret": open_id_client_secret_key, + "client_id": cls.client_id(), + "client_secret": cls.secret_key(), } - request_url = f"{open_id_server_url}/protocol/openid-connect/token" + request_url = cls.open_id_endpoint_for_name("token_endpoint") response = requests.post(request_url, data=data, headers=headers) auth_token_object: dict = json.loads(response.text) From e8cbe1df84b061e223a48370be83b663a72e7ab2 Mon Sep 17 00:00:00 2001 From: Dan Date: Thu, 1 Dec 2022 11:42:36 -0500 Subject: [PATCH 20/29] A little cleanup of the ui Don't check authorization on static assets Do not require unique username on user table (uniqueness check is on the service and service id composite.) --- .../{ff1c1628337c_.py => 3f049fa4d8ac_.py} | 9 +- .../config/permissions/development.yml | 6 +- .../src/spiffworkflow_backend/models/user.py | 2 +- .../openid_blueprint/openid_blueprint.py | 79 +++++++----- .../routes/openid_blueprint/static/login.css | 112 ++++++++++++++++++ .../routes/openid_blueprint/static/logo.png | Bin 0 -> 10138 bytes .../openid_blueprint/static/logo_small.png | Bin 0 -> 5000 bytes .../openid_blueprint/templates/login.html | 85 ++----------- .../services/authorization_service.py | 3 +- .../integration/test_openid_blueprint.py | 22 +--- 10 files changed, 177 insertions(+), 141 deletions(-) rename spiffworkflow-backend/migrations/versions/{ff1c1628337c_.py => 3f049fa4d8ac_.py} (99%) create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/logo.png create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/logo_small.png diff --git a/spiffworkflow-backend/migrations/versions/ff1c1628337c_.py b/spiffworkflow-backend/migrations/versions/3f049fa4d8ac_.py similarity index 99% rename from spiffworkflow-backend/migrations/versions/ff1c1628337c_.py rename to spiffworkflow-backend/migrations/versions/3f049fa4d8ac_.py index d8da6d3c4..dc8675a67 100644 --- a/spiffworkflow-backend/migrations/versions/ff1c1628337c_.py +++ b/spiffworkflow-backend/migrations/versions/3f049fa4d8ac_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: ff1c1628337c +Revision ID: 3f049fa4d8ac Revises: -Create Date: 2022-11-28 15:08:52.014254 +Create Date: 2022-11-30 16:49:54.805372 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = 'ff1c1628337c' +revision = '3f049fa4d8ac' down_revision = None branch_labels = None depends_on = None @@ -79,8 +79,7 @@ def upgrade(): sa.Column('email', sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('service', 'service_id', name='service_key'), - sa.UniqueConstraint('uid'), - sa.UniqueConstraint('username') + sa.UniqueConstraint('uid') ) op.create_table('message_correlation_property', sa.Column('id', sa.Integer(), nullable=False), diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml index 1acace141..d92daeaf3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml @@ -4,11 +4,7 @@ users: admin: email: admin@spiffworkflow.org password: admin - dan: - email: dan@spiffworkflow.org - password: password - - + preferred_username: Admin groups: admin: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py index eb88e5de9..ed6d10e6d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py @@ -30,7 +30,7 @@ class UserModel(SpiffworkflowBaseDBModel): __table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),) id = db.Column(db.Integer, primary_key=True) - username = db.Column(db.String(255), nullable=False, unique=True) + username = db.Column(db.String(255), nullable=False, unique=False) # server and service id must be unique, not username. uid = db.Column(db.String(50), unique=True) service = db.Column(db.String(50), nullable=False, unique=False) service_id = db.Column(db.String(255), nullable=False, unique=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py index b16ba46af..5c96d62b8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py @@ -1,23 +1,22 @@ """ Provides the bare minimum endpoints required by SpiffWorkflow to -handle openid authentication -- definitely not a production system. -This is just here to make local development, testing, and -demonstration easier. +handle openid authentication -- definitely not a production ready system. +This is just here to make local development, testing, and demonstration easier. """ import base64 import time -import urllib from urllib.parse import urlencode import jwt import yaml -from flask import Blueprint, render_template, request, current_app, redirect, url_for, g +from flask import Blueprint, render_template, request, current_app, redirect, url_for openid_blueprint = Blueprint( "openid", __name__, template_folder="templates", static_folder="static" ) -MY_SECRET_CODE = ":this_should_be_some_crazy_code_different_all_the_time" +MY_SECRET_CODE = ":this_is_not_secure_do_not_use_in_production" + @openid_blueprint.route("/.well-known/openid-configuration", methods=["GET"]) def well_known(): @@ -26,8 +25,9 @@ def well_known(): host_url = request.host_url.strip('/') return { "issuer": f"{host_url}/openid", - "authorization_endpoint": f"{host_url}{url_for('openid.auth')}", + "authorization_endpoint": f"{host_url}{url_for('openid.auth')}", "token_endpoint": f"{host_url}{url_for('openid.token')}", + "end_session_endpoint": f"{host_url}{url_for('openid.end_session')}", } @@ -40,23 +40,22 @@ def auth(): client_id=request.args.get('client_id'), scope=request.args.get('scope'), redirect_uri=request.args.get('redirect_uri'), - error_message=request.args.get('error_message')) + error_message=request.args.get('error_message', '')) @openid_blueprint.route("/form_submit", methods=["POST"]) def form_submit(): - users = get_users() if request.values['Uname'] in users and request.values['Pass'] == users[request.values['Uname']]["password"]: # Redirect back to the end user with some detailed information state = request.values.get('state') data = { - "state": base64.b64encode(bytes(state, 'UTF-8')), + "state": state, "code": request.values['Uname'] + MY_SECRET_CODE, "session_state": "" } url = request.values.get('redirect_uri') + "?" + urlencode(data) - return redirect(url, code=200) + return redirect(url) else: return render_template('login.html', state=request.values.get('state'), @@ -64,18 +63,19 @@ def form_submit(): client_id=request.values.get('client_id'), scope=request.values.get('scope'), redirect_uri=request.values.get('redirect_uri'), - error_message="Login failed. Please try agian.") + error_message="Login failed. Please try again.") @openid_blueprint.route("/token", methods=["POST"]) def token(): """Url that will return a valid token, given the super secret sauce""" - grant_type=request.values.get('grant_type') - code=request.values.get('code') - redirect_uri=request.values.get('redirect_uri') + grant_type = request.values.get('grant_type') + code = request.values.get('code') + redirect_uri = request.values.get('redirect_uri') """We just stuffed the user name on the front of the code, so grab it.""" user_name, secret_hash = code.split(":") + user_details = get_users()[user_name] """Get authentication from headers.""" authorization = request.headers.get('Authorization') @@ -83,35 +83,50 @@ def token(): authorization = base64.b64decode(authorization).decode('utf-8') client_id, client_secret = authorization.split(":") - base_url = url_for(openid_blueprint) - access_token = "..." - refresh_token = "..." + base_url = request.host_url + "openid" + access_token = user_name + ":" + "always_good_demo_access_token" + refresh_token = user_name + ":" + "always_good_demo_refresh_token" + id_token = jwt.encode({ "iss": base_url, "aud": [client_id, "account"], "iat": time.time(), - "exp": time.time() + 86400 # Exprire after a day. - }) - - {'exp': 1669757386, 'iat': 1669755586, 'auth_time': 1669753049, 'jti': '0ec2cc09-3498-4921-a021-c3b98427df70', - 'iss': 'http://localhost:7002/realms/spiffworkflow', 'aud': 'spiffworkflow-backend', - 'sub': '99e7e4ea-d4ae-4944-bd31-873dac7b004c', 'typ': 'ID', 'azp': 'spiffworkflow-backend', - 'session_state': '8751d5f6-2c60-4205-9be0-2b1005f5891e', 'at_hash': 'O5i-VLus6sryR0grMS2Y4w', 'acr': '0', - 'sid': '8751d5f6-2c60-4205-9be0-2b1005f5891e', 'email_verified': False, 'preferred_username': 'dan'} - + "exp": time.time() + 86400, # Expire after a day. + "sub": user_name, + "preferred_username": user_details.get('preferred_username', user_name) + }, + client_secret, + algorithm="HS256", + ) response = { "access_token": id_token, "id_token": id_token, + "refresh_token": id_token } + return response + + +@openid_blueprint.route("/end_session", methods=["GET"]) +def end_session(): + redirect_url = request.args.get('post_logout_redirect_uri') + id_token_hint = request.args.get('id_token_hint') + return redirect(redirect_url) + @openid_blueprint.route("/refresh", methods=["POST"]) def refresh(): pass + +permission_cache = None + + def get_users(): - with open(current_app.config["PERMISSIONS_FILE_FULLPATH"]) as file: - permission_configs = yaml.safe_load(file) - if "users" in permission_configs: - return permission_configs["users"] + global permission_cache + if not permission_cache: + with open(current_app.config["PERMISSIONS_FILE_FULLPATH"]) as file: + permission_cache = yaml.safe_load(file) + if "users" in permission_cache: + return permission_cache["users"] else: - return {} \ No newline at end of file + return {} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css new file mode 100644 index 000000000..15b093f67 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css @@ -0,0 +1,112 @@ + body{ + margin: 0; + padding: 0; + background-color:white; + font-family: 'Arial'; + } + header { + width: 100%; + background-color: black; + } + .logo_small { + padding: 5px 20px; + } + .error { + margin: 20px auto; + color: red; + font-weight: bold; + text-align: center; + } + .login{ + width: 400px; + overflow: hidden; + margin: 20px auto; + padding: 50px; + background: #fff; + border-radius: 15px ; + } + h2{ + text-align: center; + color: #277582; + padding: 20px; + } + label{ + color: #fff; + width: 200px; + display: inline-block; + } + #log { + width: 100px; + height: 50px; + border: none; + padding-left: 7px; + background-color:#202020; + color: #DDD; + text-align: left; + } + .cds--btn--primary { + background-color: #0f62fe; + border: 1px solid #0000; + color: #fff; + } + .cds--btn { + align-items: center; + border: 0; + border-radius: 0; + box-sizing: border-box; + cursor: pointer; + display: inline-flex; + flex-shrink: 0; + font-family: inherit; + font-size: 100%; + font-size: .875rem; + font-weight: 400; + justify-content: space-between; + letter-spacing: .16px; + line-height: 1.28572; + margin: 0; + max-width: 20rem; + min-height: 3rem; + outline: none; + padding: calc(0.875rem - 3px) 63px calc(0.875rem - 3px) 15px; + position: relative; + text-align: left; + text-decoration: none; + transition: background 70ms cubic-bezier(0, 0, .38, .9), box-shadow 70ms cubic-bezier(0, 0, .38, .9), border-color 70ms cubic-bezier(0, 0, .38, .9), outline 70ms cubic-bezier(0, 0, .38, .9); + vertical-align: initial; + vertical-align: top; + width: max-content; + } + .cds--btn:hover { + background-color: #0145c5; + } + .cds--btn:focus { + background-color: #01369a; + } + + .cds--text-input { + background-color: #eee; + border: none; + border-bottom: 1px solid #8d8d8d; + color: #161616; + font-family: inherit; + font-size: .875rem; + font-weight: 400; + height: 2.5rem; + letter-spacing: .16px; + line-height: 1.28572; + outline: 2px solid #0000; + outline-offset: -2px; + padding: 0 1rem; + transition: background-color 70ms cubic-bezier(.2,0,.38,.9),outline 70ms cubic-bezier(.2,0,.38,.9); + width: 100%; + } + + span{ + color: white; + font-size: 17px; + } + a{ + float: right; + background-color: grey; + } diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/logo.png b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..4cffb07fdf112e035c669c06bd5cbdc9355f43a5 GIT binary patch literal 10138 zcmW++byyT%7hbwVSh_*F^G7Qo-MMtS(%l`qg0u)oHz**;g22)xNJw`rEJ$~Ee)!!# zX3jJB&O9?`&OPrv?|WmkHI?vjsc-=R0KST{yeM*tjV12VLpU7b(3JjJ);SY`y)gJ#7Gfetx_Tu1;QHYj+!7H&46l!`D;*00Tfp zURK{f=OEX?pJqC>dfBUG1D{%rhZ5X{o?IP8NW}0e!)lGN|CA+@n_r#b?gjdfG?&Ys znn9jgEN3-A!WtAxUt3m628c|x95w`?GKY`zIda%&=@hg6D=es;9#k{6rpSjCT61QuN`1^kUjwe&B{8N;zkqGm*=_*t@M5Qs)7Zwq*kw2wd5wk-3OuN~1 z-ow0--6^(`-z}M%kx@BXHv1W+BxmNgcg7u2f5_a;@^Q_YjQ{Yfrh`!Ofkqt;ptVh}7JFEMe&v?>S z3b$2BzQj?7o{3^g6KO&m|2B#9OH5>BvsdI28|I;>rz&Tz!wB0!vaJ13K88rtQyBWS z<%+?I%t{ry7Q+z=5+DM^K0WC?>XrhBw?V#lR&;t<2N3y-LX537s_I)luC>yov0#(^ zH>+2knTV!|-50`FfWBZWN;W5uqel+;n9oPm4Zs3syv)OwR6TF~bSW(EpQY zPZ8)O2q)eNI1OaV`9MGNq*`#d*+warztXb1$-VLx72?u8`d`%Blpfi?_bnYCM%*>i zB{oTkg&f%hjQ2g0MGHmRwpB^bNFcc$rewW@_AzJAVeI+rY^tvuYrLo$=fw&e9)0DoXw3z?5A|C3SjwuZ zNW>D)@(6_&Z?9#`HXMQd-x?aukf0$jL*|oj@TC`|3T>Hj_bsC9)HUJU6}9pV8#UpCV#N{F zI!JwPv#k2bGWOFJF}8GgL)VifucH_3FZ^}4&eiH4ruKe#!xiCd*mYwi)rWXV`L7!T z8i4jSqreQCP_mg^<$~P&m#>Lt@~LJ**zOSznIo_KWBqBwTDB=_v0atbARu&;%Xd(= zwo%FdyL*oDbH!HRbXLO^grp_W#Z6s)zPLOt7jvv3O$NbJ?;xdWMYS`G?9{@Euwgq^ zH$sLmwMNMx@)Qalrhyr3p=u1Rj@Wd|aqu2il@@Sd-(FJ#q)QC1sA%qVH zHT#Mank_4Eg#RQpS+Ng-W)J55z4MC-%io{g6vv%sx<^bnSuEv)8Me4sZRLTF*P!@i zZ=pAdT?i(!4TIkz8zyabSYnC|ZCB$(rwwt)#8-qcP-oc3gFWoJaaRi#ekU5uYW^9_ z`^--CBotdB?5-e8vDpNh-R7>2!EnjAU1Yqy8xHyjdY}JNTx3eng%)83!_3E{7MBxn z;04!Z%2D1dM(03L^DIr@rx;3MW)xj~jlLpNI?4voAJ`qo2`*+WQ+H~28IKhkajyf6$ZXKsurf4O5{|i03uj$|9x%sb8Rm+$QDw#JSGC5v*ZwX$px5uW zYoj8L+R;VR-X$g~VLPuyje{-3Gi0u%Xxoo?rJ;0QYA1dZZ>l&*N1k1vpY;1I-nOX# zO3Rpy6Ur_JN4S(V2>D5O84i#oh#_8EB{Zg5SvzQ<`oV_Lp`IZ62MZ>`l6-uGA=<^4 z=G-Ox99gUS?RvNr@#PcAF#{w)441{dEnC(q9fX?DK=k<5>PYHU&g4UC)k=iJ zmgAThb@-wKyYBA?Y%-K83{AYmCkrUs$N9g@u1BBuaL7c`ff;<JNi80PUhf^8G3j1$B;6C%V+aSB<(EDypjgU?5q*(M*?F?1} z;>Da#%!JWZ#2G*sle^lUVz+Nk;*B^|q51|Lbw>pnStZof>`KQ>cnCVrVExhwKRiRH z!`is!g%spyRbMn_<|(w}E4Ww|^=ZC1=7A>gvY}cs^Yh#%b$%i0fulPSd0{V(%}f+O zRIxP*n9+PP)hH0or=~+gZMkC$XF-n(8wL79%;f))9f7+?&)hms&_yqmqD8(v;}j=z zs)aX4f942gk>!90-~FE=jLkA@KiQKFR2DYDpB0SF|~UoyV=r#^y9SU|Cm`Fc0o%@Vj@4XewX6IHAEo ziovjEea%h1#*A5}Xz--Gy800NJN={H3Kyeu3{WP0JDj0G8CtSG(YuO|A@JOo@^rX9 z@9k#WmhbX}hcA-gt!89ojE|I&_HzbObhY^&Q%n4Vbw%`I0|qZasz4b;lPEZ_BPoPrSaT1jK|Wc}=@oEE=qsq`fGL`O514J;`q!$qoCst4t$NE zTLUdr@d4LeF#!05kv;}SyqmJkJonBJQ}iNN*MZ9*RemBBm+hVo;&YR1YL((NN8r1l z!7e{$h|59F&@1+30~@mSD_wedC{H!}%H7wA%nNkc(nFz`-#kmG5~;)lsoQmy7jbQ7 zTye;lQGOF;##T)&z{%T)kmdOu=WKnS49O&@cJd_pLsk`yp4UWA5Zuyhq}lVqX)oLM zyZ2H=jUK(u4N(CW0Ly6HQ5B4t;@F?FSz!^cO?sb|6R(#6*?m!QD;fv|^f>d;1UVJ? zE|lBCbKYj&XTOpplIOeUHa902ONkaOp=XwTM3Hz?uA3T`{oh^J4Ru<6dO~^PPWJA6 zoS)neu4RO#tyv#8)OmXRVJi@;yvh&wJ_$x4>&nSn@qIKW)-xci7X_(#dwZr*%lD%) z`JTRrpv0vqE%IT&Azjt(F+A;SGwLt;S!;se1F9;qvW}o1w?pP)$y^lhd{@)MHbM}& zUS3wNArELN*|9=5&&~Ak!J|UmUa}MWwnFei7&LJq{XzY{DFp01f25hDTR7Fy!e=XZj+{bzP@Slr)^p#Yo>lK6(>szuWyFDq~p~>^@HsTARCfdru^x! z=G&Q}lb4+Xg=k{?m5aV+vaT%x>skeDAAEY0tLQVZx%3;Dt$7$6_Wyoi)lKM~)%xon%&UlT~D1%&3m=rI2jj8VVmWvgO^jn zi6*8Xr*%c*+~xgsSE$2aoEqVDQId4MW*P#&vMrbL!s|Nu20wid8-cO&5b)0oaXW2* z=ya0uqZdSgnI+qY(4j7Ou!Ej$nz&DFe00ydCNvfvH4sOfq_y?{dT&>;)p3x;l|f^A5+Y>rm9MNVeRDCMAcl?im5?6KW?Qg ziYGOOi-KZIJ>R5M)GF!kZn6$ozzXHBL+hJm${p2OPl!cpE7iWdFjk*;>1(NO*0t7M{~jTOvwN@Udb?@^1_{E?Y5K& z^^W350;BOe#|e$ChKBzv1Q$~{RnnU0``uzt zeldt->bl-?)6*<)@*SQN>_c!r&<_Q`+-tG`|aIa>=3XX>I*;EltcexSZqY;CP6^~t}8byIbBq?Y@i$_S;VLv zPT+xGXgukU=dZSpDZMUlZiKb(WY*90K2gV>>~31{Yn)vc8)R@6wU@W&X+P+hR$MJ; z$4e8+|2}E~**L5Ef?lTy{Lp5hHO88RNDf?(7w$tXv?O_ z&wO}uRP;WBGHE$=@O8^(Mc$8oXF-|Wk>>Zw9ZhFz(8F?0#?J>eF{Dh3!`}U{mPg$; zvlV0-hly@}O7F4siF>y(oi5AM2fBJ(OJi>O`Xxe(h~-9sPh|o|9`OxKfQ$GKui9^4qsT~Nvw;NX7fh@m{qcT2{I5sE~;Ir$UQ(pIxxr?F%s?A7WA)<{{J^2mwcO(3EZ&8(wm z>lcx-ccf~4P#ETOXE#J8u$3$cZSpIC={N6ew_a>1!3i>xRqcudaeOvS$o60NbEH@A zwQ}`#Ov6u18FQjXm40PeNXzSE00Ajl--HOtPKz%kg1GY16RD~sa&HPe_%5ZAos5rn zJkL~wxtmc_-6W{X@{*m*{o^VEuiOIHX0!zPh#!96>aNqB?I+(@!zE3Y(fFgbF}TE|iysG*Sx+bAkO9PZz2FKA(~y`X zI-K_t5I^a1}Hk`;)wT{AuZCrzgzFt+8_ z)-ZLSDI@R?%bL9E!*VYKX-vE{Prq!-NXxs{GX|&RD58x(Hcz`L_xAjwLFd0F%E80% zg?v>}+T4LG5Uw;ZLnR=x8!)L8`uW|#neav2z@t(3QTuiJZyL-gq3`M$3%!bZ?6cZWEB7pqK3}Gk_T)yG9kx)ufSRM6{!IpV%{81- z6R<`*XhT)5YsV(;phm9xe+*5n-qSfEt9|@ z0q9}`ze9B&9hJT=er9ADFU<kwqhlns`y#nY;3>i*a^sO+Q2&+o}On4zsos@xcY~u+p}{bC&UH1 z){EJ0r4yM?6fzw;QE4Yf!n(Z=8cYX$d$m4~1IIfRWo>jDR0FK>XJR^diT^$oqa?{GlCZpAMU#u# zL_NB9o^+(!D;lwr5C;t^0 zJwlYK#cN6yp>ownvLFn3WG|gZ8pYvtGzeun9qC*R3 zef-QanJN=n5A_EYX1P}9OoF`3Cf&(} z8<}yuvL1lc_|qCNCk!jF#KNo-uz9BaEDPBvG@sWL%u`K#*WaDJiYLud%#M;|&1uv@}T=t|W0k3*E~>EgEsDFc~&8!Iz{U?n2g zH;aYO$~w7unax16G1kY{p>G*$Mxkr>)zOckunDTOSzw=O4Izo|Cb=$Z?RN zU22!tbY+qvaDywKFhQ4{ZiJ~Lj(u5LchI*grj?KrjEI+^GvS{7QZX{KKn=l%-F`hT ze3d_m4K|B7r~ zNuCHq@awnyIW0Q)?manP-aIw0?5$GYbW&6(hMXz!b$O??_-pudP4av;Bw_p;=xR;# zgPy3(Kj@M@z{X0ROJV_Zj=AITwN$l7ddycRg~qam?TH zl7n#^-dbKwlQHMNwU7_lNHK^qA9VE*w0`9|L||pUZD*P1qmt z$)dWZsGhWVcFhQjX#Jy^>L|s+nv22@9zU1`jQN9lG(Ad zi)t*nMkjUEs#lR6zKFyYhnDkw{7!I>3uL!hJPr+ipxo=q8v6F`XS9#oht$yb?LZH= z8Ew0SkDa;T_h)h>|NHK{S-{K3 zp4i9NsUzHbOeNjyGIyRxtWFc3US~p~a=8oxb$`1=vw4GchH%akf>&KkZ@(yeMLE{d z&R+>%L$F~9QWdeqM==~hUOrJGJc?8EGZe%Da-6@)ClB1Yex}4W2@J3WGZ#5VZJMPz zRM_t{5w#)41DDM;Pn*GizLM+cc+n=O3&0AO~@ahRJL;0x?nW_;}@{wLc? z)aQrx##57op^Th{!AGVP8E+{W8RkHxOyX2rH~HNuDCzforDl9>R*ebS0>*MIB~W?H zp&kD?UJxsd5>uNnp|y=iW~#h+W!-5hg%8$MgR1^|EfCTbKQ*;AsNhO1fNg6(Lb`gn z@kzq4ee!{4TmTu&V8+E|Od-<#q%8|cQ{0|BHX&^wI&nVHTP1_E2AHjyyp55fK+iN! zaorW79U~-eY9o`GK6e{!KZdCWrzd1AE%i-JKS*q{_cpJ+oYfy~Us=M|vB1t6B5x_h zP+!=VmXDZM21zvzsvyxL`g`c%-?HF9pT~X--`+FM=~lBt)#!!4Wc^hR_IvU2bw6R& zItr$tQN;9DO{hrod3Mf%mS)EU4$}T6(W@IYG1v06HWoLer5X3}Y8F?UC@#WrDrGhF zR~WpO~=!9{|1W>zK>?Jzy`%+|p9lWm~dE|EH8a@^nf*8J(?R zwcl+MEtTU)@e2Z(_n0^Wk0^7;Mch9EO4gw-ZxC>#Z36!X?EPf{ktXT!UjcQTRUI6> zLk4$Yzy9*U`0GWo25!(Fu!!If-j?l^pCS!gIu#b%SCJ6 zgb`ihEdDwwH5k3{J#vcXz>mA?q`fkgEN5)Bag`iVit!Gst8OS@*`p`pcz6!6w>$S& zD_@TcU+q&1w`-T@QorW=&YAkLEEuoPHk^?)CgY5Y#;bE`=mhCf=PV^A{HH7RUR7CU z9%Eqa52C3@&w0M>0zoGGdZ}zl75ns6#GBZ`!LJb1PC^GuSgoU-;?-xnE{v?VP=*a& zS&VlbX0CJqoaK8i*Q>GV`I*RWk}@UNf$7~O7q!ta+R?!~N6p`~RqxC5`DYW>FLL2( zZSYSCvH%L{@vWu-(}$uKgS>h7CUQzTnF!_D$W9C8^`qAHK!S(Cd>&3ZJ<-F2?ii6K z=l+=+7TZtnT{LaW5GklKq-_)L(tbEO1OrEfgb)67SJ!jAI6l}thfnPfU9jpT0glKW zZE9laVr)codHL(5vdjgarXN4?742}821F|I&F0e!^{6@4DVz=xwt{ z5ZhMamyCGk1Ndf3gmzXNaofb2MbgD2@tAyzqd{$^@RP*0-1j0_znPxN2OZU5f*9E>DCva5qw7(rn%81?+X`r;IFe9SaVi8(ny?U!h@Cn25aWtyjK;hdgF11WcPi z^|jk#{N-n)FEYi>pBX%5J391OISIx2M3+*_jTd@ZPA4;^RAj;}XnA{&F|7gZBDpu; ziYf?2ifsw{!#)Yl!nw(?h`Y+Z3fvE z=U~Dpy>J>uWP65V|02gRd4&;hezH9DQ?$2H+wGDCO){JUzwT_+r!Ypj&@d6Z!^^Gl zCqcT!Y0@c(#hP)L8S}~<1K2bS!jd)9NvOZV7u}P;aFRO~6PI1V>oki<8E{eyKUEGI zgqGdABSfCP5~rV&QCs9Zrff5t+DRN@<1pedtWcmst=X)?8T@ixU7t$!Dfd*h_hWlkHWJijem=c!Kc z0wpG*o4$$Asd!cm;K_rRUmV?b@K8VQY9qqr8C*Z@l{Y0t zVSk)H?6_axxtm}+@Xd$Qe#G zld+hu`HlRF=j%sEz1k?!>aRcMqvRq~CQ3&f30EA;zMQ?heZzaz&?TFTDt>=(Mfny4 zVj3nT#9kLnXWH5ORCy*{3FzVdUjV-%JnllCl2i0DkhbKx7{48Na z$${Z9f< zE!-l<(a3DXOx*cjIT7*ml5oI&fOKrr)XTf6;2vYqHz!efKH5_OC6XqA@gXU0p%E-V zt$np5w|}zHyW}u}p$FRG0^Lc#JF$6gZsz5 z&Zxb8IUKj_=lnO6AvTv?5(bmOmf*O`BsHjS;AWq9l(_j4=SVTD1LnT*%{DRD8CuWK z>4u%bD4x>Hr4`OpC?!LkP1zg4iVV+Ww{z4d4O*Y%p}lL3Iv1hBFqB_s>YPrG@k}cA zKy!3~FL!2zpy{;BnMnK(xhpdT`x?p@5VO6m0ln6L%J2&y=4dUar)K0qMVSl*+8*(r z_2?xa2{AFT5Rm}jU^t;erU$-Jvm9+NhnP6KBe&uCC*4Zg4b~^;s7p~Qea?7f==wIl z87arQ_$(y9t0xb&KRs_FlKse~s2E+GiFbSEY9d0!nsML|;@_SOpeKeuZ5 AE&u=k literal 0 HcmV?d00001 diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/logo_small.png b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/static/logo_small.png new file mode 100644 index 0000000000000000000000000000000000000000..d0ad4499a9f187c829438db52f20575863e526c1 GIT binary patch literal 5000 zcmV;36L;*1P)E{GC{1Z z){<~C!Ba7n%t9|$>)M^n$@zG+n+SV4|$~&0}ghw7Td)+_InVB;o2>}x9 zz0L3Q$tP#;z4qE`?^$O*)>``ry#%K`ezOSgHBfpdiHerqYNrqXItcX^>?iacYvFCw&9OzV; z_7VpH{W{2BFCsT~G}Ef;>mqWTstyGH2uuWK`toiB)&ds-MZjgiO(HT~RqKEUfiQ5e zh&-gK#{$0xeg(`2?)U4r0p9@*2WA5;z%?RL@5^w4pWg~x1pEm2EU;2U7OLt+z+@l; zJP+If%ma#n1tRkDevVro+Q9$}z1;m|bFp~^(Rm;ffe}EYhyFTJRDcowO#4z-D{Loas+rbA{q z@TrfW{cXTqZDITCjtW&Z3>*u*?PvYL|NlGSB%m2@cKsbN2UvnPQ+@_-M(zl2i--lb z;H@4JyxIR<;5PxfQecyaC{RXrI|G}5;eZv9W;_MO=Mw@3;mzQE=wXD@#*~XUb7iqn zktalT*PbeiA&qKNS2T*8$L=q}HlQ{gw)3~mcat4d>!+%B0Y?F~BC=Uk3!280dAOf` z9Jm3f06quo6p>7T{w-Bq1l*1{qXuz1-c0*9JO$3;KHyPRT?c#*xJp$Q0*Bzurq8SD zoxTju;LWJF`TXDNVo3V1j}RwT{Rfb-etLadKYM)Bt!{l!Fn40^U@#xz%{H)SB?UlL zhv02d8vU6KRUHIu7ZFv}e!x&*Gk}P+scNZ+G^%PbU_@kBfL{@wl6?pa5RvVw+8<~W zk+iBF1VlvS!*+5+Job|GXZc~k1`)BUYJ8rXfslwa`!t6FAGUj0`p^RigB+1h%5Qo~ zQ2|6kF<;5LrKI_KvbBS}roeM6u&!NsqyK+52ZV^UkzdBQ`+2(pbc1Lm#?AxYsyatLY*= zvQJ6-&{OC&Wlh%2dAzrH_u*d?VZde+s#5e3%Cbi|74a#eItJnShT1v5YF{3W&HWr= z&sEDPF{IV95s?>@8;|-04^PPy(5M_PqL~~`H1kVb_y1JvLL^pmwZTRJ5T0nLo%8#> zsXw%$=2IEfZwhjxfj-Q0@;tSzOeWlIJ=%f2L>Slr3;__8iA;Vd zl>K4M+`M)?Bo#Pig33(<<|b2F+-fg;UX^Hfl1wvGA=DYA!Qq#{diLPa;? z9h5&ZgjMwk5jh$F(G#5EZ#*Uy)l1c~b6=T?n2aLplPyEnu=ujJo)A>54k$>c%|Y4c zVc&HRZc}yXI51}kz`)q`Lxps8M>Hb3L_;>EY4d%u@e;~Yc=p>ITAO#CDkAyV0;}8c zJXtv$JoCC?X`89ZHWO0$ZTEY5fZ)1D8)N~VL!~{Ir50ba)tOO!przU)!m6QLMeL<` zUL1f)sxoPR;<3CpRdI?pC5hEspNwYa*{1T-PQc@VZy!AFVopmrGL{ zk9mm16x$fIc?wRDU8>AJgzMT=-u{ZhQ~1iNDxYsJf(T~@J7D0Hk+bIY|8V}5y8@XL z0zR_U1!eyvP{9e#MONhz9-flf?75QvX%PmfudDoZX)He8z;(cv=y-Y9XDE816N^8c zOmHVaX)J!4@gkZK(fb;jhdKJMg@PUdY!;Dw8+6Dm*;@u>Gi#2rR_k(1Gv?Cd zqS+smUg4e~8NL;0PMqOy{FYc8L{iDx$|)X=1Zi*@%yMFJfW~B^YO*iq3PG*}5cnLf z+Yi@m1sF8r`lymV-mjhlA_~R3(mN&qPX;fz5_s8`h6p{_?q3EGVKv9uQ@}7X^NgGo;NWG ztP$iV4T;L!ctm3Hguu}NsyvdatGXjw=FF)6B1V1?V7nxlsJbg(xB4rBT;Y#vea>{% zn5cRc`09vh?ttcEMj0{t93rS{LaI6e(d)_0)^Zhi%4&pRC};T{80hrdbT-Nbk{`f` zs?K$2oRDK5N|&#@=H`b?a|bjdqjTdBlBWQim^*Tth91KU zI}DCg1mrkl>~|vN)l*Y-bC!EN?IEama)8mpET!#dUXx=?Ph0Pzu4Ug{IzR=yM>pc74d^rxyDZ) zIrMUO_?AWP#zHa~k1!ti0M&O88BIQxV3LzGmnPxAJ8B>!rcK~wyrAk)fg&$#Pe=;3 z4K-cTWHXWpcbiilUxP5tE3*^vl~s(Kk&UHM)w=~Q#ygy| zpZe>9UHBv zzF1X$md)sZ1^_WUrRpiGdQ18({&Q9-n^RHyVzj}$<{if5#L_F=(#8dDqqSyYU|aqi zuG<=|s6JCgPR);r{2-O6ycxVWVjD86)mugfg}hq~)7ptN0#J}8D%=-BH(@gyc9UDA z4c86tXxRRsm}m=kkH0!MmdqW6GY1`KeL z;ju|r9VxFl4lhs$Ad}Ycw_7jfNI6@A7h3x%wl27u^1 z%fA%h^-|NqYa0q?*0LGy7;DVAB8)<{NkvvBHyr&)Z#xOX8N0SH$G#re4TVz!Lx+TS z3q$Q46cv{|*VeMD73ilhv}{K8r!*tuvhrk=*;uo}inni5I33r0j0VPM`IwNWtVS4> z3&j;)Z$Fl5_2q!6t8~ze8w#uz0HL%l+LPR>ItiGR%MfNIYiFbeO#;gHmt8$Sd(@~Z>!`^tUG!`FC@1ii;%b0?2y>Ph4O=_KD(=FfM z0s2xloiTEfTA7$ltLO@VbU5=2-u)}EGTzx@j=}c?0^1rQb`5|kkvw-Vj4=pp*VmFz zW;bQsvS?0sOiGgqQc8JOmooParZkvpdjHd@!g5h1?@Urr$NO&sjuAa}=;iJ}(9Z*i zVKsoD&-w%>oS)6wT>%8m zDs>6oMa67MuD+_ilF#R)_WN&R0(M#0u;~tWGy4R6HXXKORk_dEFSLcGeW8+KBT(f!i}LY#U^D;aw;^+5eAHpu{L<1-|iK1NK5=GkIkd zd060Dyg_!F2Fbh;)7-1pFLrlE%Bv@-@}GHrJhJan%M;t?&q;#b9*Na_Id{+!`Dxh< z_tpA^?mzaOw})h6&flEa+&_bU7C=RB7!s>_ZEK?PE#HbWFF?MbUL(0Zvk1#o`5u5l z&i0pz##DsY0g{Vmf8dnI*C8BA(Do`U&psNEp40>Eh^lc{=-yznnA|I!q_@<)+iFfaDo7K0k68L_CF}ax(fRdKX zv*LYkAV}9(zxmU-8O3+D+?`D;GQGLjG&-?(lZY$})TPFig9pm$f|wEEdH?^% z{hUlXWOLpyL(mDK6c~ghWA@t$pzx*|`=y5Yv)6S((tzY6_y;%H86rFo$SMK`1}d!+%vU-wN$oYZC$8NBatV2# z>WPk|Zt4l%4T;JpMf4K#dt>k>rmP;NMdaFK-JH7%D!Oi~c(E5bNPeH{XHXdQqxxL% zrb!s|b&&Bs@zk=4_~QE7sx`x|y7{2CmR*HwugWWkc0+-+1%zcq93`o&fGQ*<|F zRn;YeL;+NEO|UGPsQg{DqGqOAJ37hvb)-KGZ zZKf?}wP>iFb8lI>`+GH^Z>iN&5G?~q0)GsN{A^2Y<=XcAe0djwH92ZANB;3 zbnIPd&Km>XS4BQeKv98h3RfOj(fZJbLKs{(t*yo9Nc$-{kM|HWsWDOYr%uZI(1!zp zFn}Mwc#a=udY%X$M`f#uu1q@iXSh{ehNShO4+jDTu>ro>>gkV2mOk{MOZ+cn4Di%D S|CE~m0000 Login Form - - + -

Login to SpiffWorkflow


+
+ +
+ +

Login

{{error_message}}