From a3b36ac022afcbedeccc9b5c3a6d228c9865d974 Mon Sep 17 00:00:00 2001 From: Dan Date: Fri, 18 Nov 2022 12:10:59 -0500 Subject: [PATCH 001/128] create latest/vX.X whever we tag. --- .github/workflows/release_builds.yml | 78 ++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 .github/workflows/release_builds.yml diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml new file mode 100644 index 00000000..78344249 --- /dev/null +++ b/.github/workflows/release_builds.yml @@ -0,0 +1,78 @@ +name: Build docker containers on version release + +on: + push: + tags: [ v* ] + +jobs: + create_frontend_docker_container: + runs-on: ubuntu-latest + env: + REGISTRY: ghcr.io + IMAGE_NAME: sartography/spiffworkflow-frontend + permissions: + contents: read + packages: write + steps: + - name: Check out the repository + uses: actions/checkout@v3.0.2 + with: + # Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud + fetch-depth: 0 + - name: Log in to the Container registry + uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + - name: Build and push Frontend Docker image + uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc + with: + # this action doesn't seem to respect working-directory so set context + context: spiffworkflow-backend + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + create_backend_docker_container: + runs-on: ubuntu-latest + env: + REGISTRY: ghcr.io + IMAGE_NAME: sartography/spiffworkflow-backend + permissions: + contents: read + packages: write + steps: + - name: Check out the repository + uses: actions/checkout@v3.0.2 + with: + # Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud + fetch-depth: 0 + - name: Log in to the Container registry + uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + - name: Build and push Backend Docker image + uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc + with: + # this action doesn't seem to respect working-directory so set context + context: spiffworkflow-backend + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} From 3d8d0c43623c73b83959b8e6d75e8d5f609ec267 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:01:10 -0500 Subject: [PATCH 002/128] Adding connector proxy demo. --- .github/workflows/release_builds.yml | 43 ++++++++++++++++++++++++++-- 1 file changed, 41 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index 78344249..c93c9640 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest env: REGISTRY: ghcr.io - IMAGE_NAME: sartography/spiffworkflow-frontend + IMAGE_NAME: sartography/ permissions: contents: read packages: write @@ -30,13 +30,14 @@ jobs: id: meta uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 with: + context: spiffworkflow-frontend images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - name: Build and push Frontend Docker image uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc with: # this action doesn't seem to respect working-directory so set context - context: spiffworkflow-backend + context: spiffworkflow-frontend push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} @@ -76,3 +77,41 @@ jobs: push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} + + create_demo-proxy: + runs-on: ubuntu-latest + env: + REGISTRY: ghcr.io + IMAGE_NAME: sartography/connector-proxy-demo + + permissions: + contents: read + packages: write + steps: + - name: Check out the repository + uses: actions/checkout@v3.0.2 + with: + # Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud + fetch-depth: 0 + - name: Log in to the Container registry + uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 + with: + context: connector-proxy-demo + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + - name: Build and push the connector proxy + uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc + with: + # this action doesn't seem to respect working-directory so set context + context: connector-proxy-demo + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} From 3b5f742cd10134188063afef12938f67b520d1d4 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:12:39 -0500 Subject: [PATCH 003/128] tweeking. --- .github/workflows/release_builds.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index c93c9640..5c6807bd 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -1,6 +1,7 @@ name: Build docker containers on version release on: + workflow_dispatch push: tags: [ v* ] From d398fb67e8438662f816b1e2c7447ace3e44924e Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:13:48 -0500 Subject: [PATCH 004/128] tweeking. --- .github/workflows/release_builds.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index 5c6807bd..eb245753 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -1,7 +1,7 @@ name: Build docker containers on version release on: - workflow_dispatch + workflow_dispatch: push: tags: [ v* ] From 0d1787a313c027595827bc420586ffc5992c4007 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:16:01 -0500 Subject: [PATCH 005/128] avoid running tests while doing builds. --- .github/workflows/backend_tests.yml | 5 +++-- .github/workflows/frontend_tests.yml | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index deed1448..370af4af 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -1,8 +1,9 @@ name: Backend Tests on: - - push - - pull_request + - workflow_dispatch +# - push +# - pull_request defaults: run: diff --git a/.github/workflows/frontend_tests.yml b/.github/workflows/frontend_tests.yml index 1bbfdbed..0cd1d5e7 100644 --- a/.github/workflows/frontend_tests.yml +++ b/.github/workflows/frontend_tests.yml @@ -1,8 +1,9 @@ name: Frontend Tests on: - - push - - pull_request + - workflow_dispatch +# - push +# - pull_request defaults: run: From 0ab3a7760445a15845cb7af15e67fc1c8d063f19 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:17:43 -0500 Subject: [PATCH 006/128] twiddling. --- .github/workflows/release_builds.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index eb245753..d4b9b319 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -79,7 +79,7 @@ jobs: tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - create_demo-proxy: + create_demo-proxy: runs-on: ubuntu-latest env: REGISTRY: ghcr.io From 110203705c328b1ae66cffd72e34680411611c32 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:24:25 -0500 Subject: [PATCH 007/128] updating the action. --- .github/workflows/release_builds.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index d4b9b319..203f9ad7 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -1,4 +1,4 @@ -name: Build docker containers on version release +name: Release Builds on: workflow_dispatch: From 41cd5efde2990ffe8f4c974adc282e8b25c3eab5 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:26:28 -0500 Subject: [PATCH 008/128] can I even do this on a branch? --- .github/workflows/release_builds.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index 203f9ad7..6767895c 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -78,7 +78,7 @@ jobs: push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - +# Is this getting updated, I wonder? create_demo-proxy: runs-on: ubuntu-latest env: From 156dd40da02a169aa4b5bce96f876c28a9d9948d Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:28:01 -0500 Subject: [PATCH 009/128] Getting the darn thing to trigger. --- .github/workflows/release_builds.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index 6767895c..0e0ec655 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -1,9 +1,10 @@ name: Release Builds on: - workflow_dispatch: - push: - tags: [ v* ] + - workflow_dispatch + - push +# push: +# tags: [ v* ] jobs: create_frontend_docker_container: From 5a3836b2a1f58022ba7415f48b18b70399f96192 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:29:12 -0500 Subject: [PATCH 010/128] Just do it on tags. --- .github/workflows/release_builds.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index 0e0ec655..adc890ef 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -1,10 +1,8 @@ name: Release Builds on: - - workflow_dispatch - - push -# push: -# tags: [ v* ] + push: + tags: [ v* ] jobs: create_frontend_docker_container: From 97db095d7a4299f8a533e8fa3b57918fa63d1a54 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:32:57 -0500 Subject: [PATCH 011/128] more tweaking --- .github/workflows/release_builds.yml | 2 +- connector-proxy-demo/Dockerfile | 28 +++++++++++++++++++ .../bin/boot_server_in_docker | 19 +++++++++++++ 3 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 connector-proxy-demo/Dockerfile create mode 100755 connector-proxy-demo/bin/boot_server_in_docker diff --git a/.github/workflows/release_builds.yml b/.github/workflows/release_builds.yml index adc890ef..ef1c3b99 100644 --- a/.github/workflows/release_builds.yml +++ b/.github/workflows/release_builds.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest env: REGISTRY: ghcr.io - IMAGE_NAME: sartography/ + IMAGE_NAME: sartography/spiffworkflow-frontend permissions: contents: read packages: write diff --git a/connector-proxy-demo/Dockerfile b/connector-proxy-demo/Dockerfile new file mode 100644 index 00000000..e2d89beb --- /dev/null +++ b/connector-proxy-demo/Dockerfile @@ -0,0 +1,28 @@ +FROM ghcr.io/sartography/python:3.11 + +RUN pip install poetry +RUN useradd _gunicorn --no-create-home --user-group + +RUN apt-get update && \ + apt-get install -y -q \ + gcc libssl-dev \ + curl git-core libpq-dev \ + gunicorn3 default-mysql-client + +WORKDIR /app +COPY pyproject.toml poetry.lock /app/ +RUN poetry install --without dev + +RUN set -xe \ + && apt-get remove -y gcc python3-dev libssl-dev \ + && apt-get autoremove -y \ + && apt-get clean -y \ + && rm -rf /var/lib/apt/lists/* + +COPY . /app/ + +# run poetry install again AFTER copying the app into the image +# otherwise it does not know what the main app module is +RUN poetry install --without dev + +CMD ./bin/boot_server_in_docker diff --git a/connector-proxy-demo/bin/boot_server_in_docker b/connector-proxy-demo/bin/boot_server_in_docker new file mode 100755 index 00000000..d64f417b --- /dev/null +++ b/connector-proxy-demo/bin/boot_server_in_docker @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +port="${CONNECTOR_PROXY_STATUS_IM_PORT:-}" +if [[ -z "$port" ]]; then + port=7004 +fi + +workers=3 + +# THIS MUST BE THE LAST COMMAND! +# default --limit-request-line is 4094. see https://stackoverflow.com/a/66688382/6090676 +exec poetry run gunicorn --bind "0.0.0.0:$port" --workers="$workers" --limit-request-line 8192 --timeout 90 --capture-output --access-logfile '-' --log-level debug app:app From d8406ae0ee9594083e5bf36e48d1c49a28ad13f5 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 13:43:15 -0500 Subject: [PATCH 012/128] Don't look for sources where there aren't any --- connector-proxy-demo/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector-proxy-demo/pyproject.toml b/connector-proxy-demo/pyproject.toml index 9a6f51f8..c3630780 100644 --- a/connector-proxy-demo/pyproject.toml +++ b/connector-proxy-demo/pyproject.toml @@ -20,5 +20,5 @@ build-backend = "poetry.core.masonry.api" [tool.pytest.ini_options] pythonpath = [ - ".", "src", + "." ] \ No newline at end of file From 97b58f1b5bb0796841009ebdefaf486962263969 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 14:27:43 -0500 Subject: [PATCH 013/128] fixing a few borked up things about the connector-proxy-demo's docker contaier. --- connector-proxy-demo/poetry.lock | 44 +++++++++++++++++++++++++++-- connector-proxy-demo/pyproject.toml | 4 +-- 2 files changed, 43 insertions(+), 5 deletions(-) diff --git a/connector-proxy-demo/poetry.lock b/connector-proxy-demo/poetry.lock index 9147d031..d7798e2d 100644 --- a/connector-proxy-demo/poetry.lock +++ b/connector-proxy-demo/poetry.lock @@ -55,7 +55,7 @@ optional = false python-versions = ">=3.6.0" [package.extras] -unicode_backport = ["unicodedata2"] +unicode-backport = ["unicodedata2"] [[package]] name = "click" @@ -127,6 +127,23 @@ Flask = "*" oauthlib = ">=1.1.2,<2.0.3 || >2.0.3,<2.0.4 || >2.0.4,<2.0.5 || >2.0.5,<3.0.0" requests-oauthlib = ">=0.6.2,<1.2.0" +[[package]] +name = "gunicorn" +version = "20.1.0" +description = "WSGI HTTP Server for UNIX" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +setuptools = ">=3.0" + +[package.extras] +eventlet = ["eventlet (>=0.24.1)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +tornado = ["tornado (>=0.2)"] + [[package]] name = "idna" version = "3.4" @@ -214,7 +231,7 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-oauthlib" @@ -245,6 +262,19 @@ botocore = ">=1.12.36,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] +[[package]] +name = "setuptools" +version = "65.6.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "simplejson" version = "3.17.6" @@ -310,7 +340,7 @@ watchdog = ["watchdog"] [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "86cf682d49dc495c8cf6dc60a8aedc31ad32a293e6ceaf7b1428e0c232f8319e" +content-hash = "cc395c0c1ce2b0b7ca063a17617981b2d55db39802265b36f0bc3c4383c89919" [metadata.files] boto3 = [ @@ -350,6 +380,10 @@ Flask-OAuthlib = [ {file = "Flask-OAuthlib-0.9.6.tar.gz", hash = "sha256:5bb79c8a8e670c2eb4cb553dfc3283b6c8d1202f674934676dc173cee94fe39c"}, {file = "Flask_OAuthlib-0.9.6-py3-none-any.whl", hash = "sha256:a5c3b62959aa1922470a62b6ebf4273b75f1c29561a7eb4a69cde85d45a1d669"}, ] +gunicorn = [ + {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, + {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, +] idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, @@ -428,6 +462,10 @@ s3transfer = [ {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, ] +setuptools = [ + {file = "setuptools-65.6.0-py3-none-any.whl", hash = "sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840"}, + {file = "setuptools-65.6.0.tar.gz", hash = "sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d"}, +] simplejson = [ {file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"}, {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"}, diff --git a/connector-proxy-demo/pyproject.toml b/connector-proxy-demo/pyproject.toml index c3630780..8acd820e 100644 --- a/connector-proxy-demo/pyproject.toml +++ b/connector-proxy-demo/pyproject.toml @@ -5,14 +5,14 @@ description = "An example showing how to use the Spiffworkflow-proxy's Flask Blu authors = ["Dan "] license = "LGPL" readme = "README.md" -packages = [{include = "connector_proxy_demo", from = "src"}] +#packages = [{include = "connector_proxy_demo", from = "."}] [tool.poetry.dependencies] python = "^3.10" Flask = "^2.2.2" spiffworkflow-proxy = {git = "https://github.com/sartography/spiffworkflow-proxy"} connector-aws = { git = "https://github.com/sartography/connector-aws.git"} - +gunicorn = "^20.1.0" [build-system] requires = ["poetry-core"] From 322b7f7d449fc0657ee01ad63d281efbc3867691 Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 15:20:30 -0500 Subject: [PATCH 014/128] adding a docker compose that will spin up all services. --- docker-compose.yml | 97 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 docker-compose.yml diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..0625d104 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,97 @@ +# Why we are running with network_mode: host +# +# In order for the backend server to talk to the mysql server, they need to be on the same network. +# I tried splitting it out where the mysql runs on a custom network and the backend runs on both +# the custom network AND with localhost. Nothing I tried worked and googling didn't help. They +# only ever mentioned one thing or using host.docker.internal which would cause the domains to +# be different. +# +# So instead we are running with both the mysql server and the backend server in host network mode. +# There may be a better way to do this but if it works, then it works. + +version: "3.8" +services: + db: + container_name: db + image: mysql:8.0.29 + platform: linux/amd64 + cap_add: + - SYS_NICE + restart: "${SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY:-no}" + environment: + - MYSQL_DATABASE=${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} + - MYSQL_ROOT_PASSWORD=${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw} + - MYSQL_TCP_PORT=7003 + network_mode: host + ports: + - "7003" + volumes: + - spiffworkflow_backend:/var/lib/mysql + healthcheck: + test: mysql --user=root --password=${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw} -e 'select 1' ${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} + interval: 10s + timeout: 5s + retries: 10 + + spiffworkflow-backend: + container_name: spiffworkflow-backend + image: ghcr.io/sartography/spiffworkflow-backend + depends_on: + db: + condition: service_healthy + environment: + - APPLICATION_ROOT=/ + - SPIFFWORKFLOW_BACKEND_ENV=${SPIFFWORKFLOW_BACKEND_ENV:-development} + - FLASK_DEBUG=0 + - FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key} + - OPEN_ID_SERVER_URL=${OPEN_ID_SERVER_URL:-http://localhost:7002} + - SPIFFWORKFLOW_FRONTEND_URL=${SPIFFWORKFLOW_FRONTEND_URL:-http://localhost:7001} + - SPIFFWORKFLOW_BACKEND_URL=${SPIFFWORKFLOW_BACKEND_URL:-http://localhost:7000} + - SPIFFWORKFLOW_BACKEND_PORT=7000 + - SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true + - SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@localhost:7003/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} + - BPMN_SPEC_ABSOLUTE_DIR=/app/process_models + - SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-false} + - SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=${SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME:-acceptance_tests.yml} + - RUN_BACKGROUND_SCHEDULER=true + ports: + - "7000:7000" + network_mode: host + volumes: + - ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models + - ./log:/app/log + healthcheck: + test: curl localhost:7000/v1.0/status --fail + interval: 10s + timeout: 5s + retries: 20 + + spiffworkflow-frontend: + container_name: spiffworkflow-frontend + image: ghcr.io/sartography/spiffworkflow-frontend + environment: + - APPLICATION_ROOT=/ + - PORT0=7001 + ports: + - "7001:7001" + + connector-proxy-demo: &connector-proxy-demo + container_name: connector-proxy-demo + image: ghcr.io/sartography/connector-proxy-demo + environment: + - FLASK_ENV=${FLASK_ENV:-development} + - FLASK_DEBUG=0 + - FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key} + ports: + - "7004:7004" + network_mode: host + healthcheck: + test: curl localhost:7004/liveness --fail + interval: 10s + timeout: 5s + retries: 20 + + +volumes: + spiffworkflow_backend: + driver: local From cd1affa0ba352905730be7abf0418ee4dac9268b Mon Sep 17 00:00:00 2001 From: Dan Date: Tue, 22 Nov 2022 15:23:32 -0500 Subject: [PATCH 015/128] Reenable the tests. --- .github/workflows/backend_tests.yml | 5 ++--- .github/workflows/frontend_tests.yml | 5 ++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index 370af4af..deed1448 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -1,9 +1,8 @@ name: Backend Tests on: - - workflow_dispatch -# - push -# - pull_request + - push + - pull_request defaults: run: diff --git a/.github/workflows/frontend_tests.yml b/.github/workflows/frontend_tests.yml index 0cd1d5e7..1bbfdbed 100644 --- a/.github/workflows/frontend_tests.yml +++ b/.github/workflows/frontend_tests.yml @@ -1,9 +1,8 @@ name: Frontend Tests on: - - workflow_dispatch -# - push -# - pull_request + - push + - pull_request defaults: run: From 8d223c52fb081b81e566c7bf75db4df260937a7c Mon Sep 17 00:00:00 2001 From: Dan Date: Thu, 24 Nov 2022 14:58:16 -0500 Subject: [PATCH 016/128] just a bit of cleanup in the docker compose file. --- docker-compose.yml | 32 +++++++++----------------------- 1 file changed, 9 insertions(+), 23 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 0625d104..d6a86149 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,18 +1,7 @@ -# Why we are running with network_mode: host -# -# In order for the backend server to talk to the mysql server, they need to be on the same network. -# I tried splitting it out where the mysql runs on a custom network and the backend runs on both -# the custom network AND with localhost. Nothing I tried worked and googling didn't help. They -# only ever mentioned one thing or using host.docker.internal which would cause the domains to -# be different. -# -# So instead we are running with both the mysql server and the backend server in host network mode. -# There may be a better way to do this but if it works, then it works. - version: "3.8" services: - db: - container_name: db + spiffworkflow-db: + container_name: spiffworkflow-db image: mysql:8.0.29 platform: linux/amd64 cap_add: @@ -22,7 +11,6 @@ services: - MYSQL_DATABASE=${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} - MYSQL_ROOT_PASSWORD=${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw} - MYSQL_TCP_PORT=7003 - network_mode: host ports: - "7003" volumes: @@ -37,26 +25,25 @@ services: container_name: spiffworkflow-backend image: ghcr.io/sartography/spiffworkflow-backend depends_on: - db: + spiffworkflow-db: condition: service_healthy environment: - APPLICATION_ROOT=/ - SPIFFWORKFLOW_BACKEND_ENV=${SPIFFWORKFLOW_BACKEND_ENV:-development} - FLASK_DEBUG=0 - FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key} - - OPEN_ID_SERVER_URL=${OPEN_ID_SERVER_URL:-http://localhost:7002} - - SPIFFWORKFLOW_FRONTEND_URL=${SPIFFWORKFLOW_FRONTEND_URL:-http://localhost:7001} - - SPIFFWORKFLOW_BACKEND_URL=${SPIFFWORKFLOW_BACKEND_URL:-http://localhost:7000} + - OPEN_ID_SERVER_URL=${OPEN_ID_SERVER_URL:-http://spiffworkflow-openid:7002} + - SPIFFWORKFLOW_FRONTEND_URL=${SPIFFWORKFLOW_FRONTEND_URL:-http://spiffworkflow-frontend:7001} + - SPIFFWORKFLOW_BACKEND_URL=${SPIFFWORKFLOW_BACKEND_URL:-http://spiffworkflow-backend:7000} - SPIFFWORKFLOW_BACKEND_PORT=7000 - SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true - - SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@localhost:7003/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} + - SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@spiffworkflow-db:7003/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} - BPMN_SPEC_ABSOLUTE_DIR=/app/process_models - SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-false} - SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=${SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME:-acceptance_tests.yml} - RUN_BACKGROUND_SCHEDULER=true ports: - "7000:7000" - network_mode: host volumes: - ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models - ./log:/app/log @@ -75,8 +62,8 @@ services: ports: - "7001:7001" - connector-proxy-demo: &connector-proxy-demo - container_name: connector-proxy-demo + spiffworkflow-connector: + container_name: spiffworkflow-connector image: ghcr.io/sartography/connector-proxy-demo environment: - FLASK_ENV=${FLASK_ENV:-development} @@ -84,7 +71,6 @@ services: - FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key} ports: - "7004:7004" - network_mode: host healthcheck: test: curl localhost:7004/liveness --fail interval: 10s From 77f628426a5e2b88b71507a580b8fb19be5ad590 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 28 Nov 2022 13:47:41 -0500 Subject: [PATCH 017/128] Save as report component --- .../ProcessInstanceListSaveAsReport.tsx | 86 +++++++++++++++++++ .../components/ProcessInstanceListTable.tsx | 14 +++ 2 files changed, 100 insertions(+) create mode 100644 spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx new file mode 100644 index 00000000..a642d4f9 --- /dev/null +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -0,0 +1,86 @@ +import { useState } from 'react'; +// TODO: carbon controls +/* +import { + Button, + Textbox, + // @ts-ignore +} from '@carbon/react'; +*/ +import HttpService from '../services/HttpService'; + +type OwnProps = { + onSuccess: (..._args: any[]) => any; + columns: string; + orderBy: string; + filterBy: string; + buttonText?: string; +}; + +export default function ProcessInstanceListSaveAsReport({ + onSuccess, + columns, + orderBy, + filterBy, + buttonText = 'Save as New Perspective', +}: OwnProps) { + const [identifier, setIdentifier] = useState(''); + + const hasIdentifier = () => { + return identifier?.length > 0; + }; + + const addProcessInstanceReport = (event: any) => { + event.preventDefault(); + + const columnArray = columns.split(',').map((column) => { + return { Header: column, accessor: column }; + }); + const orderByArray = orderBy.split(',').filter((n) => n); + + const filterByArray = filterBy + .split(',') + .map((filterByItem) => { + const [fieldName, fieldValue] = filterByItem.split('='); + if (fieldValue) { + return { + field_name: fieldName, + operator: 'equals', + field_value: fieldValue, + }; + } + return null; + }) + .filter((n) => n); + + HttpService.makeCallToBackend({ + path: `/process-instances/reports`, + successCallback: onSuccess, + httpMethod: 'POST', + postBody: { + identifier, + report_metadata: { + columns: columnArray, + order_by: orderByArray, + filter_by: filterByArray, + }, + }, + }); + }; + + return ( +
+ + +
+ ); +} diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index bd060af6..769bfea7 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -52,6 +52,7 @@ import { } from '../interfaces'; import ProcessModelSearch from './ProcessModelSearch'; import ProcessInstanceReportSearch from './ProcessInstanceReportSearch'; +import ProcessInstanceListSaveAsReport from './ProcessInstanceListSaveAsReport'; const REFRESH_INTERVAL = 5; const REFRESH_TIMEOUT = 600; @@ -764,6 +765,18 @@ export default function ProcessInstanceListTable({ return null; }; + const saveAsReportComponent = () => { + const callback = (_: any) => {}; + return ( + + ); + }; + const filterComponent = () => { if (!filtersEnabled) { return null; @@ -788,6 +801,7 @@ export default function ProcessInstanceListTable({ {filterOptions()} + {saveAsReportComponent()} ); }; From 0fb14f0e5717bbf070a827de9cdbaff1b4c0a3ae Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 28 Nov 2022 13:56:45 -0500 Subject: [PATCH 018/128] WIP --- .../components/ProcessInstanceListSaveAsReport.tsx | 11 +++++------ .../src/components/ProcessInstanceListTable.tsx | 11 ++++++++--- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index a642d4f9..e225d2ff 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -11,7 +11,7 @@ import HttpService from '../services/HttpService'; type OwnProps = { onSuccess: (..._args: any[]) => any; - columns: string; + columnArray: { Header: string; accessor: string}; orderBy: string; filterBy: string; buttonText?: string; @@ -19,10 +19,10 @@ type OwnProps = { export default function ProcessInstanceListSaveAsReport({ onSuccess, - columns, + columnArray, orderBy, filterBy, - buttonText = 'Save as New Perspective', + buttonText = 'Save as Perspective', }: OwnProps) { const [identifier, setIdentifier] = useState(''); @@ -33,9 +33,8 @@ export default function ProcessInstanceListSaveAsReport({ const addProcessInstanceReport = (event: any) => { event.preventDefault(); - const columnArray = columns.split(',').map((column) => { - return { Header: column, accessor: column }; - }); + console.log(columnArray); + const orderByArray = orderBy.split(',').filter((n) => n); const filterByArray = filterBy diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 769bfea7..fcbe45ad 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -622,6 +622,10 @@ export default function ProcessInstanceListTable({ ); }; + const reportColumns = () => { + return (reportMetadata as any).columns; + }; + const buildTable = () => { const headerLabels: Record = { id: 'Id', @@ -636,7 +640,7 @@ export default function ProcessInstanceListTable({ const getHeaderLabel = (header: string) => { return headerLabels[header] ?? header; }; - const headers = (reportMetadata as any).columns.map((column: any) => { + const headers = reportColumns().map((column: any) => { // return {getHeaderLabel((column as any).Header)}; return getHeaderLabel((column as any).Header); }); @@ -710,7 +714,7 @@ export default function ProcessInstanceListTable({ }; const rows = processInstances.map((row: any) => { - const currentRow = (reportMetadata as any).columns.map((column: any) => { + const currentRow = reportColumns().map((column: any) => { return formattedColumn(row, column); }); return {currentRow}; @@ -766,11 +770,12 @@ export default function ProcessInstanceListTable({ }; const saveAsReportComponent = () => { + // TODO onSuccess reload/select the new report const callback = (_: any) => {}; return ( From d1e2558af1bf27f2cfb5be2396c90e1f85056931 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 28 Nov 2022 13:59:38 -0500 Subject: [PATCH 019/128] Use current columns --- .../src/components/ProcessInstanceListSaveAsReport.tsx | 6 ++++-- .../src/components/ProcessInstanceListTable.tsx | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index e225d2ff..b1187e9e 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -11,7 +11,7 @@ import HttpService from '../services/HttpService'; type OwnProps = { onSuccess: (..._args: any[]) => any; - columnArray: { Header: string; accessor: string}; + columnArray: { Header: string; accessor: string }; orderBy: string; filterBy: string; buttonText?: string; @@ -79,7 +79,9 @@ export default function ProcessInstanceListSaveAsReport({ onChange={(e) => setIdentifier(e.target.value)} /> - + ); } diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index fcbe45ad..e98b5978 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -770,7 +770,7 @@ export default function ProcessInstanceListTable({ }; const saveAsReportComponent = () => { - // TODO onSuccess reload/select the new report + // TODO onSuccess reload/select the new report in the report search const callback = (_: any) => {}; return ( Date: Mon, 28 Nov 2022 14:14:35 -0500 Subject: [PATCH 020/128] Save selected process model --- .../ProcessInstanceListSaveAsReport.tsx | 29 +++++++------------ .../components/ProcessInstanceListTable.tsx | 2 +- 2 files changed, 12 insertions(+), 19 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index b1187e9e..35cb9223 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -7,13 +7,14 @@ import { // @ts-ignore } from '@carbon/react'; */ +import { ProcessModel } from '../interfaces'; import HttpService from '../services/HttpService'; type OwnProps = { onSuccess: (..._args: any[]) => any; columnArray: { Header: string; accessor: string }; orderBy: string; - filterBy: string; + processModelSelection: ProcessModel | null; buttonText?: string; }; @@ -21,7 +22,7 @@ export default function ProcessInstanceListSaveAsReport({ onSuccess, columnArray, orderBy, - filterBy, + processModelSelection, buttonText = 'Save as Perspective', }: OwnProps) { const [identifier, setIdentifier] = useState(''); @@ -33,24 +34,16 @@ export default function ProcessInstanceListSaveAsReport({ const addProcessInstanceReport = (event: any) => { event.preventDefault(); - console.log(columnArray); - const orderByArray = orderBy.split(',').filter((n) => n); - const filterByArray = filterBy - .split(',') - .map((filterByItem) => { - const [fieldName, fieldValue] = filterByItem.split('='); - if (fieldValue) { - return { - field_name: fieldName, - operator: 'equals', - field_value: fieldValue, - }; - } - return null; - }) - .filter((n) => n); + const filterByArray: any = []; + + if (processModelSelection) { + filterByArray.push({ + field_name: 'process_model_identifier', + field_value: processModelSelection.id, + }); + } HttpService.makeCallToBackend({ path: `/process-instances/reports`, diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index e98b5978..46bdae47 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -777,7 +777,7 @@ export default function ProcessInstanceListTable({ onSuccess={callback} columnArray={reportColumns()} orderBy="" - filterBy="" + processModelSelection={processModelSelection} /> ); }; From 5a99e9cc9ebf1c643d53053d7c7e2ecfbab3e874 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 28 Nov 2022 16:46:19 -0500 Subject: [PATCH 021/128] Save first status --- .../src/components/ProcessInstanceListSaveAsReport.tsx | 9 +++++++++ .../src/components/ProcessInstanceListTable.tsx | 1 + 2 files changed, 10 insertions(+) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index 35cb9223..77e2ac37 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -15,6 +15,7 @@ type OwnProps = { columnArray: { Header: string; accessor: string }; orderBy: string; processModelSelection: ProcessModel | null; + processStatusSelection: string[]; buttonText?: string; }; @@ -23,6 +24,7 @@ export default function ProcessInstanceListSaveAsReport({ columnArray, orderBy, processModelSelection, + processStatusSelection, buttonText = 'Save as Perspective', }: OwnProps) { const [identifier, setIdentifier] = useState(''); @@ -45,6 +47,13 @@ export default function ProcessInstanceListSaveAsReport({ }); } + if (processStatusSelection.length > 0) { + filterByArray.push({ + field_name: 'process_status', + field_value: processStatusSelection[0], // TODO: support more than one status + }); + } + HttpService.makeCallToBackend({ path: `/process-instances/reports`, successCallback: onSuccess, diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 46bdae47..9ad23ac3 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -778,6 +778,7 @@ export default function ProcessInstanceListTable({ columnArray={reportColumns()} orderBy="" processModelSelection={processModelSelection} + processStatusSelection={processStatusSelection} /> ); }; From 0268d279e32dc4c7c52ca8c2c208b4892fa939b7 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 28 Nov 2022 17:03:00 -0500 Subject: [PATCH 022/128] some basics to add metadata to reports w/ burnettk cullerton --- .../models/process_instance_report.py | 10 ---- .../process_instance_metadata.bpmn | 40 ++++++++++++++ .../integration/test_process_api.py | 53 +++++++++++++++++++ 3 files changed, 93 insertions(+), 10 deletions(-) create mode 100644 spiffworkflow-backend/tests/data/test-process-instance-metadata-report/process_instance_metadata.bpmn diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py index 5cccf4a5..4f0b0f46 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py @@ -204,18 +204,8 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): user: UserModel, ) -> ProcessInstanceReportModel: """Create_with_attributes.""" - # <<<<<<< HEAD - # process_model = ProcessModelService.get_process_model( - # process_model_id=f"{process_model_identifier}" - # ) - # process_instance_report = cls( - # identifier=identifier, - # process_group_identifier="process_model.process_group_id", - # process_model_identifier=process_model.id, - # ======= process_instance_report = cls( identifier=identifier, - # >>>>>>> main created_by_id=user.id, report_metadata=report_metadata, ) diff --git a/spiffworkflow-backend/tests/data/test-process-instance-metadata-report/process_instance_metadata.bpmn b/spiffworkflow-backend/tests/data/test-process-instance-metadata-report/process_instance_metadata.bpmn new file mode 100644 index 00000000..f371a350 --- /dev/null +++ b/spiffworkflow-backend/tests/data/test-process-instance-metadata-report/process_instance_metadata.bpmn @@ -0,0 +1,40 @@ + + + + + Flow_0fmt4q1 + + + + Flow_0fmt4q1 + Flow_0hhrkce + save_process_instance_metadata({"key1": "value1", "key2": "value2"}) + + + Flow_0hhrkce + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 5ee5ae9f..4c60cb8c 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -4,11 +4,13 @@ import json import os import time from typing import Any +from conftest import with_super_admin_user import pytest from flask.app import Flask from flask.testing import FlaskClient from flask_bpmn.models.db import db +from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec @@ -2544,3 +2546,54 @@ class TestProcessApi(BaseTest): # make sure the new subgroup does exist new_process_group = ProcessModelService.get_process_group(new_sub_path) assert new_process_group.id == new_sub_path + + def test_can_get_process_instance_list_with_report_metadata( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + process_model = load_test_spec( + process_model_id='test-process-instance-metadata-report', + bpmn_file_name='process_instance_metadata.bpmn', + process_model_source_directory='test-process-instance-metadata-report', + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=with_super_admin_user + ) + + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id + ).all() + assert len(process_instance_metadata) == 2 + + + report_metadata = { + "columns": [ + {"Header": "ID", "accessor": "id"}, + {"Header": "Status", "accessor": "status"}, + {"Header": "Key One", "accessor": "key1"}, + ], + "order_by": ["status"], + "filter_by": [], + } + process_instance_report = ProcessInstanceReportModel.create_with_attributes( + identifier="sure", + report_metadata=report_metadata, + user=with_super_admin_user, + ) + + response = client.get( + f"/v1.0/process-instances?report_identifier={process_instance_report.identifier}", + headers=self.logged_in_headers(with_super_admin_user), + ) + print(f"response.json: {response.json}") + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 1 + assert response.json["pagination"]["count"] == 1 + assert response.json["pagination"]["pages"] == 1 + assert response.json["pagination"]["total"] == 1 From b89608c528df46576b2589826641d8cd83daf4ad Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 28 Nov 2022 17:07:28 -0500 Subject: [PATCH 023/128] Save dates --- .../ProcessInstanceListSaveAsReport.tsx | 36 +++++++++++ .../components/ProcessInstanceListTable.tsx | 63 +++++++++++++++---- 2 files changed, 86 insertions(+), 13 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index 77e2ac37..d23daed0 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -16,6 +16,10 @@ type OwnProps = { orderBy: string; processModelSelection: ProcessModel | null; processStatusSelection: string[]; + startFromSeconds: string | null; + startToSeconds: string | null; + endFromSeconds: string | null; + endToSeconds: string | null; buttonText?: string; }; @@ -25,6 +29,10 @@ export default function ProcessInstanceListSaveAsReport({ orderBy, processModelSelection, processStatusSelection, + startFromSeconds, + startToSeconds, + endFromSeconds, + endToSeconds, buttonText = 'Save as Perspective', }: OwnProps) { const [identifier, setIdentifier] = useState(''); @@ -54,6 +62,34 @@ export default function ProcessInstanceListSaveAsReport({ }); } + if (startFromSeconds) { + filterByArray.push({ + field_name: 'start_from', + field_value: startFromSeconds, + }); + } + + if (startToSeconds) { + filterByArray.push({ + field_name: 'start_to', + field_value: startToSeconds, + }); + } + + if (endFromSeconds) { + filterByArray.push({ + field_name: 'end_from', + field_value: endFromSeconds, + }); + } + + if (endToSeconds) { + filterByArray.push({ + field_name: 'end_to', + field_value: endToSeconds, + }); + } + HttpService.makeCallToBackend({ path: `/process-instances/reports`, successCallback: onSuccess, diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 9ad23ac3..32674a05 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -367,16 +367,7 @@ export default function ProcessInstanceListTable({ } }; - const applyFilter = (event: any) => { - event.preventDefault(); - const { page, perPage } = getPageInfoFromSearchParams( - searchParams, - undefined, - undefined, - paginationQueryParamPrefix - ); - let queryParamString = `per_page=${perPage}&page=${page}&user_filter=true`; - + const calculateStartAndEndSeconds = () => { const startFromSeconds = convertDateAndTimeStringsToSeconds( startFromDate, startFromTime || '00:00:00' @@ -393,28 +384,59 @@ export default function ProcessInstanceListTable({ endToDate, endToTime || '00:00:00' ); + let valid = true; if (isTrueComparison(startFromSeconds, '>', startToSeconds)) { setErrorMessage({ message: '"Start date from" cannot be after "start date to"', }); - return; + valid = false; } if (isTrueComparison(endFromSeconds, '>', endToSeconds)) { setErrorMessage({ message: '"End date from" cannot be after "end date to"', }); - return; + valid = false; } if (isTrueComparison(startFromSeconds, '>', endFromSeconds)) { setErrorMessage({ message: '"Start date from" cannot be after "end date from"', }); - return; + valid = false; } if (isTrueComparison(startToSeconds, '>', endToSeconds)) { setErrorMessage({ message: '"Start date to" cannot be after "end date to"', }); + valid = false; + } + + return { + valid, + startFromSeconds, + startToSeconds, + endFromSeconds, + endToSeconds, + }; + }; + + const applyFilter = (event: any) => { + event.preventDefault(); + const { page, perPage } = getPageInfoFromSearchParams( + searchParams, + undefined, + undefined, + paginationQueryParamPrefix + ); + let queryParamString = `per_page=${perPage}&page=${page}&user_filter=true`; + const { + valid, + startFromSeconds, + startToSeconds, + endFromSeconds, + endToSeconds, + } = calculateStartAndEndSeconds(); + + if (!valid) { return; } @@ -772,6 +794,17 @@ export default function ProcessInstanceListTable({ const saveAsReportComponent = () => { // TODO onSuccess reload/select the new report in the report search const callback = (_: any) => {}; + const { + valid, + startFromSeconds, + startToSeconds, + endFromSeconds, + endToSeconds, + } = calculateStartAndEndSeconds(); + + if (!valid) { + return null; + } return ( ); }; From 8a02232049df21c236f77237911296c6165de452 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 28 Nov 2022 17:26:26 -0500 Subject: [PATCH 024/128] Choose new report --- .../src/components/ProcessInstanceListSaveAsReport.tsx | 8 +++++++- .../src/components/ProcessInstanceListTable.tsx | 7 ++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index d23daed0..6c8f5fb9 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -41,6 +41,12 @@ export default function ProcessInstanceListSaveAsReport({ return identifier?.length > 0; }; + const responseHandler = (result: any) => { + if (result.ok === true) { + onSuccess(identifier); + } + }; + const addProcessInstanceReport = (event: any) => { event.preventDefault(); @@ -92,7 +98,7 @@ export default function ProcessInstanceListSaveAsReport({ HttpService.makeCallToBackend({ path: `/process-instances/reports`, - successCallback: onSuccess, + successCallback: responseHandler, httpMethod: 'POST', postBody: { identifier, diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 32674a05..9b239502 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -367,6 +367,7 @@ export default function ProcessInstanceListTable({ } }; + // TODO: after factoring this out page hangs when invalid date ranges and applying the filter const calculateStartAndEndSeconds = () => { const startFromSeconds = convertDateAndTimeStringsToSeconds( startFromDate, @@ -793,7 +794,11 @@ export default function ProcessInstanceListTable({ const saveAsReportComponent = () => { // TODO onSuccess reload/select the new report in the report search - const callback = (_: any) => {}; + const callback = (identifier: string) => { + processInstanceReportDidChange({ + selectedItem: { id: identifier, display_name: identifier }, + }); + }; const { valid, startFromSeconds, From 385e8cea2bd3dcf7d9eeb1520bf81ef8b6b66c30 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 29 Nov 2022 15:59:46 -0500 Subject: [PATCH 025/128] WIP more metadata reporting w/ burnettk --- spiffworkflow-backend/: | 1908 +++++++++++++++++ .../routes/process_api_blueprint.py | 49 +- .../services/authentication_service.py | 5 +- .../process_instance_report_service.py | 19 + .../integration/test_process_api.py | 7 +- 5 files changed, 1977 insertions(+), 11 deletions(-) create mode 100644 spiffworkflow-backend/: diff --git a/spiffworkflow-backend/: b/spiffworkflow-backend/: new file mode 100644 index 00000000..5516fdae --- /dev/null +++ b/spiffworkflow-backend/: @@ -0,0 +1,1908 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +import random +import string +import uuid +from typing import Any +from typing import Dict +from typing import Optional +from typing import TypedDict +from typing import Union + +import connexion # type: ignore +import flask.wrappers +import jinja2 +from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel +import werkzeug +from flask import Blueprint +from flask import current_app +from flask import g +from flask import jsonify +from flask import make_response +from flask import redirect +from flask import request +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from lxml import etree # type: ignore +from lxml.builder import ElementMaker # type: ignore +from SpiffWorkflow.task import Task as SpiffTask # type: ignore +from SpiffWorkflow.task import TaskState +from sqlalchemy import and_ +from sqlalchemy import asc +from sqlalchemy import desc +from sqlalchemy.orm import aliased, joinedload + +from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( + ProcessEntityNotFoundError, +) +from spiffworkflow_backend.models.active_task import ActiveTaskModel +from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel +from spiffworkflow_backend.models.file import FileSchema +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel +from spiffworkflow_backend.models.message_instance import MessageInstanceModel +from spiffworkflow_backend.models.message_model import MessageModel +from spiffworkflow_backend.models.message_triggerable_process_model import ( + MessageTriggerableProcessModel, +) +from spiffworkflow_backend.models.principal import PrincipalModel +from spiffworkflow_backend.models.process_group import ProcessGroup +from spiffworkflow_backend.models.process_group import ProcessGroupSchema +from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema +from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_report import ( + ProcessInstanceReportModel, +) +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema +from spiffworkflow_backend.models.secret_model import SecretModel +from spiffworkflow_backend.models.secret_model import SecretModelSchema +from spiffworkflow_backend.models.spec_reference import SpecReferenceCache +from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema +from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel +from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel +from spiffworkflow_backend.routes.user import verify_token +from spiffworkflow_backend.services.authorization_service import AuthorizationService +from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService +from spiffworkflow_backend.services.git_service import GitService +from spiffworkflow_backend.services.message_service import MessageService +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_report_service import ( + ProcessInstanceReportFilter, +) +from spiffworkflow_backend.services.process_instance_report_service import ( + ProcessInstanceReportService, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner +from spiffworkflow_backend.services.secret_service import SecretService +from spiffworkflow_backend.services.service_task_service import ServiceTaskService +from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.user_service import UserService + + +class TaskDataSelectOption(TypedDict): + """TaskDataSelectOption.""" + + value: str + label: str + + +class ReactJsonSchemaSelectOption(TypedDict): + """ReactJsonSchemaSelectOption.""" + + type: str + title: str + enum: list[str] + + +process_api_blueprint = Blueprint("process_api", __name__) + + +def status() -> flask.wrappers.Response: + """Status.""" + ProcessInstanceModel.query.filter().first() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.Response: + """Permissions_check.""" + if "requests_to_check" not in body: + raise ( + ApiError( + error_code="could_not_requests_to_check", + message="The key 'requests_to_check' not found at root of request body.", + status_code=400, + ) + ) + + response_dict: dict[str, dict[str, bool]] = {} + requests_to_check = body["requests_to_check"] + + for target_uri, http_methods in requests_to_check.items(): + if target_uri not in response_dict: + response_dict[target_uri] = {} + + for http_method in http_methods: + permission_string = AuthorizationService.get_permission_from_http_method( + http_method + ) + if permission_string: + has_permission = AuthorizationService.user_has_permission( + user=g.user, + permission=permission_string, + target_uri=target_uri, + ) + response_dict[target_uri][http_method] = has_permission + + return make_response(jsonify({"results": response_dict}), 200) + + +def modify_process_model_id(process_model_id: str) -> str: + """Modify_process_model_id.""" + return process_model_id.replace("/", ":") + + +def un_modify_modified_process_model_id(modified_process_model_id: str) -> str: + """Un_modify_modified_process_model_id.""" + return modified_process_model_id.replace(":", "/") + + +def process_group_add(body: dict) -> flask.wrappers.Response: + """Add_process_group.""" + process_group = ProcessGroup(**body) + ProcessModelService.add_process_group(process_group) + return make_response(jsonify(process_group), 201) + + +def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response: + """Process_group_delete.""" + process_group_id = un_modify_modified_process_model_id(modified_process_group_id) + ProcessModelService().process_group_delete(process_group_id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_group_update( + modified_process_group_id: str, body: dict +) -> flask.wrappers.Response: + """Process Group Update.""" + body_include_list = ["display_name", "description"] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + + process_group_id = un_modify_modified_process_model_id(modified_process_group_id) + process_group = ProcessGroup(id=process_group_id, **body_filtered) + ProcessModelService.update_process_group(process_group) + return make_response(jsonify(process_group), 200) + + +def process_group_list( + process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Process_group_list.""" + if process_group_identifier is not None: + process_groups = ProcessModelService.get_process_groups( + process_group_identifier + ) + else: + process_groups = ProcessModelService.get_process_groups() + batch = ProcessModelService().get_batch( + items=process_groups, page=page, per_page=per_page + ) + pages = len(process_groups) // per_page + remainder = len(process_groups) % per_page + if remainder > 0: + pages += 1 + + response_json = { + "results": ProcessGroupSchema(many=True).dump(batch), + "pagination": { + "count": len(batch), + "total": len(process_groups), + "pages": pages, + }, + } + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def process_group_show( + modified_process_group_id: str, +) -> Any: + """Process_group_show.""" + process_group_id = un_modify_modified_process_model_id(modified_process_group_id) + try: + process_group = ProcessModelService.get_process_group(process_group_id) + except ProcessEntityNotFoundError as exception: + raise ( + ApiError( + error_code="process_group_cannot_be_found", + message=f"Process group cannot be found: {process_group_id}", + status_code=400, + ) + ) from exception + + process_group.parent_groups = ProcessModelService.get_parent_group_array( + process_group.id + ) + return make_response(jsonify(process_group), 200) + + +def process_group_move( + modified_process_group_identifier: str, new_location: str +) -> flask.wrappers.Response: + """Process_group_move.""" + original_process_group_id = un_modify_modified_process_model_id( + modified_process_group_identifier + ) + new_process_group = ProcessModelService().process_group_move( + original_process_group_id, new_location + ) + return make_response(jsonify(new_process_group), 201) + + +def process_model_create( + modified_process_group_id: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Process_model_create.""" + process_model_info = ProcessModelInfoSchema().load(body) + if modified_process_group_id is None: + raise ApiError( + error_code="process_group_id_not_specified", + message="Process Model could not be created when process_group_id path param is unspecified", + status_code=400, + ) + if process_model_info is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=f"Process Model could not be created from given body: {body}", + status_code=400, + ) + + unmodified_process_group_id = un_modify_modified_process_model_id( + modified_process_group_id + ) + process_group = ProcessModelService.get_process_group(unmodified_process_group_id) + if process_group is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=f"Process Model could not be created from given body because Process Group could not be found: {body}", + status_code=400, + ) + + ProcessModelService.add_process_model(process_model_info) + return Response( + json.dumps(ProcessModelInfoSchema().dump(process_model_info)), + status=201, + mimetype="application/json", + ) + + +def process_model_delete( + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_model_delete.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + # process_model_identifier = f"{process_group_id}/{process_model_id}" + ProcessModelService().process_model_delete(process_model_identifier) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_model_update( + modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] +) -> Any: + """Process_model_update.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + body_include_list = [ + "display_name", + "primary_file_name", + "primary_process_id", + "description", + ] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + + # process_model_identifier = f"{process_group_id}/{process_model_id}" + process_model = get_process_model(process_model_identifier) + ProcessModelService.update_process_model(process_model, body_filtered) + return ProcessModelInfoSchema().dump(process_model) + + +def process_model_show(modified_process_model_identifier: str) -> Any: + """Process_model_show.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + # process_model_identifier = f"{process_group_id}/{process_model_id}" + process_model = get_process_model(process_model_identifier) + # TODO: Temporary. Should not need the next line once models have correct ids + # process_model.id = process_model_identifier + files = sorted(SpecFileService.get_files(process_model)) + process_model.files = files + for file in process_model.files: + file.references = SpecFileService.get_references_for_file(file, process_model) + + process_model.parent_groups = ProcessModelService.get_parent_group_array( + process_model.id + ) + return make_response(jsonify(process_model), 200) + + +def process_model_move( + modified_process_model_identifier: str, new_location: str +) -> flask.wrappers.Response: + """Process_model_move.""" + original_process_model_id = un_modify_modified_process_model_id( + modified_process_model_identifier + ) + new_process_model = ProcessModelService().process_model_move( + original_process_model_id, new_location + ) + return make_response(jsonify(new_process_model), 201) + + +def process_model_list( + process_group_identifier: Optional[str] = None, + recursive: Optional[bool] = False, + filter_runnable_by_user: Optional[bool] = False, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Process model list!""" + process_models = ProcessModelService.get_process_models( + process_group_id=process_group_identifier, + recursive=recursive, + filter_runnable_by_user=filter_runnable_by_user, + ) + batch = ProcessModelService().get_batch( + process_models, page=page, per_page=per_page + ) + pages = len(process_models) // per_page + remainder = len(process_models) % per_page + if remainder > 0: + pages += 1 + response_json = { + "results": ProcessModelInfoSchema(many=True).dump(batch), + "pagination": { + "count": len(batch), + "total": len(process_models), + "pages": pages, + }, + } + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def process_list() -> Any: + """Returns a list of all known processes. + + This includes processes that are not the + primary process - helpful for finding possible call activities. + """ + references = SpecReferenceCache.query.filter_by(type="process").all() + return SpecReferenceSchema(many=True).dump(references) + + +def get_file(modified_process_model_id: str, file_name: str) -> Any: + """Get_file.""" + process_model_identifier = modified_process_model_id.replace(":", "/") + process_model = get_process_model(process_model_identifier) + files = SpecFileService.get_files(process_model, file_name) + if len(files) == 0: + raise ApiError( + error_code="unknown file", + message=f"No information exists for file {file_name}" + f" it does not exist in workflow {process_model_identifier}.", + status_code=404, + ) + + file = files[0] + file_contents = SpecFileService.get_data(process_model, file.name) + file.file_contents = file_contents + file.process_model_id = process_model.id + # file.process_group_id = process_model.process_group_id + return FileSchema().dump(file) + + +def process_model_file_update( + modified_process_model_id: str, file_name: str +) -> flask.wrappers.Response: + """Process_model_file_update.""" + process_model_identifier = modified_process_model_id.replace(":", "/") + # process_model_identifier = f"{process_group_id}/{process_model_id}" + process_model = get_process_model(process_model_identifier) + + request_file = get_file_from_request() + request_file_contents = request_file.stream.read() + if not request_file_contents: + raise ApiError( + error_code="file_contents_empty", + message="Given request file does not have any content", + status_code=400, + ) + + SpecFileService.update_file(process_model, file_name, request_file_contents) + + if current_app.config["GIT_COMMIT_ON_SAVE"]: + git_output = GitService.commit( + message=f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}" + ) + current_app.logger.info(f"git output: {git_output}") + else: + current_app.logger.info("Git commit on save is disabled") + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_model_file_delete( + modified_process_model_id: str, file_name: str +) -> flask.wrappers.Response: + """Process_model_file_delete.""" + process_model_identifier = modified_process_model_id.replace(":", "/") + process_model = get_process_model(process_model_identifier) + try: + SpecFileService.delete_file(process_model, file_name) + except FileNotFoundError as exception: + raise ( + ApiError( + error_code="process_model_file_cannot_be_found", + message=f"Process model file cannot be found: {file_name}", + status_code=400, + ) + ) from exception + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def add_file(modified_process_model_id: str) -> flask.wrappers.Response: + """Add_file.""" + process_model_identifier = modified_process_model_id.replace(":", "/") + process_model = get_process_model(process_model_identifier) + request_file = get_file_from_request() + if not request_file.filename: + raise ApiError( + error_code="could_not_get_filename", + message="Could not get filename from request", + status_code=400, + ) + + file = SpecFileService.add_file( + process_model, request_file.filename, request_file.stream.read() + ) + file_contents = SpecFileService.get_data(process_model, file.name) + file.file_contents = file_contents + file.process_model_id = process_model.id + return Response( + json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json" + ) + + +def process_instance_create(modified_process_model_id: str) -> flask.wrappers.Response: + """Create_process_instance.""" + process_model_identifier = un_modify_modified_process_model_id( + modified_process_model_id + ) + process_instance = ( + ProcessInstanceService.create_process_instance_from_process_model_identifier( + process_model_identifier, g.user + ) + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=201, + mimetype="application/json", + ) + + +def process_instance_run( + modified_process_model_identifier: str, + process_instance_id: int, + do_engine_steps: bool = True, +) -> flask.wrappers.Response: + """Process_instance_run.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + + if do_engine_steps: + try: + processor.do_engine_steps() + except ApiError as e: + ErrorHandlingService().handle_error(processor, e) + raise e + except Exception as e: + ErrorHandlingService().handle_error(processor, e) + task = processor.bpmn_process_instance.last_task + raise ApiError.from_task( + error_code="unknown_exception", + message=f"An unknown error occurred. Original error: {e}", + status_code=400, + task=task, + ) from e + processor.save() + + if not current_app.config["RUN_BACKGROUND_SCHEDULER"]: + MessageService.process_message_instances() + + process_instance_api = ProcessInstanceService.processor_to_process_instance_api( + processor + ) + process_instance_data = processor.get_data() + process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) + process_instance_metadata["data"] = process_instance_data + return Response( + json.dumps(process_instance_metadata), status=200, mimetype="application/json" + ) + + +def process_instance_terminate( + process_instance_id: int, +) -> flask.wrappers.Response: + """Process_instance_run.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.terminate() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_suspend( + process_instance_id: int, +) -> flask.wrappers.Response: + """Process_instance_suspend.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.suspend() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_resume( + process_instance_id: int, +) -> flask.wrappers.Response: + """Process_instance_resume.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.resume() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_log_list( + process_instance_id: int, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Process_instance_log_list.""" + # to make sure the process instance exists + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + + logs = ( + SpiffLoggingModel.query.filter( + SpiffLoggingModel.process_instance_id == process_instance.id + ) + .order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore + .join( + UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True + ) # isouter since if we don't have a user, we still want the log + .add_columns( + UserModel.username, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + response_json = { + "results": logs.items, + "pagination": { + "count": len(logs.items), + "total": logs.total, + "pages": logs.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def message_instance_list( + process_instance_id: Optional[int] = None, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Message_instance_list.""" + # to make sure the process instance exists + message_instances_query = MessageInstanceModel.query + + if process_instance_id: + message_instances_query = message_instances_query.filter_by( + process_instance_id=process_instance_id + ) + + message_instances = ( + message_instances_query.order_by( + MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore + MessageInstanceModel.id.desc(), # type: ignore + ) + .join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id) + .join(ProcessInstanceModel) + .add_columns( + MessageModel.identifier.label("message_identifier"), + ProcessInstanceModel.process_model_identifier, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + for message_instance in message_instances: + message_correlations: dict = {} + for ( + mcmi + ) in ( + message_instance.MessageInstanceModel.message_correlations_message_instances + ): + mc = MessageCorrelationModel.query.filter_by( + id=mcmi.message_correlation_id + ).all() + for m in mc: + if m.name not in message_correlations: + message_correlations[m.name] = {} + message_correlations[m.name][ + m.message_correlation_property.identifier + ] = m.value + message_instance.MessageInstanceModel.message_correlations = ( + message_correlations + ) + + response_json = { + "results": message_instances.items, + "pagination": { + "count": len(message_instances.items), + "total": message_instances.total, + "pages": message_instances.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +# body: { +# payload: dict, +# process_instance_id: Optional[int], +# } +def message_start( + message_identifier: str, + body: Dict[str, Any], +) -> flask.wrappers.Response: + """Message_start.""" + message_model = MessageModel.query.filter_by(identifier=message_identifier).first() + if message_model is None: + raise ( + ApiError( + error_code="unknown_message", + message=f"Could not find message with identifier: {message_identifier}", + status_code=404, + ) + ) + + if "payload" not in body: + raise ( + ApiError( + error_code="missing_payload", + message="Body is missing payload.", + status_code=400, + ) + ) + + process_instance = None + if "process_instance_id" in body: + # to make sure we have a valid process_instance_id + process_instance = find_process_instance_by_id_or_raise( + body["process_instance_id"] + ) + + message_instance = MessageInstanceModel.query.filter_by( + process_instance_id=process_instance.id, + message_model_id=message_model.id, + message_type="receive", + status="ready", + ).first() + if message_instance is None: + raise ( + ApiError( + error_code="cannot_find_waiting_message", + message=f"Could not find waiting message for identifier {message_identifier} " + f"and process instance {process_instance.id}", + status_code=400, + ) + ) + MessageService.process_message_receive( + message_instance, message_model.name, body["payload"] + ) + + else: + message_triggerable_process_model = ( + MessageTriggerableProcessModel.query.filter_by( + message_model_id=message_model.id + ).first() + ) + + if message_triggerable_process_model is None: + raise ( + ApiError( + error_code="cannot_start_message", + message=f"Message with identifier cannot be start with message: {message_identifier}", + status_code=400, + ) + ) + + process_instance = MessageService.process_message_triggerable_process_model( + message_triggerable_process_model, + message_model.name, + body["payload"], + g.user, + ) + + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) + + +def process_instance_list( + process_model_identifier: Optional[str] = None, + page: int = 1, + per_page: int = 100, + start_from: Optional[int] = None, + start_to: Optional[int] = None, + end_from: Optional[int] = None, + end_to: Optional[int] = None, + process_status: Optional[str] = None, + initiated_by_me: Optional[bool] = None, + with_tasks_completed_by_me: Optional[bool] = None, + with_tasks_completed_by_my_group: Optional[bool] = None, + user_filter: Optional[bool] = False, + report_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Process_instance_list.""" + process_instance_report = ProcessInstanceReportService.report_with_identifier( + g.user, report_identifier + ) + + if user_filter: + report_filter = ProcessInstanceReportFilter( + process_model_identifier, + start_from, + start_to, + end_from, + end_to, + process_status.split(",") if process_status else None, + initiated_by_me, + with_tasks_completed_by_me, + with_tasks_completed_by_my_group, + ) + else: + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report, + process_model_identifier, + start_from, + start_to, + end_from, + end_to, + process_status, + initiated_by_me, + with_tasks_completed_by_me, + with_tasks_completed_by_my_group, + ) + ) + + # process_model_identifier = un_modify_modified_process_model_id(modified_process_model_identifier) + process_instance_query = ProcessInstanceModel.query + # Always join that hot user table for good performance at serialization time. + process_instance_query = process_instance_query.options( + joinedload(ProcessInstanceModel.process_initiator) + ) + + if report_filter.process_model_identifier is not None: + process_model = get_process_model( + f"{report_filter.process_model_identifier}", + ) + + process_instance_query = process_instance_query.filter_by( + process_model_identifier=process_model.id + ) + + # this can never happen. obviously the class has the columns it defines. this is just to appease mypy. + if ( + ProcessInstanceModel.start_in_seconds is None + or ProcessInstanceModel.end_in_seconds is None + ): + raise ( + ApiError( + error_code="unexpected_condition", + message="Something went very wrong", + status_code=500, + ) + ) + + if report_filter.start_from is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.start_in_seconds >= report_filter.start_from + ) + if report_filter.start_to is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.start_in_seconds <= report_filter.start_to + ) + if report_filter.end_from is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.end_in_seconds >= report_filter.end_from + ) + if report_filter.end_to is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.end_in_seconds <= report_filter.end_to + ) + if report_filter.process_status is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore + ) + + if report_filter.initiated_by_me is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore + ) + process_instance_query = process_instance_query.filter_by( + process_initiator=g.user + ) + + # TODO: not sure if this is exactly what is wanted + if report_filter.with_tasks_completed_by_me is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore + ) + # process_instance_query = process_instance_query.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) + # process_instance_query = process_instance_query.add_columns(UserModel.username) + # search for process_instance.UserModel.username in this file for more details about why adding columns is annoying. + + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.process_initiator_id != g.user.id + ) + process_instance_query = process_instance_query.join( + SpiffStepDetailsModel, + ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, + ) + process_instance_query = process_instance_query.join( + SpiffLoggingModel, + ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, + ) + process_instance_query = process_instance_query.filter( + SpiffLoggingModel.message.contains("COMPLETED") # type: ignore + ) + process_instance_query = process_instance_query.filter( + SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step + ) + process_instance_query = process_instance_query.filter( + SpiffStepDetailsModel.completed_by_user_id == g.user.id + ) + + if report_filter.with_tasks_completed_by_my_group is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore + ) + process_instance_query = process_instance_query.join( + SpiffStepDetailsModel, + ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, + ) + process_instance_query = process_instance_query.join( + SpiffLoggingModel, + ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, + ) + process_instance_query = process_instance_query.filter( + SpiffLoggingModel.message.contains("COMPLETED") # type: ignore + ) + process_instance_query = process_instance_query.filter( + SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step + ) + process_instance_query = process_instance_query.join( + GroupModel, + GroupModel.id == SpiffStepDetailsModel.lane_assignment_id, + ) + process_instance_query = process_instance_query.join( + UserGroupAssignmentModel, + UserGroupAssignmentModel.group_id == GroupModel.id, + ) + process_instance_query = process_instance_query.filter( + UserGroupAssignmentModel.user_id == g.user.id + ) + + # userSkillF = aliased(UserSkill) + # userSkillI = aliased(UserSkill) + + import pdb; pdb.set_trace() + for column in process_instance_report.report_metadata['columns']: + print(f"column: {column['accessor']}") + # process_instance_query = process_instance_query.outerjoin(ProcessInstanceMetadataModel, ProcessInstanceModel.id == ProcessInstanceMetadataModel.process_instance_id, ProcessInstanceMetadataModel.key == column['accessor']) + instance_metadata_alias = alias(ProcessInstanceMetadataModel) + process_instance_query = ( + process_instance_query.outerjoin(instance_metadata_alias, ProcessInstanceModel.id == instance_metadata_alias.process_instance_id) + .add_column(ProcessInstanceMetadataModel.value.label(column['accessor'])) + ) + import pdb; pdb.set_trace() + + process_instances = ( + process_instance_query.group_by(ProcessInstanceModel.id) + .order_by( + ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + import pdb; pdb.set_trace() + + # def awesome_serialize(process_instance) + # dict_thing = process_instance.serialize + # + # # add columns since we have access to columns here + # dict_thing['awesome'] = 'awesome' + # + # return dict_thing + + results = list( + map( + ProcessInstanceService.serialize_flat_with_task_data, + process_instances.items, + ) + ) + report_metadata = process_instance_report.report_metadata + + response_json = { + "report_identifier": process_instance_report.identifier, + "report_metadata": report_metadata, + "results": results, + "filters": report_filter.to_dict(), + "pagination": { + "count": len(results), + "total": process_instances.total, + "pages": process_instances.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def process_instance_show( + modified_process_model_identifier: str, process_instance_id: int +) -> flask.wrappers.Response: + """Create_process_instance.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + current_version_control_revision = GitService.get_current_revision() + process_model = get_process_model(process_model_identifier) + + if process_model.primary_file_name: + if ( + process_instance.bpmn_version_control_identifier + == current_version_control_revision + ): + bpmn_xml_file_contents = SpecFileService.get_data( + process_model, process_model.primary_file_name + ) + else: + bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision( + process_model, process_instance.bpmn_version_control_identifier + ) + process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents + + return make_response(jsonify(process_instance), 200) + + +def process_instance_delete(process_instance_id: int) -> flask.wrappers.Response: + """Create_process_instance.""" + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + + # (Pdb) db.session.delete + # > + db.session.query(SpiffLoggingModel).filter_by( + process_instance_id=process_instance.id + ).delete() + db.session.query(SpiffStepDetailsModel).filter_by( + process_instance_id=process_instance.id + ).delete() + db.session.delete(process_instance) + db.session.commit() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_list( + page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Process_instance_report_list.""" + process_instance_reports = ProcessInstanceReportModel.query.filter_by( + created_by_id=g.user.id, + ).all() + + return make_response(jsonify(process_instance_reports), 200) + + +def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response: + """Process_instance_report_create.""" + ProcessInstanceReportModel.create_report( + identifier=body["identifier"], + user=g.user, + report_metadata=body["report_metadata"], + ) + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_update( + report_identifier: str, + body: Dict[str, Any], +) -> flask.wrappers.Response: + """Process_instance_report_create.""" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier=report_identifier, + created_by_id=g.user.id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + process_instance_report.report_metadata = body["report_metadata"] + db.session.commit() + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_delete( + report_identifier: str, +) -> flask.wrappers.Response: + """Process_instance_report_create.""" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier=report_identifier, + created_by_id=g.user.id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + db.session.delete(process_instance_report) + db.session.commit() + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def service_tasks_show() -> flask.wrappers.Response: + """Service_tasks_show.""" + available_connectors = ServiceTaskService.available_connectors() + print(available_connectors) + + return Response( + json.dumps(available_connectors), status=200, mimetype="application/json" + ) + + +def authentication_list() -> flask.wrappers.Response: + """Authentication_list.""" + available_authentications = ServiceTaskService.authentication_list() + response_json = { + "results": available_authentications, + "connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"], + "redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback", + } + + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def authentication_callback( + service: str, + auth_method: str, +) -> werkzeug.wrappers.Response: + """Authentication_callback.""" + verify_token(request.args.get("token"), force_run=True) + response = request.args["response"] + SecretService().update_secret( + f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True + ) + return redirect( + f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration" + ) + + +def process_instance_report_show( + report_identifier: str, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Process_instance_list.""" + process_instances = ProcessInstanceModel.query.order_by( # .filter_by(process_model_identifier=process_model.id) + ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore + ).paginate( + page=page, per_page=per_page, error_out=False + ) + + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier=report_identifier, + created_by_id=g.user.id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + substitution_variables = request.args.to_dict() + result_dict = process_instance_report.generate_report( + process_instances.items, substitution_variables + ) + + # update this if we go back to a database query instead of filtering in memory + result_dict["pagination"] = { + "count": len(result_dict["results"]), + "total": len(result_dict["results"]), + "pages": 1, + } + + return Response(json.dumps(result_dict), status=200, mimetype="application/json") + + +# TODO: see comment for before_request +# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"]) +def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: + """Task_list_my_tasks.""" + principal = find_principal_or_raise() + active_tasks = ( + ActiveTaskModel.query.order_by(desc(ActiveTaskModel.id)) # type: ignore + .join(ProcessInstanceModel) + .join(ActiveTaskUserModel) + .filter_by(user_id=principal.user_id) + # just need this add_columns to add the process_model_identifier. Then add everything back that was removed. + .add_columns( + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.process_model_display_name, + ProcessInstanceModel.status, + ActiveTaskModel.task_name, + ActiveTaskModel.task_title, + ActiveTaskModel.task_type, + ActiveTaskModel.task_status, + ActiveTaskModel.task_id, + ActiveTaskModel.id, + ActiveTaskModel.process_model_display_name, + ActiveTaskModel.process_instance_id, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + tasks = [ActiveTaskModel.to_task(active_task) for active_task in active_tasks.items] + + response_json = { + "results": tasks, + "pagination": { + "count": len(active_tasks.items), + "total": active_tasks.total, + "pages": active_tasks.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def task_list_for_my_open_processes( + page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Task_list_for_my_open_processes.""" + return get_tasks(page=page, per_page=per_page) + + +def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: + """Task_list_for_processes_started_by_others.""" + return get_tasks( + processes_started_by_user=False, + has_lane_assignment_id=False, + page=page, + per_page=per_page, + ) + + +def task_list_for_my_groups( + page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Task_list_for_processes_started_by_others.""" + return get_tasks(processes_started_by_user=False, page=page, per_page=per_page) + + +def get_tasks( + processes_started_by_user: bool = True, + has_lane_assignment_id: bool = True, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Get_tasks.""" + user_id = g.user.id + + # use distinct to ensure we only get one row per active task otherwise + # we can get back multiple for the same active task row which throws off + # pagination later on + # https://stackoverflow.com/q/34582014/6090676 + active_tasks_query = ( + ActiveTaskModel.query.distinct() + .outerjoin(GroupModel, GroupModel.id == ActiveTaskModel.lane_assignment_id) + .join(ProcessInstanceModel) + .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) + ) + + if processes_started_by_user: + active_tasks_query = active_tasks_query.filter( + ProcessInstanceModel.process_initiator_id == user_id + ).outerjoin( + ActiveTaskUserModel, + and_( + ActiveTaskUserModel.user_id == user_id, + ActiveTaskModel.id == ActiveTaskUserModel.active_task_id, + ), + ) + else: + active_tasks_query = active_tasks_query.filter( + ProcessInstanceModel.process_initiator_id != user_id + ).join( + ActiveTaskUserModel, + and_( + ActiveTaskUserModel.user_id == user_id, + ActiveTaskModel.id == ActiveTaskUserModel.active_task_id, + ), + ) + if has_lane_assignment_id: + active_tasks_query = active_tasks_query.filter( + ActiveTaskModel.lane_assignment_id.is_not(None) # type: ignore + ) + else: + active_tasks_query = active_tasks_query.filter(ActiveTaskModel.lane_assignment_id.is_(None)) # type: ignore + + active_tasks = active_tasks_query.add_columns( + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.status.label("process_instance_status"), # type: ignore + ProcessInstanceModel.updated_at_in_seconds, + ProcessInstanceModel.created_at_in_seconds, + UserModel.username, + GroupModel.identifier.label("group_identifier"), + ActiveTaskModel.task_name, + ActiveTaskModel.task_title, + ActiveTaskModel.process_model_display_name, + ActiveTaskModel.process_instance_id, + ActiveTaskUserModel.user_id.label("current_user_is_potential_owner"), + ).paginate(page=page, per_page=per_page, error_out=False) + + response_json = { + "results": active_tasks.items, + "pagination": { + "count": len(active_tasks.items), + "total": active_tasks.total, + "pages": active_tasks.pages, + }, + } + return make_response(jsonify(response_json), 200) + + +def process_instance_task_list( + modified_process_model_id: str, + process_instance_id: int, + all_tasks: bool = False, + spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_task_list.""" + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + + if spiff_step > 0: + step_detail = ( + db.session.query(SpiffStepDetailsModel) + .filter( + SpiffStepDetailsModel.process_instance_id == process_instance.id, + SpiffStepDetailsModel.spiff_step == spiff_step, + ) + .first() + ) + if step_detail is not None and process_instance.bpmn_json is not None: + bpmn_json = json.loads(process_instance.bpmn_json) + bpmn_json["tasks"] = step_detail.task_json + process_instance.bpmn_json = json.dumps(bpmn_json) + + processor = ProcessInstanceProcessor(process_instance) + + spiff_tasks = None + if all_tasks: + spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) + else: + spiff_tasks = processor.get_all_user_tasks() + + tasks = [] + for spiff_task in spiff_tasks: + task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) + task.data = spiff_task.data + tasks.append(task) + + return make_response(jsonify(tasks), 200) + + +def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: + """Task_show.""" + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + + if process_instance.status == ProcessInstanceStatus.suspended.value: + raise ApiError( + error_code="error_suspended", + message="The process instance is suspended", + status_code=400, + ) + + process_model = get_process_model( + process_instance.process_model_identifier, + ) + + form_schema_file_name = "" + form_ui_schema_file_name = "" + spiff_task = get_spiff_task_from_process_instance(task_id, process_instance) + extensions = spiff_task.task_spec.extensions + + if "properties" in extensions: + properties = extensions["properties"] + if "formJsonSchemaFilename" in properties: + form_schema_file_name = properties["formJsonSchemaFilename"] + if "formUiSchemaFilename" in properties: + form_ui_schema_file_name = properties["formUiSchemaFilename"] + task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) + task.data = spiff_task.data + task.process_model_display_name = process_model.display_name + task.process_model_identifier = process_model.id + process_model_with_form = process_model + + if task.type == "User Task": + if not form_schema_file_name: + raise ( + ApiError( + error_code="missing_form_file", + message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}", + status_code=400, + ) + ) + + form_contents = prepare_form_data( + form_schema_file_name, + task.data, + process_model_with_form, + ) + + try: + # form_contents is a str + form_dict = json.loads(form_contents) + except Exception as exception: + raise ( + ApiError( + error_code="error_loading_form", + message=f"Could not load form schema from: {form_schema_file_name}. Error was: {str(exception)}", + status_code=400, + ) + ) from exception + + if task.data: + _update_form_schema_with_task_data_as_needed(form_dict, task.data) + + if form_contents: + task.form_schema = form_dict + + if form_ui_schema_file_name: + ui_form_contents = prepare_form_data( + form_ui_schema_file_name, + task.data, + process_model_with_form, + ) + if ui_form_contents: + task.form_ui_schema = ui_form_contents + + if task.properties and task.data and "instructionsForEndUser" in task.properties: + print( + f"task.properties['instructionsForEndUser']: {task.properties['instructionsForEndUser']}" + ) + if task.properties["instructionsForEndUser"]: + task.properties["instructionsForEndUser"] = render_jinja_template( + task.properties["instructionsForEndUser"], task.data + ) + return make_response(jsonify(task), 200) + + +def task_submit( + process_instance_id: int, + task_id: str, + body: Dict[str, Any], + terminate_loop: bool = False, +) -> flask.wrappers.Response: + """Task_submit_user_data.""" + principal = find_principal_or_raise() + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + + processor = ProcessInstanceProcessor(process_instance) + spiff_task = get_spiff_task_from_process_instance( + task_id, process_instance, processor=processor + ) + AuthorizationService.assert_user_can_complete_spiff_task( + process_instance.id, spiff_task, principal.user + ) + + if spiff_task.state != TaskState.READY: + raise ( + ApiError( + error_code="invalid_state", + message="You may not update a task unless it is in the READY state.", + status_code=400, + ) + ) + + if terminate_loop and spiff_task.is_looping(): + spiff_task.terminate_loop() + + active_task = ActiveTaskModel.query.filter_by( + process_instance_id=process_instance_id, task_id=task_id + ).first() + if active_task is None: + raise ( + ApiError( + error_code="no_active_task", + message="Cannot find an active task with task id '{task_id}' for process instance {process_instance_id}.", + status_code=500, + ) + ) + + ProcessInstanceService.complete_form_task( + processor=processor, + spiff_task=spiff_task, + data=body, + user=g.user, + active_task=active_task, + ) + + # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same + # task spec, complete that form as well. + # if update_all: + # last_index = spiff_task.task_info()["mi_index"] + # next_task = processor.next_task() + # while next_task and next_task.task_info()["mi_index"] > last_index: + # __update_task(processor, next_task, form_data, user) + # last_index = next_task.task_info()["mi_index"] + # next_task = processor.next_task() + + next_active_task_assigned_to_me = ( + ActiveTaskModel.query.filter_by(process_instance_id=process_instance_id) + .order_by(asc(ActiveTaskModel.id)) # type: ignore + .join(ActiveTaskUserModel) + .filter_by(user_id=principal.user_id) + .first() + ) + if next_active_task_assigned_to_me: + return make_response( + jsonify(ActiveTaskModel.to_task(next_active_task_assigned_to_me)), 200 + ) + + return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") + + +def script_unit_test_create( + process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Script_unit_test_create.""" + bpmn_task_identifier = _get_required_parameter_or_raise( + "bpmn_task_identifier", body + ) + input_json = _get_required_parameter_or_raise("input_json", body) + expected_output_json = _get_required_parameter_or_raise( + "expected_output_json", body + ) + + process_model_identifier = f"{process_group_id}/{process_model_id}" + process_model = get_process_model(process_model_identifier) + file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0] + if file is None: + raise ApiError( + error_code="cannot_find_file", + message=f"Could not find the primary bpmn file for process_model: {process_model.id}", + status_code=404, + ) + + # TODO: move this to an xml service or something + file_contents = SpecFileService.get_data(process_model, file.name) + bpmn_etree_element = etree.fromstring(file_contents) + + nsmap = bpmn_etree_element.nsmap + spiff_element_maker = ElementMaker( + namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap + ) + + script_task_elements = bpmn_etree_element.xpath( + f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + if len(script_task_elements) == 0: + raise ApiError( + error_code="missing_script_task", + message=f"Cannot find a script task with id: {bpmn_task_identifier}", + status_code=404, + ) + script_task_element = script_task_elements[0] + + extension_elements = None + extension_elements_array = script_task_element.xpath( + "//bpmn:extensionElements", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + if len(extension_elements_array) == 0: + bpmn_element_maker = ElementMaker( + namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap + ) + extension_elements = bpmn_element_maker("extensionElements") + script_task_element.append(extension_elements) + else: + extension_elements = extension_elements_array[0] + + unit_test_elements = None + unit_test_elements_array = extension_elements.xpath( + "//spiffworkflow:unitTests", + namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"}, + ) + if len(unit_test_elements_array) == 0: + unit_test_elements = spiff_element_maker("unitTests") + extension_elements.append(unit_test_elements) + else: + unit_test_elements = unit_test_elements_array[0] + + fuzz = "".join( + random.choice(string.ascii_uppercase + string.digits) # noqa: S311 + for _ in range(7) + ) + unit_test_id = f"unit_test_{fuzz}" + + input_json_element = spiff_element_maker("inputJson", json.dumps(input_json)) + expected_output_json_element = spiff_element_maker( + "expectedOutputJson", json.dumps(expected_output_json) + ) + unit_test_element = spiff_element_maker("unitTest", id=unit_test_id) + unit_test_element.append(input_json_element) + unit_test_element.append(expected_output_json_element) + unit_test_elements.append(unit_test_element) + SpecFileService.update_file( + process_model, file.name, etree.tostring(bpmn_etree_element) + ) + + return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") + + +def script_unit_test_run( + process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Script_unit_test_run.""" + # FIXME: We should probably clear this somewhere else but this works + current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None + current_app.config["THREAD_LOCAL_DATA"].spiff_step = None + + python_script = _get_required_parameter_or_raise("python_script", body) + input_json = _get_required_parameter_or_raise("input_json", body) + expected_output_json = _get_required_parameter_or_raise( + "expected_output_json", body + ) + + result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( + python_script, input_json, expected_output_json + ) + return make_response(jsonify(result), 200) + + +def get_file_from_request() -> Any: + """Get_file_from_request.""" + request_file = connexion.request.files.get("file") + if not request_file: + raise ApiError( + error_code="no_file_given", + message="Given request does not contain a file", + status_code=400, + ) + return request_file + + +def get_process_model(process_model_id: str) -> ProcessModelInfo: + """Get_process_model.""" + process_model = None + try: + process_model = ProcessModelService.get_process_model(process_model_id) + except ProcessEntityNotFoundError as exception: + raise ( + ApiError( + error_code="process_model_cannot_be_found", + message=f"Process model cannot be found: {process_model_id}", + status_code=400, + ) + ) from exception + + return process_model + + +def find_principal_or_raise() -> PrincipalModel: + """Find_principal_or_raise.""" + principal = PrincipalModel.query.filter_by(user_id=g.user.id).first() + if principal is None: + raise ( + ApiError( + error_code="principal_not_found", + message=f"Principal not found from user id: {g.user.id}", + status_code=400, + ) + ) + return principal # type: ignore + + +def find_process_instance_by_id_or_raise( + process_instance_id: int, +) -> ProcessInstanceModel: + """Find_process_instance_by_id_or_raise.""" + process_instance_query = ProcessInstanceModel.query.filter_by( + id=process_instance_id + ) + + # we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves: + # this returns an object that allows you to do: process_instance.UserModel.username + # process_instance = db.session.query(ProcessInstanceModel, UserModel).filter_by(id=process_instance_id).first() + # you can also use splat with add_columns, but it still didn't ultimately give us access to the process instance + # attributes or username like we wanted: + # process_instance_query.join(UserModel).add_columns(*ProcessInstanceModel.__table__.columns, UserModel.username) + + process_instance = process_instance_query.first() + if process_instance is None: + raise ( + ApiError( + error_code="process_instance_cannot_be_found", + message=f"Process instance cannot be found: {process_instance_id}", + status_code=400, + ) + ) + return process_instance # type: ignore + + +def get_value_from_array_with_index(array: list, index: int) -> Any: + """Get_value_from_array_with_index.""" + if index < 0: + return None + + if index >= len(array): + return None + + return array[index] + + +def prepare_form_data( + form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo +) -> str: + """Prepare_form_data.""" + if task_data is None: + return "" + + file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8") + return render_jinja_template(file_contents, task_data) + + +def render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str: + """Render_jinja_template.""" + jinja_environment = jinja2.Environment( + autoescape=True, lstrip_blocks=True, trim_blocks=True + ) + template = jinja_environment.from_string(unprocessed_template) + return template.render(**data) + + +def get_spiff_task_from_process_instance( + task_id: str, + process_instance: ProcessInstanceModel, + processor: Union[ProcessInstanceProcessor, None] = None, +) -> SpiffTask: + """Get_spiff_task_from_process_instance.""" + if processor is None: + processor = ProcessInstanceProcessor(process_instance) + task_uuid = uuid.UUID(task_id) + spiff_task = processor.bpmn_process_instance.get_task(task_uuid) + + if spiff_task is None: + raise ( + ApiError( + error_code="empty_task", + message="Processor failed to obtain task.", + status_code=500, + ) + ) + return spiff_task + + +# +# Methods for secrets CRUD - maybe move somewhere else: +# +def get_secret(key: str) -> Optional[str]: + """Get_secret.""" + return SecretService.get_secret(key) + + +def secret_list( + page: int = 1, + per_page: int = 100, +) -> Response: + """Secret_list.""" + secrets = ( + SecretModel.query.order_by(SecretModel.key) + .join(UserModel) + .add_columns( + UserModel.username, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + response_json = { + "results": secrets.items, + "pagination": { + "count": len(secrets.items), + "total": secrets.total, + "pages": secrets.pages, + }, + } + return make_response(jsonify(response_json), 200) + + +def add_secret(body: Dict) -> Response: + """Add secret.""" + secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id) + assert secret_model # noqa: S101 + return Response( + json.dumps(SecretModelSchema().dump(secret_model)), + status=201, + mimetype="application/json", + ) + + +def update_secret(key: str, body: dict) -> Response: + """Update secret.""" + SecretService().update_secret(key, body["value"], g.user.id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def delete_secret(key: str) -> Response: + """Delete secret.""" + current_user = UserService.current_user() + SecretService.delete_secret(key, current_user.id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: + """Get_required_parameter_or_raise.""" + return_value = None + if parameter in post_body: + return_value = post_body[parameter] + + if return_value is None or return_value == "": + raise ( + ApiError( + error_code="missing_required_parameter", + message=f"Parameter is missing from json request body: {parameter}", + status_code=400, + ) + ) + + return return_value + + +# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches +def _update_form_schema_with_task_data_as_needed( + in_dict: dict, task_data: dict +) -> None: + """Update_nested.""" + for k, value in in_dict.items(): + if "anyOf" == k: + # value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"] + if isinstance(value, list): + if len(value) == 1: + first_element_in_value_list = value[0] + if isinstance(first_element_in_value_list, str): + if first_element_in_value_list.startswith( + "options_from_task_data_var:" + ): + task_data_var = first_element_in_value_list.replace( + "options_from_task_data_var:", "" + ) + + if task_data_var not in task_data: + raise ( + ApiError( + error_code="missing_task_data_var", + message=f"Task data is missing variable: {task_data_var}", + status_code=500, + ) + ) + + select_options_from_task_data = task_data.get(task_data_var) + if isinstance(select_options_from_task_data, list): + if all( + "value" in d and "label" in d + for d in select_options_from_task_data + ): + + def map_function( + task_data_select_option: TaskDataSelectOption, + ) -> ReactJsonSchemaSelectOption: + """Map_function.""" + return { + "type": "string", + "enum": [task_data_select_option["value"]], + "title": task_data_select_option["label"], + } + + options_for_react_json_schema_form = list( + map(map_function, select_options_from_task_data) + ) + + in_dict[k] = options_for_react_json_schema_form + elif isinstance(value, dict): + _update_form_schema_with_task_data_as_needed(value, task_data) + elif isinstance(value, list): + for o in value: + if isinstance(o, dict): + _update_form_schema_with_task_data_as_needed(o, task_data) + + +def update_task_data(process_instance_id: str, task_id: str, body: Dict) -> Response: + """Update task data.""" + process_instance = ProcessInstanceModel.query.filter( + ProcessInstanceModel.id == int(process_instance_id) + ).first() + if process_instance: + process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json) + if "new_task_data" in body: + new_task_data_str: str = body["new_task_data"] + new_task_data_dict = json.loads(new_task_data_str) + if task_id in process_instance_bpmn_json_dict["tasks"]: + process_instance_bpmn_json_dict["tasks"][task_id][ + "data" + ] = new_task_data_dict + process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict) + db.session.add(process_instance) + try: + db.session.commit() + except Exception as e: + db.session.rollback() + raise ApiError( + error_code="update_task_data_error", + message=f"Could not update the Instance. Original error is {e}", + ) from e + else: + raise ApiError( + error_code="update_task_data_error", + message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", + ) + else: + raise ApiError( + error_code="update_task_data_error", + message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 739e689d..9e4c54be 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -12,6 +12,7 @@ from typing import Union import connexion # type: ignore import flask.wrappers import jinja2 +from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel import werkzeug from flask import Blueprint from flask import current_app @@ -27,10 +28,10 @@ from lxml import etree # type: ignore from lxml.builder import ElementMaker # type: ignore from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState -from sqlalchemy import and_ +from sqlalchemy import and_, func from sqlalchemy import asc from sqlalchemy import desc -from sqlalchemy.orm import joinedload +from sqlalchemy.orm import aliased, joinedload from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, @@ -928,6 +929,26 @@ def process_instance_list( UserGroupAssignmentModel.user_id == g.user.id ) + # userSkillF = aliased(UserSkill) + # userSkillI = aliased(UserSkill) + + # import pdb; pdb.set_trace() + stock_columns = ProcessInstanceReportService.get_column_names_for_model(ProcessInstanceModel) + # print(f"stock_columns: {stock_columns}") + # import pdb; pdb.set_trace() + # for column in process_instance_report.report_metadata['columns']: + # if column not in stock_columns: + # # continue + for column in [{'accessor': 'key1'}]: + # print(f"column: {column['accessor']}") + # process_instance_query = process_instance_query.outerjoin(ProcessInstanceMetadataModel, ProcessInstanceModel.id == ProcessInstanceMetadataModel.process_instance_id, ProcessInstanceMetadataModel.key == column['accessor']) + instance_metadata_alias = aliased(ProcessInstanceMetadataModel) + process_instance_query = ( + process_instance_query.options(joinedload(instance_metadata_alias, ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, innerjoin=False)).filter(instance_metadata_alias.key == column['accessor']) + .add_column(func.max(instance_metadata_alias.value).label(column['accessor'])) + ) + # import pdb; pdb.set_trace() + process_instances = ( process_instance_query.group_by(ProcessInstanceModel.id) .order_by( @@ -935,14 +956,26 @@ def process_instance_list( ) .paginate(page=page, per_page=per_page, error_out=False) ) + import pdb; pdb.set_trace() - results = list( - map( - ProcessInstanceService.serialize_flat_with_task_data, - process_instances.items, - ) - ) + # def awesome_serialize(process_instance) + # dict_thing = process_instance.serialize + # + # # add columns since we have access to columns here + # dict_thing['awesome'] = 'awesome' + # + # return dict_thing + + # results = list( + # map( + # ProcessInstanceService.serialize_flat_with_task_data, + # process_instances.items, + # ) + # ) + results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(process_instances.items, process_instance_report.report_metadata['columns']) report_metadata = process_instance_report.report_metadata + print(f"results: {results}") + import pdb; pdb.set_trace() response_json = { "report_identifier": process_instance_report.identifier, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index 18f08d0f..3868adf6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -235,8 +235,9 @@ class AuthenticationService: refresh_token_object: RefreshTokenModel = RefreshTokenModel.query.filter( RefreshTokenModel.user_id == user_id ).first() - assert refresh_token_object # noqa: S101 - return refresh_token_object.token + if refresh_token_object: + return refresh_token_object.token + return None @classmethod def get_auth_token_from_refresh_token(cls, refresh_token: str) -> dict: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index fc5a93da..6c579826 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -1,6 +1,8 @@ """Process_instance_report_service.""" from dataclasses import dataclass +from flask_bpmn.models.db import db from typing import Optional +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, @@ -241,3 +243,20 @@ class ProcessInstanceReportService: ) return report_filter + + @classmethod + def add_metadata_columns_to_process_instance(cls, process_instance_sqlalchemy_rows, metadata_columns: list[dict]) -> list[dict]: + stock_columns = cls.get_column_names_for_model(ProcessInstanceModel) + results = [] + for process_instance in process_instance_sqlalchemy_rows: + process_instance_dict = process_instance['ProcessInstanceModel'].serialized + for metadata_column in metadata_columns: + if metadata_column['accessor'] not in stock_columns: + process_instance_dict[metadata_column['accessor']] = process_instance[metadata_column['accessor']] + + results.append(process_instance_dict) + return results + + @classmethod + def get_column_names_for_model(cls, model: db.Model) -> list[str]: + return [i.name for i in model.__table__.columns] diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 4c60cb8c..b7fc0479 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2588,12 +2588,17 @@ class TestProcessApi(BaseTest): response = client.get( f"/v1.0/process-instances?report_identifier={process_instance_report.identifier}", + # f"/v1.0/process-instances?report_identifier=demo1", headers=self.logged_in_headers(with_super_admin_user), ) print(f"response.json: {response.json}") - assert response.status_code == 200 assert response.json is not None + assert response.status_code == 200 + assert len(response.json["results"]) == 1 + assert response.json["results"][0]["status"] == "complete" + assert response.json["results"][0]["id"] == process_instance.id + # assert response.json["results"][0]["key1"] == "value1" assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["pages"] == 1 assert response.json["pagination"]["total"] == 1 From 17831eafa7e4982a808fab8300b799479ad25113 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 29 Nov 2022 16:00:19 -0500 Subject: [PATCH 026/128] WIP more metadata reporting w/ burnettk --- .../spiffworkflow_backend/routes/process_api_blueprint.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 9e4c54be..57890fb2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -936,10 +936,10 @@ def process_instance_list( stock_columns = ProcessInstanceReportService.get_column_names_for_model(ProcessInstanceModel) # print(f"stock_columns: {stock_columns}") # import pdb; pdb.set_trace() - # for column in process_instance_report.report_metadata['columns']: - # if column not in stock_columns: - # # continue - for column in [{'accessor': 'key1'}]: + for column in process_instance_report.report_metadata['columns']: + if column['accessor'] in stock_columns: + continue + # for column in [{'accessor': 'key1'}]: # print(f"column: {column['accessor']}") # process_instance_query = process_instance_query.outerjoin(ProcessInstanceMetadataModel, ProcessInstanceModel.id == ProcessInstanceMetadataModel.process_instance_id, ProcessInstanceMetadataModel.key == column['accessor']) instance_metadata_alias = aliased(ProcessInstanceMetadataModel) From 33b9e5b943b7bfaa88506c66d56e2904731bb29f Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 29 Nov 2022 16:09:55 -0500 Subject: [PATCH 027/128] metadat reports work w/ burnettk --- .../routes/process_api_blueprint.py | 35 +++---------------- .../integration/test_process_api.py | 9 ++--- 2 files changed, 9 insertions(+), 35 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 57890fb2..b2b07ae5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -814,9 +814,9 @@ def process_instance_list( # process_model_identifier = un_modify_modified_process_model_id(modified_process_model_identifier) process_instance_query = ProcessInstanceModel.query # Always join that hot user table for good performance at serialization time. - process_instance_query = process_instance_query.options( - joinedload(ProcessInstanceModel.process_initiator) - ) + # process_instance_query = process_instance_query.options( + # joinedload(ProcessInstanceModel.process_initiator, ProcessInstanceModel.process_initiator_id == UserModel.id) + # ) if report_filter.process_model_identifier is not None: process_model = get_process_model( @@ -929,25 +929,15 @@ def process_instance_list( UserGroupAssignmentModel.user_id == g.user.id ) - # userSkillF = aliased(UserSkill) - # userSkillI = aliased(UserSkill) - - # import pdb; pdb.set_trace() stock_columns = ProcessInstanceReportService.get_column_names_for_model(ProcessInstanceModel) - # print(f"stock_columns: {stock_columns}") - # import pdb; pdb.set_trace() for column in process_instance_report.report_metadata['columns']: if column['accessor'] in stock_columns: continue - # for column in [{'accessor': 'key1'}]: - # print(f"column: {column['accessor']}") - # process_instance_query = process_instance_query.outerjoin(ProcessInstanceMetadataModel, ProcessInstanceModel.id == ProcessInstanceMetadataModel.process_instance_id, ProcessInstanceMetadataModel.key == column['accessor']) instance_metadata_alias = aliased(ProcessInstanceMetadataModel) process_instance_query = ( process_instance_query.options(joinedload(instance_metadata_alias, ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, innerjoin=False)).filter(instance_metadata_alias.key == column['accessor']) - .add_column(func.max(instance_metadata_alias.value).label(column['accessor'])) + .add_columns(func.max(instance_metadata_alias.value).label(column['accessor'])) ) - # import pdb; pdb.set_trace() process_instances = ( process_instance_query.group_by(ProcessInstanceModel.id) @@ -956,26 +946,9 @@ def process_instance_list( ) .paginate(page=page, per_page=per_page, error_out=False) ) - import pdb; pdb.set_trace() - # def awesome_serialize(process_instance) - # dict_thing = process_instance.serialize - # - # # add columns since we have access to columns here - # dict_thing['awesome'] = 'awesome' - # - # return dict_thing - - # results = list( - # map( - # ProcessInstanceService.serialize_flat_with_task_data, - # process_instances.items, - # ) - # ) results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(process_instances.items, process_instance_report.report_metadata['columns']) report_metadata = process_instance_report.report_metadata - print(f"results: {results}") - import pdb; pdb.set_trace() response_json = { "report_identifier": process_instance_report.identifier, diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index b7fc0479..fb33d246 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2562,7 +2562,7 @@ class TestProcessApi(BaseTest): process_instance = self.create_process_instance_from_process_model( process_model=process_model, user=with_super_admin_user ) - + processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( @@ -2576,6 +2576,7 @@ class TestProcessApi(BaseTest): {"Header": "ID", "accessor": "id"}, {"Header": "Status", "accessor": "status"}, {"Header": "Key One", "accessor": "key1"}, + # {"Header": "Key Two", "accessor": "key2"}, ], "order_by": ["status"], "filter_by": [], @@ -2588,17 +2589,17 @@ class TestProcessApi(BaseTest): response = client.get( f"/v1.0/process-instances?report_identifier={process_instance_report.identifier}", - # f"/v1.0/process-instances?report_identifier=demo1", headers=self.logged_in_headers(with_super_admin_user), ) - print(f"response.json: {response.json}") + assert response.json is not None assert response.status_code == 200 assert len(response.json["results"]) == 1 assert response.json["results"][0]["status"] == "complete" assert response.json["results"][0]["id"] == process_instance.id - # assert response.json["results"][0]["key1"] == "value1" + assert response.json["results"][0]["key1"] == "value1" + # assert response.json["results"][0]["key2"] == "value2" assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["pages"] == 1 assert response.json["pagination"]["total"] == 1 From 88c6d625bbda2c21ab7679bfcb46e79e0fb06f33 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 29 Nov 2022 16:19:55 -0500 Subject: [PATCH 028/128] some cleanup for metadata w/ burnettk --- .../routes/process_api_blueprint.py | 38 +++++++++++++------ .../process_instance_report_service.py | 24 ++++++++---- .../integration/test_process_api.py | 13 ++++--- 3 files changed, 50 insertions(+), 25 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index b2b07ae5..753b6c3c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -12,7 +12,6 @@ from typing import Union import connexion # type: ignore import flask.wrappers import jinja2 -from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel import werkzeug from flask import Blueprint from flask import current_app @@ -28,10 +27,12 @@ from lxml import etree # type: ignore from lxml.builder import ElementMaker # type: ignore from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState -from sqlalchemy import and_, func +from sqlalchemy import and_ from sqlalchemy import asc from sqlalchemy import desc -from sqlalchemy.orm import aliased, joinedload +from sqlalchemy import func +from sqlalchemy.orm import aliased +from sqlalchemy.orm import joinedload from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, @@ -53,6 +54,9 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSche from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -814,9 +818,9 @@ def process_instance_list( # process_model_identifier = un_modify_modified_process_model_id(modified_process_model_identifier) process_instance_query = ProcessInstanceModel.query # Always join that hot user table for good performance at serialization time. - # process_instance_query = process_instance_query.options( - # joinedload(ProcessInstanceModel.process_initiator, ProcessInstanceModel.process_initiator_id == UserModel.id) - # ) + process_instance_query = process_instance_query.options( + joinedload(ProcessInstanceModel.process_initiator) + ) if report_filter.process_model_identifier is not None: process_model = get_process_model( @@ -929,14 +933,22 @@ def process_instance_list( UserGroupAssignmentModel.user_id == g.user.id ) - stock_columns = ProcessInstanceReportService.get_column_names_for_model(ProcessInstanceModel) - for column in process_instance_report.report_metadata['columns']: - if column['accessor'] in stock_columns: + stock_columns = ProcessInstanceReportService.get_column_names_for_model( + ProcessInstanceModel + ) + for column in process_instance_report.report_metadata["columns"]: + if column["accessor"] in stock_columns: continue instance_metadata_alias = aliased(ProcessInstanceMetadataModel) process_instance_query = ( - process_instance_query.options(joinedload(instance_metadata_alias, ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, innerjoin=False)).filter(instance_metadata_alias.key == column['accessor']) - .add_columns(func.max(instance_metadata_alias.value).label(column['accessor'])) + process_instance_query.outerjoin( + instance_metadata_alias, + ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, + ) + .filter(instance_metadata_alias.key == column["accessor"]) + .add_columns( + func.max(instance_metadata_alias.value).label(column["accessor"]) + ) ) process_instances = ( @@ -947,7 +959,9 @@ def process_instance_list( .paginate(page=page, per_page=per_page, error_out=False) ) - results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(process_instances.items, process_instance_report.report_metadata['columns']) + results = ProcessInstanceReportService.add_metadata_columns_to_process_instance( + process_instances.items, process_instance_report.report_metadata["columns"] + ) report_metadata = process_instance_report.report_metadata response_json = { diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index 6c579826..20563be3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -1,9 +1,11 @@ """Process_instance_report_service.""" from dataclasses import dataclass -from flask_bpmn.models.db import db from typing import Optional -from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +import sqlalchemy +from flask_bpmn.models.db import db + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -245,18 +247,26 @@ class ProcessInstanceReportService: return report_filter @classmethod - def add_metadata_columns_to_process_instance(cls, process_instance_sqlalchemy_rows, metadata_columns: list[dict]) -> list[dict]: + def add_metadata_columns_to_process_instance( + cls, + process_instance_sqlalchemy_rows: list[sqlalchemy.engine.row.Row], # type: ignore + metadata_columns: list[dict], + ) -> list[dict]: + """Add_metadata_columns_to_process_instance.""" stock_columns = cls.get_column_names_for_model(ProcessInstanceModel) results = [] for process_instance in process_instance_sqlalchemy_rows: - process_instance_dict = process_instance['ProcessInstanceModel'].serialized + process_instance_dict = process_instance["ProcessInstanceModel"].serialized for metadata_column in metadata_columns: - if metadata_column['accessor'] not in stock_columns: - process_instance_dict[metadata_column['accessor']] = process_instance[metadata_column['accessor']] + if metadata_column["accessor"] not in stock_columns: + process_instance_dict[ + metadata_column["accessor"] + ] = process_instance[metadata_column["accessor"]] results.append(process_instance_dict) return results @classmethod - def get_column_names_for_model(cls, model: db.Model) -> list[str]: + def get_column_names_for_model(cls, model: db.Model) -> list[str]: # type: ignore + """Get_column_names_for_model.""" return [i.name for i in model.__table__.columns] diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index fb33d246..e22ec77b 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -4,13 +4,11 @@ import json import os import time from typing import Any -from conftest import with_super_admin_user import pytest from flask.app import Flask from flask.testing import FlaskClient from flask_bpmn.models.db import db -from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec @@ -22,6 +20,9 @@ from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_group import ProcessGroup from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -2554,10 +2555,11 @@ class TestProcessApi(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: + """Test_can_get_process_instance_list_with_report_metadata.""" process_model = load_test_spec( - process_model_id='test-process-instance-metadata-report', - bpmn_file_name='process_instance_metadata.bpmn', - process_model_source_directory='test-process-instance-metadata-report', + process_model_id="test-process-instance-metadata-report", + bpmn_file_name="process_instance_metadata.bpmn", + process_model_source_directory="test-process-instance-metadata-report", ) process_instance = self.create_process_instance_from_process_model( process_model=process_model, user=with_super_admin_user @@ -2570,7 +2572,6 @@ class TestProcessApi(BaseTest): ).all() assert len(process_instance_metadata) == 2 - report_metadata = { "columns": [ {"Header": "ID", "accessor": "id"}, From 83032deb9aa57747183d685584fc1141a6963f80 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 29 Nov 2022 16:37:19 -0500 Subject: [PATCH 029/128] finished base for metadata reporting w/ burnettk --- .../routes/process_api_blueprint.py | 16 +++++++--------- .../services/process_instance_report_service.py | 4 +--- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 753b6c3c..b3bc1a22 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -940,19 +940,17 @@ def process_instance_list( if column["accessor"] in stock_columns: continue instance_metadata_alias = aliased(ProcessInstanceMetadataModel) - process_instance_query = ( - process_instance_query.outerjoin( - instance_metadata_alias, + process_instance_query = process_instance_query.outerjoin( + instance_metadata_alias, + and_( ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, - ) - .filter(instance_metadata_alias.key == column["accessor"]) - .add_columns( - func.max(instance_metadata_alias.value).label(column["accessor"]) - ) - ) + instance_metadata_alias.key == column["accessor"], + ), + ).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"])) process_instances = ( process_instance_query.group_by(ProcessInstanceModel.id) + .add_columns(ProcessInstanceModel.id) .order_by( ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index 20563be3..ad9dec0a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -5,7 +5,6 @@ from typing import Optional import sqlalchemy from flask_bpmn.models.db import db -from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -253,12 +252,11 @@ class ProcessInstanceReportService: metadata_columns: list[dict], ) -> list[dict]: """Add_metadata_columns_to_process_instance.""" - stock_columns = cls.get_column_names_for_model(ProcessInstanceModel) results = [] for process_instance in process_instance_sqlalchemy_rows: process_instance_dict = process_instance["ProcessInstanceModel"].serialized for metadata_column in metadata_columns: - if metadata_column["accessor"] not in stock_columns: + if metadata_column["accessor"] not in process_instance_dict: process_instance_dict[ metadata_column["accessor"] ] = process_instance[metadata_column["accessor"]] From 258b4789544a438e0dc8e3d9d8882fdfec5f6e55 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 29 Nov 2022 17:32:29 -0500 Subject: [PATCH 030/128] added api to get list of process report columns --- spiffworkflow-backend/migrations/env.py | 2 + .../{ff1c1628337c_.py => 40a2ed63cc5a_.py} | 8 ++-- .../src/spiffworkflow_backend/api.yml | 16 ++++++++ .../models/process_instance_metadata.py | 2 +- .../routes/process_api_blueprint.py | 9 +++++ .../process_instance_report_service.py | 14 +++++++ .../integration/test_process_api.py | 38 ++++++++++++++++++- 7 files changed, 83 insertions(+), 6 deletions(-) rename spiffworkflow-backend/migrations/versions/{ff1c1628337c_.py => 40a2ed63cc5a_.py} (98%) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 630e381a..68feded2 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,3 +1,5 @@ +from __future__ import with_statement + import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/migrations/versions/ff1c1628337c_.py b/spiffworkflow-backend/migrations/versions/40a2ed63cc5a_.py similarity index 98% rename from spiffworkflow-backend/migrations/versions/ff1c1628337c_.py rename to spiffworkflow-backend/migrations/versions/40a2ed63cc5a_.py index d8da6d3c..6abd6b4a 100644 --- a/spiffworkflow-backend/migrations/versions/ff1c1628337c_.py +++ b/spiffworkflow-backend/migrations/versions/40a2ed63cc5a_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: ff1c1628337c +Revision ID: 40a2ed63cc5a Revises: -Create Date: 2022-11-28 15:08:52.014254 +Create Date: 2022-11-29 16:59:02.980181 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = 'ff1c1628337c' +revision = '40a2ed63cc5a' down_revision = None branch_labels = None depends_on = None @@ -249,6 +249,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('process_instance_id', 'key', name='process_instance_metadata_unique') ) + op.create_index(op.f('ix_process_instance_metadata_key'), 'process_instance_metadata', ['key'], unique=False) op.create_table('spiff_step_details', sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False), @@ -295,6 +296,7 @@ def downgrade(): op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user') op.drop_table('active_task_user') op.drop_table('spiff_step_details') + op.drop_index(op.f('ix_process_instance_metadata_key'), table_name='process_instance_metadata') op.drop_table('process_instance_metadata') op.drop_table('permission_assignment') op.drop_table('message_instance') diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index e7dc00fe..81fa92bd 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -841,6 +841,22 @@ paths: schema: $ref: "#/components/schemas/OkTrue" + /process-instances/reports/columns: + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_column_list + summary: Returns all available columns for a process instance report. + tags: + - Process Instances + responses: + "200": + description: Workflow. + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Workflow" + /process-instances/reports/{report_identifier}: parameters: - name: report_identifier diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py index 5a4d4ca5..c9003594 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py @@ -23,7 +23,7 @@ class ProcessInstanceMetadataModel(SpiffworkflowBaseDBModel): process_instance_id: int = db.Column( ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore ) - key: str = db.Column(db.String(255), nullable=False) + key: str = db.Column(db.String(255), nullable=False, index=True) value: str = db.Column(db.String(255), nullable=False) updated_at_in_seconds: int = db.Column(db.Integer, nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index b3bc1a22..b96cc262 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -977,6 +977,15 @@ def process_instance_list( return make_response(jsonify(response_json), 200) +def process_instance_report_column_list() -> flask.wrappers.Response: + + table_columns = ProcessInstanceReportService.builtin_column_options() + columns_for_metadata = db.session.query(ProcessInstanceMetadataModel.key).distinct().all() # type: ignore + columns_for_metadata_strings = [{ 'Header': i[0], 'accessor': i[0]} for i in columns_for_metadata] + # columns = sorted(table_columns + columns_for_metadata_strings) + return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) + + def process_instance_show( modified_process_model_identifier: str, process_instance_id: int ) -> flask.wrappers.Response: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index ad9dec0a..da70f0c0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -268,3 +268,17 @@ class ProcessInstanceReportService: def get_column_names_for_model(cls, model: db.Model) -> list[str]: # type: ignore """Get_column_names_for_model.""" return [i.name for i in model.__table__.columns] + + @classmethod + def builtin_column_options(cls) -> list[dict]: + return [ + {"Header": "id", "accessor": "id"}, + { + "Header": "process_model_display_name", + "accessor": "process_model_display_name", + }, + {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, + {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, + {"Header": "username", "accessor": "username"}, + {"Header": "status", "accessor": "status"}, + ] diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index e22ec77b..beef3b74 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2577,7 +2577,7 @@ class TestProcessApi(BaseTest): {"Header": "ID", "accessor": "id"}, {"Header": "Status", "accessor": "status"}, {"Header": "Key One", "accessor": "key1"}, - # {"Header": "Key Two", "accessor": "key2"}, + {"Header": "Key Two", "accessor": "key2"}, ], "order_by": ["status"], "filter_by": [], @@ -2600,7 +2600,41 @@ class TestProcessApi(BaseTest): assert response.json["results"][0]["status"] == "complete" assert response.json["results"][0]["id"] == process_instance.id assert response.json["results"][0]["key1"] == "value1" - # assert response.json["results"][0]["key2"] == "value2" + assert response.json["results"][0]["key2"] == "value2" assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["pages"] == 1 assert response.json["pagination"]["total"] == 1 + + def test_can_get_process_instance_report_column_list( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_can_get_process_instance_list_with_report_metadata.""" + process_model = load_test_spec( + process_model_id="test-process-instance-metadata-report", + bpmn_file_name="process_instance_metadata.bpmn", + process_model_source_directory="test-process-instance-metadata-report", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=with_super_admin_user + ) + + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id + ).all() + assert len(process_instance_metadata) == 2 + + response = client.get( + f"/v1.0/process-instances/reports/columns", + headers=self.logged_in_headers(with_super_admin_user), + ) + + assert response.json is not None + assert response.status_code == 200 + assert response.json == [{'Header': 'id', 'accessor': 'id'}, {'Header': 'process_model_display_name', 'accessor': 'process_model_display_name'}, {'Header': 'start_in_seconds', 'accessor': 'start_in_seconds'}, {'Header': 'end_in_seconds', 'accessor': 'end_in_seconds'}, {'Header': 'username', 'accessor': 'username'}, {'Header': 'status', 'accessor': 'status'}, {'Header': 'key1', 'accessor': 'key1'}, {'Header': 'key2', 'accessor': 'key2'}] + From 6a0d33aaf7ac3fa270bb89116198e0342ec159d2 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 30 Nov 2022 07:24:24 -0500 Subject: [PATCH 031/128] removed file named ':' --- spiffworkflow-backend/: | 1908 --------------------------------------- 1 file changed, 1908 deletions(-) delete mode 100644 spiffworkflow-backend/: diff --git a/spiffworkflow-backend/: b/spiffworkflow-backend/: deleted file mode 100644 index 5516fdae..00000000 --- a/spiffworkflow-backend/: +++ /dev/null @@ -1,1908 +0,0 @@ -"""APIs for dealing with process groups, process models, and process instances.""" -import json -import random -import string -import uuid -from typing import Any -from typing import Dict -from typing import Optional -from typing import TypedDict -from typing import Union - -import connexion # type: ignore -import flask.wrappers -import jinja2 -from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel -import werkzeug -from flask import Blueprint -from flask import current_app -from flask import g -from flask import jsonify -from flask import make_response -from flask import redirect -from flask import request -from flask.wrappers import Response -from flask_bpmn.api.api_error import ApiError -from flask_bpmn.models.db import db -from lxml import etree # type: ignore -from lxml.builder import ElementMaker # type: ignore -from SpiffWorkflow.task import Task as SpiffTask # type: ignore -from SpiffWorkflow.task import TaskState -from sqlalchemy import and_ -from sqlalchemy import asc -from sqlalchemy import desc -from sqlalchemy.orm import aliased, joinedload - -from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( - ProcessEntityNotFoundError, -) -from spiffworkflow_backend.models.active_task import ActiveTaskModel -from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel -from spiffworkflow_backend.models.file import FileSchema -from spiffworkflow_backend.models.group import GroupModel -from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel -from spiffworkflow_backend.models.message_instance import MessageInstanceModel -from spiffworkflow_backend.models.message_model import MessageModel -from spiffworkflow_backend.models.message_triggerable_process_model import ( - MessageTriggerableProcessModel, -) -from spiffworkflow_backend.models.principal import PrincipalModel -from spiffworkflow_backend.models.process_group import ProcessGroup -from spiffworkflow_backend.models.process_group import ProcessGroupSchema -from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema -from spiffworkflow_backend.models.process_instance import ProcessInstanceModel -from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema -from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus -from spiffworkflow_backend.models.process_instance_report import ( - ProcessInstanceReportModel, -) -from spiffworkflow_backend.models.process_model import ProcessModelInfo -from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema -from spiffworkflow_backend.models.secret_model import SecretModel -from spiffworkflow_backend.models.secret_model import SecretModelSchema -from spiffworkflow_backend.models.spec_reference import SpecReferenceCache -from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema -from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel -from spiffworkflow_backend.models.user import UserModel -from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel -from spiffworkflow_backend.routes.user import verify_token -from spiffworkflow_backend.services.authorization_service import AuthorizationService -from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService -from spiffworkflow_backend.services.git_service import GitService -from spiffworkflow_backend.services.message_service import MessageService -from spiffworkflow_backend.services.process_instance_processor import ( - ProcessInstanceProcessor, -) -from spiffworkflow_backend.services.process_instance_report_service import ( - ProcessInstanceReportFilter, -) -from spiffworkflow_backend.services.process_instance_report_service import ( - ProcessInstanceReportService, -) -from spiffworkflow_backend.services.process_instance_service import ( - ProcessInstanceService, -) -from spiffworkflow_backend.services.process_model_service import ProcessModelService -from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner -from spiffworkflow_backend.services.secret_service import SecretService -from spiffworkflow_backend.services.service_task_service import ServiceTaskService -from spiffworkflow_backend.services.spec_file_service import SpecFileService -from spiffworkflow_backend.services.user_service import UserService - - -class TaskDataSelectOption(TypedDict): - """TaskDataSelectOption.""" - - value: str - label: str - - -class ReactJsonSchemaSelectOption(TypedDict): - """ReactJsonSchemaSelectOption.""" - - type: str - title: str - enum: list[str] - - -process_api_blueprint = Blueprint("process_api", __name__) - - -def status() -> flask.wrappers.Response: - """Status.""" - ProcessInstanceModel.query.filter().first() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.Response: - """Permissions_check.""" - if "requests_to_check" not in body: - raise ( - ApiError( - error_code="could_not_requests_to_check", - message="The key 'requests_to_check' not found at root of request body.", - status_code=400, - ) - ) - - response_dict: dict[str, dict[str, bool]] = {} - requests_to_check = body["requests_to_check"] - - for target_uri, http_methods in requests_to_check.items(): - if target_uri not in response_dict: - response_dict[target_uri] = {} - - for http_method in http_methods: - permission_string = AuthorizationService.get_permission_from_http_method( - http_method - ) - if permission_string: - has_permission = AuthorizationService.user_has_permission( - user=g.user, - permission=permission_string, - target_uri=target_uri, - ) - response_dict[target_uri][http_method] = has_permission - - return make_response(jsonify({"results": response_dict}), 200) - - -def modify_process_model_id(process_model_id: str) -> str: - """Modify_process_model_id.""" - return process_model_id.replace("/", ":") - - -def un_modify_modified_process_model_id(modified_process_model_id: str) -> str: - """Un_modify_modified_process_model_id.""" - return modified_process_model_id.replace(":", "/") - - -def process_group_add(body: dict) -> flask.wrappers.Response: - """Add_process_group.""" - process_group = ProcessGroup(**body) - ProcessModelService.add_process_group(process_group) - return make_response(jsonify(process_group), 201) - - -def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response: - """Process_group_delete.""" - process_group_id = un_modify_modified_process_model_id(modified_process_group_id) - ProcessModelService().process_group_delete(process_group_id) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_group_update( - modified_process_group_id: str, body: dict -) -> flask.wrappers.Response: - """Process Group Update.""" - body_include_list = ["display_name", "description"] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } - - process_group_id = un_modify_modified_process_model_id(modified_process_group_id) - process_group = ProcessGroup(id=process_group_id, **body_filtered) - ProcessModelService.update_process_group(process_group) - return make_response(jsonify(process_group), 200) - - -def process_group_list( - process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Process_group_list.""" - if process_group_identifier is not None: - process_groups = ProcessModelService.get_process_groups( - process_group_identifier - ) - else: - process_groups = ProcessModelService.get_process_groups() - batch = ProcessModelService().get_batch( - items=process_groups, page=page, per_page=per_page - ) - pages = len(process_groups) // per_page - remainder = len(process_groups) % per_page - if remainder > 0: - pages += 1 - - response_json = { - "results": ProcessGroupSchema(many=True).dump(batch), - "pagination": { - "count": len(batch), - "total": len(process_groups), - "pages": pages, - }, - } - return Response(json.dumps(response_json), status=200, mimetype="application/json") - - -def process_group_show( - modified_process_group_id: str, -) -> Any: - """Process_group_show.""" - process_group_id = un_modify_modified_process_model_id(modified_process_group_id) - try: - process_group = ProcessModelService.get_process_group(process_group_id) - except ProcessEntityNotFoundError as exception: - raise ( - ApiError( - error_code="process_group_cannot_be_found", - message=f"Process group cannot be found: {process_group_id}", - status_code=400, - ) - ) from exception - - process_group.parent_groups = ProcessModelService.get_parent_group_array( - process_group.id - ) - return make_response(jsonify(process_group), 200) - - -def process_group_move( - modified_process_group_identifier: str, new_location: str -) -> flask.wrappers.Response: - """Process_group_move.""" - original_process_group_id = un_modify_modified_process_model_id( - modified_process_group_identifier - ) - new_process_group = ProcessModelService().process_group_move( - original_process_group_id, new_location - ) - return make_response(jsonify(new_process_group), 201) - - -def process_model_create( - modified_process_group_id: str, body: Dict[str, Union[str, bool, int]] -) -> flask.wrappers.Response: - """Process_model_create.""" - process_model_info = ProcessModelInfoSchema().load(body) - if modified_process_group_id is None: - raise ApiError( - error_code="process_group_id_not_specified", - message="Process Model could not be created when process_group_id path param is unspecified", - status_code=400, - ) - if process_model_info is None: - raise ApiError( - error_code="process_model_could_not_be_created", - message=f"Process Model could not be created from given body: {body}", - status_code=400, - ) - - unmodified_process_group_id = un_modify_modified_process_model_id( - modified_process_group_id - ) - process_group = ProcessModelService.get_process_group(unmodified_process_group_id) - if process_group is None: - raise ApiError( - error_code="process_model_could_not_be_created", - message=f"Process Model could not be created from given body because Process Group could not be found: {body}", - status_code=400, - ) - - ProcessModelService.add_process_model(process_model_info) - return Response( - json.dumps(ProcessModelInfoSchema().dump(process_model_info)), - status=201, - mimetype="application/json", - ) - - -def process_model_delete( - modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Process_model_delete.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - # process_model_identifier = f"{process_group_id}/{process_model_id}" - ProcessModelService().process_model_delete(process_model_identifier) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_model_update( - modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] -) -> Any: - """Process_model_update.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - body_include_list = [ - "display_name", - "primary_file_name", - "primary_process_id", - "description", - ] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } - - # process_model_identifier = f"{process_group_id}/{process_model_id}" - process_model = get_process_model(process_model_identifier) - ProcessModelService.update_process_model(process_model, body_filtered) - return ProcessModelInfoSchema().dump(process_model) - - -def process_model_show(modified_process_model_identifier: str) -> Any: - """Process_model_show.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - # process_model_identifier = f"{process_group_id}/{process_model_id}" - process_model = get_process_model(process_model_identifier) - # TODO: Temporary. Should not need the next line once models have correct ids - # process_model.id = process_model_identifier - files = sorted(SpecFileService.get_files(process_model)) - process_model.files = files - for file in process_model.files: - file.references = SpecFileService.get_references_for_file(file, process_model) - - process_model.parent_groups = ProcessModelService.get_parent_group_array( - process_model.id - ) - return make_response(jsonify(process_model), 200) - - -def process_model_move( - modified_process_model_identifier: str, new_location: str -) -> flask.wrappers.Response: - """Process_model_move.""" - original_process_model_id = un_modify_modified_process_model_id( - modified_process_model_identifier - ) - new_process_model = ProcessModelService().process_model_move( - original_process_model_id, new_location - ) - return make_response(jsonify(new_process_model), 201) - - -def process_model_list( - process_group_identifier: Optional[str] = None, - recursive: Optional[bool] = False, - filter_runnable_by_user: Optional[bool] = False, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Process model list!""" - process_models = ProcessModelService.get_process_models( - process_group_id=process_group_identifier, - recursive=recursive, - filter_runnable_by_user=filter_runnable_by_user, - ) - batch = ProcessModelService().get_batch( - process_models, page=page, per_page=per_page - ) - pages = len(process_models) // per_page - remainder = len(process_models) % per_page - if remainder > 0: - pages += 1 - response_json = { - "results": ProcessModelInfoSchema(many=True).dump(batch), - "pagination": { - "count": len(batch), - "total": len(process_models), - "pages": pages, - }, - } - return Response(json.dumps(response_json), status=200, mimetype="application/json") - - -def process_list() -> Any: - """Returns a list of all known processes. - - This includes processes that are not the - primary process - helpful for finding possible call activities. - """ - references = SpecReferenceCache.query.filter_by(type="process").all() - return SpecReferenceSchema(many=True).dump(references) - - -def get_file(modified_process_model_id: str, file_name: str) -> Any: - """Get_file.""" - process_model_identifier = modified_process_model_id.replace(":", "/") - process_model = get_process_model(process_model_identifier) - files = SpecFileService.get_files(process_model, file_name) - if len(files) == 0: - raise ApiError( - error_code="unknown file", - message=f"No information exists for file {file_name}" - f" it does not exist in workflow {process_model_identifier}.", - status_code=404, - ) - - file = files[0] - file_contents = SpecFileService.get_data(process_model, file.name) - file.file_contents = file_contents - file.process_model_id = process_model.id - # file.process_group_id = process_model.process_group_id - return FileSchema().dump(file) - - -def process_model_file_update( - modified_process_model_id: str, file_name: str -) -> flask.wrappers.Response: - """Process_model_file_update.""" - process_model_identifier = modified_process_model_id.replace(":", "/") - # process_model_identifier = f"{process_group_id}/{process_model_id}" - process_model = get_process_model(process_model_identifier) - - request_file = get_file_from_request() - request_file_contents = request_file.stream.read() - if not request_file_contents: - raise ApiError( - error_code="file_contents_empty", - message="Given request file does not have any content", - status_code=400, - ) - - SpecFileService.update_file(process_model, file_name, request_file_contents) - - if current_app.config["GIT_COMMIT_ON_SAVE"]: - git_output = GitService.commit( - message=f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}" - ) - current_app.logger.info(f"git output: {git_output}") - else: - current_app.logger.info("Git commit on save is disabled") - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_model_file_delete( - modified_process_model_id: str, file_name: str -) -> flask.wrappers.Response: - """Process_model_file_delete.""" - process_model_identifier = modified_process_model_id.replace(":", "/") - process_model = get_process_model(process_model_identifier) - try: - SpecFileService.delete_file(process_model, file_name) - except FileNotFoundError as exception: - raise ( - ApiError( - error_code="process_model_file_cannot_be_found", - message=f"Process model file cannot be found: {file_name}", - status_code=400, - ) - ) from exception - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def add_file(modified_process_model_id: str) -> flask.wrappers.Response: - """Add_file.""" - process_model_identifier = modified_process_model_id.replace(":", "/") - process_model = get_process_model(process_model_identifier) - request_file = get_file_from_request() - if not request_file.filename: - raise ApiError( - error_code="could_not_get_filename", - message="Could not get filename from request", - status_code=400, - ) - - file = SpecFileService.add_file( - process_model, request_file.filename, request_file.stream.read() - ) - file_contents = SpecFileService.get_data(process_model, file.name) - file.file_contents = file_contents - file.process_model_id = process_model.id - return Response( - json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json" - ) - - -def process_instance_create(modified_process_model_id: str) -> flask.wrappers.Response: - """Create_process_instance.""" - process_model_identifier = un_modify_modified_process_model_id( - modified_process_model_id - ) - process_instance = ( - ProcessInstanceService.create_process_instance_from_process_model_identifier( - process_model_identifier, g.user - ) - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=201, - mimetype="application/json", - ) - - -def process_instance_run( - modified_process_model_identifier: str, - process_instance_id: int, - do_engine_steps: bool = True, -) -> flask.wrappers.Response: - """Process_instance_run.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - - if do_engine_steps: - try: - processor.do_engine_steps() - except ApiError as e: - ErrorHandlingService().handle_error(processor, e) - raise e - except Exception as e: - ErrorHandlingService().handle_error(processor, e) - task = processor.bpmn_process_instance.last_task - raise ApiError.from_task( - error_code="unknown_exception", - message=f"An unknown error occurred. Original error: {e}", - status_code=400, - task=task, - ) from e - processor.save() - - if not current_app.config["RUN_BACKGROUND_SCHEDULER"]: - MessageService.process_message_instances() - - process_instance_api = ProcessInstanceService.processor_to_process_instance_api( - processor - ) - process_instance_data = processor.get_data() - process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) - process_instance_metadata["data"] = process_instance_data - return Response( - json.dumps(process_instance_metadata), status=200, mimetype="application/json" - ) - - -def process_instance_terminate( - process_instance_id: int, -) -> flask.wrappers.Response: - """Process_instance_run.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - processor.terminate() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_suspend( - process_instance_id: int, -) -> flask.wrappers.Response: - """Process_instance_suspend.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - processor.suspend() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_resume( - process_instance_id: int, -) -> flask.wrappers.Response: - """Process_instance_resume.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - processor.resume() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_log_list( - process_instance_id: int, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Process_instance_log_list.""" - # to make sure the process instance exists - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - logs = ( - SpiffLoggingModel.query.filter( - SpiffLoggingModel.process_instance_id == process_instance.id - ) - .order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore - .join( - UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True - ) # isouter since if we don't have a user, we still want the log - .add_columns( - UserModel.username, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - - response_json = { - "results": logs.items, - "pagination": { - "count": len(logs.items), - "total": logs.total, - "pages": logs.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -def message_instance_list( - process_instance_id: Optional[int] = None, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Message_instance_list.""" - # to make sure the process instance exists - message_instances_query = MessageInstanceModel.query - - if process_instance_id: - message_instances_query = message_instances_query.filter_by( - process_instance_id=process_instance_id - ) - - message_instances = ( - message_instances_query.order_by( - MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore - MessageInstanceModel.id.desc(), # type: ignore - ) - .join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id) - .join(ProcessInstanceModel) - .add_columns( - MessageModel.identifier.label("message_identifier"), - ProcessInstanceModel.process_model_identifier, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - - for message_instance in message_instances: - message_correlations: dict = {} - for ( - mcmi - ) in ( - message_instance.MessageInstanceModel.message_correlations_message_instances - ): - mc = MessageCorrelationModel.query.filter_by( - id=mcmi.message_correlation_id - ).all() - for m in mc: - if m.name not in message_correlations: - message_correlations[m.name] = {} - message_correlations[m.name][ - m.message_correlation_property.identifier - ] = m.value - message_instance.MessageInstanceModel.message_correlations = ( - message_correlations - ) - - response_json = { - "results": message_instances.items, - "pagination": { - "count": len(message_instances.items), - "total": message_instances.total, - "pages": message_instances.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -# body: { -# payload: dict, -# process_instance_id: Optional[int], -# } -def message_start( - message_identifier: str, - body: Dict[str, Any], -) -> flask.wrappers.Response: - """Message_start.""" - message_model = MessageModel.query.filter_by(identifier=message_identifier).first() - if message_model is None: - raise ( - ApiError( - error_code="unknown_message", - message=f"Could not find message with identifier: {message_identifier}", - status_code=404, - ) - ) - - if "payload" not in body: - raise ( - ApiError( - error_code="missing_payload", - message="Body is missing payload.", - status_code=400, - ) - ) - - process_instance = None - if "process_instance_id" in body: - # to make sure we have a valid process_instance_id - process_instance = find_process_instance_by_id_or_raise( - body["process_instance_id"] - ) - - message_instance = MessageInstanceModel.query.filter_by( - process_instance_id=process_instance.id, - message_model_id=message_model.id, - message_type="receive", - status="ready", - ).first() - if message_instance is None: - raise ( - ApiError( - error_code="cannot_find_waiting_message", - message=f"Could not find waiting message for identifier {message_identifier} " - f"and process instance {process_instance.id}", - status_code=400, - ) - ) - MessageService.process_message_receive( - message_instance, message_model.name, body["payload"] - ) - - else: - message_triggerable_process_model = ( - MessageTriggerableProcessModel.query.filter_by( - message_model_id=message_model.id - ).first() - ) - - if message_triggerable_process_model is None: - raise ( - ApiError( - error_code="cannot_start_message", - message=f"Message with identifier cannot be start with message: {message_identifier}", - status_code=400, - ) - ) - - process_instance = MessageService.process_message_triggerable_process_model( - message_triggerable_process_model, - message_model.name, - body["payload"], - g.user, - ) - - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) - - -def process_instance_list( - process_model_identifier: Optional[str] = None, - page: int = 1, - per_page: int = 100, - start_from: Optional[int] = None, - start_to: Optional[int] = None, - end_from: Optional[int] = None, - end_to: Optional[int] = None, - process_status: Optional[str] = None, - initiated_by_me: Optional[bool] = None, - with_tasks_completed_by_me: Optional[bool] = None, - with_tasks_completed_by_my_group: Optional[bool] = None, - user_filter: Optional[bool] = False, - report_identifier: Optional[str] = None, -) -> flask.wrappers.Response: - """Process_instance_list.""" - process_instance_report = ProcessInstanceReportService.report_with_identifier( - g.user, report_identifier - ) - - if user_filter: - report_filter = ProcessInstanceReportFilter( - process_model_identifier, - start_from, - start_to, - end_from, - end_to, - process_status.split(",") if process_status else None, - initiated_by_me, - with_tasks_completed_by_me, - with_tasks_completed_by_my_group, - ) - else: - report_filter = ( - ProcessInstanceReportService.filter_from_metadata_with_overrides( - process_instance_report, - process_model_identifier, - start_from, - start_to, - end_from, - end_to, - process_status, - initiated_by_me, - with_tasks_completed_by_me, - with_tasks_completed_by_my_group, - ) - ) - - # process_model_identifier = un_modify_modified_process_model_id(modified_process_model_identifier) - process_instance_query = ProcessInstanceModel.query - # Always join that hot user table for good performance at serialization time. - process_instance_query = process_instance_query.options( - joinedload(ProcessInstanceModel.process_initiator) - ) - - if report_filter.process_model_identifier is not None: - process_model = get_process_model( - f"{report_filter.process_model_identifier}", - ) - - process_instance_query = process_instance_query.filter_by( - process_model_identifier=process_model.id - ) - - # this can never happen. obviously the class has the columns it defines. this is just to appease mypy. - if ( - ProcessInstanceModel.start_in_seconds is None - or ProcessInstanceModel.end_in_seconds is None - ): - raise ( - ApiError( - error_code="unexpected_condition", - message="Something went very wrong", - status_code=500, - ) - ) - - if report_filter.start_from is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.start_in_seconds >= report_filter.start_from - ) - if report_filter.start_to is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.start_in_seconds <= report_filter.start_to - ) - if report_filter.end_from is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.end_in_seconds >= report_filter.end_from - ) - if report_filter.end_to is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.end_in_seconds <= report_filter.end_to - ) - if report_filter.process_status is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore - ) - - if report_filter.initiated_by_me is True: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore - ) - process_instance_query = process_instance_query.filter_by( - process_initiator=g.user - ) - - # TODO: not sure if this is exactly what is wanted - if report_filter.with_tasks_completed_by_me is True: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore - ) - # process_instance_query = process_instance_query.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) - # process_instance_query = process_instance_query.add_columns(UserModel.username) - # search for process_instance.UserModel.username in this file for more details about why adding columns is annoying. - - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.process_initiator_id != g.user.id - ) - process_instance_query = process_instance_query.join( - SpiffStepDetailsModel, - ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, - ) - process_instance_query = process_instance_query.join( - SpiffLoggingModel, - ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.message.contains("COMPLETED") # type: ignore - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step - ) - process_instance_query = process_instance_query.filter( - SpiffStepDetailsModel.completed_by_user_id == g.user.id - ) - - if report_filter.with_tasks_completed_by_my_group is True: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore - ) - process_instance_query = process_instance_query.join( - SpiffStepDetailsModel, - ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, - ) - process_instance_query = process_instance_query.join( - SpiffLoggingModel, - ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.message.contains("COMPLETED") # type: ignore - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step - ) - process_instance_query = process_instance_query.join( - GroupModel, - GroupModel.id == SpiffStepDetailsModel.lane_assignment_id, - ) - process_instance_query = process_instance_query.join( - UserGroupAssignmentModel, - UserGroupAssignmentModel.group_id == GroupModel.id, - ) - process_instance_query = process_instance_query.filter( - UserGroupAssignmentModel.user_id == g.user.id - ) - - # userSkillF = aliased(UserSkill) - # userSkillI = aliased(UserSkill) - - import pdb; pdb.set_trace() - for column in process_instance_report.report_metadata['columns']: - print(f"column: {column['accessor']}") - # process_instance_query = process_instance_query.outerjoin(ProcessInstanceMetadataModel, ProcessInstanceModel.id == ProcessInstanceMetadataModel.process_instance_id, ProcessInstanceMetadataModel.key == column['accessor']) - instance_metadata_alias = alias(ProcessInstanceMetadataModel) - process_instance_query = ( - process_instance_query.outerjoin(instance_metadata_alias, ProcessInstanceModel.id == instance_metadata_alias.process_instance_id) - .add_column(ProcessInstanceMetadataModel.value.label(column['accessor'])) - ) - import pdb; pdb.set_trace() - - process_instances = ( - process_instance_query.group_by(ProcessInstanceModel.id) - .order_by( - ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - import pdb; pdb.set_trace() - - # def awesome_serialize(process_instance) - # dict_thing = process_instance.serialize - # - # # add columns since we have access to columns here - # dict_thing['awesome'] = 'awesome' - # - # return dict_thing - - results = list( - map( - ProcessInstanceService.serialize_flat_with_task_data, - process_instances.items, - ) - ) - report_metadata = process_instance_report.report_metadata - - response_json = { - "report_identifier": process_instance_report.identifier, - "report_metadata": report_metadata, - "results": results, - "filters": report_filter.to_dict(), - "pagination": { - "count": len(results), - "total": process_instances.total, - "pages": process_instances.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -def process_instance_show( - modified_process_model_identifier: str, process_instance_id: int -) -> flask.wrappers.Response: - """Create_process_instance.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - current_version_control_revision = GitService.get_current_revision() - process_model = get_process_model(process_model_identifier) - - if process_model.primary_file_name: - if ( - process_instance.bpmn_version_control_identifier - == current_version_control_revision - ): - bpmn_xml_file_contents = SpecFileService.get_data( - process_model, process_model.primary_file_name - ) - else: - bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision( - process_model, process_instance.bpmn_version_control_identifier - ) - process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents - - return make_response(jsonify(process_instance), 200) - - -def process_instance_delete(process_instance_id: int) -> flask.wrappers.Response: - """Create_process_instance.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - # (Pdb) db.session.delete - # > - db.session.query(SpiffLoggingModel).filter_by( - process_instance_id=process_instance.id - ).delete() - db.session.query(SpiffStepDetailsModel).filter_by( - process_instance_id=process_instance.id - ).delete() - db.session.delete(process_instance) - db.session.commit() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_report_list( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Process_instance_report_list.""" - process_instance_reports = ProcessInstanceReportModel.query.filter_by( - created_by_id=g.user.id, - ).all() - - return make_response(jsonify(process_instance_reports), 200) - - -def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response: - """Process_instance_report_create.""" - ProcessInstanceReportModel.create_report( - identifier=body["identifier"], - user=g.user, - report_metadata=body["report_metadata"], - ) - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_report_update( - report_identifier: str, - body: Dict[str, Any], -) -> flask.wrappers.Response: - """Process_instance_report_create.""" - process_instance_report = ProcessInstanceReportModel.query.filter_by( - identifier=report_identifier, - created_by_id=g.user.id, - ).first() - if process_instance_report is None: - raise ApiError( - error_code="unknown_process_instance_report", - message="Unknown process instance report", - status_code=404, - ) - - process_instance_report.report_metadata = body["report_metadata"] - db.session.commit() - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_report_delete( - report_identifier: str, -) -> flask.wrappers.Response: - """Process_instance_report_create.""" - process_instance_report = ProcessInstanceReportModel.query.filter_by( - identifier=report_identifier, - created_by_id=g.user.id, - ).first() - if process_instance_report is None: - raise ApiError( - error_code="unknown_process_instance_report", - message="Unknown process instance report", - status_code=404, - ) - - db.session.delete(process_instance_report) - db.session.commit() - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def service_tasks_show() -> flask.wrappers.Response: - """Service_tasks_show.""" - available_connectors = ServiceTaskService.available_connectors() - print(available_connectors) - - return Response( - json.dumps(available_connectors), status=200, mimetype="application/json" - ) - - -def authentication_list() -> flask.wrappers.Response: - """Authentication_list.""" - available_authentications = ServiceTaskService.authentication_list() - response_json = { - "results": available_authentications, - "connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"], - "redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback", - } - - return Response(json.dumps(response_json), status=200, mimetype="application/json") - - -def authentication_callback( - service: str, - auth_method: str, -) -> werkzeug.wrappers.Response: - """Authentication_callback.""" - verify_token(request.args.get("token"), force_run=True) - response = request.args["response"] - SecretService().update_secret( - f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True - ) - return redirect( - f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration" - ) - - -def process_instance_report_show( - report_identifier: str, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Process_instance_list.""" - process_instances = ProcessInstanceModel.query.order_by( # .filter_by(process_model_identifier=process_model.id) - ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore - ).paginate( - page=page, per_page=per_page, error_out=False - ) - - process_instance_report = ProcessInstanceReportModel.query.filter_by( - identifier=report_identifier, - created_by_id=g.user.id, - ).first() - if process_instance_report is None: - raise ApiError( - error_code="unknown_process_instance_report", - message="Unknown process instance report", - status_code=404, - ) - - substitution_variables = request.args.to_dict() - result_dict = process_instance_report.generate_report( - process_instances.items, substitution_variables - ) - - # update this if we go back to a database query instead of filtering in memory - result_dict["pagination"] = { - "count": len(result_dict["results"]), - "total": len(result_dict["results"]), - "pages": 1, - } - - return Response(json.dumps(result_dict), status=200, mimetype="application/json") - - -# TODO: see comment for before_request -# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"]) -def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: - """Task_list_my_tasks.""" - principal = find_principal_or_raise() - active_tasks = ( - ActiveTaskModel.query.order_by(desc(ActiveTaskModel.id)) # type: ignore - .join(ProcessInstanceModel) - .join(ActiveTaskUserModel) - .filter_by(user_id=principal.user_id) - # just need this add_columns to add the process_model_identifier. Then add everything back that was removed. - .add_columns( - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.process_model_display_name, - ProcessInstanceModel.status, - ActiveTaskModel.task_name, - ActiveTaskModel.task_title, - ActiveTaskModel.task_type, - ActiveTaskModel.task_status, - ActiveTaskModel.task_id, - ActiveTaskModel.id, - ActiveTaskModel.process_model_display_name, - ActiveTaskModel.process_instance_id, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - tasks = [ActiveTaskModel.to_task(active_task) for active_task in active_tasks.items] - - response_json = { - "results": tasks, - "pagination": { - "count": len(active_tasks.items), - "total": active_tasks.total, - "pages": active_tasks.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -def task_list_for_my_open_processes( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Task_list_for_my_open_processes.""" - return get_tasks(page=page, per_page=per_page) - - -def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: - """Task_list_for_processes_started_by_others.""" - return get_tasks( - processes_started_by_user=False, - has_lane_assignment_id=False, - page=page, - per_page=per_page, - ) - - -def task_list_for_my_groups( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Task_list_for_processes_started_by_others.""" - return get_tasks(processes_started_by_user=False, page=page, per_page=per_page) - - -def get_tasks( - processes_started_by_user: bool = True, - has_lane_assignment_id: bool = True, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Get_tasks.""" - user_id = g.user.id - - # use distinct to ensure we only get one row per active task otherwise - # we can get back multiple for the same active task row which throws off - # pagination later on - # https://stackoverflow.com/q/34582014/6090676 - active_tasks_query = ( - ActiveTaskModel.query.distinct() - .outerjoin(GroupModel, GroupModel.id == ActiveTaskModel.lane_assignment_id) - .join(ProcessInstanceModel) - .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) - ) - - if processes_started_by_user: - active_tasks_query = active_tasks_query.filter( - ProcessInstanceModel.process_initiator_id == user_id - ).outerjoin( - ActiveTaskUserModel, - and_( - ActiveTaskUserModel.user_id == user_id, - ActiveTaskModel.id == ActiveTaskUserModel.active_task_id, - ), - ) - else: - active_tasks_query = active_tasks_query.filter( - ProcessInstanceModel.process_initiator_id != user_id - ).join( - ActiveTaskUserModel, - and_( - ActiveTaskUserModel.user_id == user_id, - ActiveTaskModel.id == ActiveTaskUserModel.active_task_id, - ), - ) - if has_lane_assignment_id: - active_tasks_query = active_tasks_query.filter( - ActiveTaskModel.lane_assignment_id.is_not(None) # type: ignore - ) - else: - active_tasks_query = active_tasks_query.filter(ActiveTaskModel.lane_assignment_id.is_(None)) # type: ignore - - active_tasks = active_tasks_query.add_columns( - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.status.label("process_instance_status"), # type: ignore - ProcessInstanceModel.updated_at_in_seconds, - ProcessInstanceModel.created_at_in_seconds, - UserModel.username, - GroupModel.identifier.label("group_identifier"), - ActiveTaskModel.task_name, - ActiveTaskModel.task_title, - ActiveTaskModel.process_model_display_name, - ActiveTaskModel.process_instance_id, - ActiveTaskUserModel.user_id.label("current_user_is_potential_owner"), - ).paginate(page=page, per_page=per_page, error_out=False) - - response_json = { - "results": active_tasks.items, - "pagination": { - "count": len(active_tasks.items), - "total": active_tasks.total, - "pages": active_tasks.pages, - }, - } - return make_response(jsonify(response_json), 200) - - -def process_instance_task_list( - modified_process_model_id: str, - process_instance_id: int, - all_tasks: bool = False, - spiff_step: int = 0, -) -> flask.wrappers.Response: - """Process_instance_task_list.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - if spiff_step > 0: - step_detail = ( - db.session.query(SpiffStepDetailsModel) - .filter( - SpiffStepDetailsModel.process_instance_id == process_instance.id, - SpiffStepDetailsModel.spiff_step == spiff_step, - ) - .first() - ) - if step_detail is not None and process_instance.bpmn_json is not None: - bpmn_json = json.loads(process_instance.bpmn_json) - bpmn_json["tasks"] = step_detail.task_json - process_instance.bpmn_json = json.dumps(bpmn_json) - - processor = ProcessInstanceProcessor(process_instance) - - spiff_tasks = None - if all_tasks: - spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) - else: - spiff_tasks = processor.get_all_user_tasks() - - tasks = [] - for spiff_task in spiff_tasks: - task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) - task.data = spiff_task.data - tasks.append(task) - - return make_response(jsonify(tasks), 200) - - -def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: - """Task_show.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - if process_instance.status == ProcessInstanceStatus.suspended.value: - raise ApiError( - error_code="error_suspended", - message="The process instance is suspended", - status_code=400, - ) - - process_model = get_process_model( - process_instance.process_model_identifier, - ) - - form_schema_file_name = "" - form_ui_schema_file_name = "" - spiff_task = get_spiff_task_from_process_instance(task_id, process_instance) - extensions = spiff_task.task_spec.extensions - - if "properties" in extensions: - properties = extensions["properties"] - if "formJsonSchemaFilename" in properties: - form_schema_file_name = properties["formJsonSchemaFilename"] - if "formUiSchemaFilename" in properties: - form_ui_schema_file_name = properties["formUiSchemaFilename"] - task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) - task.data = spiff_task.data - task.process_model_display_name = process_model.display_name - task.process_model_identifier = process_model.id - process_model_with_form = process_model - - if task.type == "User Task": - if not form_schema_file_name: - raise ( - ApiError( - error_code="missing_form_file", - message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}", - status_code=400, - ) - ) - - form_contents = prepare_form_data( - form_schema_file_name, - task.data, - process_model_with_form, - ) - - try: - # form_contents is a str - form_dict = json.loads(form_contents) - except Exception as exception: - raise ( - ApiError( - error_code="error_loading_form", - message=f"Could not load form schema from: {form_schema_file_name}. Error was: {str(exception)}", - status_code=400, - ) - ) from exception - - if task.data: - _update_form_schema_with_task_data_as_needed(form_dict, task.data) - - if form_contents: - task.form_schema = form_dict - - if form_ui_schema_file_name: - ui_form_contents = prepare_form_data( - form_ui_schema_file_name, - task.data, - process_model_with_form, - ) - if ui_form_contents: - task.form_ui_schema = ui_form_contents - - if task.properties and task.data and "instructionsForEndUser" in task.properties: - print( - f"task.properties['instructionsForEndUser']: {task.properties['instructionsForEndUser']}" - ) - if task.properties["instructionsForEndUser"]: - task.properties["instructionsForEndUser"] = render_jinja_template( - task.properties["instructionsForEndUser"], task.data - ) - return make_response(jsonify(task), 200) - - -def task_submit( - process_instance_id: int, - task_id: str, - body: Dict[str, Any], - terminate_loop: bool = False, -) -> flask.wrappers.Response: - """Task_submit_user_data.""" - principal = find_principal_or_raise() - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - processor = ProcessInstanceProcessor(process_instance) - spiff_task = get_spiff_task_from_process_instance( - task_id, process_instance, processor=processor - ) - AuthorizationService.assert_user_can_complete_spiff_task( - process_instance.id, spiff_task, principal.user - ) - - if spiff_task.state != TaskState.READY: - raise ( - ApiError( - error_code="invalid_state", - message="You may not update a task unless it is in the READY state.", - status_code=400, - ) - ) - - if terminate_loop and spiff_task.is_looping(): - spiff_task.terminate_loop() - - active_task = ActiveTaskModel.query.filter_by( - process_instance_id=process_instance_id, task_id=task_id - ).first() - if active_task is None: - raise ( - ApiError( - error_code="no_active_task", - message="Cannot find an active task with task id '{task_id}' for process instance {process_instance_id}.", - status_code=500, - ) - ) - - ProcessInstanceService.complete_form_task( - processor=processor, - spiff_task=spiff_task, - data=body, - user=g.user, - active_task=active_task, - ) - - # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same - # task spec, complete that form as well. - # if update_all: - # last_index = spiff_task.task_info()["mi_index"] - # next_task = processor.next_task() - # while next_task and next_task.task_info()["mi_index"] > last_index: - # __update_task(processor, next_task, form_data, user) - # last_index = next_task.task_info()["mi_index"] - # next_task = processor.next_task() - - next_active_task_assigned_to_me = ( - ActiveTaskModel.query.filter_by(process_instance_id=process_instance_id) - .order_by(asc(ActiveTaskModel.id)) # type: ignore - .join(ActiveTaskUserModel) - .filter_by(user_id=principal.user_id) - .first() - ) - if next_active_task_assigned_to_me: - return make_response( - jsonify(ActiveTaskModel.to_task(next_active_task_assigned_to_me)), 200 - ) - - return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") - - -def script_unit_test_create( - process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] -) -> flask.wrappers.Response: - """Script_unit_test_create.""" - bpmn_task_identifier = _get_required_parameter_or_raise( - "bpmn_task_identifier", body - ) - input_json = _get_required_parameter_or_raise("input_json", body) - expected_output_json = _get_required_parameter_or_raise( - "expected_output_json", body - ) - - process_model_identifier = f"{process_group_id}/{process_model_id}" - process_model = get_process_model(process_model_identifier) - file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0] - if file is None: - raise ApiError( - error_code="cannot_find_file", - message=f"Could not find the primary bpmn file for process_model: {process_model.id}", - status_code=404, - ) - - # TODO: move this to an xml service or something - file_contents = SpecFileService.get_data(process_model, file.name) - bpmn_etree_element = etree.fromstring(file_contents) - - nsmap = bpmn_etree_element.nsmap - spiff_element_maker = ElementMaker( - namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap - ) - - script_task_elements = bpmn_etree_element.xpath( - f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']", - namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, - ) - if len(script_task_elements) == 0: - raise ApiError( - error_code="missing_script_task", - message=f"Cannot find a script task with id: {bpmn_task_identifier}", - status_code=404, - ) - script_task_element = script_task_elements[0] - - extension_elements = None - extension_elements_array = script_task_element.xpath( - "//bpmn:extensionElements", - namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, - ) - if len(extension_elements_array) == 0: - bpmn_element_maker = ElementMaker( - namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap - ) - extension_elements = bpmn_element_maker("extensionElements") - script_task_element.append(extension_elements) - else: - extension_elements = extension_elements_array[0] - - unit_test_elements = None - unit_test_elements_array = extension_elements.xpath( - "//spiffworkflow:unitTests", - namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"}, - ) - if len(unit_test_elements_array) == 0: - unit_test_elements = spiff_element_maker("unitTests") - extension_elements.append(unit_test_elements) - else: - unit_test_elements = unit_test_elements_array[0] - - fuzz = "".join( - random.choice(string.ascii_uppercase + string.digits) # noqa: S311 - for _ in range(7) - ) - unit_test_id = f"unit_test_{fuzz}" - - input_json_element = spiff_element_maker("inputJson", json.dumps(input_json)) - expected_output_json_element = spiff_element_maker( - "expectedOutputJson", json.dumps(expected_output_json) - ) - unit_test_element = spiff_element_maker("unitTest", id=unit_test_id) - unit_test_element.append(input_json_element) - unit_test_element.append(expected_output_json_element) - unit_test_elements.append(unit_test_element) - SpecFileService.update_file( - process_model, file.name, etree.tostring(bpmn_etree_element) - ) - - return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") - - -def script_unit_test_run( - process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] -) -> flask.wrappers.Response: - """Script_unit_test_run.""" - # FIXME: We should probably clear this somewhere else but this works - current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None - current_app.config["THREAD_LOCAL_DATA"].spiff_step = None - - python_script = _get_required_parameter_or_raise("python_script", body) - input_json = _get_required_parameter_or_raise("input_json", body) - expected_output_json = _get_required_parameter_or_raise( - "expected_output_json", body - ) - - result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( - python_script, input_json, expected_output_json - ) - return make_response(jsonify(result), 200) - - -def get_file_from_request() -> Any: - """Get_file_from_request.""" - request_file = connexion.request.files.get("file") - if not request_file: - raise ApiError( - error_code="no_file_given", - message="Given request does not contain a file", - status_code=400, - ) - return request_file - - -def get_process_model(process_model_id: str) -> ProcessModelInfo: - """Get_process_model.""" - process_model = None - try: - process_model = ProcessModelService.get_process_model(process_model_id) - except ProcessEntityNotFoundError as exception: - raise ( - ApiError( - error_code="process_model_cannot_be_found", - message=f"Process model cannot be found: {process_model_id}", - status_code=400, - ) - ) from exception - - return process_model - - -def find_principal_or_raise() -> PrincipalModel: - """Find_principal_or_raise.""" - principal = PrincipalModel.query.filter_by(user_id=g.user.id).first() - if principal is None: - raise ( - ApiError( - error_code="principal_not_found", - message=f"Principal not found from user id: {g.user.id}", - status_code=400, - ) - ) - return principal # type: ignore - - -def find_process_instance_by_id_or_raise( - process_instance_id: int, -) -> ProcessInstanceModel: - """Find_process_instance_by_id_or_raise.""" - process_instance_query = ProcessInstanceModel.query.filter_by( - id=process_instance_id - ) - - # we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves: - # this returns an object that allows you to do: process_instance.UserModel.username - # process_instance = db.session.query(ProcessInstanceModel, UserModel).filter_by(id=process_instance_id).first() - # you can also use splat with add_columns, but it still didn't ultimately give us access to the process instance - # attributes or username like we wanted: - # process_instance_query.join(UserModel).add_columns(*ProcessInstanceModel.__table__.columns, UserModel.username) - - process_instance = process_instance_query.first() - if process_instance is None: - raise ( - ApiError( - error_code="process_instance_cannot_be_found", - message=f"Process instance cannot be found: {process_instance_id}", - status_code=400, - ) - ) - return process_instance # type: ignore - - -def get_value_from_array_with_index(array: list, index: int) -> Any: - """Get_value_from_array_with_index.""" - if index < 0: - return None - - if index >= len(array): - return None - - return array[index] - - -def prepare_form_data( - form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo -) -> str: - """Prepare_form_data.""" - if task_data is None: - return "" - - file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8") - return render_jinja_template(file_contents, task_data) - - -def render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str: - """Render_jinja_template.""" - jinja_environment = jinja2.Environment( - autoescape=True, lstrip_blocks=True, trim_blocks=True - ) - template = jinja_environment.from_string(unprocessed_template) - return template.render(**data) - - -def get_spiff_task_from_process_instance( - task_id: str, - process_instance: ProcessInstanceModel, - processor: Union[ProcessInstanceProcessor, None] = None, -) -> SpiffTask: - """Get_spiff_task_from_process_instance.""" - if processor is None: - processor = ProcessInstanceProcessor(process_instance) - task_uuid = uuid.UUID(task_id) - spiff_task = processor.bpmn_process_instance.get_task(task_uuid) - - if spiff_task is None: - raise ( - ApiError( - error_code="empty_task", - message="Processor failed to obtain task.", - status_code=500, - ) - ) - return spiff_task - - -# -# Methods for secrets CRUD - maybe move somewhere else: -# -def get_secret(key: str) -> Optional[str]: - """Get_secret.""" - return SecretService.get_secret(key) - - -def secret_list( - page: int = 1, - per_page: int = 100, -) -> Response: - """Secret_list.""" - secrets = ( - SecretModel.query.order_by(SecretModel.key) - .join(UserModel) - .add_columns( - UserModel.username, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - response_json = { - "results": secrets.items, - "pagination": { - "count": len(secrets.items), - "total": secrets.total, - "pages": secrets.pages, - }, - } - return make_response(jsonify(response_json), 200) - - -def add_secret(body: Dict) -> Response: - """Add secret.""" - secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id) - assert secret_model # noqa: S101 - return Response( - json.dumps(SecretModelSchema().dump(secret_model)), - status=201, - mimetype="application/json", - ) - - -def update_secret(key: str, body: dict) -> Response: - """Update secret.""" - SecretService().update_secret(key, body["value"], g.user.id) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def delete_secret(key: str) -> Response: - """Delete secret.""" - current_user = UserService.current_user() - SecretService.delete_secret(key, current_user.id) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: - """Get_required_parameter_or_raise.""" - return_value = None - if parameter in post_body: - return_value = post_body[parameter] - - if return_value is None or return_value == "": - raise ( - ApiError( - error_code="missing_required_parameter", - message=f"Parameter is missing from json request body: {parameter}", - status_code=400, - ) - ) - - return return_value - - -# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches -def _update_form_schema_with_task_data_as_needed( - in_dict: dict, task_data: dict -) -> None: - """Update_nested.""" - for k, value in in_dict.items(): - if "anyOf" == k: - # value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"] - if isinstance(value, list): - if len(value) == 1: - first_element_in_value_list = value[0] - if isinstance(first_element_in_value_list, str): - if first_element_in_value_list.startswith( - "options_from_task_data_var:" - ): - task_data_var = first_element_in_value_list.replace( - "options_from_task_data_var:", "" - ) - - if task_data_var not in task_data: - raise ( - ApiError( - error_code="missing_task_data_var", - message=f"Task data is missing variable: {task_data_var}", - status_code=500, - ) - ) - - select_options_from_task_data = task_data.get(task_data_var) - if isinstance(select_options_from_task_data, list): - if all( - "value" in d and "label" in d - for d in select_options_from_task_data - ): - - def map_function( - task_data_select_option: TaskDataSelectOption, - ) -> ReactJsonSchemaSelectOption: - """Map_function.""" - return { - "type": "string", - "enum": [task_data_select_option["value"]], - "title": task_data_select_option["label"], - } - - options_for_react_json_schema_form = list( - map(map_function, select_options_from_task_data) - ) - - in_dict[k] = options_for_react_json_schema_form - elif isinstance(value, dict): - _update_form_schema_with_task_data_as_needed(value, task_data) - elif isinstance(value, list): - for o in value: - if isinstance(o, dict): - _update_form_schema_with_task_data_as_needed(o, task_data) - - -def update_task_data(process_instance_id: str, task_id: str, body: Dict) -> Response: - """Update task data.""" - process_instance = ProcessInstanceModel.query.filter( - ProcessInstanceModel.id == int(process_instance_id) - ).first() - if process_instance: - process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json) - if "new_task_data" in body: - new_task_data_str: str = body["new_task_data"] - new_task_data_dict = json.loads(new_task_data_str) - if task_id in process_instance_bpmn_json_dict["tasks"]: - process_instance_bpmn_json_dict["tasks"][task_id][ - "data" - ] = new_task_data_dict - process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict) - db.session.add(process_instance) - try: - db.session.commit() - except Exception as e: - db.session.rollback() - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update the Instance. Original error is {e}", - ) from e - else: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", - ) - else: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) From 975b9616322d199a67d4037c580d9fc4664cb6b0 Mon Sep 17 00:00:00 2001 From: Dan Date: Wed, 30 Nov 2022 11:32:55 -0500 Subject: [PATCH 032/128] Adding a blueprint for openid - a very lightweight embedded authentication system to make it eaiser to try out SpiffWorkflow when you don't have openID set up with Google etal. Removing all calls to open id's user_info endpoint - as these are unncessiary. Adding a users section to the permission files -- so we can handle all user/group/permissions in one file when needed. There was a very confusing is_admin function on the user model that needed killin. --- .../src/spiffworkflow_backend/__init__.py | 2 + .../config/permissions/development.yml | 10 ++ .../src/spiffworkflow_backend/models/user.py | 4 - .../routes/openid_blueprint/__init__.py | 1 + .../openid_blueprint/openid_blueprint.py | 116 ++++++++++++++++++ .../openid_blueprint/templates/login.html | 103 ++++++++++++++++ .../src/spiffworkflow_backend/routes/user.py | 42 ++++--- .../services/authentication_service.py | 37 ------ .../services/authorization_service.py | 6 + .../integration/test_openid_blueprint.py | 79 ++++++++++++ 10 files changed, 339 insertions(+), 61 deletions(-) create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/__init__.py create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py create mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html create mode 100644 spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 5d591d84..389c9370 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -19,6 +19,7 @@ from werkzeug.exceptions import NotFound import spiffworkflow_backend.load_database_models # noqa: F401 from spiffworkflow_backend.config import setup_config from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint +from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import openid_blueprint from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.routes.user_blueprint import user_blueprint @@ -103,6 +104,7 @@ def create_app() -> flask.app.Flask: app.register_blueprint(process_api_blueprint) app.register_blueprint(api_error_blueprint) app.register_blueprint(admin_blueprint, url_prefix="/admin") + app.register_blueprint(openid_blueprint, url_prefix="/openid") origins_re = [ r"^https?:\/\/%s(.*)" % o.replace(".", r"\.") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml index e17e3f11..1acace14 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml @@ -1,5 +1,15 @@ default_group: everybody +users: + admin: + email: admin@spiffworkflow.org + password: admin + dan: + email: dan@spiffworkflow.org + password: password + + + groups: admin: users: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py index c33a72e7..eb88e5de 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py @@ -83,10 +83,6 @@ class UserModel(SpiffworkflowBaseDBModel): algorithm="HS256", ) - def is_admin(self) -> bool: - """Is_admin.""" - return True - # @classmethod # def from_open_id_user_info(cls, user_info: dict) -> Any: # """From_open_id_user_info.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/__init__.py new file mode 100644 index 00000000..f520b09d --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/__init__.py @@ -0,0 +1 @@ +"""__init__.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py new file mode 100644 index 00000000..dd8928c6 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py @@ -0,0 +1,116 @@ +""" +Provides the bare minimum endpoints required by SpiffWorkflow to +handle openid authentication -- definitely not a production system. +This is just here to make local development, testing, and +demonstration easier. +""" +import base64 +import time +import urllib +from urllib.parse import urlencode + +import jwt +import yaml +from flask import Blueprint, render_template, request, current_app, redirect, url_for, g + +openid_blueprint = Blueprint( + "openid", __name__, template_folder="templates", static_folder="static" +) + +MY_SECRET_CODE = ":this_should_be_some_crazy_code_different_all_the_time" + +@openid_blueprint.route("/well-known/openid-configuration", methods=["GET"]) +def well_known(): + """OpenID Discovery endpoint -- as these urls can be very different from system to system, + this is just a small subset.""" + host_url = request.host_url.strip('/') + return { + "issuer": f"{host_url}/openid", + "authorization_endpoint": f"{host_url}{url_for('openid.auth')}", + "token_endpoint": f"{host_url}{url_for('openid.token')}", + } + + +@openid_blueprint.route("/auth", methods=["GET"]) +def auth(): + """Accepts a series of parameters""" + return render_template('login.html', + state=request.args.get('state'), + response_type=request.args.get('response_type'), + client_id=request.args.get('client_id'), + scope=request.args.get('scope'), + redirect_uri=request.args.get('redirect_uri'), + error_message=request.args.get('error_message')) + + +@openid_blueprint.route("/form_submit", methods=["POST"]) +def form_submit(): + + users = get_users() + if request.values['Uname'] in users and request.values['Pass'] == users[request.values['Uname']]["password"]: + # Redirect back to the end user with some detailed information + state = request.values.get('state') + data = { + "state": base64.b64encode(bytes(state, 'UTF-8')), + "code": request.values['Uname'] + MY_SECRET_CODE + } + url = request.values.get('redirect_uri') + urlencode(data) + return redirect(url, code=200) + else: + return render_template('login.html', + state=request.values.get('state'), + response_type=request.values.get('response_type'), + client_id=request.values.get('client_id'), + scope=request.values.get('scope'), + redirect_uri=request.values.get('redirect_uri'), + error_message="Login failed. Please try agian.") + + +@openid_blueprint.route("/token", methods=["POST"]) +def token(): + """Url that will return a valid token, given the super secret sauce""" + grant_type=request.values.get('grant_type') + code=request.values.get('code') + redirect_uri=request.values.get('redirect_uri') + + """We just stuffed the user name on the front of the code, so grab it.""" + user_name, secret_hash = code.split(":") + + """Get authentication from headers.""" + authorization = request.headers.get('Authorization') + authorization = authorization[6:] # Remove "Basic" + authorization = base64.b64decode(authorization).decode('utf-8') + client_id, client_secret = authorization.split(":") + + base_url = url_for(openid_blueprint) + access_token = "..." + refresh_token = "..." + id_token = jwt.encode({ + "iss": base_url, + "aud": [client_id, "account"], + "iat": time.time(), + "exp": time.time() + 86400 # Exprire after a day. + }) + + {'exp': 1669757386, 'iat': 1669755586, 'auth_time': 1669753049, 'jti': '0ec2cc09-3498-4921-a021-c3b98427df70', + 'iss': 'http://localhost:7002/realms/spiffworkflow', 'aud': 'spiffworkflow-backend', + 'sub': '99e7e4ea-d4ae-4944-bd31-873dac7b004c', 'typ': 'ID', 'azp': 'spiffworkflow-backend', + 'session_state': '8751d5f6-2c60-4205-9be0-2b1005f5891e', 'at_hash': 'O5i-VLus6sryR0grMS2Y4w', 'acr': '0', + 'sid': '8751d5f6-2c60-4205-9be0-2b1005f5891e', 'email_verified': False, 'preferred_username': 'dan'} + + response = { + "access_token": id_token, + "id_token": id_token, + } + +@openid_blueprint.route("/refresh", methods=["POST"]) +def refresh(): + pass + +def get_users(): + with open(current_app.config["PERMISSIONS_FILE_FULLPATH"]) as file: + permission_configs = yaml.safe_load(file) + if "users" in permission_configs: + return permission_configs["users"] + else: + return {} \ No newline at end of file diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html new file mode 100644 index 00000000..1da64914 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/templates/login.html @@ -0,0 +1,103 @@ + + + + Login Form + + + + +

Login to SpiffWorkflow


+
{{error_message}}
+ + + \ No newline at end of file diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index 5fe10e0a..c9059427 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -1,6 +1,7 @@ """User.""" import ast import base64 +import json from typing import Any from typing import Dict from typing import Optional @@ -14,9 +15,12 @@ from flask import request from flask_bpmn.api.api_error import ApiError from werkzeug.wrappers import Response +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel +from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authentication_service import ( - AuthenticationService, + AuthenticationService, AuthenticationProviderTypes, ) from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.user_service import UserService @@ -58,7 +62,6 @@ def verify_token( decoded_token = get_decoded_token(token) if decoded_token is not None: - if "token_type" in decoded_token: token_type = decoded_token["token_type"] if token_type == "internal": # noqa: S105 @@ -68,11 +71,11 @@ def verify_token( current_app.logger.error( f"Exception in verify_token getting user from decoded internal token. {e}" ) - elif "iss" in decoded_token.keys(): try: - user_info = AuthenticationService.get_user_info_from_open_id(token) - except ApiError as ae: + if AuthenticationService.validate_id_token(token): + user_info = decoded_token + except ApiError as ae: # API Error is only thrown in the token is outdated. # Try to refresh the token user = UserService.get_user_by_service_and_service_id( "open_id", decoded_token["sub"] @@ -86,14 +89,9 @@ def verify_token( ) ) if auth_token and "error" not in auth_token: - # redirect to original url, with auth_token? - user_info = ( - AuthenticationService.get_user_info_from_open_id( - auth_token["access_token"] - ) - ) - if not user_info: - raise ae + # We have the user, but this code is a bit convoluted, and will later demand + # a user_info object so it can look up the user. Sorry to leave this crap here. + user_info = {"sub": user.service_id } else: raise ae else: @@ -202,6 +200,15 @@ def login(redirect_url: str = "/") -> Response: ) return redirect(login_redirect_url) +def parse_id_token(token: str) -> dict: + parts = token.split(".") + if len(parts) != 3: + raise Exception("Incorrect id token format") + + payload = parts[1] + padded = payload + '=' * (4 - len(payload) % 4) + decoded = base64.b64decode(padded) + return json.loads(decoded) def login_return(code: str, state: str, session_state: str) -> Optional[Response]: """Login_return.""" @@ -211,10 +218,9 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response if "id_token" in auth_token_object: id_token = auth_token_object["id_token"] + user_info = parse_id_token(id_token) + if AuthenticationService.validate_id_token(id_token): - user_info = AuthenticationService.get_user_info_from_open_id( - auth_token_object["access_token"] - ) if user_info and "error" not in user_info: user_model = AuthorizationService.create_user_from_sign_in(user_info) g.user = user_model.id @@ -332,15 +338,11 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo .filter(UserModel.service_id == service_id) .first() ) - # user: UserModel = UserModel.query.filter() if user: return user user = UserModel( username=service_id, - uid=service_id, service=service, service_id=service_id, - name="API User", ) - return user diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index 18f08d0f..a12e57fa 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -42,43 +42,6 @@ class AuthenticationService: open_id_client_secret_key, ) - @classmethod - def get_user_info_from_open_id(cls, token: str) -> dict: - """The token is an auth_token.""" - ( - open_id_server_url, - open_id_client_id, - open_id_realm_name, - open_id_client_secret_key, - ) = cls.get_open_id_args() - - headers = {"Authorization": f"Bearer {token}"} - - request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/userinfo" - try: - request_response = requests.get(request_url, headers=headers) - except Exception as e: - current_app.logger.error(f"Exception in get_user_info_from_id_token: {e}") - raise ApiError( - error_code="token_error", - message=f"Exception in get_user_info_from_id_token: {e}", - status_code=401, - ) from e - - if request_response.status_code == 401: - raise ApiError( - error_code="invalid_token", message="Please login", status_code=401 - ) - elif request_response.status_code == 200: - user_info: dict = json.loads(request_response.text) - return user_info - - raise ApiError( - error_code="user_info_error", - message="Cannot get user info in get_user_info_from_id_token", - status_code=401, - ) - @staticmethod def get_backend_url() -> str: """Get_backend_url.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index ea488f7a..f29c0985 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -1,4 +1,5 @@ """Authorization_service.""" +import inspect import re from typing import Optional from typing import Union @@ -23,6 +24,7 @@ from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserNotFoundError from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel +from spiffworkflow_backend.routes.openid_blueprint import openid_blueprint from spiffworkflow_backend.services.group_service import GroupService from spiffworkflow_backend.services.user_service import UserService @@ -125,6 +127,7 @@ class AuthorizationService: db.session.add(user_group_assignemnt) db.session.commit() + @classmethod def import_permissions_from_yaml_file( cls, raise_if_missing_user: bool = False @@ -241,6 +244,7 @@ class AuthorizationService: return True api_view_function = current_app.view_functions[request.endpoint] + module = inspect.getmodule(api_view_function) if ( api_view_function and api_view_function.__name__.startswith("login") @@ -248,6 +252,7 @@ class AuthorizationService: or api_view_function.__name__.startswith("console_ui_") or api_view_function.__name__ in authentication_exclusion_list or api_view_function.__name__ in swagger_functions + or module == openid_blueprint ): return True @@ -442,6 +447,7 @@ class AuthorizationService: email=email, ) + # this may eventually get too slow. # when it does, be careful about backgrounding, because # the user will immediately need permissions to use the site. diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py new file mode 100644 index 00000000..b234d914 --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py @@ -0,0 +1,79 @@ +"""Test_authentication.""" +import ast +import base64 + +from flask import Flask +from flask.testing import FlaskClient + +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.services.authentication_service import ( + AuthenticationService, +) + + +class TestFaskOpenId(BaseTest): + """An integrated Open ID that responds to openID requests + by referencing a build in YAML file. Useful for + local development, testing, demos etc...""" + + def test_discovery_of_endpoints(self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None,) -> None: + response = client.get("/openid/well-known/openid-configuration") + discovered_urls = response.json + assert "http://localhost/openid" == discovered_urls["issuer"] + assert "http://localhost/openid/auth" == discovered_urls["authorization_endpoint"] + assert "http://localhost/openid/token" == discovered_urls["token_endpoint"] + + def test_get_login_page(self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None,) -> None: + # It should be possible to get to a login page + data = { + "state": {"bubblegum":1, "daydream":2} + } + response = client.get("/openid/auth", query_string=data) + assert b"

Login to SpiffWorkflow

" in response.data + assert b"bubblegum" in response.data + + def test_get_token(self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None,) -> None: + + code = "c3BpZmZ3b3JrZmxvdy1iYWNrZW5kOkpYZVFFeG0wSmhRUEx1bWdIdElJcWY1MmJEYWxIejBx" + headers = { + "Content-Type": "application/x-www-form-urlencoded", + "Authorization": f"Basic {code}", + } + data = { + "grant_type": 'authorization_code', + "code": code, + "redirect_url": 'http://localhost:7000/v1.0/login_return' + } + response = client.post("/openid/token", data=data, headers=headers) + assert response + + def test_refresh_token_endpoint(self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None,) -> None: + pass + # Handle a refresh with the following + # data provided + # "grant_type": "refresh_token", + # "refresh_token": refresh_token, + # "client_id": open_id_client_id, + # "client_secret": open_id_client_secret_key, + # Return an json response with: + # id - (this users' id) + + def test_logout(self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None,) -> None: + pass + # It should be possible to logout and be redirected back. From d63c410988d1762863c332e1422baf5d7bf96bdd Mon Sep 17 00:00:00 2001 From: Dan Date: Wed, 30 Nov 2022 11:51:20 -0500 Subject: [PATCH 033/128] Not all open id systems have realms like KeyCloak does -- so removing this in favor of setting just one value - which is the base url of the openid system -- which will work across all openid systems. --- .../spiffworkflow_backend/config/default.py | 3 +-- .../services/authentication_service.py | 18 +++++------------- 2 files changed, 6 insertions(+), 15 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index 53d670c7..c32c4882 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -30,9 +30,8 @@ CONNECTOR_PROXY_URL = environ.get( GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true" # Open ID server -OPEN_ID_SERVER_URL = environ.get("OPEN_ID_SERVER_URL", default="http://localhost:7002") +OPEN_ID_SERVER_URL = environ.get("OPEN_ID_SERVER_URL", default="http://localhost:7002/realms/spiffworkflow") OPEN_ID_CLIENT_ID = environ.get("OPEN_ID_CLIENT_ID", default="spiffworkflow-backend") -OPEN_ID_REALM_NAME = environ.get("OPEN_ID_REALM_NAME", default="spiffworkflow") OPEN_ID_CLIENT_SECRET_KEY = environ.get( "OPEN_ID_CLIENT_SECRET_KEY", default="JXeQExm0JhQPLumgHtIIqf52bDalHz0q" ) # noqa: S105 diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index a12e57fa..f8171d88 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -15,7 +15,6 @@ from werkzeug.wrappers import Response from spiffworkflow_backend.models.refresh_token import RefreshTokenModel - class AuthenticationProviderTypes(enum.Enum): """AuthenticationServiceProviders.""" @@ -31,14 +30,12 @@ class AuthenticationService: """Get_open_id_args.""" open_id_server_url = current_app.config["OPEN_ID_SERVER_URL"] open_id_client_id = current_app.config["OPEN_ID_CLIENT_ID"] - open_id_realm_name = current_app.config["OPEN_ID_REALM_NAME"] open_id_client_secret_key = current_app.config[ "OPEN_ID_CLIENT_SECRET_KEY" ] # noqa: S105 return ( open_id_server_url, open_id_client_id, - open_id_realm_name, open_id_client_secret_key, ) @@ -55,11 +52,10 @@ class AuthenticationService: ( open_id_server_url, open_id_client_id, - open_id_realm_name, open_id_client_secret_key, ) = AuthenticationService.get_open_id_args() request_url = ( - f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/logout?" + f"{open_id_server_url}/protocol/openid-connect/logout?" + f"post_logout_redirect_uri={return_redirect_url}&" + f"id_token_hint={id_token}" ) @@ -79,12 +75,11 @@ class AuthenticationService: ( open_id_server_url, open_id_client_id, - open_id_realm_name, open_id_client_secret_key, ) = AuthenticationService.get_open_id_args() return_redirect_url = f"{self.get_backend_url()}{redirect_url}" login_redirect_url = ( - f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/auth?" + f"{open_id_server_url}/protocol/openid-connect/auth?" + f"state={state}&" + "response_type=code&" + f"client_id={open_id_client_id}&" @@ -100,7 +95,6 @@ class AuthenticationService: ( open_id_server_url, open_id_client_id, - open_id_realm_name, open_id_client_secret_key, ) = AuthenticationService.get_open_id_args() @@ -117,7 +111,7 @@ class AuthenticationService: "redirect_uri": f"{self.get_backend_url()}{redirect_url}", } - request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + request_url = f"{open_id_server_url}/protocol/openid-connect/token" response = requests.post(request_url, data=data, headers=headers) auth_token_object: dict = json.loads(response.text) @@ -131,7 +125,6 @@ class AuthenticationService: ( open_id_server_url, open_id_client_id, - open_id_realm_name, open_id_client_secret_key, ) = cls.get_open_id_args() try: @@ -142,7 +135,7 @@ class AuthenticationService: message="Cannot decode id_token", status_code=401, ) from e - if decoded_token["iss"] != f"{open_id_server_url}/realms/{open_id_realm_name}": + if decoded_token["iss"] != open_id_server_url: valid = False elif ( open_id_client_id not in decoded_token["aud"] @@ -207,7 +200,6 @@ class AuthenticationService: ( open_id_server_url, open_id_client_id, - open_id_realm_name, open_id_client_secret_key, ) = cls.get_open_id_args() @@ -226,7 +218,7 @@ class AuthenticationService: "client_secret": open_id_client_secret_key, } - request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + request_url = f"{open_id_server_url}/protocol/openid-connect/token" response = requests.post(request_url, data=data, headers=headers) auth_token_object: dict = json.loads(response.text) From acc33288b91f855f6613ca1019f10044513a26a3 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 30 Nov 2022 15:08:04 -0500 Subject: [PATCH 034/128] added correlations to message list table w/ burnettk --- spiffworkflow-backend/migrations/env.py | 2 - spiffworkflow-backend/poetry.lock | 19 +--- .../routes/process_api_blueprint.py | 7 +- .../process_instance_report_service.py | 1 + .../integration/test_process_api.py | 17 +++- .../src/components/MiniComponents.tsx | 22 +++++ .../components/ProcessInstanceListTable.tsx | 19 +--- spiffworkflow-frontend/src/index.css | 14 +++ spiffworkflow-frontend/src/interfaces.ts | 22 +++++ .../src/routes/MessageInstanceList.tsx | 92 ++++++++++++------- 10 files changed, 145 insertions(+), 70 deletions(-) create mode 100644 spiffworkflow-frontend/src/components/MiniComponents.tsx diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 68feded2..630e381a 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,5 +1,3 @@ -from __future__ import with_statement - import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 8484cdb4..ac024241 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1851,7 +1851,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "46f410a2852baeedc8f9ac5165347ce6d4470594" +resolved_reference = "bba7ddf5478af579b891ca63c50babbfccf6b7a4" [[package]] name = "SQLAlchemy" @@ -2563,7 +2563,6 @@ greenlet = [ {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, - {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, @@ -2572,7 +2571,6 @@ greenlet = [ {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, - {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, @@ -2581,7 +2579,6 @@ greenlet = [ {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, - {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, @@ -2880,7 +2877,10 @@ orjson = [ {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, + {file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"}, + {file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"}, {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, + {file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, @@ -2989,18 +2989,7 @@ psycopg2 = [ {file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"}, ] pyasn1 = [ - {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, - {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, - {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, - {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, - {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, - {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, - {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, - {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, - {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, - {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, ] pycodestyle = [ diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index b96cc262..46067031 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -647,6 +647,7 @@ def message_instance_list( .add_columns( MessageModel.identifier.label("message_identifier"), ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.process_model_display_name, ) .paginate(page=page, per_page=per_page, error_out=False) ) @@ -978,10 +979,12 @@ def process_instance_list( def process_instance_report_column_list() -> flask.wrappers.Response: - + """Process_instance_report_column_list.""" table_columns = ProcessInstanceReportService.builtin_column_options() columns_for_metadata = db.session.query(ProcessInstanceMetadataModel.key).distinct().all() # type: ignore - columns_for_metadata_strings = [{ 'Header': i[0], 'accessor': i[0]} for i in columns_for_metadata] + columns_for_metadata_strings = [ + {"Header": i[0], "accessor": i[0]} for i in columns_for_metadata + ] # columns = sorted(table_columns + columns_for_metadata_strings) return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index da70f0c0..bd3a2e08 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -271,6 +271,7 @@ class ProcessInstanceReportService: @classmethod def builtin_column_options(cls) -> list[dict]: + """Builtin_column_options.""" return [ {"Header": "id", "accessor": "id"}, { diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index beef3b74..215e44d4 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2630,11 +2630,22 @@ class TestProcessApi(BaseTest): assert len(process_instance_metadata) == 2 response = client.get( - f"/v1.0/process-instances/reports/columns", + "/v1.0/process-instances/reports/columns", headers=self.logged_in_headers(with_super_admin_user), ) assert response.json is not None assert response.status_code == 200 - assert response.json == [{'Header': 'id', 'accessor': 'id'}, {'Header': 'process_model_display_name', 'accessor': 'process_model_display_name'}, {'Header': 'start_in_seconds', 'accessor': 'start_in_seconds'}, {'Header': 'end_in_seconds', 'accessor': 'end_in_seconds'}, {'Header': 'username', 'accessor': 'username'}, {'Header': 'status', 'accessor': 'status'}, {'Header': 'key1', 'accessor': 'key1'}, {'Header': 'key2', 'accessor': 'key2'}] - + assert response.json == [ + {"Header": "id", "accessor": "id"}, + { + "Header": "process_model_display_name", + "accessor": "process_model_display_name", + }, + {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, + {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, + {"Header": "username", "accessor": "username"}, + {"Header": "status", "accessor": "status"}, + {"Header": "key1", "accessor": "key1"}, + {"Header": "key2", "accessor": "key2"}, + ] diff --git a/spiffworkflow-frontend/src/components/MiniComponents.tsx b/spiffworkflow-frontend/src/components/MiniComponents.tsx new file mode 100644 index 00000000..6f0a1293 --- /dev/null +++ b/spiffworkflow-frontend/src/components/MiniComponents.tsx @@ -0,0 +1,22 @@ +import { Link } from 'react-router-dom'; +import { modifyProcessIdentifierForPathParam } from '../helpers'; +import { MessageInstance, ProcessInstance } from '../interfaces'; + +export function FormatProcessModelDisplayName( + instanceObject: ProcessInstance | MessageInstance +) { + const { + process_model_identifier: processModelIdentifier, + process_model_display_name: processModelDisplayName, + } = instanceObject; + return ( + + {processModelDisplayName} + + ); +} diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 9b239502..50b69c0b 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -53,6 +53,7 @@ import { import ProcessModelSearch from './ProcessModelSearch'; import ProcessInstanceReportSearch from './ProcessInstanceReportSearch'; import ProcessInstanceListSaveAsReport from './ProcessInstanceListSaveAsReport'; +import { FormatProcessModelDisplayName } from './MiniComponents'; const REFRESH_INTERVAL = 5; const REFRESH_TIMEOUT = 600; @@ -693,22 +694,6 @@ export default function ProcessInstanceListTable({ ); }; - const formatProcessModelDisplayName = ( - row: ProcessInstance, - displayName: string - ) => { - return ( - - {displayName} - - ); - }; - const formatSecondsForDisplay = (_row: any, seconds: any) => { return convertSecondsToFormattedDateTime(seconds) || '-'; }; @@ -719,7 +704,7 @@ export default function ProcessInstanceListTable({ const columnFormatters: Record = { id: formatProcessInstanceId, process_model_identifier: formatProcessModelIdentifier, - process_model_display_name: formatProcessModelDisplayName, + process_model_display_name: FormatProcessModelDisplayName, start_in_seconds: formatSecondsForDisplay, end_in_seconds: formatSecondsForDisplay, }; diff --git a/spiffworkflow-frontend/src/index.css b/spiffworkflow-frontend/src/index.css index 4723e557..53e04b78 100644 --- a/spiffworkflow-frontend/src/index.css +++ b/spiffworkflow-frontend/src/index.css @@ -69,6 +69,20 @@ h2 { color: black; } +/* match normal link colors */ +.cds--btn--ghost.button-link { + color: #0062fe; +} +.cds--btn--ghost.button-link:visited { + color: #0062fe; +} +.cds--btn--ghost.button-link:hover { + color: #0062fe; +} +.cds--btn--ghost.button-link:visited:hover { + color: #0062fe; +} + .cds--header__global .cds--btn--primary { background-color: #161616 } diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 42ba5335..66759dfe 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -39,6 +39,28 @@ export interface ProcessFile { export interface ProcessInstance { id: number; process_model_identifier: string; + process_model_display_name: string; +} + +export interface MessageCorrelationProperties { + [key: string]: string; +} + +export interface MessageCorrelations { + [key: string]: MessageCorrelationProperties; +} + +export interface MessageInstance { + id: number; + process_model_identifier: string; + process_model_display_name: string; + process_instance_id: number; + message_identifier: string; + message_type: string; + failure_cause: string; + status: string; + created_at_in_seconds: number; + message_correlations?: MessageCorrelations; } export interface ProcessInstanceReport { diff --git a/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx b/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx index f1478058..5a2d4e1a 100644 --- a/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx +++ b/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx @@ -1,15 +1,17 @@ import { useEffect, useState } from 'react'; // @ts-ignore -import { Table } from '@carbon/react'; +import { Table, Modal, Button } from '@carbon/react'; import { Link, useParams, useSearchParams } from 'react-router-dom'; import PaginationForTable from '../components/PaginationForTable'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; import { - convertSecondsToFormattedDateString, + convertSecondsToFormattedDateTime, getPageInfoFromSearchParams, modifyProcessIdentifierForPathParam, } from '../helpers'; import HttpService from '../services/HttpService'; +import { FormatProcessModelDisplayName } from '../components/MiniComponents'; +import { MessageInstance } from '../interfaces'; export default function MessageInstanceList() { const params = useParams(); @@ -17,6 +19,9 @@ export default function MessageInstanceList() { const [messageIntances, setMessageInstances] = useState([]); const [pagination, setPagination] = useState(null); + const [messageInstanceForModal, setMessageInstanceForModal] = + useState(null); + useEffect(() => { const setMessageInstanceListFromResult = (result: any) => { setMessageInstances(result.results); @@ -35,41 +40,64 @@ export default function MessageInstanceList() { }); }, [searchParams, params]); - const buildTable = () => { - // return null; - const rows = messageIntances.map((row) => { - const rowToUse = row as any; + const handleCorrelationDisplayClose = () => { + setMessageInstanceForModal(null); + }; + + const correlationsDisplayModal = () => { + if (messageInstanceForModal) { return ( - - {rowToUse.id} - - - {rowToUse.process_model_identifier} - - + +
+            {JSON.stringify(
+              messageInstanceForModal.message_correlations,
+              null,
+              2
+            )}
+          
+
+ ); + } + return null; + }; + + const buildTable = () => { + const rows = messageIntances.map((row: MessageInstance) => { + return ( + + {row.id} + {FormatProcessModelDisplayName(row)} - {rowToUse.process_instance_id} + {row.process_instance_id} - {rowToUse.message_identifier} - {rowToUse.message_type} - {rowToUse.failure_cause || '-'} - {rowToUse.status} + {row.message_identifier} + {row.message_type} + {row.failure_cause || '-'} - {convertSecondsToFormattedDateString( - rowToUse.created_at_in_seconds - )} + + + {row.status} + + {convertSecondsToFormattedDateTime(row.created_at_in_seconds)} ); @@ -78,12 +106,13 @@ export default function MessageInstanceList() { - - + + - + + @@ -121,6 +150,7 @@ export default function MessageInstanceList() { <> {breadcrumbElement}

Messages

+ {correlationsDisplayModal()} Date: Wed, 30 Nov 2022 15:35:37 -0500 Subject: [PATCH 035/128] better display for failure causes on message list w/ burnettk --- spiffworkflow-frontend/src/index.css | 12 +++++++ .../src/routes/MessageInstanceList.tsx | 34 ++++++++++++++++--- 2 files changed, 42 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-frontend/src/index.css b/spiffworkflow-frontend/src/index.css index 53e04b78..ade073f5 100644 --- a/spiffworkflow-frontend/src/index.css +++ b/spiffworkflow-frontend/src/index.css @@ -72,15 +72,19 @@ h2 { /* match normal link colors */ .cds--btn--ghost.button-link { color: #0062fe; + padding-left: 0; } .cds--btn--ghost.button-link:visited { color: #0062fe; + padding-left: 0; } .cds--btn--ghost.button-link:hover { color: #0062fe; + padding-left: 0; } .cds--btn--ghost.button-link:visited:hover { color: #0062fe; + padding-left: 0; } .cds--header__global .cds--btn--primary { @@ -311,3 +315,11 @@ td.actions-cell { text-align: right; padding-bottom: 10px; } + +.cds--btn--ghost:not([disabled]) svg { + fill: red; +} + +.failure-string { + color: red; +} diff --git a/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx b/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx index 5a2d4e1a..b77b744c 100644 --- a/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx +++ b/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx @@ -1,5 +1,7 @@ import { useEffect, useState } from 'react'; // @ts-ignore +import { ErrorOutline } from '@carbon/icons-react'; +// @ts-ignore import { Table, Modal, Button } from '@carbon/react'; import { Link, useParams, useSearchParams } from 'react-router-dom'; import PaginationForTable from '../components/PaginationForTable'; @@ -46,14 +48,27 @@ export default function MessageInstanceList() { const correlationsDisplayModal = () => { if (messageInstanceForModal) { + let failureCausePre = null; + if (messageInstanceForModal.failure_cause) { + failureCausePre = ( + <> +

+ {messageInstanceForModal.failure_cause} +

+
+ + ); + } return ( + {failureCausePre} +

Correlations:

             {JSON.stringify(
               messageInstanceForModal.message_correlations,
@@ -69,6 +84,17 @@ export default function MessageInstanceList() {
 
   const buildTable = () => {
     const rows = messageIntances.map((row: MessageInstance) => {
+      let errorIcon = null;
+      let errorTitle = null;
+      if (row.failure_cause) {
+        errorTitle = 'Instance has an error';
+        errorIcon = (
+          <>
+             
+            
+          
+        );
+      }
       return (
         
@@ -85,14 +111,15 @@ export default function MessageInstanceList() { - @@ -111,8 +138,7 @@ export default function MessageInstanceList() { - - + From 899374893479d3ccf13e45fe98be705f6ec490d5 Mon Sep 17 00:00:00 2001 From: Dan Date: Wed, 30 Nov 2022 16:33:44 -0500 Subject: [PATCH 036/128] Use the "well-known" configuration dictionary from openid to get the url endpoints, rather than trying to configure or guess the correct endpoint urls. --- .../openid_blueprint/openid_blueprint.py | 7 +- .../services/authentication_service.py | 92 ++++++++----------- 2 files changed, 43 insertions(+), 56 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py index dd8928c6..b16ba46a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py @@ -19,7 +19,7 @@ openid_blueprint = Blueprint( MY_SECRET_CODE = ":this_should_be_some_crazy_code_different_all_the_time" -@openid_blueprint.route("/well-known/openid-configuration", methods=["GET"]) +@openid_blueprint.route("/.well-known/openid-configuration", methods=["GET"]) def well_known(): """OpenID Discovery endpoint -- as these urls can be very different from system to system, this is just a small subset.""" @@ -52,9 +52,10 @@ def form_submit(): state = request.values.get('state') data = { "state": base64.b64encode(bytes(state, 'UTF-8')), - "code": request.values['Uname'] + MY_SECRET_CODE + "code": request.values['Uname'] + MY_SECRET_CODE, + "session_state": "" } - url = request.values.get('redirect_uri') + urlencode(data) + url = request.values.get('redirect_uri') + "?" + urlencode(data) return redirect(url, code=200) else: return render_template('login.html', diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index f8171d88..5fdedf76 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -3,6 +3,7 @@ import base64 import enum import json import time +import typing from typing import Optional import jwt @@ -15,6 +16,7 @@ from werkzeug.wrappers import Response from spiffworkflow_backend.models.refresh_token import RefreshTokenModel + class AuthenticationProviderTypes(enum.Enum): """AuthenticationServiceProviders.""" @@ -24,20 +26,31 @@ class AuthenticationProviderTypes(enum.Enum): class AuthenticationService: """AuthenticationService.""" + ENDPOINT_CACHE = {} # We only need to find the openid endpoints once, then we can cache them. @staticmethod - def get_open_id_args() -> tuple: - """Get_open_id_args.""" - open_id_server_url = current_app.config["OPEN_ID_SERVER_URL"] - open_id_client_id = current_app.config["OPEN_ID_CLIENT_ID"] - open_id_client_secret_key = current_app.config[ - "OPEN_ID_CLIENT_SECRET_KEY" - ] # noqa: S105 - return ( - open_id_server_url, - open_id_client_id, - open_id_client_secret_key, - ) + def client_id(): + return current_app.config["OPEN_ID_CLIENT_ID"] + + @staticmethod + def server_url(): + return current_app.config["OPEN_ID_SERVER_URL"] + + @staticmethod + def secret_key(): + return current_app.config["OPEN_ID_CLIENT_SECRET_KEY"] + + + @classmethod + def open_id_endpoint_for_name(cls, name: str) -> None: + """All openid systems provide a mapping of static names to the full path of that endpoint.""" + if name not in AuthenticationService.ENDPOINT_CACHE: + request_url = f"{cls.server_url()}/.well-known/openid-configuration" + response = requests.get(request_url) + AuthenticationService.ENDPOINT_CACHE = response.json() + if name not in AuthenticationService.ENDPOINT_CACHE: + raise Exception(f"Unknown OpenID Endpoint: {name}") + return AuthenticationService.ENDPOINT_CACHE[name] @staticmethod def get_backend_url() -> str: @@ -49,14 +62,9 @@ class AuthenticationService: if redirect_url is None: redirect_url = "/" return_redirect_url = f"{self.get_backend_url()}/v1.0/logout_return" - ( - open_id_server_url, - open_id_client_id, - open_id_client_secret_key, - ) = AuthenticationService.get_open_id_args() request_url = ( - f"{open_id_server_url}/protocol/openid-connect/logout?" - + f"post_logout_redirect_uri={return_redirect_url}&" + self.open_id_endpoint_for_name("end_session_endpoint") + + f"?post_logout_redirect_uri={return_redirect_url}&" + f"id_token_hint={id_token}" ) @@ -72,17 +80,12 @@ class AuthenticationService: self, state: str, redirect_url: str = "/v1.0/login_return" ) -> str: """Get_login_redirect_url.""" - ( - open_id_server_url, - open_id_client_id, - open_id_client_secret_key, - ) = AuthenticationService.get_open_id_args() return_redirect_url = f"{self.get_backend_url()}{redirect_url}" login_redirect_url = ( - f"{open_id_server_url}/protocol/openid-connect/auth?" - + f"state={state}&" + self.open_id_endpoint_for_name("authorization_endpoint") + + f"?state={state}&" + "response_type=code&" - + f"client_id={open_id_client_id}&" + + f"client_id={self.client_id()}&" + "scope=openid&" + f"redirect_uri={return_redirect_url}" ) @@ -92,13 +95,7 @@ class AuthenticationService: self, code: str, redirect_url: str = "/v1.0/login_return" ) -> dict: """Get_auth_token_object.""" - ( - open_id_server_url, - open_id_client_id, - open_id_client_secret_key, - ) = AuthenticationService.get_open_id_args() - - backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}" + backend_basic_auth_string = f"{self.client_id()}:{self.secret_key()}" backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) headers = { @@ -111,7 +108,7 @@ class AuthenticationService: "redirect_uri": f"{self.get_backend_url()}{redirect_url}", } - request_url = f"{open_id_server_url}/protocol/openid-connect/token" + request_url = self.open_id_endpoint_for_name("token_endpoint") response = requests.post(request_url, data=data, headers=headers) auth_token_object: dict = json.loads(response.text) @@ -122,11 +119,6 @@ class AuthenticationService: """Https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation.""" valid = True now = time.time() - ( - open_id_server_url, - open_id_client_id, - open_id_client_secret_key, - ) = cls.get_open_id_args() try: decoded_token = jwt.decode(id_token, options={"verify_signature": False}) except Exception as e: @@ -135,15 +127,15 @@ class AuthenticationService: message="Cannot decode id_token", status_code=401, ) from e - if decoded_token["iss"] != open_id_server_url: + if decoded_token["iss"] != cls.server_url(): valid = False elif ( - open_id_client_id not in decoded_token["aud"] + cls.client_id() not in decoded_token["aud"] and "account" not in decoded_token["aud"] ): valid = False elif "azp" in decoded_token and decoded_token["azp"] not in ( - open_id_client_id, + cls.client_id(), "account", ): valid = False @@ -196,14 +188,8 @@ class AuthenticationService: @classmethod def get_auth_token_from_refresh_token(cls, refresh_token: str) -> dict: - """Get a new auth_token from a refresh_token.""" - ( - open_id_server_url, - open_id_client_id, - open_id_client_secret_key, - ) = cls.get_open_id_args() - backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}" + backend_basic_auth_string = f"{cls.client_id()}:{cls.secret_key()}" backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) headers = { @@ -214,11 +200,11 @@ class AuthenticationService: data = { "grant_type": "refresh_token", "refresh_token": refresh_token, - "client_id": open_id_client_id, - "client_secret": open_id_client_secret_key, + "client_id": cls.client_id(), + "client_secret": cls.secret_key(), } - request_url = f"{open_id_server_url}/protocol/openid-connect/token" + request_url = cls.open_id_endpoint_for_name("token_endpoint") response = requests.post(request_url, data=data, headers=headers) auth_token_object: dict = json.loads(response.text) From d534cf9bfb352374347f5fc92d7c9d8f8b162535 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 30 Nov 2022 17:20:34 -0500 Subject: [PATCH 037/128] some updates for process instance reports and metadata w/ burnettk --- .../process_instance_report_service.py | 48 +----- .../src/components/ProcessGroupForm.tsx | 1 - .../ProcessInstanceListSaveAsReport.tsx | 32 ++-- .../components/ProcessInstanceListTable.tsx | 156 ++++++++++++------ spiffworkflow-frontend/src/index.css | 13 +- spiffworkflow-frontend/src/interfaces.ts | 5 + 6 files changed, 145 insertions(+), 110 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index bd3a2e08..6397cc20 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -76,17 +76,7 @@ class ProcessInstanceReportService: # TODO replace with system reports that are loaded on launch (or similar) temp_system_metadata_map = { "default": { - "columns": [ - {"Header": "id", "accessor": "id"}, - { - "Header": "process_model_display_name", - "accessor": "process_model_display_name", - }, - {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, - {"Header": "username", "accessor": "username"}, - {"Header": "status", "accessor": "status"}, - ], + "columns": cls.builtin_column_options() }, "system_report_instances_initiated_by_me": { "columns": [ @@ -102,33 +92,13 @@ class ProcessInstanceReportService: "filter_by": [{"field_name": "initiated_by_me", "field_value": True}], }, "system_report_instances_with_tasks_completed_by_me": { - "columns": [ - {"Header": "id", "accessor": "id"}, - { - "Header": "process_model_display_name", - "accessor": "process_model_display_name", - }, - {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, - {"Header": "username", "accessor": "username"}, - {"Header": "status", "accessor": "status"}, - ], + "columns": cls.builtin_column_options(), "filter_by": [ {"field_name": "with_tasks_completed_by_me", "field_value": True} ], }, "system_report_instances_with_tasks_completed_by_my_groups": { - "columns": [ - {"Header": "id", "accessor": "id"}, - { - "Header": "process_model_display_name", - "accessor": "process_model_display_name", - }, - {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, - {"Header": "username", "accessor": "username"}, - {"Header": "status", "accessor": "status"}, - ], + "columns": cls.builtin_column_options(), "filter_by": [ { "field_name": "with_tasks_completed_by_my_group", @@ -273,13 +243,13 @@ class ProcessInstanceReportService: def builtin_column_options(cls) -> list[dict]: """Builtin_column_options.""" return [ - {"Header": "id", "accessor": "id"}, + {"Header": "Id", "accessor": "id"}, { - "Header": "process_model_display_name", + "Header": "Process", "accessor": "process_model_display_name", }, - {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, - {"Header": "username", "accessor": "username"}, - {"Header": "status", "accessor": "status"}, + {"Header": "Start", "accessor": "start_in_seconds"}, + {"Header": "End", "accessor": "end_in_seconds"}, + {"Header": "Username", "accessor": "username"}, + {"Header": "Status", "accessor": "status"}, ] diff --git a/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx b/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx index a518e47b..79ab8253 100644 --- a/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx +++ b/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx @@ -115,7 +115,6 @@ export default function ProcessGroupForm({ labelText="Display Name*" value={processGroup.display_name} onChange={(event: any) => onDisplayNameChanged(event.target.value)} - onBlur={(event: any) => console.log('event', event)} />, ]; diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index 6c8f5fb9..d70aab3e 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -1,12 +1,11 @@ import { useState } from 'react'; -// TODO: carbon controls -/* import { Button, - Textbox, + TextInput, + Form, + Stack, // @ts-ignore } from '@carbon/react'; -*/ import { ProcessModel } from '../interfaces'; import HttpService from '../services/HttpService'; @@ -112,20 +111,21 @@ export default function ProcessInstanceListSaveAsReport({ }; return ( -
- - - + + + ); } diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 50b69c0b..ebf6a446 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -7,7 +7,7 @@ import { } from 'react-router-dom'; // @ts-ignore -import { Filter } from '@carbon/icons-react'; +import { Filter, Close } from '@carbon/icons-react'; import { Button, ButtonSet, @@ -21,6 +21,7 @@ import { TableHead, TableRow, TimePicker, + Tag, // @ts-ignore } from '@carbon/react'; import { PROCESS_STATUSES, DATE_FORMAT, DATE_FORMAT_CARBON } from '../config'; @@ -49,6 +50,7 @@ import { ProcessModel, ProcessInstanceReport, ProcessInstance, + ReportColumn, } from '../interfaces'; import ProcessModelSearch from './ProcessModelSearch'; import ProcessInstanceReportSearch from './ProcessInstanceReportSearch'; @@ -127,6 +129,10 @@ export default function ProcessInstanceListTable({ const [processInstanceReportSelection, setProcessInstanceReportSelection] = useState(null); + const [availableReportColumns, setAvailableReportColumns] = useState< + ReportColumn[] + >([]); + const dateParametersToAlwaysFilterBy: dateParameters = useMemo(() => { return { start_from: [setStartFromDate, setStartFromTime], @@ -554,12 +560,99 @@ export default function ProcessInstanceListTable({ setEndToTime(''); }; + const processInstanceReportDidChange = (selection: any) => { + clearFilters(); + + const selectedReport = selection.selectedItem; + setProcessInstanceReportSelection(selectedReport); + + const queryParamString = selectedReport + ? `&report_identifier=${selectedReport.id}` + : ''; + + setErrorMessage(null); + navigate(`/admin/process-instances?${queryParamString}`); + }; + + const reportColumns = () => { + return (reportMetadata as any).columns; + }; + + const saveAsReportComponent = () => { + // TODO onSuccess reload/select the new report in the report search + const callback = (identifier: string) => { + processInstanceReportDidChange({ + selectedItem: { id: identifier, display_name: identifier }, + }); + }; + const { + valid, + startFromSeconds, + startToSeconds, + endFromSeconds, + endToSeconds, + } = calculateStartAndEndSeconds(); + + if (!valid) { + return null; + } + return ( + + ); + }; + + const columnSelections = () => { + if (reportColumns()) { + const tags: any = []; + + (reportColumns() as any).forEach((reportColumn: ReportColumn) => { + tags.push( + + + diff --git a/src/spiffworkflow_backend/routes/openid_blueprint/__init__.py b/src/spiffworkflow_backend/routes/openid_blueprint/__init__.py new file mode 100644 index 00000000..f520b09d --- /dev/null +++ b/src/spiffworkflow_backend/routes/openid_blueprint/__init__.py @@ -0,0 +1 @@ +"""__init__.""" diff --git a/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py b/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py new file mode 100644 index 00000000..f812ab03 --- /dev/null +++ b/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py @@ -0,0 +1,153 @@ +"""OpenID Implementation for demos and local development. + +A very insecure and partial OpenID implementation for use in demos and testing. +Provides the bare minimum endpoints required by SpiffWorkflow to +handle openid authentication -- definitely not a production ready system. +This is just here to make local development, testing, and demonstration easier. +""" +import base64 +import time +from typing import Any +from urllib.parse import urlencode + +import jwt +import yaml +from flask import Blueprint +from flask import current_app +from flask import redirect +from flask import render_template +from flask import request +from flask import url_for +from werkzeug.wrappers import Response + +openid_blueprint = Blueprint( + "openid", __name__, template_folder="templates", static_folder="static" +) + +OPEN_ID_CODE = ":this_is_not_secure_do_not_use_in_production" + + +@openid_blueprint.route("/.well-known/openid-configuration", methods=["GET"]) +def well_known() -> dict: + """Open ID Discovery endpoint. + + These urls can be very different from one openid impl to the next, this is just a small subset. + """ + host_url = request.host_url.strip("/") + return { + "issuer": f"{host_url}/openid", + "authorization_endpoint": f"{host_url}{url_for('openid.auth')}", + "token_endpoint": f"{host_url}{url_for('openid.token')}", + "end_session_endpoint": f"{host_url}{url_for('openid.end_session')}", + } + + +@openid_blueprint.route("/auth", methods=["GET"]) +def auth() -> str: + """Accepts a series of parameters.""" + return render_template( + "login.html", + state=request.args.get("state"), + response_type=request.args.get("response_type"), + client_id=request.args.get("client_id"), + scope=request.args.get("scope"), + redirect_uri=request.args.get("redirect_uri"), + error_message=request.args.get("error_message", ""), + ) + + +@openid_blueprint.route("/form_submit", methods=["POST"]) +def form_submit() -> Any: + """Handles the login form submission.""" + users = get_users() + if ( + request.values["Uname"] in users + and request.values["Pass"] == users[request.values["Uname"]]["password"] + ): + # Redirect back to the end user with some detailed information + state = request.values.get("state") + data = { + "state": state, + "code": request.values["Uname"] + OPEN_ID_CODE, + "session_state": "", + } + url = request.values.get("redirect_uri") + "?" + urlencode(data) + return redirect(url) + else: + return render_template( + "login.html", + state=request.values.get("state"), + response_type=request.values.get("response_type"), + client_id=request.values.get("client_id"), + scope=request.values.get("scope"), + redirect_uri=request.values.get("redirect_uri"), + error_message="Login failed. Please try again.", + ) + + +@openid_blueprint.route("/token", methods=["POST"]) +def token() -> dict: + """Url that will return a valid token, given the super secret sauce.""" + request.values.get("grant_type") + code = request.values.get("code") + request.values.get("redirect_uri") + + """We just stuffed the user name on the front of the code, so grab it.""" + user_name, secret_hash = code.split(":") + user_details = get_users()[user_name] + + """Get authentication from headers.""" + authorization = request.headers.get("Authorization", "Basic ") + authorization = authorization[6:] # Remove "Basic" + authorization = base64.b64decode(authorization).decode("utf-8") + client_id, client_secret = authorization.split(":") + + base_url = request.host_url + "openid" + + id_token = jwt.encode( + { + "iss": base_url, + "aud": [client_id, "account"], + "iat": time.time(), + "exp": time.time() + 86400, # Expire after a day. + "sub": user_name, + "preferred_username": user_details.get("preferred_username", user_name), + }, + client_secret, + algorithm="HS256", + ) + response = { + "access_token": id_token, + "id_token": id_token, + "refresh_token": id_token, + } + return response + + +@openid_blueprint.route("/end_session", methods=["GET"]) +def end_session() -> Response: + """Logout.""" + redirect_url = request.args.get("post_logout_redirect_uri", "http://localhost") + request.args.get("id_token_hint") + return redirect(redirect_url) + + +@openid_blueprint.route("/refresh", methods=["POST"]) +def refresh() -> str: + """Refresh.""" + return "" + + +permission_cache = None + + +def get_users() -> Any: + """Load users from a local configuration file.""" + global permission_cache + if not permission_cache: + with open(current_app.config["PERMISSIONS_FILE_FULLPATH"]) as file: + permission_cache = yaml.safe_load(file) + if "users" in permission_cache: + return permission_cache["users"] + else: + return {} diff --git a/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css b/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css new file mode 100644 index 00000000..15b093f6 --- /dev/null +++ b/src/spiffworkflow_backend/routes/openid_blueprint/static/login.css @@ -0,0 +1,112 @@ + body{ + margin: 0; + padding: 0; + background-color:white; + font-family: 'Arial'; + } + header { + width: 100%; + background-color: black; + } + .logo_small { + padding: 5px 20px; + } + .error { + margin: 20px auto; + color: red; + font-weight: bold; + text-align: center; + } + .login{ + width: 400px; + overflow: hidden; + margin: 20px auto; + padding: 50px; + background: #fff; + border-radius: 15px ; + } + h2{ + text-align: center; + color: #277582; + padding: 20px; + } + label{ + color: #fff; + width: 200px; + display: inline-block; + } + #log { + width: 100px; + height: 50px; + border: none; + padding-left: 7px; + background-color:#202020; + color: #DDD; + text-align: left; + } + .cds--btn--primary { + background-color: #0f62fe; + border: 1px solid #0000; + color: #fff; + } + .cds--btn { + align-items: center; + border: 0; + border-radius: 0; + box-sizing: border-box; + cursor: pointer; + display: inline-flex; + flex-shrink: 0; + font-family: inherit; + font-size: 100%; + font-size: .875rem; + font-weight: 400; + justify-content: space-between; + letter-spacing: .16px; + line-height: 1.28572; + margin: 0; + max-width: 20rem; + min-height: 3rem; + outline: none; + padding: calc(0.875rem - 3px) 63px calc(0.875rem - 3px) 15px; + position: relative; + text-align: left; + text-decoration: none; + transition: background 70ms cubic-bezier(0, 0, .38, .9), box-shadow 70ms cubic-bezier(0, 0, .38, .9), border-color 70ms cubic-bezier(0, 0, .38, .9), outline 70ms cubic-bezier(0, 0, .38, .9); + vertical-align: initial; + vertical-align: top; + width: max-content; + } + .cds--btn:hover { + background-color: #0145c5; + } + .cds--btn:focus { + background-color: #01369a; + } + + .cds--text-input { + background-color: #eee; + border: none; + border-bottom: 1px solid #8d8d8d; + color: #161616; + font-family: inherit; + font-size: .875rem; + font-weight: 400; + height: 2.5rem; + letter-spacing: .16px; + line-height: 1.28572; + outline: 2px solid #0000; + outline-offset: -2px; + padding: 0 1rem; + transition: background-color 70ms cubic-bezier(.2,0,.38,.9),outline 70ms cubic-bezier(.2,0,.38,.9); + width: 100%; + } + + span{ + color: white; + font-size: 17px; + } + a{ + float: right; + background-color: grey; + } diff --git a/src/spiffworkflow_backend/routes/openid_blueprint/static/logo.png b/src/spiffworkflow_backend/routes/openid_blueprint/static/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..4cffb07fdf112e035c669c06bd5cbdc9355f43a5 GIT binary patch literal 10138 zcmW++byyT%7hbwVSh_*F^G7Qo-MMtS(%l`qg0u)oHz**;g22)xNJw`rEJ$~Ee)!!# zX3jJB&O9?`&OPrv?|WmkHI?vjsc-=R0KST{yeM*tjV12VLpU7b(3JjJ);SY`y)gJ#7Gfetx_Tu1;QHYj+!7H&46l!`D;*00Tfp zURK{f=OEX?pJqC>dfBUG1D{%rhZ5X{o?IP8NW}0e!)lGN|CA+@n_r#b?gjdfG?&Ys znn9jgEN3-A!WtAxUt3m628c|x95w`?GKY`zIda%&=@hg6D=es;9#k{6rpSjCT61QuN`1^kUjwe&B{8N;zkqGm*=_*t@M5Qs)7Zwq*kw2wd5wk-3OuN~1 z-ow0--6^(`-z}M%kx@BXHv1W+BxmNgcg7u2f5_a;@^Q_YjQ{Yfrh`!Ofkqt;ptVh}7JFEMe&v?>S z3b$2BzQj?7o{3^g6KO&m|2B#9OH5>BvsdI28|I;>rz&Tz!wB0!vaJ13K88rtQyBWS z<%+?I%t{ry7Q+z=5+DM^K0WC?>XrhBw?V#lR&;t<2N3y-LX537s_I)luC>yov0#(^ zH>+2knTV!|-50`FfWBZWN;W5uqel+;n9oPm4Zs3syv)OwR6TF~bSW(EpQY zPZ8)O2q)eNI1OaV`9MGNq*`#d*+warztXb1$-VLx72?u8`d`%Blpfi?_bnYCM%*>i zB{oTkg&f%hjQ2g0MGHmRwpB^bNFcc$rewW@_AzJAVeI+rY^tvuYrLo$=fw&e9)0DoXw3z?5A|C3SjwuZ zNW>D)@(6_&Z?9#`HXMQd-x?aukf0$jL*|oj@TC`|3T>Hj_bsC9)HUJU6}9pV8#UpCV#N{F zI!JwPv#k2bGWOFJF}8GgL)VifucH_3FZ^}4&eiH4ruKe#!xiCd*mYwi)rWXV`L7!T z8i4jSqreQCP_mg^<$~P&m#>Lt@~LJ**zOSznIo_KWBqBwTDB=_v0atbARu&;%Xd(= zwo%FdyL*oDbH!HRbXLO^grp_W#Z6s)zPLOt7jvv3O$NbJ?;xdWMYS`G?9{@Euwgq^ zH$sLmwMNMx@)Qalrhyr3p=u1Rj@Wd|aqu2il@@Sd-(FJ#q)QC1sA%qVH zHT#Mank_4Eg#RQpS+Ng-W)J55z4MC-%io{g6vv%sx<^bnSuEv)8Me4sZRLTF*P!@i zZ=pAdT?i(!4TIkz8zyabSYnC|ZCB$(rwwt)#8-qcP-oc3gFWoJaaRi#ekU5uYW^9_ z`^--CBotdB?5-e8vDpNh-R7>2!EnjAU1Yqy8xHyjdY}JNTx3eng%)83!_3E{7MBxn z;04!Z%2D1dM(03L^DIr@rx;3MW)xj~jlLpNI?4voAJ`qo2`*+WQ+H~28IKhkajyf6$ZXKsurf4O5{|i03uj$|9x%sb8Rm+$QDw#JSGC5v*ZwX$px5uW zYoj8L+R;VR-X$g~VLPuyje{-3Gi0u%Xxoo?rJ;0QYA1dZZ>l&*N1k1vpY;1I-nOX# zO3Rpy6Ur_JN4S(V2>D5O84i#oh#_8EB{Zg5SvzQ<`oV_Lp`IZ62MZ>`l6-uGA=<^4 z=G-Ox99gUS?RvNr@#PcAF#{w)441{dEnC(q9fX?DK=k<5>PYHU&g4UC)k=iJ zmgAThb@-wKyYBA?Y%-K83{AYmCkrUs$N9g@u1BBuaL7c`ff;<JNi80PUhf^8G3j1$B;6C%V+aSB<(EDypjgU?5q*(M*?F?1} z;>Da#%!JWZ#2G*sle^lUVz+Nk;*B^|q51|Lbw>pnStZof>`KQ>cnCVrVExhwKRiRH z!`is!g%spyRbMn_<|(w}E4Ww|^=ZC1=7A>gvY}cs^Yh#%b$%i0fulPSd0{V(%}f+O zRIxP*n9+PP)hH0or=~+gZMkC$XF-n(8wL79%;f))9f7+?&)hms&_yqmqD8(v;}j=z zs)aX4f942gk>!90-~FE=jLkA@KiQKFR2DYDpB0SF|~UoyV=r#^y9SU|Cm`Fc0o%@Vj@4XewX6IHAEo ziovjEea%h1#*A5}Xz--Gy800NJN={H3Kyeu3{WP0JDj0G8CtSG(YuO|A@JOo@^rX9 z@9k#WmhbX}hcA-gt!89ojE|I&_HzbObhY^&Q%n4Vbw%`I0|qZasz4b;lPEZ_BPoPrSaT1jK|Wc}=@oEE=qsq`fGL`O514J;`q!$qoCst4t$NE zTLUdr@d4LeF#!05kv;}SyqmJkJonBJQ}iNN*MZ9*RemBBm+hVo;&YR1YL((NN8r1l z!7e{$h|59F&@1+30~@mSD_wedC{H!}%H7wA%nNkc(nFz`-#kmG5~;)lsoQmy7jbQ7 zTye;lQGOF;##T)&z{%T)kmdOu=WKnS49O&@cJd_pLsk`yp4UWA5Zuyhq}lVqX)oLM zyZ2H=jUK(u4N(CW0Ly6HQ5B4t;@F?FSz!^cO?sb|6R(#6*?m!QD;fv|^f>d;1UVJ? zE|lBCbKYj&XTOpplIOeUHa902ONkaOp=XwTM3Hz?uA3T`{oh^J4Ru<6dO~^PPWJA6 zoS)neu4RO#tyv#8)OmXRVJi@;yvh&wJ_$x4>&nSn@qIKW)-xci7X_(#dwZr*%lD%) z`JTRrpv0vqE%IT&Azjt(F+A;SGwLt;S!;se1F9;qvW}o1w?pP)$y^lhd{@)MHbM}& zUS3wNArELN*|9=5&&~Ak!J|UmUa}MWwnFei7&LJq{XzY{DFp01f25hDTR7Fy!e=XZj+{bzP@Slr)^p#Yo>lK6(>szuWyFDq~p~>^@HsTARCfdru^x! z=G&Q}lb4+Xg=k{?m5aV+vaT%x>skeDAAEY0tLQVZx%3;Dt$7$6_Wyoi)lKM~)%xon%&UlT~D1%&3m=rI2jj8VVmWvgO^jn zi6*8Xr*%c*+~xgsSE$2aoEqVDQId4MW*P#&vMrbL!s|Nu20wid8-cO&5b)0oaXW2* z=ya0uqZdSgnI+qY(4j7Ou!Ej$nz&DFe00ydCNvfvH4sOfq_y?{dT&>;)p3x;l|f^A5+Y>rm9MNVeRDCMAcl?im5?6KW?Qg ziYGOOi-KZIJ>R5M)GF!kZn6$ozzXHBL+hJm${p2OPl!cpE7iWdFjk*;>1(NO*0t7M{~jTOvwN@Udb?@^1_{E?Y5K& z^^W350;BOe#|e$ChKBzv1Q$~{RnnU0``uzt zeldt->bl-?)6*<)@*SQN>_c!r&<_Q`+-tG`|aIa>=3XX>I*;EltcexSZqY;CP6^~t}8byIbBq?Y@i$_S;VLv zPT+xGXgukU=dZSpDZMUlZiKb(WY*90K2gV>>~31{Yn)vc8)R@6wU@W&X+P+hR$MJ; z$4e8+|2}E~**L5Ef?lTy{Lp5hHO88RNDf?(7w$tXv?O_ z&wO}uRP;WBGHE$=@O8^(Mc$8oXF-|Wk>>Zw9ZhFz(8F?0#?J>eF{Dh3!`}U{mPg$; zvlV0-hly@}O7F4siF>y(oi5AM2fBJ(OJi>O`Xxe(h~-9sPh|o|9`OxKfQ$GKui9^4qsT~Nvw;NX7fh@m{qcT2{I5sE~;Ir$UQ(pIxxr?F%s?A7WA)<{{J^2mwcO(3EZ&8(wm z>lcx-ccf~4P#ETOXE#J8u$3$cZSpIC={N6ew_a>1!3i>xRqcudaeOvS$o60NbEH@A zwQ}`#Ov6u18FQjXm40PeNXzSE00Ajl--HOtPKz%kg1GY16RD~sa&HPe_%5ZAos5rn zJkL~wxtmc_-6W{X@{*m*{o^VEuiOIHX0!zPh#!96>aNqB?I+(@!zE3Y(fFgbF}TE|iysG*Sx+bAkO9PZz2FKA(~y`X zI-K_t5I^a1}Hk`;)wT{AuZCrzgzFt+8_ z)-ZLSDI@R?%bL9E!*VYKX-vE{Prq!-NXxs{GX|&RD58x(Hcz`L_xAjwLFd0F%E80% zg?v>}+T4LG5Uw;ZLnR=x8!)L8`uW|#neav2z@t(3QTuiJZyL-gq3`M$3%!bZ?6cZWEB7pqK3}Gk_T)yG9kx)ufSRM6{!IpV%{81- z6R<`*XhT)5YsV(;phm9xe+*5n-qSfEt9|@ z0q9}`ze9B&9hJT=er9ADFU<kwqhlns`y#nY;3>i*a^sO+Q2&+o}On4zsos@xcY~u+p}{bC&UH1 z){EJ0r4yM?6fzw;QE4Yf!n(Z=8cYX$d$m4~1IIfRWo>jDR0FK>XJR^diT^$oqa?{GlCZpAMU#u# zL_NB9o^+(!D;lwr5C;t^0 zJwlYK#cN6yp>ownvLFn3WG|gZ8pYvtGzeun9qC*R3 zef-QanJN=n5A_EYX1P}9OoF`3Cf&(} z8<}yuvL1lc_|qCNCk!jF#KNo-uz9BaEDPBvG@sWL%u`K#*WaDJiYLud%#M;|&1uv@}T=t|W0k3*E~>EgEsDFc~&8!Iz{U?n2g zH;aYO$~w7unax16G1kY{p>G*$Mxkr>)zOckunDTOSzw=O4Izo|Cb=$Z?RN zU22!tbY+qvaDywKFhQ4{ZiJ~Lj(u5LchI*grj?KrjEI+^GvS{7QZX{KKn=l%-F`hT ze3d_m4K|B7r~ zNuCHq@awnyIW0Q)?manP-aIw0?5$GYbW&6(hMXz!b$O??_-pudP4av;Bw_p;=xR;# zgPy3(Kj@M@z{X0ROJV_Zj=AITwN$l7ddycRg~qam?TH zl7n#^-dbKwlQHMNwU7_lNHK^qA9VE*w0`9|L||pUZD*P1qmt z$)dWZsGhWVcFhQjX#Jy^>L|s+nv22@9zU1`jQN9lG(Ad zi)t*nMkjUEs#lR6zKFyYhnDkw{7!I>3uL!hJPr+ipxo=q8v6F`XS9#oht$yb?LZH= z8Ew0SkDa;T_h)h>|NHK{S-{K3 zp4i9NsUzHbOeNjyGIyRxtWFc3US~p~a=8oxb$`1=vw4GchH%akf>&KkZ@(yeMLE{d z&R+>%L$F~9QWdeqM==~hUOrJGJc?8EGZe%Da-6@)ClB1Yex}4W2@J3WGZ#5VZJMPz zRM_t{5w#)41DDM;Pn*GizLM+cc+n=O3&0AO~@ahRJL;0x?nW_;}@{wLc? z)aQrx##57op^Th{!AGVP8E+{W8RkHxOyX2rH~HNuDCzforDl9>R*ebS0>*MIB~W?H zp&kD?UJxsd5>uNnp|y=iW~#h+W!-5hg%8$MgR1^|EfCTbKQ*;AsNhO1fNg6(Lb`gn z@kzq4ee!{4TmTu&V8+E|Od-<#q%8|cQ{0|BHX&^wI&nVHTP1_E2AHjyyp55fK+iN! zaorW79U~-eY9o`GK6e{!KZdCWrzd1AE%i-JKS*q{_cpJ+oYfy~Us=M|vB1t6B5x_h zP+!=VmXDZM21zvzsvyxL`g`c%-?HF9pT~X--`+FM=~lBt)#!!4Wc^hR_IvU2bw6R& zItr$tQN;9DO{hrod3Mf%mS)EU4$}T6(W@IYG1v06HWoLer5X3}Y8F?UC@#WrDrGhF zR~WpO~=!9{|1W>zK>?Jzy`%+|p9lWm~dE|EH8a@^nf*8J(?R zwcl+MEtTU)@e2Z(_n0^Wk0^7;Mch9EO4gw-ZxC>#Z36!X?EPf{ktXT!UjcQTRUI6> zLk4$Yzy9*U`0GWo25!(Fu!!If-j?l^pCS!gIu#b%SCJ6 zgb`ihEdDwwH5k3{J#vcXz>mA?q`fkgEN5)Bag`iVit!Gst8OS@*`p`pcz6!6w>$S& zD_@TcU+q&1w`-T@QorW=&YAkLEEuoPHk^?)CgY5Y#;bE`=mhCf=PV^A{HH7RUR7CU z9%Eqa52C3@&w0M>0zoGGdZ}zl75ns6#GBZ`!LJb1PC^GuSgoU-;?-xnE{v?VP=*a& zS&VlbX0CJqoaK8i*Q>GV`I*RWk}@UNf$7~O7q!ta+R?!~N6p`~RqxC5`DYW>FLL2( zZSYSCvH%L{@vWu-(}$uKgS>h7CUQzTnF!_D$W9C8^`qAHK!S(Cd>&3ZJ<-F2?ii6K z=l+=+7TZtnT{LaW5GklKq-_)L(tbEO1OrEfgb)67SJ!jAI6l}thfnPfU9jpT0glKW zZE9laVr)codHL(5vdjgarXN4?742}821F|I&F0e!^{6@4DVz=xwt{ z5ZhMamyCGk1Ndf3gmzXNaofb2MbgD2@tAyzqd{$^@RP*0-1j0_znPxN2OZU5f*9E>DCva5qw7(rn%81?+X`r;IFe9SaVi8(ny?U!h@Cn25aWtyjK;hdgF11WcPi z^|jk#{N-n)FEYi>pBX%5J391OISIx2M3+*_jTd@ZPA4;^RAj;}XnA{&F|7gZBDpu; ziYf?2ifsw{!#)Yl!nw(?h`Y+Z3fvE z=U~Dpy>J>uWP65V|02gRd4&;hezH9DQ?$2H+wGDCO){JUzwT_+r!Ypj&@d6Z!^^Gl zCqcT!Y0@c(#hP)L8S}~<1K2bS!jd)9NvOZV7u}P;aFRO~6PI1V>oki<8E{eyKUEGI zgqGdABSfCP5~rV&QCs9Zrff5t+DRN@<1pedtWcmst=X)?8T@ixU7t$!Dfd*h_hWlkHWJijem=c!Kc z0wpG*o4$$Asd!cm;K_rRUmV?b@K8VQY9qqr8C*Z@l{Y0t zVSk)H?6_axxtm}+@Xd$Qe#G zld+hu`HlRF=j%sEz1k?!>aRcMqvRq~CQ3&f30EA;zMQ?heZzaz&?TFTDt>=(Mfny4 zVj3nT#9kLnXWH5ORCy*{3FzVdUjV-%JnllCl2i0DkhbKx7{48Na z$${Z9f< zE!-l<(a3DXOx*cjIT7*ml5oI&fOKrr)XTf6;2vYqHz!efKH5_OC6XqA@gXU0p%E-V zt$np5w|}zHyW}u}p$FRG0^Lc#JF$6gZsz5 z&Zxb8IUKj_=lnO6AvTv?5(bmOmf*O`BsHjS;AWq9l(_j4=SVTD1LnT*%{DRD8CuWK z>4u%bD4x>Hr4`OpC?!LkP1zg4iVV+Ww{z4d4O*Y%p}lL3Iv1hBFqB_s>YPrG@k}cA zKy!3~FL!2zpy{;BnMnK(xhpdT`x?p@5VO6m0ln6L%J2&y=4dUar)K0qMVSl*+8*(r z_2?xa2{AFT5Rm}jU^t;erU$-Jvm9+NhnP6KBe&uCC*4Zg4b~^;s7p~Qea?7f==wIl z87arQ_$(y9t0xb&KRs_FlKse~s2E+GiFbSEY9d0!nsML|;@_SOpeKeuZ5 AE&u=k literal 0 HcmV?d00001 diff --git a/src/spiffworkflow_backend/routes/openid_blueprint/static/logo_small.png b/src/spiffworkflow_backend/routes/openid_blueprint/static/logo_small.png new file mode 100644 index 0000000000000000000000000000000000000000..d0ad4499a9f187c829438db52f20575863e526c1 GIT binary patch literal 5000 zcmV;36L;*1P)E{GC{1Z z){<~C!Ba7n%t9|$>)M^n$@zG+n+SV4|$~&0}ghw7Td)+_InVB;o2>}x9 zz0L3Q$tP#;z4qE`?^$O*)>``ry#%K`ezOSgHBfpdiHerqYNrqXItcX^>?iacYvFCw&9OzV; z_7VpH{W{2BFCsT~G}Ef;>mqWTstyGH2uuWK`toiB)&ds-MZjgiO(HT~RqKEUfiQ5e zh&-gK#{$0xeg(`2?)U4r0p9@*2WA5;z%?RL@5^w4pWg~x1pEm2EU;2U7OLt+z+@l; zJP+If%ma#n1tRkDevVro+Q9$}z1;m|bFp~^(Rm;ffe}EYhyFTJRDcowO#4z-D{Loas+rbA{q z@TrfW{cXTqZDITCjtW&Z3>*u*?PvYL|NlGSB%m2@cKsbN2UvnPQ+@_-M(zl2i--lb z;H@4JyxIR<;5PxfQecyaC{RXrI|G}5;eZv9W;_MO=Mw@3;mzQE=wXD@#*~XUb7iqn zktalT*PbeiA&qKNS2T*8$L=q}HlQ{gw)3~mcat4d>!+%B0Y?F~BC=Uk3!280dAOf` z9Jm3f06quo6p>7T{w-Bq1l*1{qXuz1-c0*9JO$3;KHyPRT?c#*xJp$Q0*Bzurq8SD zoxTju;LWJF`TXDNVo3V1j}RwT{Rfb-etLadKYM)Bt!{l!Fn40^U@#xz%{H)SB?UlL zhv02d8vU6KRUHIu7ZFv}e!x&*Gk}P+scNZ+G^%PbU_@kBfL{@wl6?pa5RvVw+8<~W zk+iBF1VlvS!*+5+Job|GXZc~k1`)BUYJ8rXfslwa`!t6FAGUj0`p^RigB+1h%5Qo~ zQ2|6kF<;5LrKI_KvbBS}roeM6u&!NsqyK+52ZV^UkzdBQ`+2(pbc1Lm#?AxYsyatLY*= zvQJ6-&{OC&Wlh%2dAzrH_u*d?VZde+s#5e3%Cbi|74a#eItJnShT1v5YF{3W&HWr= z&sEDPF{IV95s?>@8;|-04^PPy(5M_PqL~~`H1kVb_y1JvLL^pmwZTRJ5T0nLo%8#> zsXw%$=2IEfZwhjxfj-Q0@;tSzOeWlIJ=%f2L>Slr3;__8iA;Vd zl>K4M+`M)?Bo#Pig33(<<|b2F+-fg;UX^Hfl1wvGA=DYA!Qq#{diLPa;? z9h5&ZgjMwk5jh$F(G#5EZ#*Uy)l1c~b6=T?n2aLplPyEnu=ujJo)A>54k$>c%|Y4c zVc&HRZc}yXI51}kz`)q`Lxps8M>Hb3L_;>EY4d%u@e;~Yc=p>ITAO#CDkAyV0;}8c zJXtv$JoCC?X`89ZHWO0$ZTEY5fZ)1D8)N~VL!~{Ir50ba)tOO!przU)!m6QLMeL<` zUL1f)sxoPR;<3CpRdI?pC5hEspNwYa*{1T-PQc@VZy!AFVopmrGL{ zk9mm16x$fIc?wRDU8>AJgzMT=-u{ZhQ~1iNDxYsJf(T~@J7D0Hk+bIY|8V}5y8@XL z0zR_U1!eyvP{9e#MONhz9-flf?75QvX%PmfudDoZX)He8z;(cv=y-Y9XDE816N^8c zOmHVaX)J!4@gkZK(fb;jhdKJMg@PUdY!;Dw8+6Dm*;@u>Gi#2rR_k(1Gv?Cd zqS+smUg4e~8NL;0PMqOy{FYc8L{iDx$|)X=1Zi*@%yMFJfW~B^YO*iq3PG*}5cnLf z+Yi@m1sF8r`lymV-mjhlA_~R3(mN&qPX;fz5_s8`h6p{_?q3EGVKv9uQ@}7X^NgGo;NWG ztP$iV4T;L!ctm3Hguu}NsyvdatGXjw=FF)6B1V1?V7nxlsJbg(xB4rBT;Y#vea>{% zn5cRc`09vh?ttcEMj0{t93rS{LaI6e(d)_0)^Zhi%4&pRC};T{80hrdbT-Nbk{`f` zs?K$2oRDK5N|&#@=H`b?a|bjdqjTdBlBWQim^*Tth91KU zI}DCg1mrkl>~|vN)l*Y-bC!EN?IEama)8mpET!#dUXx=?Ph0Pzu4Ug{IzR=yM>pc74d^rxyDZ) zIrMUO_?AWP#zHa~k1!ti0M&O88BIQxV3LzGmnPxAJ8B>!rcK~wyrAk)fg&$#Pe=;3 z4K-cTWHXWpcbiilUxP5tE3*^vl~s(Kk&UHM)w=~Q#ygy| zpZe>9UHBv zzF1X$md)sZ1^_WUrRpiGdQ18({&Q9-n^RHyVzj}$<{if5#L_F=(#8dDqqSyYU|aqi zuG<=|s6JCgPR);r{2-O6ycxVWVjD86)mugfg}hq~)7ptN0#J}8D%=-BH(@gyc9UDA z4c86tXxRRsm}m=kkH0!MmdqW6GY1`KeL z;ju|r9VxFl4lhs$Ad}Ycw_7jfNI6@A7h3x%wl27u^1 z%fA%h^-|NqYa0q?*0LGy7;DVAB8)<{NkvvBHyr&)Z#xOX8N0SH$G#re4TVz!Lx+TS z3q$Q46cv{|*VeMD73ilhv}{K8r!*tuvhrk=*;uo}inni5I33r0j0VPM`IwNWtVS4> z3&j;)Z$Fl5_2q!6t8~ze8w#uz0HL%l+LPR>ItiGR%MfNIYiFbeO#;gHmt8$Sd(@~Z>!`^tUG!`FC@1ii;%b0?2y>Ph4O=_KD(=FfM z0s2xloiTEfTA7$ltLO@VbU5=2-u)}EGTzx@j=}c?0^1rQb`5|kkvw-Vj4=pp*VmFz zW;bQsvS?0sOiGgqQc8JOmooParZkvpdjHd@!g5h1?@Urr$NO&sjuAa}=;iJ}(9Z*i zVKsoD&-w%>oS)6wT>%8m zDs>6oMa67MuD+_ilF#R)_WN&R0(M#0u;~tWGy4R6HXXKORk_dEFSLcGeW8+KBT(f!i}LY#U^D;aw;^+5eAHpu{L<1-|iK1NK5=GkIkd zd060Dyg_!F2Fbh;)7-1pFLrlE%Bv@-@}GHrJhJan%M;t?&q;#b9*Na_Id{+!`Dxh< z_tpA^?mzaOw})h6&flEa+&_bU7C=RB7!s>_ZEK?PE#HbWFF?MbUL(0Zvk1#o`5u5l z&i0pz##DsY0g{Vmf8dnI*C8BA(Do`U&psNEp40>Eh^lc{=-yznnA|I!q_@<)+iFfaDo7K0k68L_CF}ax(fRdKX zv*LYkAV}9(zxmU-8O3+D+?`D;GQGLjG&-?(lZY$})TPFig9pm$f|wEEdH?^% z{hUlXWOLpyL(mDK6c~ghWA@t$pzx*|`=y5Yv)6S((tzY6_y;%H86rFo$SMK`1}d!+%vU-wN$oYZC$8NBatV2# z>WPk|Zt4l%4T;JpMf4K#dt>k>rmP;NMdaFK-JH7%D!Oi~c(E5bNPeH{XHXdQqxxL% zrb!s|b&&Bs@zk=4_~QE7sx`x|y7{2CmR*HwugWWkc0+-+1%zcq93`o&fGQ*<|F zRn;YeL;+NEO|UGPsQg{DqGqOAJ37hvb)-KGZ zZKf?}wP>iFb8lI>`+GH^Z>iN&5G?~q0)GsN{A^2Y<=XcAe0djwH92ZANB;3 zbnIPd&Km>XS4BQeKv98h3RfOj(fZJbLKs{(t*yo9Nc$-{kM|HWsWDOYr%uZI(1!zp zFn}Mwc#a=udY%X$M`f#uu1q@iXSh{ehNShO4+jDTu>ro>>gkV2mOk{MOZ+cn4Di%D S|CE~m0000 + + + Login Form + + + +
+ +
+ +

Login

+
{{error_message}}
+ + + diff --git a/src/spiffworkflow_backend/routes/process_api_blueprint.py b/src/spiffworkflow_backend/routes/process_api_blueprint.py index 42930765..3e7eed53 100644 --- a/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -1,6 +1,7 @@ """APIs for dealing with process groups, process models, and process instances.""" import json import random +import re import string import uuid from typing import Any @@ -30,6 +31,9 @@ from SpiffWorkflow.task import TaskState from sqlalchemy import and_ from sqlalchemy import asc from sqlalchemy import desc +from sqlalchemy import func +from sqlalchemy.orm import aliased +from sqlalchemy.orm import selectinload from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, @@ -51,6 +55,9 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSche from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -63,6 +70,7 @@ from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService @@ -150,16 +158,15 @@ def modify_process_model_id(process_model_id: str) -> str: return process_model_id.replace("/", ":") -def un_modify_modified_process_model_id(modified_process_model_id: str) -> str: +def un_modify_modified_process_model_id(modified_process_model_identifier: str) -> str: """Un_modify_modified_process_model_id.""" - return modified_process_model_id.replace(":", "/") + return modified_process_model_identifier.replace(":", "/") def process_group_add(body: dict) -> flask.wrappers.Response: """Add_process_group.""" - process_model_service = ProcessModelService() process_group = ProcessGroup(**body) - process_model_service.add_process_group(process_group) + ProcessModelService.add_process_group(process_group) return make_response(jsonify(process_group), 201) @@ -183,20 +190,20 @@ def process_group_update( process_group_id = un_modify_modified_process_model_id(modified_process_group_id) process_group = ProcessGroup(id=process_group_id, **body_filtered) - ProcessModelService().update_process_group(process_group) + ProcessModelService.update_process_group(process_group) return make_response(jsonify(process_group), 200) -def process_groups_list( +def process_group_list( process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 ) -> flask.wrappers.Response: - """Process_groups_list.""" + """Process_group_list.""" if process_group_identifier is not None: - process_groups = ProcessModelService().get_process_groups( + process_groups = ProcessModelService.get_process_groups( process_group_identifier ) else: - process_groups = ProcessModelService().get_process_groups() + process_groups = ProcessModelService.get_process_groups() batch = ProcessModelService().get_batch( items=process_groups, page=page, per_page=per_page ) @@ -222,7 +229,7 @@ def process_group_show( """Process_group_show.""" process_group_id = un_modify_modified_process_model_id(modified_process_group_id) try: - process_group = ProcessModelService().get_process_group(process_group_id) + process_group = ProcessModelService.get_process_group(process_group_id) except ProcessEntityNotFoundError as exception: raise ( ApiError( @@ -231,32 +238,55 @@ def process_group_show( status_code=400, ) ) from exception + + process_group.parent_groups = ProcessModelService.get_parent_group_array( + process_group.id + ) return make_response(jsonify(process_group), 200) +def process_group_move( + modified_process_group_identifier: str, new_location: str +) -> flask.wrappers.Response: + """Process_group_move.""" + original_process_group_id = un_modify_modified_process_model_id( + modified_process_group_identifier + ) + new_process_group = ProcessModelService().process_group_move( + original_process_group_id, new_location + ) + return make_response(jsonify(new_process_group), 201) + + def process_model_create( modified_process_group_id: str, body: Dict[str, Union[str, bool, int]] ) -> flask.wrappers.Response: """Process_model_create.""" - process_model_info = ProcessModelInfoSchema().load(body) + body_include_list = [ + "id", + "display_name", + "primary_file_name", + "primary_process_id", + "description", + "metadata_extraction_paths", + ] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + if modified_process_group_id is None: raise ApiError( error_code="process_group_id_not_specified", message="Process Model could not be created when process_group_id path param is unspecified", status_code=400, ) - if process_model_info is None: - raise ApiError( - error_code="process_model_could_not_be_created", - message=f"Process Model could not be created from given body: {body}", - status_code=400, - ) unmodified_process_group_id = un_modify_modified_process_model_id( modified_process_group_id ) - process_model_service = ProcessModelService() - process_group = process_model_service.get_process_group(unmodified_process_group_id) + process_group = ProcessModelService.get_process_group(unmodified_process_group_id) if process_group is None: raise ApiError( error_code="process_model_could_not_be_created", @@ -264,7 +294,15 @@ def process_model_create( status_code=400, ) - process_model_service.add_process_model(process_model_info) + process_model_info = ProcessModelInfo(**body_filtered) # type: ignore + if process_model_info is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=f"Process Model could not be created from given body: {body}", + status_code=400, + ) + + ProcessModelService.add_process_model(process_model_info) return Response( json.dumps(ProcessModelInfoSchema().dump(process_model_info)), status=201, @@ -277,7 +315,6 @@ def process_model_delete( ) -> flask.wrappers.Response: """Process_model_delete.""" process_model_identifier = modified_process_model_identifier.replace(":", "/") - # process_model_identifier = f"{process_group_id}/{process_model_id}" ProcessModelService().process_model_delete(process_model_identifier) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -292,6 +329,7 @@ def process_model_update( "primary_file_name", "primary_process_id", "description", + "metadata_extraction_paths", ] body_filtered = { include_item: body[include_item] @@ -299,33 +337,68 @@ def process_model_update( if include_item in body } - # process_model_identifier = f"{process_group_id}/{process_model_id}" process_model = get_process_model(process_model_identifier) - ProcessModelService().update_process_model(process_model, body_filtered) + ProcessModelService.update_process_model(process_model, body_filtered) return ProcessModelInfoSchema().dump(process_model) def process_model_show(modified_process_model_identifier: str) -> Any: """Process_model_show.""" process_model_identifier = modified_process_model_identifier.replace(":", "/") - # process_model_identifier = f"{process_group_id}/{process_model_id}" process_model = get_process_model(process_model_identifier) - # TODO: Temporary. Should not need the next line once models have correct ids - # process_model.id = process_model_identifier - files = sorted(SpecFileService.get_files(process_model)) + files = sorted( + SpecFileService.get_files(process_model), + key=lambda f: "" if f.name == process_model.primary_file_name else f.sort_index, + ) process_model.files = files for file in process_model.files: file.references = SpecFileService.get_references_for_file(file, process_model) - process_model_json = ProcessModelInfoSchema().dump(process_model) - return process_model_json + + process_model.parent_groups = ProcessModelService.get_parent_group_array( + process_model.id + ) + return make_response(jsonify(process_model), 200) + + +def process_model_move( + modified_process_model_identifier: str, new_location: str +) -> flask.wrappers.Response: + """Process_model_move.""" + original_process_model_id = un_modify_modified_process_model_id( + modified_process_model_identifier + ) + new_process_model = ProcessModelService().process_model_move( + original_process_model_id, new_location + ) + return make_response(jsonify(new_process_model), 201) + + +def process_model_publish( + modified_process_model_identifier: str, branch_to_update: Optional[str] = None +) -> flask.wrappers.Response: + """Process_model_publish.""" + if branch_to_update is None: + branch_to_update = current_app.config["GIT_BRANCH_TO_PUBLISH_TO"] + process_model_identifier = un_modify_modified_process_model_id( + modified_process_model_identifier + ) + pr_url = GitService().publish(process_model_identifier, branch_to_update) + data = {"ok": True, "pr_url": pr_url} + return Response(json.dumps(data), status=200, mimetype="application/json") def process_model_list( - process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 + process_group_identifier: Optional[str] = None, + recursive: Optional[bool] = False, + filter_runnable_by_user: Optional[bool] = False, + page: int = 1, + per_page: int = 100, ) -> flask.wrappers.Response: """Process model list!""" - process_models = ProcessModelService().get_process_models( - process_group_id=process_group_identifier + process_models = ProcessModelService.get_process_models( + process_group_id=process_group_identifier, + recursive=recursive, + filter_runnable_by_user=filter_runnable_by_user, ) batch = ProcessModelService().get_batch( process_models, page=page, per_page=per_page @@ -355,9 +428,9 @@ def process_list() -> Any: return SpecReferenceSchema(many=True).dump(references) -def get_file(modified_process_model_id: str, file_name: str) -> Any: +def get_file(modified_process_model_identifier: str, file_name: str) -> Any: """Get_file.""" - process_model_identifier = modified_process_model_id.replace(":", "/") + process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = get_process_model(process_model_identifier) files = SpecFileService.get_files(process_model, file_name) if len(files) == 0: @@ -377,11 +450,10 @@ def get_file(modified_process_model_id: str, file_name: str) -> Any: def process_model_file_update( - modified_process_model_id: str, file_name: str + modified_process_model_identifier: str, file_name: str ) -> flask.wrappers.Response: """Process_model_file_update.""" - process_model_identifier = modified_process_model_id.replace(":", "/") - # process_model_identifier = f"{process_group_id}/{process_model_id}" + process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = get_process_model(process_model_identifier) request_file = get_file_from_request() @@ -407,10 +479,10 @@ def process_model_file_update( def process_model_file_delete( - modified_process_model_id: str, file_name: str + modified_process_model_identifier: str, file_name: str ) -> flask.wrappers.Response: """Process_model_file_delete.""" - process_model_identifier = modified_process_model_id.replace(":", "/") + process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = get_process_model(process_model_identifier) try: SpecFileService.delete_file(process_model, file_name) @@ -426,9 +498,9 @@ def process_model_file_delete( return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") -def add_file(modified_process_model_id: str) -> flask.wrappers.Response: +def add_file(modified_process_model_identifier: str) -> flask.wrappers.Response: """Add_file.""" - process_model_identifier = modified_process_model_id.replace(":", "/") + process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = get_process_model(process_model_identifier) request_file = get_file_from_request() if not request_file.filename: @@ -449,13 +521,17 @@ def add_file(modified_process_model_id: str) -> flask.wrappers.Response: ) -def process_instance_create(modified_process_model_id: str) -> flask.wrappers.Response: +def process_instance_create( + modified_process_model_identifier: str, +) -> flask.wrappers.Response: """Create_process_instance.""" process_model_identifier = un_modify_modified_process_model_id( - modified_process_model_id + modified_process_model_identifier ) - process_instance = ProcessInstanceService.create_process_instance( - process_model_identifier, g.user + process_instance = ( + ProcessInstanceService.create_process_instance_from_process_model_identifier( + process_model_identifier, g.user + ) ) return Response( json.dumps(ProcessInstanceModelSchema().dump(process_instance)), @@ -465,6 +541,7 @@ def process_instance_create(modified_process_model_id: str) -> flask.wrappers.Re def process_instance_run( + modified_process_model_identifier: str, process_instance_id: int, do_engine_steps: bool = True, ) -> flask.wrappers.Response: @@ -507,6 +584,7 @@ def process_instance_run( def process_instance_terminate( process_instance_id: int, + modified_process_model_identifier: str, ) -> flask.wrappers.Response: """Process_instance_run.""" process_instance = ProcessInstanceService().get_process_instance( @@ -519,6 +597,7 @@ def process_instance_terminate( def process_instance_suspend( process_instance_id: int, + modified_process_model_identifier: str, ) -> flask.wrappers.Response: """Process_instance_suspend.""" process_instance = ProcessInstanceService().get_process_instance( @@ -531,6 +610,7 @@ def process_instance_suspend( def process_instance_resume( process_instance_id: int, + modified_process_model_identifier: str, ) -> flask.wrappers.Response: """Process_instance_resume.""" process_instance = ProcessInstanceService().get_process_instance( @@ -542,19 +622,24 @@ def process_instance_resume( def process_instance_log_list( + modified_process_model_identifier: str, process_instance_id: int, page: int = 1, per_page: int = 100, + detailed: bool = False, ) -> flask.wrappers.Response: """Process_instance_log_list.""" # to make sure the process instance exists process_instance = find_process_instance_by_id_or_raise(process_instance_id) + log_query = SpiffLoggingModel.query.filter( + SpiffLoggingModel.process_instance_id == process_instance.id + ) + if not detailed: + log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore + logs = ( - SpiffLoggingModel.query.filter( - SpiffLoggingModel.process_instance_id == process_instance.id - ) - .order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore + log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore .join( UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True ) # isouter since if we don't have a user, we still want the log @@ -600,6 +685,7 @@ def message_instance_list( .add_columns( MessageModel.identifier.label("message_identifier"), ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.process_model_display_name, ) .paginate(page=page, per_page=per_page, error_out=False) ) @@ -729,12 +815,16 @@ def process_instance_list( end_from: Optional[int] = None, end_to: Optional[int] = None, process_status: Optional[str] = None, + initiated_by_me: Optional[bool] = None, + with_tasks_completed_by_me: Optional[bool] = None, + with_tasks_completed_by_my_group: Optional[bool] = None, user_filter: Optional[bool] = False, report_identifier: Optional[str] = None, + report_id: Optional[int] = None, ) -> flask.wrappers.Response: """Process_instance_list.""" process_instance_report = ProcessInstanceReportService.report_with_identifier( - g.user, report_identifier + g.user, report_id, report_identifier ) if user_filter: @@ -745,6 +835,9 @@ def process_instance_list( end_from, end_to, process_status.split(",") if process_status else None, + initiated_by_me, + with_tasks_completed_by_me, + with_tasks_completed_by_my_group, ) else: report_filter = ( @@ -756,11 +849,18 @@ def process_instance_list( end_from, end_to, process_status, + initiated_by_me, + with_tasks_completed_by_me, + with_tasks_completed_by_my_group, ) ) - # process_model_identifier = un_modify_modified_process_model_id(modified_process_model_identifier) process_instance_query = ProcessInstanceModel.query + # Always join that hot user table for good performance at serialization time. + process_instance_query = process_instance_query.options( + selectinload(ProcessInstanceModel.process_initiator) + ) + if report_filter.process_model_identifier is not None: process_model = get_process_model( f"{report_filter.process_model_identifier}", @@ -804,20 +904,146 @@ def process_instance_list( ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore ) - process_instances = process_instance_query.order_by( - ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore - ).paginate(page=page, per_page=per_page, error_out=False) - - results = list( - map( - ProcessInstanceService.serialize_flat_with_task_data, - process_instances.items, + if report_filter.initiated_by_me is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore ) + process_instance_query = process_instance_query.filter_by( + process_initiator=g.user + ) + + # TODO: not sure if this is exactly what is wanted + if report_filter.with_tasks_completed_by_me is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore + ) + # process_instance_query = process_instance_query.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) + # process_instance_query = process_instance_query.add_columns(UserModel.username) + # search for process_instance.UserModel.username in this file for more details about why adding columns is annoying. + + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.process_initiator_id != g.user.id + ) + process_instance_query = process_instance_query.join( + SpiffStepDetailsModel, + ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, + ) + process_instance_query = process_instance_query.join( + SpiffLoggingModel, + ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, + ) + process_instance_query = process_instance_query.filter( + SpiffLoggingModel.message.contains("COMPLETED") # type: ignore + ) + process_instance_query = process_instance_query.filter( + SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step + ) + process_instance_query = process_instance_query.filter( + SpiffStepDetailsModel.completed_by_user_id == g.user.id + ) + + if report_filter.with_tasks_completed_by_my_group is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore + ) + process_instance_query = process_instance_query.join( + SpiffStepDetailsModel, + ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, + ) + process_instance_query = process_instance_query.join( + SpiffLoggingModel, + ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, + ) + process_instance_query = process_instance_query.filter( + SpiffLoggingModel.message.contains("COMPLETED") # type: ignore + ) + process_instance_query = process_instance_query.filter( + SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step + ) + process_instance_query = process_instance_query.join( + GroupModel, + GroupModel.id == SpiffStepDetailsModel.lane_assignment_id, + ) + process_instance_query = process_instance_query.join( + UserGroupAssignmentModel, + UserGroupAssignmentModel.group_id == GroupModel.id, + ) + process_instance_query = process_instance_query.filter( + UserGroupAssignmentModel.user_id == g.user.id + ) + + instance_metadata_aliases = {} + stock_columns = ProcessInstanceReportService.get_column_names_for_model( + ProcessInstanceModel + ) + for column in process_instance_report.report_metadata["columns"]: + if column["accessor"] in stock_columns: + continue + instance_metadata_alias = aliased(ProcessInstanceMetadataModel) + instance_metadata_aliases[column["accessor"]] = instance_metadata_alias + + filter_for_column = None + if "filter_by" in process_instance_report.report_metadata: + filter_for_column = next( + ( + f + for f in process_instance_report.report_metadata["filter_by"] + if f["field_name"] == column["accessor"] + ), + None, + ) + isouter = True + conditions = [ + ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, + instance_metadata_alias.key == column["accessor"], + ] + if filter_for_column: + isouter = False + conditions.append( + instance_metadata_alias.value == filter_for_column["field_value"] + ) + process_instance_query = process_instance_query.join( + instance_metadata_alias, and_(*conditions), isouter=isouter + ).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"])) + + order_by_query_array = [] + order_by_array = process_instance_report.report_metadata["order_by"] + if len(order_by_array) < 1: + order_by_array = ProcessInstanceReportModel.default_order_by() + for order_by_option in order_by_array: + attribute = re.sub("^-", "", order_by_option) + if attribute in stock_columns: + if order_by_option.startswith("-"): + order_by_query_array.append( + getattr(ProcessInstanceModel, attribute).desc() + ) + else: + order_by_query_array.append( + getattr(ProcessInstanceModel, attribute).asc() + ) + elif attribute in instance_metadata_aliases: + if order_by_option.startswith("-"): + order_by_query_array.append( + instance_metadata_aliases[attribute].value.desc() + ) + else: + order_by_query_array.append( + instance_metadata_aliases[attribute].value.asc() + ) + + process_instances = ( + process_instance_query.group_by(ProcessInstanceModel.id) + .add_columns(ProcessInstanceModel.id) + .order_by(*order_by_query_array) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + results = ProcessInstanceReportService.add_metadata_columns_to_process_instance( + process_instances.items, process_instance_report.report_metadata["columns"] ) - report_metadata = process_instance_report.report_metadata response_json = { - "report_metadata": report_metadata, + "report": process_instance_report, "results": results, "filters": report_filter.to_dict(), "pagination": { @@ -830,6 +1056,22 @@ def process_instance_list( return make_response(jsonify(response_json), 200) +def process_instance_report_column_list() -> flask.wrappers.Response: + """Process_instance_report_column_list.""" + table_columns = ProcessInstanceReportService.builtin_column_options() + columns_for_metadata = ( + db.session.query(ProcessInstanceMetadataModel.key) + .order_by(ProcessInstanceMetadataModel.key) + .distinct() # type: ignore + .all() + ) + columns_for_metadata_strings = [ + {"Header": i[0], "accessor": i[0], "filterable": True} + for i in columns_for_metadata + ] + return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) + + def process_instance_show( modified_process_model_identifier: str, process_instance_id: int ) -> flask.wrappers.Response: @@ -846,7 +1088,7 @@ def process_instance_show( ): bpmn_xml_file_contents = SpecFileService.get_data( process_model, process_model.primary_file_name - ) + ).decode("utf-8") else: bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision( process_model, process_instance.bpmn_version_control_identifier @@ -856,7 +1098,9 @@ def process_instance_show( return make_response(jsonify(process_instance), 200) -def process_instance_delete(process_instance_id: int) -> flask.wrappers.Response: +def process_instance_delete( + process_instance_id: int, modified_process_model_identifier: str +) -> flask.wrappers.Response: """Create_process_instance.""" process_instance = find_process_instance_by_id_or_raise(process_instance_id) @@ -886,22 +1130,22 @@ def process_instance_report_list( def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response: """Process_instance_report_create.""" - ProcessInstanceReportModel.create_report( + process_instance_report = ProcessInstanceReportModel.create_report( identifier=body["identifier"], user=g.user, report_metadata=body["report_metadata"], ) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + return make_response(jsonify(process_instance_report), 201) def process_instance_report_update( - report_identifier: str, + report_id: int, body: Dict[str, Any], ) -> flask.wrappers.Response: """Process_instance_report_create.""" process_instance_report = ProcessInstanceReportModel.query.filter_by( - identifier=report_identifier, + id=report_id, created_by_id=g.user.id, ).first() if process_instance_report is None: @@ -914,15 +1158,15 @@ def process_instance_report_update( process_instance_report.report_metadata = body["report_metadata"] db.session.commit() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + return make_response(jsonify(process_instance_report), 201) def process_instance_report_delete( - report_identifier: str, + report_id: int, ) -> flask.wrappers.Response: """Process_instance_report_create.""" process_instance_report = ProcessInstanceReportModel.query.filter_by( - identifier=report_identifier, + id=report_id, created_by_id=g.user.id, ).first() if process_instance_report is None: @@ -938,11 +1182,9 @@ def process_instance_report_delete( return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") -def service_tasks_show() -> flask.wrappers.Response: - """Service_tasks_show.""" +def service_task_list() -> flask.wrappers.Response: + """Service_task_list.""" available_connectors = ServiceTaskService.available_connectors() - print(available_connectors) - return Response( json.dumps(available_connectors), status=200, mimetype="application/json" ) @@ -971,24 +1213,22 @@ def authentication_callback( f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True ) return redirect( - f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/authentications" + f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration" ) def process_instance_report_show( - report_identifier: str, + report_id: int, page: int = 1, per_page: int = 100, ) -> flask.wrappers.Response: - """Process_instance_list.""" - process_instances = ProcessInstanceModel.query.order_by( # .filter_by(process_model_identifier=process_model.id) + """Process_instance_report_show.""" + process_instances = ProcessInstanceModel.query.order_by( ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore - ).paginate( - page=page, per_page=per_page, error_out=False - ) + ).paginate(page=page, per_page=per_page, error_out=False) process_instance_report = ProcessInstanceReportModel.query.filter_by( - identifier=report_identifier, + id=report_id, created_by_id=g.user.id, ).first() if process_instance_report is None: @@ -1026,7 +1266,7 @@ def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Res # just need this add_columns to add the process_model_identifier. Then add everything back that was removed. .add_columns( ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.process_group_identifier, + ProcessInstanceModel.process_model_display_name, ProcessInstanceModel.status, ActiveTaskModel.task_name, ActiveTaskModel.task_title, @@ -1150,7 +1390,7 @@ def get_tasks( def process_instance_task_list( - modified_process_model_id: str, + modified_process_model_identifier: str, process_instance_id: int, all_tasks: bool = False, spiff_step: int = 0, @@ -1167,8 +1407,10 @@ def process_instance_task_list( ) .first() ) - if step_detail is not None: - process_instance.bpmn_json = json.dumps(step_detail.task_json) + if step_detail is not None and process_instance.bpmn_json is not None: + bpmn_json = json.loads(process_instance.bpmn_json) + bpmn_json["tasks"] = step_detail.task_json + process_instance.bpmn_json = json.dumps(bpmn_json) processor = ProcessInstanceProcessor(process_instance) @@ -1263,9 +1505,6 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response task.form_ui_schema = ui_form_contents if task.properties and task.data and "instructionsForEndUser" in task.properties: - print( - f"task.properties['instructionsForEndUser']: {task.properties['instructionsForEndUser']}" - ) if task.properties["instructionsForEndUser"]: task.properties["instructionsForEndUser"] = render_jinja_template( task.properties["instructionsForEndUser"], task.data @@ -1288,7 +1527,7 @@ def task_submit( task_id, process_instance, processor=processor ) AuthorizationService.assert_user_can_complete_spiff_task( - processor, spiff_task, principal.user + process_instance.id, spiff_task, principal.user ) if spiff_task.state != TaskState.READY: @@ -1473,7 +1712,7 @@ def get_process_model(process_model_id: str) -> ProcessModelInfo: """Get_process_model.""" process_model = None try: - process_model = ProcessModelService().get_process_model(process_model_id) + process_model = ProcessModelService.get_process_model(process_model_id) except ProcessEntityNotFoundError as exception: raise ( ApiError( @@ -1580,9 +1819,26 @@ def get_spiff_task_from_process_instance( return spiff_task +# sample body: +# {"ref": "refs/heads/main", "repository": {"name": "sample-process-models", +# "full_name": "sartography/sample-process-models", "private": False .... }} +# test with: ngrok http 7000 +# where 7000 is the port the app is running on locally +def github_webhook_receive(body: Dict) -> Response: + """Github_webhook_receive.""" + auth_header = request.headers.get("X-Hub-Signature-256") + AuthorizationService.verify_sha256_token(auth_header) + result = GitService.handle_web_hook(body) + return Response( + json.dumps({"git_pull": result}), status=200, mimetype="application/json" + ) + + # # Methods for secrets CRUD - maybe move somewhere else: # + + def get_secret(key: str) -> Optional[str]: """Get_secret.""" return SecretService.get_secret(key) @@ -1712,7 +1968,12 @@ def _update_form_schema_with_task_data_as_needed( _update_form_schema_with_task_data_as_needed(o, task_data) -def update_task_data(process_instance_id: str, task_id: str, body: Dict) -> Response: +def update_task_data( + process_instance_id: str, + modified_process_model_identifier: str, + task_id: str, + body: Dict, +) -> Response: """Update task data.""" process_instance = ProcessInstanceModel.query.filter( ProcessInstanceModel.id == int(process_instance_id) diff --git a/src/spiffworkflow_backend/routes/user.py b/src/spiffworkflow_backend/routes/user.py index 5fe10e0a..2bbbc137 100644 --- a/src/spiffworkflow_backend/routes/user.py +++ b/src/spiffworkflow_backend/routes/user.py @@ -1,6 +1,7 @@ """User.""" import ast import base64 +import json from typing import Any from typing import Dict from typing import Optional @@ -58,7 +59,6 @@ def verify_token( decoded_token = get_decoded_token(token) if decoded_token is not None: - if "token_type" in decoded_token: token_type = decoded_token["token_type"] if token_type == "internal": # noqa: S105 @@ -68,11 +68,11 @@ def verify_token( current_app.logger.error( f"Exception in verify_token getting user from decoded internal token. {e}" ) - elif "iss" in decoded_token.keys(): try: - user_info = AuthenticationService.get_user_info_from_open_id(token) - except ApiError as ae: + if AuthenticationService.validate_id_token(token): + user_info = decoded_token + except ApiError as ae: # API Error is only thrown in the token is outdated. # Try to refresh the token user = UserService.get_user_by_service_and_service_id( "open_id", decoded_token["sub"] @@ -86,14 +86,9 @@ def verify_token( ) ) if auth_token and "error" not in auth_token: - # redirect to original url, with auth_token? - user_info = ( - AuthenticationService.get_user_info_from_open_id( - auth_token["access_token"] - ) - ) - if not user_info: - raise ae + # We have the user, but this code is a bit convoluted, and will later demand + # a user_info object so it can look up the user. Sorry to leave this crap here. + user_info = {"sub": user.service_id} else: raise ae else: @@ -203,6 +198,18 @@ def login(redirect_url: str = "/") -> Response: return redirect(login_redirect_url) +def parse_id_token(token: str) -> Any: + """Parse the id token.""" + parts = token.split(".") + if len(parts) != 3: + raise Exception("Incorrect id token format") + + payload = parts[1] + padded = payload + "=" * (4 - len(payload) % 4) + decoded = base64.b64decode(padded) + return json.loads(decoded) + + def login_return(code: str, state: str, session_state: str) -> Optional[Response]: """Login_return.""" state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8")) @@ -211,10 +218,9 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response if "id_token" in auth_token_object: id_token = auth_token_object["id_token"] + user_info = parse_id_token(id_token) + if AuthenticationService.validate_id_token(id_token): - user_info = AuthenticationService.get_user_info_from_open_id( - auth_token_object["access_token"] - ) if user_info and "error" not in user_info: user_model = AuthorizationService.create_user_from_sign_in(user_info) g.user = user_model.id @@ -332,15 +338,11 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo .filter(UserModel.service_id == service_id) .first() ) - # user: UserModel = UserModel.query.filter() if user: return user user = UserModel( username=service_id, - uid=service_id, service=service, service_id=service_id, - name="API User", ) - return user diff --git a/src/spiffworkflow_backend/scripts/add_user_to_group.py b/src/spiffworkflow_backend/scripts/add_user_to_group.py new file mode 100644 index 00000000..d3c77711 --- /dev/null +++ b/src/spiffworkflow_backend/scripts/add_user_to_group.py @@ -0,0 +1,43 @@ +"""Get_env.""" +from typing import Any + +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.group import GroupNotFoundError +from spiffworkflow_backend.models.script_attributes_context import ( + ScriptAttributesContext, +) +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.models.user import UserNotFoundError +from spiffworkflow_backend.scripts.script import Script +from spiffworkflow_backend.services.user_service import UserService + + +class AddUserToGroup(Script): + """AddUserToGroup.""" + + def get_description(self) -> str: + """Get_description.""" + return """Add a given user to a given group.""" + + def run( + self, + script_attributes_context: ScriptAttributesContext, + *args: Any, + **kwargs: Any, + ) -> Any: + """Run.""" + username = args[0] + group_identifier = args[1] + user = UserModel.query.filter_by(username=username).first() + if user is None: + raise UserNotFoundError( + f"Script 'add_user_to_group' could not find a user with username: {username}" + ) + + group = GroupModel.query.filter_by(identifier=group_identifier).first() + if group is None: + raise GroupNotFoundError( + f"Script 'add_user_to_group' could not find group with identifier '{group_identifier}'." + ) + + UserService.add_user_to_group(user, group) diff --git a/src/spiffworkflow_backend/scripts/save_process_instance_metadata.py b/src/spiffworkflow_backend/scripts/save_process_instance_metadata.py new file mode 100644 index 00000000..d9c1959a --- /dev/null +++ b/src/spiffworkflow_backend/scripts/save_process_instance_metadata.py @@ -0,0 +1,42 @@ +"""Save process instance metadata.""" +from typing import Any + +from flask_bpmn.models.db import db + +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) +from spiffworkflow_backend.models.script_attributes_context import ( + ScriptAttributesContext, +) +from spiffworkflow_backend.scripts.script import Script + + +class SaveProcessInstanceMetadata(Script): + """SaveProcessInstanceMetadata.""" + + def get_description(self) -> str: + """Get_description.""" + return """Save a given dict as process instance metadata (useful for creating reports).""" + + def run( + self, + script_attributes_context: ScriptAttributesContext, + *args: Any, + **kwargs: Any, + ) -> Any: + """Run.""" + metadata_dict = args[0] + for key, value in metadata_dict.items(): + pim = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=script_attributes_context.process_instance_id, + key=key, + ).first() + if pim is None: + pim = ProcessInstanceMetadataModel( + process_instance_id=script_attributes_context.process_instance_id, + key=key, + ) + pim.value = value + db.session.add(pim) + db.session.commit() diff --git a/src/spiffworkflow_backend/services/acceptance_test_fixtures.py b/src/spiffworkflow_backend/services/acceptance_test_fixtures.py index cfea3148..81488910 100644 --- a/src/spiffworkflow_backend/services/acceptance_test_fixtures.py +++ b/src/spiffworkflow_backend/services/acceptance_test_fixtures.py @@ -1,5 +1,4 @@ """Acceptance_test_fixtures.""" -import json import time from flask import current_app @@ -8,13 +7,15 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]: """Load_fixtures.""" current_app.logger.debug("load_acceptance_test_fixtures() start") - test_process_group_id = "" - test_process_model_id = "acceptance-tests-group-one/acceptance-tests-model-1" + test_process_model_id = "misc/acceptance-tests-group-one/acceptance-tests-model-1" user = BaseTest.find_or_create_user() statuses = ProcessInstanceStatus.list() current_time = round(time.time()) @@ -28,16 +29,13 @@ def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]: # suspended - 6 hours ago process_instances = [] for i in range(len(statuses)): - process_instance = ProcessInstanceModel( - status=statuses[i], - process_initiator=user, - process_model_identifier=test_process_model_id, - process_group_identifier=test_process_group_id, - updated_at_in_seconds=round(time.time()), - start_in_seconds=current_time - (3600 * i), - end_in_seconds=current_time - (3600 * i - 20), - bpmn_json=json.dumps({"i": i}), + + process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier( + test_process_model_id, user ) + process_instance.status = statuses[i] + process_instance.start_in_seconds = current_time - (3600 * i) + process_instance.end_in_seconds = current_time - (3600 * i - 20) db.session.add(process_instance) process_instances.append(process_instance) diff --git a/src/spiffworkflow_backend/services/authentication_service.py b/src/spiffworkflow_backend/services/authentication_service.py index 18f08d0f..f4bd357b 100644 --- a/src/spiffworkflow_backend/services/authentication_service.py +++ b/src/spiffworkflow_backend/services/authentication_service.py @@ -26,58 +26,35 @@ class AuthenticationProviderTypes(enum.Enum): class AuthenticationService: """AuthenticationService.""" + ENDPOINT_CACHE: dict = ( + {} + ) # We only need to find the openid endpoints once, then we can cache them. + @staticmethod - def get_open_id_args() -> tuple: - """Get_open_id_args.""" - open_id_server_url = current_app.config["OPEN_ID_SERVER_URL"] - open_id_client_id = current_app.config["OPEN_ID_CLIENT_ID"] - open_id_realm_name = current_app.config["OPEN_ID_REALM_NAME"] - open_id_client_secret_key = current_app.config[ - "OPEN_ID_CLIENT_SECRET_KEY" - ] # noqa: S105 - return ( - open_id_server_url, - open_id_client_id, - open_id_realm_name, - open_id_client_secret_key, - ) + def client_id() -> str: + """Returns the client id from the config.""" + return current_app.config.get("OPEN_ID_CLIENT_ID", "") + + @staticmethod + def server_url() -> str: + """Returns the server url from the config.""" + return current_app.config.get("OPEN_ID_SERVER_URL", "") + + @staticmethod + def secret_key() -> str: + """Returns the secret key from the config.""" + return current_app.config.get("OPEN_ID_CLIENT_SECRET_KEY", "") @classmethod - def get_user_info_from_open_id(cls, token: str) -> dict: - """The token is an auth_token.""" - ( - open_id_server_url, - open_id_client_id, - open_id_realm_name, - open_id_client_secret_key, - ) = cls.get_open_id_args() - - headers = {"Authorization": f"Bearer {token}"} - - request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/userinfo" - try: - request_response = requests.get(request_url, headers=headers) - except Exception as e: - current_app.logger.error(f"Exception in get_user_info_from_id_token: {e}") - raise ApiError( - error_code="token_error", - message=f"Exception in get_user_info_from_id_token: {e}", - status_code=401, - ) from e - - if request_response.status_code == 401: - raise ApiError( - error_code="invalid_token", message="Please login", status_code=401 - ) - elif request_response.status_code == 200: - user_info: dict = json.loads(request_response.text) - return user_info - - raise ApiError( - error_code="user_info_error", - message="Cannot get user info in get_user_info_from_id_token", - status_code=401, - ) + def open_id_endpoint_for_name(cls, name: str) -> str: + """All openid systems provide a mapping of static names to the full path of that endpoint.""" + if name not in AuthenticationService.ENDPOINT_CACHE: + request_url = f"{cls.server_url()}/.well-known/openid-configuration" + response = requests.get(request_url) + AuthenticationService.ENDPOINT_CACHE = response.json() + if name not in AuthenticationService.ENDPOINT_CACHE: + raise Exception(f"Unknown OpenID Endpoint: {name}") + return AuthenticationService.ENDPOINT_CACHE.get(name, "") @staticmethod def get_backend_url() -> str: @@ -87,17 +64,10 @@ class AuthenticationService: def logout(self, id_token: str, redirect_url: Optional[str] = None) -> Response: """Logout.""" if redirect_url is None: - redirect_url = "/" - return_redirect_url = f"{self.get_backend_url()}/v1.0/logout_return" - ( - open_id_server_url, - open_id_client_id, - open_id_realm_name, - open_id_client_secret_key, - ) = AuthenticationService.get_open_id_args() + redirect_url = f"{self.get_backend_url()}/v1.0/logout_return" request_url = ( - f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/logout?" - + f"post_logout_redirect_uri={return_redirect_url}&" + self.open_id_endpoint_for_name("end_session_endpoint") + + f"?post_logout_redirect_uri={redirect_url}&" + f"id_token_hint={id_token}" ) @@ -113,18 +83,12 @@ class AuthenticationService: self, state: str, redirect_url: str = "/v1.0/login_return" ) -> str: """Get_login_redirect_url.""" - ( - open_id_server_url, - open_id_client_id, - open_id_realm_name, - open_id_client_secret_key, - ) = AuthenticationService.get_open_id_args() return_redirect_url = f"{self.get_backend_url()}{redirect_url}" login_redirect_url = ( - f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/auth?" - + f"state={state}&" + self.open_id_endpoint_for_name("authorization_endpoint") + + f"?state={state}&" + "response_type=code&" - + f"client_id={open_id_client_id}&" + + f"client_id={self.client_id()}&" + "scope=openid&" + f"redirect_uri={return_redirect_url}" ) @@ -134,14 +98,7 @@ class AuthenticationService: self, code: str, redirect_url: str = "/v1.0/login_return" ) -> dict: """Get_auth_token_object.""" - ( - open_id_server_url, - open_id_client_id, - open_id_realm_name, - open_id_client_secret_key, - ) = AuthenticationService.get_open_id_args() - - backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}" + backend_basic_auth_string = f"{self.client_id()}:{self.secret_key()}" backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) headers = { @@ -154,7 +111,7 @@ class AuthenticationService: "redirect_uri": f"{self.get_backend_url()}{redirect_url}", } - request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + request_url = self.open_id_endpoint_for_name("token_endpoint") response = requests.post(request_url, data=data, headers=headers) auth_token_object: dict = json.loads(response.text) @@ -165,12 +122,6 @@ class AuthenticationService: """Https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation.""" valid = True now = time.time() - ( - open_id_server_url, - open_id_client_id, - open_id_realm_name, - open_id_client_secret_key, - ) = cls.get_open_id_args() try: decoded_token = jwt.decode(id_token, options={"verify_signature": False}) except Exception as e: @@ -179,15 +130,15 @@ class AuthenticationService: message="Cannot decode id_token", status_code=401, ) from e - if decoded_token["iss"] != f"{open_id_server_url}/realms/{open_id_realm_name}": + if decoded_token["iss"] != cls.server_url(): valid = False elif ( - open_id_client_id not in decoded_token["aud"] + cls.client_id() not in decoded_token["aud"] and "account" not in decoded_token["aud"] ): valid = False elif "azp" in decoded_token and decoded_token["azp"] not in ( - open_id_client_id, + cls.client_id(), "account", ): valid = False @@ -235,20 +186,14 @@ class AuthenticationService: refresh_token_object: RefreshTokenModel = RefreshTokenModel.query.filter( RefreshTokenModel.user_id == user_id ).first() - assert refresh_token_object # noqa: S101 - return refresh_token_object.token + if refresh_token_object: + return refresh_token_object.token + return None @classmethod def get_auth_token_from_refresh_token(cls, refresh_token: str) -> dict: - """Get a new auth_token from a refresh_token.""" - ( - open_id_server_url, - open_id_client_id, - open_id_realm_name, - open_id_client_secret_key, - ) = cls.get_open_id_args() - - backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}" + """Converts a refresh token to an Auth Token by calling the openid's auth endpoint.""" + backend_basic_auth_string = f"{cls.client_id()}:{cls.secret_key()}" backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) headers = { @@ -259,11 +204,11 @@ class AuthenticationService: data = { "grant_type": "refresh_token", "refresh_token": refresh_token, - "client_id": open_id_client_id, - "client_secret": open_id_client_secret_key, + "client_id": cls.client_id(), + "client_secret": cls.secret_key(), } - request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + request_url = cls.open_id_endpoint_for_name("token_endpoint") response = requests.post(request_url, data=data, headers=headers) auth_token_object: dict = json.loads(response.text) diff --git a/src/spiffworkflow_backend/services/authorization_service.py b/src/spiffworkflow_backend/services/authorization_service.py index 29ee7884..9456f8f1 100644 --- a/src/spiffworkflow_backend/services/authorization_service.py +++ b/src/spiffworkflow_backend/services/authorization_service.py @@ -1,5 +1,9 @@ """Authorization_service.""" +import inspect import re +from hashlib import sha256 +from hmac import compare_digest +from hmac import HMAC from typing import Optional from typing import Union @@ -8,6 +12,7 @@ import yaml from flask import current_app from flask import g from flask import request +from flask import scaffold from flask_bpmn.api.api_error import ApiError from flask_bpmn.models.db import db from SpiffWorkflow.task import Task as SpiffTask # type: ignore @@ -23,10 +28,8 @@ from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserNotFoundError from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel +from spiffworkflow_backend.routes.openid_blueprint import openid_blueprint from spiffworkflow_backend.services.group_service import GroupService -from spiffworkflow_backend.services.process_instance_processor import ( - ProcessInstanceProcessor, -) from spiffworkflow_backend.services.user_service import UserService @@ -45,6 +48,27 @@ class UserDoesNotHaveAccessToTaskError(Exception): class AuthorizationService: """Determine whether a user has permission to perform their request.""" + # https://stackoverflow.com/a/71320673/6090676 + @classmethod + def verify_sha256_token(cls, auth_header: Optional[str]) -> None: + """Verify_sha256_token.""" + if auth_header is None: + raise ApiError( + error_code="unauthorized", + message="", + status_code=403, + ) + + received_sign = auth_header.split("sha256=")[-1].strip() + secret = current_app.config["GITHUB_WEBHOOK_SECRET"].encode() + expected_sign = HMAC(key=secret, msg=request.data, digestmod=sha256).hexdigest() + if not compare_digest(received_sign, expected_sign): + raise ApiError( + error_code="unauthorized", + message="", + status_code=403, + ) + @classmethod def has_permission( cls, principals: list[PrincipalModel], permission: str, target_uri: str @@ -232,7 +256,11 @@ class AuthorizationService: def should_disable_auth_for_request(cls) -> bool: """Should_disable_auth_for_request.""" swagger_functions = ["get_json_spec"] - authentication_exclusion_list = ["status", "authentication_callback"] + authentication_exclusion_list = [ + "status", + "authentication_callback", + "github_webhook_receive", + ] if request.method == "OPTIONS": return True @@ -244,6 +272,7 @@ class AuthorizationService: return True api_view_function = current_app.view_functions[request.endpoint] + module = inspect.getmodule(api_view_function) if ( api_view_function and api_view_function.__name__.startswith("login") @@ -251,6 +280,8 @@ class AuthorizationService: or api_view_function.__name__.startswith("console_ui_") or api_view_function.__name__ in authentication_exclusion_list or api_view_function.__name__ in swagger_functions + or module == openid_blueprint + or module == scaffold # don't check permissions for static assets ): return True @@ -393,25 +424,25 @@ class AuthorizationService: @staticmethod def assert_user_can_complete_spiff_task( - processor: ProcessInstanceProcessor, + process_instance_id: int, spiff_task: SpiffTask, user: UserModel, ) -> bool: """Assert_user_can_complete_spiff_task.""" active_task = ActiveTaskModel.query.filter_by( task_name=spiff_task.task_spec.name, - process_instance_id=processor.process_instance_model.id, + process_instance_id=process_instance_id, ).first() if active_task is None: raise ActiveTaskNotFoundError( f"Could find an active task with task name '{spiff_task.task_spec.name}'" - f" for process instance '{processor.process_instance_model.id}'" + f" for process instance '{process_instance_id}'" ) if user not in active_task.potential_owners: raise UserDoesNotHaveAccessToTaskError( f"User {user.username} does not have access to update task'{spiff_task.task_spec.name}'" - f" for process instance '{processor.process_instance_model.id}'" + f" for process instance '{process_instance_id}'" ) return True diff --git a/src/spiffworkflow_backend/services/background_processing_service.py b/src/spiffworkflow_backend/services/background_processing_service.py index 08a2b02d..1771c2c8 100644 --- a/src/spiffworkflow_backend/services/background_processing_service.py +++ b/src/spiffworkflow_backend/services/background_processing_service.py @@ -14,7 +14,7 @@ class BackgroundProcessingService: """__init__.""" self.app = app - def run(self) -> None: + def process_waiting_process_instances(self) -> None: """Since this runs in a scheduler, we need to specify the app context as well.""" with self.app.app_context(): ProcessInstanceService.do_waiting() diff --git a/src/spiffworkflow_backend/services/data_setup_service.py b/src/spiffworkflow_backend/services/data_setup_service.py index 412c4b82..c9c0647e 100644 --- a/src/spiffworkflow_backend/services/data_setup_service.py +++ b/src/spiffworkflow_backend/services/data_setup_service.py @@ -26,7 +26,7 @@ class DataSetupService: current_app.logger.debug("DataSetupService.save_all_process_models() start") failing_process_models = [] - process_models = ProcessModelService().get_process_models() + process_models = ProcessModelService.get_process_models(recursive=True) SpecFileService.clear_caches() for process_model in process_models: current_app.logger.debug(f"Process Model: {process_model.display_name}") diff --git a/src/spiffworkflow_backend/services/error_handling_service.py b/src/spiffworkflow_backend/services/error_handling_service.py index 99e4fbe8..1e8b38f2 100644 --- a/src/spiffworkflow_backend/services/error_handling_service.py +++ b/src/spiffworkflow_backend/services/error_handling_service.py @@ -34,7 +34,7 @@ class ErrorHandlingService: self, _processor: ProcessInstanceProcessor, _error: Union[ApiError, Exception] ) -> None: """On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception.""" - process_model = ProcessModelService().get_process_model( + process_model = ProcessModelService.get_process_model( _processor.process_model_identifier ) if process_model.fault_or_suspend_on_exception == "suspend": diff --git a/src/spiffworkflow_backend/services/file_system_service.py b/src/spiffworkflow_backend/services/file_system_service.py index cbe007d6..a2a9181d 100644 --- a/src/spiffworkflow_backend/services/file_system_service.py +++ b/src/spiffworkflow_backend/services/file_system_service.py @@ -1,6 +1,8 @@ """File_system_service.""" import os +from contextlib import contextmanager from datetime import datetime +from typing import Generator from typing import List from typing import Optional @@ -23,18 +25,40 @@ class FileSystemService: PROCESS_GROUP_JSON_FILE = "process_group.json" PROCESS_MODEL_JSON_FILE = "process_model.json" + # https://stackoverflow.com/a/24176022/6090676 + @staticmethod + @contextmanager + def cd(newdir: str) -> Generator: + """Cd.""" + prevdir = os.getcwd() + os.chdir(os.path.expanduser(newdir)) + try: + yield + finally: + os.chdir(prevdir) + @staticmethod def root_path() -> str: """Root_path.""" # fixme: allow absolute files dir_name = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] app_root = current_app.root_path - return os.path.join(app_root, "..", dir_name) + return os.path.abspath(os.path.join(app_root, "..", dir_name)) + + @staticmethod + def id_string_to_relative_path(id_string: str) -> str: + """Id_string_to_relative_path.""" + return id_string.replace("/", os.sep) @staticmethod def process_group_path(name: str) -> str: """Category_path.""" - return os.path.abspath(os.path.join(FileSystemService.root_path(), name)) + return os.path.abspath( + os.path.join( + FileSystemService.root_path(), + FileSystemService.id_string_to_relative_path(name), + ) + ) @staticmethod def full_path_from_relative_path(relative_path: str) -> str: diff --git a/src/spiffworkflow_backend/services/git_service.py b/src/spiffworkflow_backend/services/git_service.py index 815e4cad..f187a47c 100644 --- a/src/spiffworkflow_backend/services/git_service.py +++ b/src/spiffworkflow_backend/services/git_service.py @@ -1,56 +1,246 @@ """Git_service.""" import os +import shutil +import subprocess # noqa we need the subprocess module to safely run the git commands +import uuid +from typing import Optional +from typing import Union from flask import current_app +from flask import g +from spiffworkflow_backend.config import ConfigurationError from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.services.file_system_service import FileSystemService +class MissingGitConfigsError(Exception): + """MissingGitConfigsError.""" + + +class InvalidGitWebhookBodyError(Exception): + """InvalidGitWebhookBodyError.""" + + +class GitCloneUrlMismatchError(Exception): + """GitCloneUrlMismatchError.""" + + +class GitCommandError(Exception): + """GitCommandError.""" + + +# TOOD: check for the existence of git and configs on bootup if publishing is enabled class GitService: """GitService.""" - @staticmethod - def get_current_revision() -> str: + @classmethod + def get_current_revision(cls) -> str: """Get_current_revision.""" bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] # The value includes a carriage return character at the end, so we don't grab the last character - current_git_revision = os.popen( # noqa: S605 - f"cd {bpmn_spec_absolute_dir} && git rev-parse --short HEAD" - ).read()[ - :-1 - ] # noqa: S605 - return current_git_revision + with FileSystemService.cd(bpmn_spec_absolute_dir): + return cls.run_shell_command_to_get_stdout( + ["git", "rev-parse", "--short", "HEAD"] + ) - @staticmethod + @classmethod def get_instance_file_contents_for_revision( - process_model: ProcessModelInfo, revision: str - ) -> bytes: + cls, process_model: ProcessModelInfo, revision: str + ) -> str: """Get_instance_file_contents_for_revision.""" bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] process_model_relative_path = FileSystemService.process_model_relative_path( process_model ) - shell_cd_command = f"cd {bpmn_spec_absolute_dir}" - shell_git_command = f"git show {revision}:{process_model_relative_path}/{process_model.primary_file_name}" - shell_command = f"{shell_cd_command} && {shell_git_command}" - # git show 78ae5eb:category_number_one/script-task/script-task.bpmn - file_contents: str = os.popen(shell_command).read()[:-1] # noqa: S605 - assert file_contents # noqa: S101 - return file_contents.encode("utf-8") + with FileSystemService.cd(bpmn_spec_absolute_dir): + shell_command = [ + "git", + "show", + f"{revision}:{process_model_relative_path}/{process_model.primary_file_name}", + ] + return cls.run_shell_command_to_get_stdout(shell_command) - @staticmethod - def commit(message: str) -> str: + @classmethod + def commit(cls, message: str, repo_path: Optional[str] = None) -> str: """Commit.""" - bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] + repo_path_to_use = repo_path + if repo_path is None: + repo_path_to_use = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] + if repo_path_to_use is None: + raise ConfigurationError("BPMN_SPEC_ABSOLUTE_DIR config must be set") + git_username = "" git_email = "" - if ( - current_app.config["GIT_COMMIT_USERNAME"] - and current_app.config["GIT_COMMIT_EMAIL"] - ): - git_username = current_app.config["GIT_COMMIT_USERNAME"] - git_email = current_app.config["GIT_COMMIT_EMAIL"] - shell_command = f"./bin/git_commit_bpmn_models_repo '{bpmn_spec_absolute_dir}' '{message}' '{git_username}' '{git_email}'" - output = os.popen(shell_command).read() # noqa: S605 - return output + if current_app.config["GIT_USERNAME"] and current_app.config["GIT_USER_EMAIL"]: + git_username = current_app.config["GIT_USERNAME"] + git_email = current_app.config["GIT_USER_EMAIL"] + shell_command_path = os.path.join( + current_app.root_path, "..", "..", "bin", "git_commit_bpmn_models_repo" + ) + shell_command = [ + shell_command_path, + repo_path_to_use, + message, + git_username, + git_email, + ] + return cls.run_shell_command_to_get_stdout(shell_command) + + @classmethod + def check_for_configs(cls) -> None: + """Check_for_configs.""" + if current_app.config["GIT_BRANCH_TO_PUBLISH_TO"] is None: + raise MissingGitConfigsError( + "Missing config for GIT_BRANCH_TO_PUBLISH_TO. " + "This is required for publishing process models" + ) + if current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"] is None: + raise MissingGitConfigsError( + "Missing config for GIT_CLONE_URL_FOR_PUBLISHING. " + "This is required for publishing process models" + ) + + @classmethod + def run_shell_command_as_boolean(cls, command: list[str]) -> bool: + """Run_shell_command_as_boolean.""" + # we know result will be a bool here + result: bool = cls.run_shell_command(command, return_success_state=True) # type: ignore + return result + + @classmethod + def run_shell_command_to_get_stdout(cls, command: list[str]) -> str: + """Run_shell_command_to_get_stdout.""" + # we know result will be a CompletedProcess here + result: subprocess.CompletedProcess[bytes] = cls.run_shell_command( + command, return_success_state=False + ) # type: ignore + return result.stdout.decode("utf-8") + + @classmethod + def run_shell_command( + cls, command: list[str], return_success_state: bool = False + ) -> Union[subprocess.CompletedProcess[bytes], bool]: + """Run_shell_command.""" + # this is fine since we pass the commands directly + result = subprocess.run(command, check=False, capture_output=True) # noqa + if return_success_state: + return result.returncode == 0 + + if result.returncode != 0: + stdout = result.stdout.decode("utf-8") + stderr = result.stderr.decode("utf-8") + raise GitCommandError( + f"Failed to execute git command: {command} " + f"Stdout: {stdout} " + f"Stderr: {stderr} " + ) + + return result + + # only supports github right now + @classmethod + def handle_web_hook(cls, webhook: dict) -> bool: + """Handle_web_hook.""" + cls.check_for_configs() + + if "repository" not in webhook or "clone_url" not in webhook["repository"]: + raise InvalidGitWebhookBodyError( + f"Cannot find required keys of 'repository:clone_url' from webhook body: {webhook}" + ) + + clone_url = webhook["repository"]["clone_url"] + if clone_url != current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"]: + raise GitCloneUrlMismatchError( + f"Configured clone url does not match clone url from webhook: {clone_url}" + ) + + if "ref" not in webhook: + raise InvalidGitWebhookBodyError( + f"Could not find the 'ref' arg in the webhook boy: {webhook}" + ) + + if current_app.config["GIT_BRANCH"] is None: + raise MissingGitConfigsError( + "Missing config for GIT_BRANCH. " + "This is required for updating the repository as a result of the webhook" + ) + + ref = webhook["ref"] + git_branch = current_app.config["GIT_BRANCH"] + if ref != f"refs/heads/{git_branch}": + return False + + with FileSystemService.cd(current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]): + cls.run_shell_command(["git", "pull"]) + return True + + @classmethod + def publish(cls, process_model_id: str, branch_to_update: str) -> str: + """Publish.""" + cls.check_for_configs() + source_process_model_root = FileSystemService.root_path() + source_process_model_path = os.path.join( + source_process_model_root, process_model_id + ) + unique_hex = uuid.uuid4().hex + clone_dir = f"sample-process-models.{unique_hex}" + + # clone new instance of sample-process-models, checkout branch_to_update + # we are adding a guid to this so the flake8 issue has been mitigated + destination_process_root = f"/tmp/{clone_dir}" # noqa + + git_clone_url = current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"].replace( + "https://", + f"https://{current_app.config['GIT_USERNAME']}:{current_app.config['GIT_USER_PASSWORD']}@", + ) + cmd = ["git", "clone", git_clone_url, destination_process_root] + + cls.run_shell_command(cmd) + with FileSystemService.cd(destination_process_root): + # create publish branch from branch_to_update + cls.run_shell_command(["git", "checkout", branch_to_update]) + branch_to_pull_request = f"publish-{process_model_id}" + + # check if branch exists and checkout appropriately + command = [ + "git", + "show-ref", + "--verify", + f"refs/remotes/origin/{branch_to_pull_request}", + ] + if cls.run_shell_command_as_boolean(command): + cls.run_shell_command(["git", "checkout", branch_to_pull_request]) + else: + cls.run_shell_command(["git", "checkout", "-b", branch_to_pull_request]) + + # copy files from process model into the new publish branch + destination_process_model_path = os.path.join( + destination_process_root, process_model_id + ) + if os.path.exists(destination_process_model_path): + shutil.rmtree(destination_process_model_path) + shutil.copytree(source_process_model_path, destination_process_model_path) + + # add and commit files to branch_to_pull_request, then push + commit_message = ( + f"Request to publish changes to {process_model_id}, " + f"from {g.user.username} on {current_app.config['ENV_IDENTIFIER']}" + ) + cls.commit(commit_message, destination_process_root) + cls.run_shell_command( + ["git", "push", "--set-upstream", "origin", branch_to_pull_request] + ) + + # build url for github page to open PR + git_remote = cls.run_shell_command_to_get_stdout( + ["git", "config", "--get", "remote.origin.url"] + ) + remote_url = git_remote.strip().replace(".git", "") + pr_url = f"{remote_url}/compare/{branch_to_update}...{branch_to_pull_request}?expand=1" + + # try to clean up + if os.path.exists(destination_process_root): + shutil.rmtree(destination_process_root) + + return pr_url diff --git a/src/spiffworkflow_backend/services/logging_service.py b/src/spiffworkflow_backend/services/logging_service.py index b93e8665..dd34cb3f 100644 --- a/src/spiffworkflow_backend/services/logging_service.py +++ b/src/spiffworkflow_backend/services/logging_service.py @@ -216,7 +216,9 @@ class DBHandler(logging.Handler): bpmn_task_type = record.task_type if hasattr(record, "task_type") else None # type: ignore timestamp = record.created message = record.msg if hasattr(record, "msg") else None - current_user_id = record.current_user_id if hasattr(record, "current_user_id") else None # type: ignore + current_user_id = ( + record.current_user_id if hasattr(record, "current_user_id") else None # type: ignore + ) spiff_step = ( record.spiff_step # type: ignore if hasattr(record, "spiff_step") and record.spiff_step is not None # type: ignore diff --git a/src/spiffworkflow_backend/services/message_service.py b/src/spiffworkflow_backend/services/message_service.py index 216a66a5..cfb42c83 100644 --- a/src/spiffworkflow_backend/services/message_service.py +++ b/src/spiffworkflow_backend/services/message_service.py @@ -117,7 +117,7 @@ class MessageService: user: UserModel, ) -> ProcessInstanceModel: """Process_message_triggerable_process_model.""" - process_instance_receive = ProcessInstanceService.create_process_instance( + process_instance_receive = ProcessInstanceService.create_process_instance_from_process_model_identifier( message_triggerable_process_model.process_model_identifier, user, ) diff --git a/src/spiffworkflow_backend/services/process_instance_processor.py b/src/spiffworkflow_backend/services/process_instance_processor.py index be32a2f0..ffe69fd7 100644 --- a/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/src/spiffworkflow_backend/services/process_instance_processor.py @@ -81,6 +81,9 @@ from spiffworkflow_backend.models.message_instance import MessageInstanceModel from spiffworkflow_backend.models.message_instance import MessageModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.script_attributes_context import ( ScriptAttributesContext, @@ -97,6 +100,7 @@ from spiffworkflow_backend.services.service_task_service import ServiceTaskDeleg from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.user_service import UserService + # Sorry about all this crap. I wanted to move this thing to another file, but # importing a bunch of types causes circular imports. @@ -178,9 +182,14 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore ) return Script.generate_augmented_list(script_attributes_context) - def evaluate(self, task: SpiffTask, expression: str) -> Any: + def evaluate( + self, + task: SpiffTask, + expression: str, + external_methods: Optional[dict[str, Any]] = None, + ) -> Any: """Evaluate.""" - return self._evaluate(expression, task.data, task) + return self._evaluate(expression, task.data, task, external_methods) def _evaluate( self, @@ -349,7 +358,9 @@ class ProcessInstanceProcessor: check_sub_specs(test_spec, 5) self.process_model_identifier = process_instance_model.process_model_identifier - # self.process_group_identifier = process_instance_model.process_group_identifier + self.process_model_display_name = ( + process_instance_model.process_model_display_name + ) try: self.bpmn_process_instance = self.__get_bpmn_process_instance( @@ -359,21 +370,8 @@ class ProcessInstanceProcessor: subprocesses=subprocesses, ) self.bpmn_process_instance.script_engine = self._script_engine - self.add_user_info_to_process_instance(self.bpmn_process_instance) - if self.PROCESS_INSTANCE_ID_KEY not in self.bpmn_process_instance.data: - if not process_instance_model.id: - db.session.add(process_instance_model) - # If the model is new, and has no id, save it, write it into the process_instance model - # and save it again. In this way, the workflow process is always aware of the - # database model to which it is associated, and scripts running within the model - # can then load data as needed. - self.bpmn_process_instance.data[ - ProcessInstanceProcessor.PROCESS_INSTANCE_ID_KEY - ] = process_instance_model.id - self.save() - except MissingSpecError as ke: raise ApiError( error_code="unexpected_process_instance_structure", @@ -387,7 +385,7 @@ class ProcessInstanceProcessor: cls, process_model_identifier: str ) -> Tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]: """Get_process_model_and_subprocesses.""" - process_model_info = ProcessModelService().get_process_model( + process_model_info = ProcessModelService.get_process_model( process_model_identifier ) if process_model_info is None: @@ -553,13 +551,8 @@ class ProcessInstanceProcessor: """SaveSpiffStepDetails.""" bpmn_json = self.serialize() wf_json = json.loads(bpmn_json) - task_json = "{}" - if "tasks" in wf_json: - task_json = json.dumps(wf_json["tasks"]) + task_json = wf_json["tasks"] - # TODO want to just save the tasks, something wasn't immediately working - # so after the flow works with the full wf_json revisit this - task_json = wf_json return { "process_instance_id": self.process_instance_model.id, "spiff_step": self.process_instance_model.spiff_step or 1, @@ -587,6 +580,41 @@ class ProcessInstanceProcessor: db.session.add(details_model) db.session.commit() + def extract_metadata(self, process_model_info: ProcessModelInfo) -> None: + """Extract_metadata.""" + metadata_extraction_paths = process_model_info.metadata_extraction_paths + if metadata_extraction_paths is None: + return + if len(metadata_extraction_paths) <= 0: + return + + current_data = self.get_current_data() + for metadata_extraction_path in metadata_extraction_paths: + key = metadata_extraction_path["key"] + path = metadata_extraction_path["path"] + path_segments = path.split(".") + data_for_key = current_data + for path_segment in path_segments: + if path_segment in data_for_key: + data_for_key = data_for_key[path_segment] + else: + data_for_key = None # type: ignore + break + + if data_for_key is not None: + pim = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=self.process_instance_model.id, + key=key, + ).first() + if pim is None: + pim = ProcessInstanceMetadataModel( + process_instance_id=self.process_instance_model.id, + key=key, + ) + pim.value = data_for_key + db.session.add(pim) + db.session.commit() + def save(self) -> None: """Saves the current state of this processor to the database.""" self.process_instance_model.bpmn_json = self.serialize() @@ -606,17 +634,22 @@ class ProcessInstanceProcessor: if self.bpmn_process_instance.is_completed(): self.process_instance_model.end_in_seconds = round(time.time()) - active_tasks = ActiveTaskModel.query.filter_by( - process_instance_id=self.process_instance_model.id - ).all() - if len(active_tasks) > 0: - for at in active_tasks: - db.session.delete(at) - db.session.add(self.process_instance_model) db.session.commit() + active_tasks = ActiveTaskModel.query.filter_by( + process_instance_id=self.process_instance_model.id + ).all() ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks() + process_model_display_name = "" + process_model_info = self.process_model_service.get_process_model( + self.process_instance_model.process_model_identifier + ) + if process_model_info is not None: + process_model_display_name = process_model_info.display_name + + self.extract_metadata(process_model_info) + for ready_or_waiting_task in ready_or_waiting_tasks: # filter out non-usertasks task_spec = ready_or_waiting_task.task_spec @@ -635,34 +668,41 @@ class ProcessInstanceProcessor: if "formUiSchemaFilename" in properties: ui_form_file_name = properties["formUiSchemaFilename"] - process_model_display_name = "" - process_model_info = self.process_model_service.get_process_model( - self.process_instance_model.process_model_identifier - ) - if process_model_info is not None: - process_model_display_name = process_model_info.display_name + active_task = None + for at in active_tasks: + if at.task_id == str(ready_or_waiting_task.id): + active_task = at + active_tasks.remove(at) - active_task = ActiveTaskModel( - process_instance_id=self.process_instance_model.id, - process_model_display_name=process_model_display_name, - form_file_name=form_file_name, - ui_form_file_name=ui_form_file_name, - task_id=str(ready_or_waiting_task.id), - task_name=ready_or_waiting_task.task_spec.name, - task_title=ready_or_waiting_task.task_spec.description, - task_type=ready_or_waiting_task.task_spec.__class__.__name__, - task_status=ready_or_waiting_task.get_state_name(), - lane_assignment_id=potential_owner_hash["lane_assignment_id"], - ) - db.session.add(active_task) - db.session.commit() - - for potential_owner_id in potential_owner_hash["potential_owner_ids"]: - active_task_user = ActiveTaskUserModel( - user_id=potential_owner_id, active_task_id=active_task.id + if active_task is None: + active_task = ActiveTaskModel( + process_instance_id=self.process_instance_model.id, + process_model_display_name=process_model_display_name, + form_file_name=form_file_name, + ui_form_file_name=ui_form_file_name, + task_id=str(ready_or_waiting_task.id), + task_name=ready_or_waiting_task.task_spec.name, + task_title=ready_or_waiting_task.task_spec.description, + task_type=ready_or_waiting_task.task_spec.__class__.__name__, + task_status=ready_or_waiting_task.get_state_name(), + lane_assignment_id=potential_owner_hash["lane_assignment_id"], ) - db.session.add(active_task_user) - db.session.commit() + db.session.add(active_task) + db.session.commit() + + for potential_owner_id in potential_owner_hash[ + "potential_owner_ids" + ]: + active_task_user = ActiveTaskUserModel( + user_id=potential_owner_id, active_task_id=active_task.id + ) + db.session.add(active_task_user) + db.session.commit() + + if len(active_tasks) > 0: + for at in active_tasks: + db.session.delete(at) + db.session.commit() @staticmethod def get_parser() -> MyCustomParser: @@ -675,7 +715,7 @@ class ProcessInstanceProcessor: bpmn_process_identifier: str, ) -> Optional[str]: """Backfill_missing_spec_reference_records.""" - process_models = ProcessModelService().get_process_models() + process_models = ProcessModelService.get_process_models(recursive=True) for process_model in process_models: try: refs = SpecFileService.reference_map( @@ -1152,8 +1192,8 @@ class ProcessInstanceProcessor: def get_current_data(self) -> dict[str, Any]: """Get the current data for the process. - Return either most recent task data or the process data - if the process instance is complete + Return either the most recent task data or--if the process instance is complete-- + the process data. """ if self.process_instance_model.status == "complete": return self.get_data() diff --git a/src/spiffworkflow_backend/services/process_instance_report_service.py b/src/spiffworkflow_backend/services/process_instance_report_service.py index a521c1a3..84d5d675 100644 --- a/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -2,6 +2,9 @@ from dataclasses import dataclass from typing import Optional +import sqlalchemy +from flask_bpmn.models.db import db + from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -18,6 +21,9 @@ class ProcessInstanceReportFilter: end_from: Optional[int] = None end_to: Optional[int] = None process_status: Optional[list[str]] = None + initiated_by_me: Optional[bool] = None + with_tasks_completed_by_me: Optional[bool] = None + with_tasks_completed_by_my_group: Optional[bool] = None def to_dict(self) -> dict[str, str]: """To_dict.""" @@ -35,6 +41,16 @@ class ProcessInstanceReportFilter: d["end_to"] = str(self.end_to) if self.process_status is not None: d["process_status"] = ",".join(self.process_status) + if self.initiated_by_me is not None: + d["initiated_by_me"] = str(self.initiated_by_me).lower() + if self.with_tasks_completed_by_me is not None: + d["with_tasks_completed_by_me"] = str( + self.with_tasks_completed_by_me + ).lower() + if self.with_tasks_completed_by_my_group is not None: + d["with_tasks_completed_by_my_group"] = str( + self.with_tasks_completed_by_my_group + ).lower() return d @@ -44,49 +60,65 @@ class ProcessInstanceReportService: @classmethod def report_with_identifier( - cls, user: UserModel, report_identifier: Optional[str] = None + cls, + user: UserModel, + report_id: Optional[int] = None, + report_identifier: Optional[str] = None, ) -> ProcessInstanceReportModel: """Report_with_filter.""" + if report_id is not None: + process_instance_report = ProcessInstanceReportModel.query.filter_by( + id=report_id, created_by_id=user.id + ).first() + if process_instance_report is not None: + return process_instance_report # type: ignore + if report_identifier is None: - return ProcessInstanceReportModel.default_report(user) + report_identifier = "default" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier=report_identifier, created_by_id=user.id + ).first() + + if process_instance_report is not None: + return process_instance_report # type: ignore # TODO replace with system reports that are loaded on launch (or similar) temp_system_metadata_map = { + "default": { + "columns": cls.builtin_column_options(), + "filter_by": [], + "order_by": ["-start_in_seconds", "-id"], + }, "system_report_instances_initiated_by_me": { "columns": [ + {"Header": "id", "accessor": "id"}, { - "Header": "process_model_identifier", - "accessor": "process_model_identifier", + "Header": "process_model_display_name", + "accessor": "process_model_display_name", }, {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "id", "accessor": "id"}, {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, {"Header": "status", "accessor": "status"}, ], + "filter_by": [{"field_name": "initiated_by_me", "field_value": True}], + "order_by": ["-start_in_seconds", "-id"], }, "system_report_instances_with_tasks_completed_by_me": { - "columns": [ - {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, - {"Header": "status", "accessor": "status"}, - {"Header": "id", "accessor": "id"}, - { - "Header": "process_model_identifier", - "accessor": "process_model_identifier", - }, + "columns": cls.builtin_column_options(), + "filter_by": [ + {"field_name": "with_tasks_completed_by_me", "field_value": True} ], + "order_by": ["-start_in_seconds", "-id"], }, "system_report_instances_with_tasks_completed_by_my_groups": { - "columns": [ + "columns": cls.builtin_column_options(), + "filter_by": [ { - "Header": "process_model_identifier", - "accessor": "process_model_identifier", - }, - {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, - {"Header": "status", "accessor": "status"}, - {"Header": "id", "accessor": "id"}, + "field_name": "with_tasks_completed_by_my_group", + "field_value": True, + } ], + "order_by": ["-start_in_seconds", "-id"], }, } @@ -96,7 +128,7 @@ class ProcessInstanceReportService: report_metadata=temp_system_metadata_map[report_identifier], ) - return process_instance_report + return process_instance_report # type: ignore @classmethod def filter_by_to_dict( @@ -119,6 +151,10 @@ class ProcessInstanceReportService: """Filter_from_metadata.""" filters = cls.filter_by_to_dict(process_instance_report) + def bool_value(key: str) -> Optional[bool]: + """Bool_value.""" + return bool(filters[key]) if key in filters else None + def int_value(key: str) -> Optional[int]: """Int_value.""" return int(filters[key]) if key in filters else None @@ -133,6 +169,11 @@ class ProcessInstanceReportService: end_from = int_value("end_from") end_to = int_value("end_to") process_status = list_value("process_status") + initiated_by_me = bool_value("initiated_by_me") + with_tasks_completed_by_me = bool_value("with_tasks_completed_by_me") + with_tasks_completed_by_my_group = bool_value( + "with_tasks_completed_by_my_group" + ) report_filter = ProcessInstanceReportFilter( process_model_identifier, @@ -141,6 +182,9 @@ class ProcessInstanceReportService: end_from, end_to, process_status, + initiated_by_me, + with_tasks_completed_by_me, + with_tasks_completed_by_my_group, ) return report_filter @@ -155,6 +199,9 @@ class ProcessInstanceReportService: end_from: Optional[int] = None, end_to: Optional[int] = None, process_status: Optional[str] = None, + initiated_by_me: Optional[bool] = None, + with_tasks_completed_by_me: Optional[bool] = None, + with_tasks_completed_by_my_group: Optional[bool] = None, ) -> ProcessInstanceReportFilter: """Filter_from_metadata_with_overrides.""" report_filter = cls.filter_from_metadata(process_instance_report) @@ -171,5 +218,53 @@ class ProcessInstanceReportService: report_filter.end_to = end_to if process_status is not None: report_filter.process_status = process_status.split(",") + if initiated_by_me is not None: + report_filter.initiated_by_me = initiated_by_me + if with_tasks_completed_by_me is not None: + report_filter.with_tasks_completed_by_me = with_tasks_completed_by_me + if with_tasks_completed_by_my_group is not None: + report_filter.with_tasks_completed_by_my_group = ( + with_tasks_completed_by_my_group + ) return report_filter + + @classmethod + def add_metadata_columns_to_process_instance( + cls, + process_instance_sqlalchemy_rows: list[sqlalchemy.engine.row.Row], # type: ignore + metadata_columns: list[dict], + ) -> list[dict]: + """Add_metadata_columns_to_process_instance.""" + results = [] + for process_instance in process_instance_sqlalchemy_rows: + process_instance_dict = process_instance["ProcessInstanceModel"].serialized + for metadata_column in metadata_columns: + if metadata_column["accessor"] not in process_instance_dict: + process_instance_dict[ + metadata_column["accessor"] + ] = process_instance[metadata_column["accessor"]] + + results.append(process_instance_dict) + return results + + @classmethod + def get_column_names_for_model(cls, model: db.Model) -> list[str]: # type: ignore + """Get_column_names_for_model.""" + return [i.name for i in model.__table__.columns] + + @classmethod + def builtin_column_options(cls) -> list[dict]: + """Builtin_column_options.""" + return [ + {"Header": "Id", "accessor": "id", "filterable": False}, + { + "Header": "Process", + "accessor": "process_model_display_name", + "filterable": False, + }, + {"Header": "Start", "accessor": "start_in_seconds", "filterable": False}, + {"Header": "End", "accessor": "end_in_seconds", "filterable": False}, + {"Header": "Username", "accessor": "username", "filterable": False}, + {"Header": "Status", "accessor": "status", "filterable": False}, + ] diff --git a/src/spiffworkflow_backend/services/process_instance_service.py b/src/spiffworkflow_backend/services/process_instance_service.py index 80271801..46bd252b 100644 --- a/src/spiffworkflow_backend/services/process_instance_service.py +++ b/src/spiffworkflow_backend/services/process_instance_service.py @@ -12,6 +12,7 @@ from spiffworkflow_backend.models.active_task import ActiveTaskModel from spiffworkflow_backend.models.process_instance import ProcessInstanceApi from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.task import MultiInstanceType from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.user import UserModel @@ -28,9 +29,10 @@ class ProcessInstanceService: TASK_STATE_LOCKED = "locked" - @staticmethod + @classmethod def create_process_instance( - process_model_identifier: str, + cls, + process_model: ProcessModelInfo, user: UserModel, ) -> ProcessInstanceModel: """Get_process_instance_from_spec.""" @@ -38,8 +40,8 @@ class ProcessInstanceService: process_instance_model = ProcessInstanceModel( status=ProcessInstanceStatus.not_started.value, process_initiator=user, - process_model_identifier=process_model_identifier, - process_group_identifier="", + process_model_identifier=process_model.id, + process_model_display_name=process_model.display_name, start_in_seconds=round(time.time()), bpmn_version_control_type="git", bpmn_version_control_identifier=current_git_revision, @@ -48,6 +50,16 @@ class ProcessInstanceService: db.session.commit() return process_instance_model + @classmethod + def create_process_instance_from_process_model_identifier( + cls, + process_model_identifier: str, + user: UserModel, + ) -> ProcessInstanceModel: + """Create_process_instance_from_process_model_identifier.""" + process_model = ProcessModelService.get_process_model(process_model_identifier) + return cls.create_process_instance(process_model, user) + @staticmethod def do_waiting() -> None: """Do_waiting.""" @@ -88,20 +100,15 @@ class ProcessInstanceService: process_model = process_model_service.get_process_model( processor.process_model_identifier ) - is_review_value = process_model.is_review if process_model else False - title_value = process_model.display_name if process_model else "" + process_model.display_name if process_model else "" process_instance_api = ProcessInstanceApi( id=processor.get_process_instance_id(), status=processor.get_status(), next_task=None, - # navigation=navigation, process_model_identifier=processor.process_model_identifier, - process_group_identifier="", - # total_tasks=len(navigation), + process_model_display_name=processor.process_model_display_name, completed_tasks=processor.process_instance_model.completed_tasks, updated_at_in_seconds=processor.process_instance_model.updated_at_in_seconds, - is_review=is_review_value, - title=title_value, ) next_task_trying_again = next_task @@ -197,7 +204,7 @@ class ProcessInstanceService: a multi-instance task. """ AuthorizationService.assert_user_can_complete_spiff_task( - processor, spiff_task, user + processor.process_instance_model.id, spiff_task, user ) dot_dct = ProcessInstanceService.create_dot_dict(data) @@ -315,22 +322,3 @@ class ProcessInstanceService: ) return task - - @staticmethod - def serialize_flat_with_task_data( - process_instance: ProcessInstanceModel, - ) -> dict[str, Any]: - """Serialize_flat_with_task_data.""" - results = {} - try: - original_status = process_instance.status - processor = ProcessInstanceProcessor(process_instance) - process_instance.data = processor.get_current_data() - results = process_instance.serialized_flat - # this process seems to mutate the status of the process_instance which - # can result in different results than expected from process_instance_list, - # so set the status back to the expected value - results["status"] = original_status - except ApiError: - results = process_instance.serialized - return results diff --git a/src/spiffworkflow_backend/services/process_model_service.py b/src/spiffworkflow_backend/services/process_model_service.py index 2431289c..d4fa5647 100644 --- a/src/spiffworkflow_backend/services/process_model_service.py +++ b/src/spiffworkflow_backend/services/process_model_service.py @@ -2,6 +2,7 @@ import json import os import shutil +from glob import glob from typing import Any from typing import List from typing import Optional @@ -17,7 +18,9 @@ from spiffworkflow_backend.models.process_group import ProcessGroupSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema +from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.file_system_service import FileSystemService +from spiffworkflow_backend.services.user_service import UserService T = TypeVar("T") @@ -34,20 +37,54 @@ class ProcessModelService(FileSystemService): GROUP_SCHEMA = ProcessGroupSchema() PROCESS_MODEL_SCHEMA = ProcessModelInfoSchema() - def is_group(self, path: str) -> bool: + @classmethod + def is_group(cls, path: str) -> bool: """Is_group.""" - group_json_path = os.path.join(path, self.PROCESS_GROUP_JSON_FILE) + group_json_path = os.path.join(path, cls.PROCESS_GROUP_JSON_FILE) if os.path.exists(group_json_path): return True return False - def is_model(self, path: str) -> bool: + @classmethod + def is_group_identifier(cls, process_group_identifier: str) -> bool: + """Is_group_identifier.""" + if os.path.exists(FileSystemService.root_path()): + process_group_path = os.path.abspath( + os.path.join( + FileSystemService.root_path(), + FileSystemService.id_string_to_relative_path( + process_group_identifier + ), + ) + ) + return cls.is_group(process_group_path) + + return False + + @classmethod + def is_model(cls, path: str) -> bool: """Is_model.""" - model_json_path = os.path.join(path, self.PROCESS_MODEL_JSON_FILE) + model_json_path = os.path.join(path, cls.PROCESS_MODEL_JSON_FILE) if os.path.exists(model_json_path): return True return False + @classmethod + def is_model_identifier(cls, process_model_identifier: str) -> bool: + """Is_model_identifier.""" + if os.path.exists(FileSystemService.root_path()): + process_model_path = os.path.abspath( + os.path.join( + FileSystemService.root_path(), + FileSystemService.id_string_to_relative_path( + process_model_identifier + ), + ) + ) + return cls.is_model(process_model_path) + + return False + @staticmethod def write_json_file( file_path: str, json_data: dict, indent: int = 4, sort_keys: bool = True @@ -67,37 +104,38 @@ class ProcessModelService(FileSystemService): end = start + per_page return items[start:end] - def add_process_model(self, process_model: ProcessModelInfo) -> None: + @classmethod + def add_process_model(cls, process_model: ProcessModelInfo) -> None: """Add_spec.""" - display_order = self.next_display_order(process_model) - process_model.display_order = display_order - self.save_process_model(process_model) + cls.save_process_model(process_model) + @classmethod def update_process_model( - self, process_model: ProcessModelInfo, attributes_to_update: dict + cls, process_model: ProcessModelInfo, attributes_to_update: dict ) -> None: """Update_spec.""" for atu_key, atu_value in attributes_to_update.items(): if hasattr(process_model, atu_key): setattr(process_model, atu_key, atu_value) - self.save_process_model(process_model) + cls.save_process_model(process_model) - def save_process_model(self, process_model: ProcessModelInfo) -> None: + @classmethod + def save_process_model(cls, process_model: ProcessModelInfo) -> None: """Save_process_model.""" process_model_path = os.path.abspath( os.path.join(FileSystemService.root_path(), process_model.id) ) os.makedirs(process_model_path, exist_ok=True) json_path = os.path.abspath( - os.path.join(process_model_path, self.PROCESS_MODEL_JSON_FILE) + os.path.join(process_model_path, cls.PROCESS_MODEL_JSON_FILE) ) process_model_id = process_model.id # we don't save id in the json file # this allows us to move models around on the filesystem # the id is determined by its location on the filesystem delattr(process_model, "id") - json_data = self.PROCESS_MODEL_SCHEMA.dump(process_model) - self.write_json_file(json_path, json_data) + json_data = cls.PROCESS_MODEL_SCHEMA.dump(process_model) + cls.write_json_file(json_path, json_data) process_model.id = process_model_id def process_model_delete(self, process_model_id: str) -> None: @@ -110,22 +148,36 @@ class ProcessModelService(FileSystemService): error_code="existing_instances", message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.", ) - self.get_process_model(process_model_id) - # path = self.workflow_path(process_model) - path = f"{FileSystemService.root_path()}/{process_model_id}" + process_model = self.get_process_model(process_model_id) + path = self.workflow_path(process_model) shutil.rmtree(path) + def process_model_move( + self, original_process_model_id: str, new_location: str + ) -> ProcessModelInfo: + """Process_model_move.""" + process_model = self.get_process_model(original_process_model_id) + original_model_path = self.workflow_path(process_model) + _, model_id = os.path.split(original_model_path) + new_relative_path = os.path.join(new_location, model_id) + new_model_path = os.path.abspath( + os.path.join(FileSystemService.root_path(), new_relative_path) + ) + shutil.move(original_model_path, new_model_path) + new_process_model = self.get_process_model(new_relative_path) + return new_process_model + @classmethod def get_process_model_from_relative_path( cls, relative_path: str ) -> ProcessModelInfo: """Get_process_model_from_relative_path.""" process_group_identifier, _ = os.path.split(relative_path) - process_group = cls().get_process_group(process_group_identifier) path = os.path.join(FileSystemService.root_path(), relative_path) - return cls().__scan_process_model(path, process_group=process_group) + return cls.__scan_process_model(path) - def get_process_model(self, process_model_id: str) -> ProcessModelInfo: + @classmethod + def get_process_model(cls, process_model_id: str) -> ProcessModelInfo: """Get a process model from a model and group id. process_model_id is the full path to the model--including groups. @@ -136,108 +188,139 @@ class ProcessModelService(FileSystemService): model_path = os.path.abspath( os.path.join(FileSystemService.root_path(), process_model_id) ) - if self.is_model(model_path): - process_model = self.get_process_model_from_relative_path(process_model_id) - return process_model - - # group_path, model_id = os.path.split(process_model_id) - # if group_path is not None: - # process_group = self.get_process_group(group_path) - # if process_group is not None: - # for process_model in process_group.process_models: - # if process_model_id == process_model.id: - # return process_model - # with os.scandir(FileSystemService.root_path()) as process_group_dirs: - # for item in process_group_dirs: - # process_group_dir = item - # if item.is_dir(): - # with os.scandir(item.path) as spec_dirs: - # for sd in spec_dirs: - # if sd.name == process_model_id: - # # Now we have the process_group directory, and spec directory - # process_group = self.__scan_process_group( - # process_group_dir - # ) - # return self.__scan_process_model(sd.path, sd.name, process_group) + if cls.is_model(model_path): + return cls.get_process_model_from_relative_path(process_model_id) raise ProcessEntityNotFoundError("process_model_not_found") + @classmethod def get_process_models( - self, process_group_id: Optional[str] = None + cls, + process_group_id: Optional[str] = None, + recursive: Optional[bool] = False, + filter_runnable_by_user: Optional[bool] = False, ) -> List[ProcessModelInfo]: """Get process models.""" - process_groups = [] - if process_group_id is None: - process_groups = self.get_process_groups() - else: - process_group = self.get_process_group(process_group_id) - if process_group is not None: - process_groups.append(process_group) - process_models = [] - for process_group in process_groups: - process_models.extend(process_group.process_models) + root_path = FileSystemService.root_path() + if process_group_id: + awesome_id = process_group_id.replace("/", os.sep) + root_path = os.path.join(root_path, awesome_id) + + process_model_glob = os.path.join(root_path, "*", "process_model.json") + if recursive: + process_model_glob = os.path.join(root_path, "**", "process_model.json") + + for file in glob(process_model_glob, recursive=True): + process_model_relative_path = os.path.relpath( + file, start=FileSystemService.root_path() + ) + process_model = cls.get_process_model_from_relative_path( + os.path.dirname(process_model_relative_path) + ) + process_models.append(process_model) process_models.sort() + + if filter_runnable_by_user: + user = UserService.current_user() + new_process_model_list = [] + for process_model in process_models: + uri = f"/v1.0/process-models/{process_model.id.replace('/', ':')}/process-instances" + result = AuthorizationService.user_has_permission( + user=user, permission="create", target_uri=uri + ) + if result: + new_process_model_list.append(process_model) + return new_process_model_list + return process_models + @classmethod + def get_parent_group_array(cls, process_identifier: str) -> list[dict]: + """Get_parent_group_array.""" + full_group_id_path = None + parent_group_array = [] + for process_group_id_segment in process_identifier.split("/")[0:-1]: + if full_group_id_path is None: + full_group_id_path = process_group_id_segment + else: + full_group_id_path = os.path.join(full_group_id_path, process_group_id_segment) # type: ignore + parent_group = ProcessModelService.get_process_group(full_group_id_path) + if parent_group: + parent_group_array.append( + {"id": parent_group.id, "display_name": parent_group.display_name} + ) + return parent_group_array + + @classmethod def get_process_groups( - self, process_group_id: Optional[str] = None + cls, process_group_id: Optional[str] = None ) -> list[ProcessGroup]: - """Returns the process_groups as a list in display order.""" - process_groups = self.__scan_process_groups(process_group_id) + """Returns the process_groups.""" + process_groups = cls.__scan_process_groups(process_group_id) process_groups.sort() return process_groups - def get_process_group(self, process_group_id: str) -> ProcessGroup: + @classmethod + def get_process_group( + cls, process_group_id: str, find_direct_nested_items: bool = True + ) -> ProcessGroup: """Look for a given process_group, and return it.""" if os.path.exists(FileSystemService.root_path()): process_group_path = os.path.abspath( - os.path.join(FileSystemService.root_path(), process_group_id) + os.path.join( + FileSystemService.root_path(), + FileSystemService.id_string_to_relative_path(process_group_id), + ) ) - if self.is_group(process_group_path): - return self.__scan_process_group(process_group_path) - # nested_groups = [] - # process_group_dir = os.scandir(process_group_path) - # for item in process_group_dir: - # if self.is_group(item.path): - # nested_group = self.get_process_group(os.path.join(process_group_path, item.path)) - # nested_groups.append(nested_group) - # elif self.is_model(item.path): - # print("get_process_group: ") - # return self.__scan_process_group(process_group_path) - # with os.scandir(FileSystemService.root_path()) as directory_items: - # for item in directory_items: - # if item.is_dir() and item.name == process_group_id: - # return self.__scan_process_group(item) + if cls.is_group(process_group_path): + return cls.find_or_create_process_group( + process_group_path, + find_direct_nested_items=find_direct_nested_items, + ) raise ProcessEntityNotFoundError( "process_group_not_found", f"Process Group Id: {process_group_id}" ) - def add_process_group(self, process_group: ProcessGroup) -> ProcessGroup: + @classmethod + def add_process_group(cls, process_group: ProcessGroup) -> ProcessGroup: """Add_process_group.""" - display_order = len(self.get_process_groups()) - process_group.display_order = display_order - return self.update_process_group(process_group) + return cls.update_process_group(process_group) - def update_process_group(self, process_group: ProcessGroup) -> ProcessGroup: + @classmethod + def update_process_group(cls, process_group: ProcessGroup) -> ProcessGroup: """Update_process_group.""" - cat_path = self.process_group_path(process_group.id) + cat_path = cls.process_group_path(process_group.id) os.makedirs(cat_path, exist_ok=True) - json_path = os.path.join(cat_path, self.PROCESS_GROUP_JSON_FILE) + json_path = os.path.join(cat_path, cls.PROCESS_GROUP_JSON_FILE) serialized_process_group = process_group.serialized # we don't store `id` in the json files # this allows us to move groups around on the filesystem del serialized_process_group["id"] - self.write_json_file(json_path, serialized_process_group) + cls.write_json_file(json_path, serialized_process_group) return process_group + def process_group_move( + self, original_process_group_id: str, new_location: str + ) -> ProcessGroup: + """Process_group_move.""" + original_group_path = self.process_group_path(original_process_group_id) + _, original_group_id = os.path.split(original_group_path) + new_root = os.path.join(FileSystemService.root_path(), new_location) + new_group_path = os.path.abspath( + os.path.join(FileSystemService.root_path(), new_root, original_group_id) + ) + destination = shutil.move(original_group_path, new_group_path) + new_process_group = self.get_process_group(destination) + return new_process_group + def __get_all_nested_models(self, group_path: str) -> list: """__get_all_nested_models.""" all_nested_models = [] for _root, dirs, _files in os.walk(group_path): for dir in dirs: model_dir = os.path.join(group_path, dir) - if ProcessModelService().is_model(model_dir): + if ProcessModelService.is_model(model_dir): process_model = self.get_process_model(model_dir) all_nested_models.append(process_model) return all_nested_models @@ -273,8 +356,9 @@ class ProcessModelService(FileSystemService): index += 1 return process_groups + @classmethod def __scan_process_groups( - self, process_group_id: Optional[str] = None + cls, process_group_id: Optional[str] = None ) -> list[ProcessGroup]: """__scan_process_groups.""" if not os.path.exists(FileSystemService.root_path()): @@ -288,14 +372,17 @@ class ProcessModelService(FileSystemService): process_groups = [] for item in directory_items: # if item.is_dir() and not item.name[0] == ".": - if item.is_dir() and self.is_group(item): # type: ignore - scanned_process_group = self.__scan_process_group(item.path) + if item.is_dir() and cls.is_group(item): # type: ignore + scanned_process_group = cls.find_or_create_process_group(item.path) process_groups.append(scanned_process_group) return process_groups - def __scan_process_group(self, dir_path: str) -> ProcessGroup: + @classmethod + def find_or_create_process_group( + cls, dir_path: str, find_direct_nested_items: bool = True + ) -> ProcessGroup: """Reads the process_group.json file, and any nested directories.""" - cat_path = os.path.join(dir_path, self.PROCESS_GROUP_JSON_FILE) + cat_path = os.path.join(dir_path, cls.PROCESS_GROUP_JSON_FILE) if os.path.exists(cat_path): with open(cat_path) as cat_json: data = json.load(cat_json) @@ -316,40 +403,41 @@ class ProcessModelService(FileSystemService): display_order=10000, admin=False, ) - self.write_json_file(cat_path, self.GROUP_SCHEMA.dump(process_group)) + cls.write_json_file(cat_path, cls.GROUP_SCHEMA.dump(process_group)) # we don't store `id` in the json files, so we add it in here process_group.id = process_group_id - with os.scandir(dir_path) as nested_items: - process_group.process_models = [] - process_group.process_groups = [] - for nested_item in nested_items: - if nested_item.is_dir(): - # TODO: check whether this is a group or model - if self.is_group(nested_item.path): - # This is a nested group - process_group.process_groups.append( - self.__scan_process_group(nested_item.path) - ) - elif self.is_model(nested_item.path): - process_group.process_models.append( - self.__scan_process_model( - nested_item.path, - nested_item.name, - process_group=process_group, + + if find_direct_nested_items: + with os.scandir(dir_path) as nested_items: + process_group.process_models = [] + process_group.process_groups = [] + for nested_item in nested_items: + if nested_item.is_dir(): + # TODO: check whether this is a group or model + if cls.is_group(nested_item.path): + # This is a nested group + process_group.process_groups.append( + cls.find_or_create_process_group(nested_item.path) ) - ) - process_group.process_models.sort() - # process_group.process_groups.sort() + elif ProcessModelService.is_model(nested_item.path): + process_group.process_models.append( + cls.__scan_process_model( + nested_item.path, + nested_item.name, + ) + ) + process_group.process_models.sort() + # process_group.process_groups.sort() return process_group + @classmethod def __scan_process_model( - self, + cls, path: str, name: Optional[str] = None, - process_group: Optional[ProcessGroup] = None, ) -> ProcessModelInfo: """__scan_process_model.""" - json_file_path = os.path.join(path, self.PROCESS_MODEL_JSON_FILE) + json_file_path = os.path.join(path, cls.PROCESS_MODEL_JSON_FILE) if os.path.exists(json_file_path): with open(json_file_path) as wf_json: @@ -377,13 +465,10 @@ class ProcessModelService(FileSystemService): display_name=name, description="", display_order=0, - is_review=False, ) - self.write_json_file( - json_file_path, self.PROCESS_MODEL_SCHEMA.dump(process_model_info) + cls.write_json_file( + json_file_path, cls.PROCESS_MODEL_SCHEMA.dump(process_model_info) ) # we don't store `id` in the json files, so we add it in here process_model_info.id = name - if process_group: - process_model_info.process_group = process_group.id return process_model_info diff --git a/src/spiffworkflow_backend/services/secret_service.py b/src/spiffworkflow_backend/services/secret_service.py index 42f401c1..e4dee491 100644 --- a/src/spiffworkflow_backend/services/secret_service.py +++ b/src/spiffworkflow_backend/services/secret_service.py @@ -19,15 +19,15 @@ from spiffworkflow_backend.models.secret_model import SecretModel class SecretService: """SecretService.""" - def encrypt_key(self, plain_key: str) -> str: - """Encrypt_key.""" - # flask_secret = current_app.secret_key - # print("encrypt_key") - ... + # def encrypt_key(self, plain_key: str) -> str: + # """Encrypt_key.""" + # # flask_secret = current_app.secret_key + # # print("encrypt_key") + # ... - def decrypt_key(self, encrypted_key: str) -> str: - """Decrypt key.""" - ... + # def decrypt_key(self, encrypted_key: str) -> str: + # """Decrypt key.""" + # ... @staticmethod def add_secret( @@ -65,7 +65,7 @@ class SecretService: def update_secret( key: str, value: str, - user_id: int, + user_id: Optional[int] = None, create_if_not_exists: Optional[bool] = False, ) -> None: """Does this pass pre commit?""" @@ -79,6 +79,12 @@ class SecretService: db.session.rollback() raise e elif create_if_not_exists: + if user_id is None: + raise ApiError( + error_code="update_secret_error_no_user_id", + message=f"Cannot update secret with key: {key}. Missing user id.", + status_code=404, + ) SecretService.add_secret(key=key, value=value, user_id=user_id) else: raise ApiError( diff --git a/src/spiffworkflow_backend/services/service_task_service.py b/src/spiffworkflow_backend/services/service_task_service.py index 97ce1495..15e25a75 100644 --- a/src/spiffworkflow_backend/services/service_task_service.py +++ b/src/spiffworkflow_backend/services/service_task_service.py @@ -8,6 +8,7 @@ from flask import g from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.secret_service import SecretService +from spiffworkflow_backend.services.user_service import UserService class ConnectorProxyError(Exception): @@ -65,7 +66,8 @@ class ServiceTaskDelegate: secret_key = parsed_response["auth"] refreshed_token_set = json.dumps(parsed_response["refreshed_token_set"]) - SecretService().update_secret(secret_key, refreshed_token_set, g.user.id) + user_id = g.user.id if UserService.has_user() else None + SecretService().update_secret(secret_key, refreshed_token_set, user_id) return json.dumps(parsed_response["api_response"]) diff --git a/src/spiffworkflow_backend/services/spec_file_service.py b/src/spiffworkflow_backend/services/spec_file_service.py index f02e3a6f..c69f41c3 100644 --- a/src/spiffworkflow_backend/services/spec_file_service.py +++ b/src/spiffworkflow_backend/services/spec_file_service.py @@ -171,12 +171,11 @@ class SpecFileService(FileSystemService): ref.is_primary = True if ref.is_primary: - ProcessModelService().update_process_model( + ProcessModelService.update_process_model( process_model_info, { "primary_process_id": ref.identifier, "primary_file_name": file_name, - "is_review": ref.has_lanes, }, ) SpecFileService.update_caches(ref) @@ -322,7 +321,6 @@ class SpecFileService(FileSystemService): message_triggerable_process_model = MessageTriggerableProcessModel( message_model_id=message_model.id, process_model_identifier=ref.process_model_id, - process_group_identifier="process_group_identifier", ) db.session.add(message_triggerable_process_model) db.session.commit() @@ -330,8 +328,6 @@ class SpecFileService(FileSystemService): if ( message_triggerable_process_model.process_model_identifier != ref.process_model_id - # or message_triggerable_process_model.process_group_identifier - # != process_model_info.process_group_id ): raise ValidationException( f"Message model is already used to start process model {ref.process_model_id}" diff --git a/tests/data/hello_world/hello_world.bpmn b/tests/data/hello_world/hello_world.bpmn index 1e5bc853..4be5adba 100644 --- a/tests/data/hello_world/hello_world.bpmn +++ b/tests/data/hello_world/hello_world.bpmn @@ -19,7 +19,11 @@ Flow_0bazl8x Flow_1mcaszp - a = 1 + a = 1 +b = 2 +outer = {} +outer["inner"] = 'sweet1' + Flow_1mcaszp diff --git a/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn b/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn new file mode 100644 index 00000000..7452216a --- /dev/null +++ b/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn @@ -0,0 +1,56 @@ + + + + + Flow_1ohrjz9 + + + + Flow_1flxgry + + + + Flow_1ohrjz9 + Flow_18gs4jt + outer = {} +invoice_number = 123 +outer["inner"] = 'sweet1' +outer['time'] = time.time_ns() + + + + Flow_18gs4jt + Flow_1flxgry + outer["inner"] = 'sweet2' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/save_process_instance_metadata/save_process_instance_metadata.bpmn b/tests/data/save_process_instance_metadata/save_process_instance_metadata.bpmn new file mode 100644 index 00000000..2c72b08d --- /dev/null +++ b/tests/data/save_process_instance_metadata/save_process_instance_metadata.bpmn @@ -0,0 +1,52 @@ + + + + + Flow_1j4jzft + + + + Flow_01xr2ac + + + Flow_1j4jzft + Flow_10xyk22 + save_process_instance_metadata({"key1": "value1"}) + + + + Flow_10xyk22 + Flow_01xr2ac + save_process_instance_metadata({"key2": "value2", "key3": "value3"}) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/spiffworkflow_backend/helpers/base_test.py b/tests/spiffworkflow_backend/helpers/base_test.py index 44c99908..48982fc6 100644 --- a/tests/spiffworkflow_backend/helpers/base_test.py +++ b/tests/spiffworkflow_backend/helpers/base_test.py @@ -140,7 +140,7 @@ class BaseTest: process_group_path = os.path.abspath( os.path.join(FileSystemService.root_path(), process_group_id) ) - if ProcessModelService().is_group(process_group_path): + if ProcessModelService.is_group(process_group_path): if exception_notification_addresses is None: exception_notification_addresses = [] @@ -149,7 +149,6 @@ class BaseTest: id=process_model_id, display_name=process_model_display_name, description=process_model_description, - is_review=False, primary_process_id=primary_process_id, primary_file_name=primary_file_name, fault_or_suspend_on_exception=fault_or_suspend_on_exception, @@ -253,9 +252,20 @@ class BaseTest: There must be an existing process model to instantiate. """ + if not ProcessModelService.is_model_identifier(test_process_model_id): + dirname = os.path.dirname(test_process_model_id) + if not ProcessModelService.is_group_identifier(dirname): + process_group = ProcessGroup(id=dirname, display_name=dirname) + ProcessModelService.add_process_group(process_group) + basename = os.path.basename(test_process_model_id) + load_test_spec( + process_model_id=test_process_model_id, + process_model_source_directory=basename, + bpmn_file_name=basename, + ) modified_process_model_id = test_process_model_id.replace("/", ":") response = client.post( - f"/v1.0/process-models/{modified_process_model_id}/process-instances", + f"/v1.0/process-instances/{modified_process_model_id}", headers=headers, ) assert response.status_code == 201 @@ -284,7 +294,7 @@ class BaseTest: status=status, process_initiator=user, process_model_identifier=process_model.id, - process_group_identifier="", + process_model_display_name=process_model.display_name, updated_at_in_seconds=round(time.time()), start_in_seconds=current_time - (3600 * 1), end_in_seconds=current_time - (3600 * 1 - 20), @@ -347,3 +357,16 @@ class BaseTest: target_uri=target_uri, ) assert has_permission is expected_result + + def modify_process_identifier_for_path_param(self, identifier: str) -> str: + """Identifier.""" + if "\\" in identifier: + raise Exception(f"Found backslash in identifier: {identifier}") + + return identifier.replace("/", ":") + + def un_modify_modified_process_identifier_for_path_param( + self, modified_identifier: str + ) -> str: + """Un_modify_modified_process_model_id.""" + return modified_identifier.replace(":", "/") diff --git a/tests/spiffworkflow_backend/helpers/example_data.py b/tests/spiffworkflow_backend/helpers/example_data.py index befd2602..4b0ee5fc 100644 --- a/tests/spiffworkflow_backend/helpers/example_data.py +++ b/tests/spiffworkflow_backend/helpers/example_data.py @@ -36,10 +36,8 @@ class ExampleDataLoader: display_name=display_name, description=description, display_order=display_order, - is_review=False, ) - workflow_spec_service = ProcessModelService() - workflow_spec_service.add_process_model(spec) + ProcessModelService.add_process_model(spec) bpmn_file_name_with_extension = bpmn_file_name if not bpmn_file_name_with_extension: @@ -88,7 +86,7 @@ class ExampleDataLoader: ) spec.primary_process_id = references[0].identifier spec.primary_file_name = filename - ProcessModelService().save_process_model(spec) + ProcessModelService.save_process_model(spec) finally: if file: file.close() diff --git a/tests/spiffworkflow_backend/integration/test_logging_service.py b/tests/spiffworkflow_backend/integration/test_logging_service.py index 97dafaf3..f9dd4452 100644 --- a/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -51,13 +51,13 @@ class TestLoggingService(BaseTest): assert response.json is not None process_instance_id = response.json["id"] response = client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=headers, ) assert response.status_code == 200 log_response = client.get( - f"/v1.0/process-instances/{process_instance_id}/logs", + f"/v1.0/logs/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", headers=headers, ) assert log_response.status_code == 200 diff --git a/tests/spiffworkflow_backend/integration/test_nested_groups.py b/tests/spiffworkflow_backend/integration/test_nested_groups.py index 3a12acf6..3983f9be 100644 --- a/tests/spiffworkflow_backend/integration/test_nested_groups.py +++ b/tests/spiffworkflow_backend/integration/test_nested_groups.py @@ -46,7 +46,7 @@ class TestNestedGroups(BaseTest): process_instance_id = response.json["id"] client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) process_instance = ProcessInstanceService().get_process_instance( diff --git a/tests/spiffworkflow_backend/integration/test_openid_blueprint.py b/tests/spiffworkflow_backend/integration/test_openid_blueprint.py new file mode 100644 index 00000000..20a0bb67 --- /dev/null +++ b/tests/spiffworkflow_backend/integration/test_openid_blueprint.py @@ -0,0 +1,61 @@ +"""Test_authentication.""" +from flask import Flask +from flask.testing import FlaskClient +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + + +class TestFlaskOpenId(BaseTest): + """An integrated Open ID that responds to openID requests. + + By referencing a build in YAML file. Useful for + local development, testing, demos etc... + """ + + def test_discovery_of_endpoints( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test discovery endpoints.""" + response = client.get("/openid/.well-known/openid-configuration") + discovered_urls = response.json + assert "http://localhost/openid" == discovered_urls["issuer"] + assert ( + "http://localhost/openid/auth" == discovered_urls["authorization_endpoint"] + ) + assert "http://localhost/openid/token" == discovered_urls["token_endpoint"] + + def test_get_login_page( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """It should be possible to get to a login page.""" + data = {"state": {"bubblegum": 1, "daydream": 2}} + response = client.get("/openid/auth", query_string=data) + assert b"

Login

" in response.data + assert b"bubblegum" in response.data + + def test_get_token( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """It should be possible to get a token.""" + code = ( + "c3BpZmZ3b3JrZmxvdy1iYWNrZW5kOkpYZVFFeG0wSmhRUEx1bWdIdElJcWY1MmJEYWxIejBx" + ) + headers = { + "Content-Type": "application/x-www-form-urlencoded", + "Authorization": f"Basic {code}", + } + data = { + "grant_type": "authorization_code", + "code": code, + "redirect_url": "http://localhost:7000/v1.0/login_return", + } + response = client.post("/openid/token", data=data, headers=headers) + assert response diff --git a/tests/spiffworkflow_backend/integration/test_process_api.py b/tests/spiffworkflow_backend/integration/test_process_api.py index fbbf7deb..0070c5c9 100644 --- a/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/tests/spiffworkflow_backend/integration/test_process_api.py @@ -20,6 +20,9 @@ from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_group import ProcessGroup from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -133,12 +136,12 @@ class TestProcessApi(BaseTest): process_model_description=model_description, user=with_super_admin_user, ) - process_model = ProcessModelService().get_process_model( + process_model = ProcessModelService.get_process_model( process_model_identifier, ) assert model_display_name == process_model.display_name assert 0 == process_model.display_order - assert 1 == len(ProcessModelService().get_process_groups()) + assert 1 == len(ProcessModelService.get_process_groups()) # add bpmn file to the model bpmn_file_name = "sample.bpmn" @@ -155,9 +158,7 @@ class TestProcessApi(BaseTest): user=with_super_admin_user, ) # get the model, assert that primary is set - process_model = ProcessModelService().get_process_model( - process_model_identifier - ) + process_model = ProcessModelService.get_process_model(process_model_identifier) assert process_model.primary_file_name == bpmn_file_name assert process_model.primary_process_id == "sample" @@ -208,9 +209,7 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 - process_model = ProcessModelService().get_process_model( - process_model_identifier - ) + process_model = ProcessModelService.get_process_model(process_model_identifier) assert process_model.primary_file_name == bpmn_file_name assert process_model.primary_process_id == terminal_primary_process_id @@ -236,9 +235,7 @@ class TestProcessApi(BaseTest): ) # assert we have a model - process_model = ProcessModelService().get_process_model( - process_model_identifier - ) + process_model = ProcessModelService.get_process_model(process_model_identifier) assert process_model is not None assert process_model.id == process_model_identifier @@ -254,7 +251,7 @@ class TestProcessApi(BaseTest): # assert we no longer have a model with pytest.raises(ProcessEntityNotFoundError): - ProcessModelService().get_process_model(process_model_identifier) + ProcessModelService.get_process_model(process_model_identifier) def test_process_model_delete_with_instances( self, @@ -327,19 +324,18 @@ class TestProcessApi(BaseTest): process_model_id=process_model_identifier, user=with_super_admin_user, ) - process_model = ProcessModelService().get_process_model( - process_model_identifier - ) + process_model = ProcessModelService.get_process_model(process_model_identifier) assert process_model.id == process_model_identifier assert process_model.display_name == "Cooooookies" - assert process_model.is_review is False assert process_model.primary_file_name is None assert process_model.primary_process_id is None process_model.display_name = "Updated Display Name" process_model.primary_file_name = "superduper.bpmn" process_model.primary_process_id = "superduper" - process_model.is_review = True # not in the include list, so get ignored + process_model.metadata_extraction_paths = [ + {"key": "extraction1", "path": "path1"} + ] modified_process_model_identifier = process_model_identifier.replace("/", ":") response = client.put( @@ -353,7 +349,44 @@ class TestProcessApi(BaseTest): assert response.json["display_name"] == "Updated Display Name" assert response.json["primary_file_name"] == "superduper.bpmn" assert response.json["primary_process_id"] == "superduper" - assert response.json["is_review"] is False + assert response.json["metadata_extraction_paths"] == [ + {"key": "extraction1", "path": "path1"} + ] + + def test_process_model_list_all( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_process_model_list_all.""" + group_id = "test_group/test_sub_group" + self.create_process_group(client, with_super_admin_user, group_id) + + # add 5 models to the group + for i in range(5): + process_model_identifier = f"{group_id}/test_model_{i}" + model_display_name = f"Test Model {i}" + model_description = f"Test Model {i} Description" + self.create_process_model_with_api( + client, + process_model_id=process_model_identifier, + process_model_display_name=model_display_name, + process_model_description=model_description, + user=with_super_admin_user, + ) + + # get all models + response = client.get( + "/v1.0/process-models?per_page=1000&recursive=true", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.json is not None + assert len(response.json["results"]) == 5 + assert response.json["pagination"]["count"] == 5 + assert response.json["pagination"]["total"] == 5 + assert response.json["pagination"]["pages"] == 1 def test_process_model_list( self, @@ -515,7 +548,7 @@ class TestProcessApi(BaseTest): assert result.description == "Test Description" # Check what is persisted - persisted = ProcessModelService().get_process_group("test") + persisted = ProcessModelService.get_process_group("test") assert persisted.display_name == "Another Test Category" assert persisted.id == "test" assert persisted.description == "Test Description" @@ -537,7 +570,7 @@ class TestProcessApi(BaseTest): process_group_id, display_name=process_group_display_name, ) - persisted = ProcessModelService().get_process_group(process_group_id) + persisted = ProcessModelService.get_process_group(process_group_id) assert persisted is not None assert persisted.id == process_group_id @@ -547,7 +580,7 @@ class TestProcessApi(BaseTest): ) with pytest.raises(ProcessEntityNotFoundError): - ProcessModelService().get_process_group(process_group_id) + ProcessModelService.get_process_group(process_group_id) def test_process_group_update( self, @@ -563,7 +596,7 @@ class TestProcessApi(BaseTest): self.create_process_group( client, with_super_admin_user, group_id, display_name=group_display_name ) - process_group = ProcessModelService().get_process_group(group_id) + process_group = ProcessModelService.get_process_group(group_id) assert process_group.display_name == group_display_name @@ -577,7 +610,7 @@ class TestProcessApi(BaseTest): ) assert response.status_code == 200 - process_group = ProcessModelService().get_process_group(group_id) + process_group = ProcessModelService.get_process_group(group_id) assert process_group.display_name == "Modified Display Name" def test_process_group_list( @@ -879,7 +912,7 @@ class TestProcessApi(BaseTest): modified_process_model_identifier = process_model_identifier.replace("/", ":") response = client.post( - f"/v1.0/process-models/{modified_process_model_identifier}/process-instances", + f"/v1.0/process-instances/{modified_process_model_identifier}", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 201 @@ -944,6 +977,43 @@ class TestProcessApi(BaseTest): assert response.json is not None assert response.json["id"] == process_group_id assert response.json["process_models"][0]["id"] == process_model_identifier + assert response.json["parent_groups"] == [] + + def test_get_process_group_show_when_nested( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_get_process_group_show_when_nested.""" + self.create_group_and_model_with_bpmn( + client=client, + user=with_super_admin_user, + process_group_id="test_group_one", + process_model_id="simple_form", + bpmn_file_location="simple_form", + ) + + self.create_group_and_model_with_bpmn( + client=client, + user=with_super_admin_user, + process_group_id="test_group_one/test_group_two", + process_model_id="call_activity_nested", + bpmn_file_location="call_activity_nested", + ) + + response = client.get( + "/v1.0/process-groups/test_group_one:test_group_two", + headers=self.logged_in_headers(with_super_admin_user), + ) + + assert response.status_code == 200 + assert response.json is not None + assert response.json["id"] == "test_group_one/test_group_two" + assert response.json["parent_groups"] == [ + {"display_name": "test_group_one", "id": "test_group_one"} + ] def test_get_process_model_when_found( self, @@ -962,11 +1032,15 @@ class TestProcessApi(BaseTest): f"/v1.0/process-models/{modified_process_model_identifier}", headers=self.logged_in_headers(with_super_admin_user), ) + assert response.status_code == 200 assert response.json is not None assert response.json["id"] == process_model_identifier assert len(response.json["files"]) == 1 assert response.json["files"][0]["name"] == "random_fact.bpmn" + assert response.json["parent_groups"] == [ + {"display_name": "test_group", "id": "test_group"} + ] def test_get_process_model_when_not_found( self, @@ -1034,7 +1108,7 @@ class TestProcessApi(BaseTest): assert response.json is not None process_instance_id = response.json["id"] response = client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) @@ -1066,7 +1140,9 @@ class TestProcessApi(BaseTest): process_group_id=process_group_id, process_model_id=process_model_id, ) - modified_process_model_identifier = process_model_identifier.replace("/", ":") + modified_process_model_identifier = ( + self.modify_process_identifier_for_path_param(process_model_identifier) + ) headers = self.logged_in_headers(with_super_admin_user) create_response = self.create_process_instance_from_process_model_id( client, process_model_identifier, headers @@ -1074,14 +1150,15 @@ class TestProcessApi(BaseTest): assert create_response.json is not None process_instance_id = create_response.json["id"] client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{modified_process_model_identifier}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) show_response = client.get( - f"/v1.0/process-models/{modified_process_model_identifier}/process-instances/{process_instance_id}", + f"/v1.0/process-instances/{modified_process_model_identifier}/{process_instance_id}", headers=self.logged_in_headers(with_super_admin_user), ) assert show_response.json is not None + assert show_response.status_code == 200 file_system_root = FileSystemService.root_path() file_path = ( f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn" @@ -1177,7 +1254,7 @@ class TestProcessApi(BaseTest): process_instance_id = response.json["id"] response = client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) @@ -1237,14 +1314,14 @@ class TestProcessApi(BaseTest): process_instance_id = response.json["id"] response = client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 assert response.json is not None response = client.post( - f"/v1.0/process-instances/{process_instance_id}/terminate", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/terminate", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 @@ -1285,13 +1362,13 @@ class TestProcessApi(BaseTest): process_instance_id = response.json["id"] response = client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) assert response.json is not None delete_response = client.delete( - f"/v1.0/process-instances/{process_instance_id}", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", headers=self.logged_in_headers(with_super_admin_user), ) assert delete_response.status_code == 200 @@ -1324,7 +1401,7 @@ class TestProcessApi(BaseTest): process_instance_id = response.json["id"] response = client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) @@ -1481,7 +1558,7 @@ class TestProcessApi(BaseTest): status=ProcessInstanceStatus[statuses[i]].value, process_initiator=with_super_admin_user, process_model_identifier=process_model_identifier, - process_group_identifier="test_process_group_id", + process_model_display_name=process_model_identifier, updated_at_in_seconds=round(time.time()), start_in_seconds=(1000 * i) + 1000, end_in_seconds=(1000 * i) + 2000, @@ -1656,14 +1733,14 @@ class TestProcessApi(BaseTest): ], } - ProcessInstanceReportModel.create_with_attributes( + report = ProcessInstanceReportModel.create_with_attributes( identifier="sure", report_metadata=report_metadata, user=with_super_admin_user, ) response = client.get( - "/v1.0/process-instances/reports/sure", + f"/v1.0/process-instances/reports/{report.id}", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 @@ -1702,14 +1779,14 @@ class TestProcessApi(BaseTest): ], } - ProcessInstanceReportModel.create_with_attributes( + report = ProcessInstanceReportModel.create_with_attributes( identifier="sure", report_metadata=report_metadata, user=with_super_admin_user, ) response = client.get( - "/v1.0/process-instances/reports/sure?grade_level=1", + f"/v1.0/process-instances/reports/{report.id}?grade_level=1", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 @@ -1724,9 +1801,9 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: - """Test_process_instance_report_show_with_default_list.""" + """Test_process_instance_report_show_with_bad_identifier.""" response = client.get( - "/v1.0/process-instances/reports/sure?grade_level=1", + "/v1.0/process-instances/reports/13000000?grade_level=1", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 404 @@ -1783,7 +1860,7 @@ class TestProcessApi(BaseTest): assert process.status == "not_started" response = client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 400 @@ -1827,10 +1904,8 @@ class TestProcessApi(BaseTest): process_instance_id = self.setup_testing_instance( client, process_model_identifier, with_super_admin_user ) - process_model = ProcessModelService().get_process_model( - process_model_identifier - ) - ProcessModelService().update_process_model( + process_model = ProcessModelService.get_process_model(process_model_identifier) + ProcessModelService.update_process_model( process_model, {"fault_or_suspend_on_exception": NotificationType.suspend.value}, ) @@ -1844,7 +1919,7 @@ class TestProcessApi(BaseTest): assert process.status == "not_started" response = client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 400 @@ -1882,10 +1957,8 @@ class TestProcessApi(BaseTest): client, process_model_identifier, with_super_admin_user ) - process_model = ProcessModelService().get_process_model( - process_model_identifier - ) - ProcessModelService().update_process_model( + process_model = ProcessModelService.get_process_model(process_model_identifier) + ProcessModelService.update_process_model( process_model, {"exception_notification_addresses": ["with_super_admin_user@example.com"]}, ) @@ -1894,7 +1967,7 @@ class TestProcessApi(BaseTest): with mail.record_messages() as outbox: response = client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 400 @@ -2079,7 +2152,7 @@ class TestProcessApi(BaseTest): assert response.json is not None process_instance_id = response.json["id"] response = client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(initiator_user), ) assert response.status_code == 200 @@ -2284,7 +2357,7 @@ class TestProcessApi(BaseTest): process_instance_id = response.json["id"] client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) @@ -2294,7 +2367,7 @@ class TestProcessApi(BaseTest): assert process_instance.status == "user_input_required" client.post( - f"/v1.0/process-instances/{process_instance_id}/suspend", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/suspend", headers=self.logged_in_headers(with_super_admin_user), ) process_instance = ProcessInstanceService().get_process_instance( @@ -2304,7 +2377,7 @@ class TestProcessApi(BaseTest): # TODO: Why can I run a suspended process instance? response = client.post( - f"/v1.0/process-instances/{process_instance_id}/run", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) @@ -2369,3 +2442,425 @@ class TestProcessApi(BaseTest): ) print("test_script_unit_test_run") + + def setup_initial_groups_for_move_tests( + self, client: FlaskClient, with_super_admin_user: UserModel + ) -> None: + """Setup_initial_groups_for_move_tests.""" + groups = ["group_a", "group_b", "group_b/group_bb"] + # setup initial groups + for group in groups: + self.create_process_group( + client, with_super_admin_user, group, display_name=group + ) + # make sure initial groups exist + for group in groups: + persisted = ProcessModelService.get_process_group(group) + assert persisted is not None + assert persisted.id == group + + def test_move_model( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_move_model.""" + self.setup_initial_groups_for_move_tests(client, with_super_admin_user) + + process_model_id = "test_model" + original_location = "group_a" + original_process_model_path = f"{original_location}/{process_model_id}" + + # add model to `group_a` + self.create_process_model_with_api( + client, + original_process_model_path, + user=with_super_admin_user, + process_model_display_name=process_model_id, + process_model_description=process_model_id, + ) + persisted = ProcessModelService.get_process_model(original_process_model_path) + assert persisted is not None + assert persisted.id == original_process_model_path + + # move model to `group_b/group_bb` + new_location = "group_b/group_bb" + new_process_model_path = f"{new_location}/{process_model_id}" + modified_original_process_model_id = original_process_model_path.replace( + "/", ":" + ) + + response = client.put( + f"/v1.0/process-models/{modified_original_process_model_id}/move?new_location={new_location}", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 201 + assert response.json["id"] == new_process_model_path + + # make sure the original model does not exist + with pytest.raises(ProcessEntityNotFoundError) as e: + ProcessModelService.get_process_model(original_process_model_path) + assert e.value.args[0] == "process_model_not_found" + + # make sure the new model does exist + new_process_model = ProcessModelService.get_process_model( + new_process_model_path + ) + assert new_process_model is not None + assert new_process_model.id == new_process_model_path + + def test_move_group( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_move_group.""" + self.setup_initial_groups_for_move_tests(client, with_super_admin_user) + + # add sub group to `group_a` + sub_group_id = "sub_group" + original_location = "group_a" + original_sub_path = f"{original_location}/{sub_group_id}" + self.create_process_group( + client, with_super_admin_user, original_sub_path, display_name=sub_group_id + ) + # make sure original subgroup exists + persisted = ProcessModelService.get_process_group(original_sub_path) + assert persisted is not None + assert persisted.id == original_sub_path + + # move sub_group to `group_b/group_bb` + new_location = "group_b/group_bb" + new_sub_path = f"{new_location}/{sub_group_id}" + modified_original_process_group_id = original_sub_path.replace("/", ":") + response = client.put( + f"/v1.0/process-groups/{modified_original_process_group_id}/move?new_location={new_location}", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 201 + assert response.json["id"] == new_sub_path + + # make sure the original subgroup does not exist + with pytest.raises(ProcessEntityNotFoundError) as e: + ProcessModelService.get_process_group(original_sub_path) + + assert e.value.args[0] == "process_group_not_found" + assert e.value.args[1] == f"Process Group Id: {original_sub_path}" + + # make sure the new subgroup does exist + new_process_group = ProcessModelService.get_process_group(new_sub_path) + assert new_process_group.id == new_sub_path + + # this doesn't work in CI + # assert "Initial Commit" in output + # def test_process_model_publish( + # self, + # app: Flask, + # client: FlaskClient, + # with_db_and_bpmn_file_cleanup: None, + # with_super_admin_user: UserModel, + # ) -> None: + # """Test_process_model_publish.""" + # bpmn_root = FileSystemService.root_path() + # shell_command = ["git", "init", "--initial-branch=main", bpmn_root] + # output = GitService.run_shell_command_to_get_stdout(shell_command) + # assert output == f"Initialized empty Git repository in {bpmn_root}/.git/\n" + # with FileSystemService.cd(bpmn_root): + # output = GitService.run_shell_command_to_get_stdout(["git", "status"]) + # assert "On branch main" in output + # assert "No commits yet" in output + # assert ( + # 'nothing to commit (create/copy files and use "git add" to track)' + # in output + # ) + # + # process_group_id = "test_group" + # self.create_process_group( + # client, with_super_admin_user, process_group_id, process_group_id + # ) + # + # sub_process_group_id = "test_group/test_sub_group" + # process_model_id = "hello_world" + # bpmn_file_name = "hello_world.bpmn" + # bpmn_file_location = "hello_world" + # process_model_identifier = self.create_group_and_model_with_bpmn( + # client=client, + # user=with_super_admin_user, + # process_group_id=sub_process_group_id, + # process_model_id=process_model_id, + # bpmn_file_name=bpmn_file_name, + # bpmn_file_location=bpmn_file_location, + # ) + # process_model_absolute_dir = os.path.join( + # bpmn_root, process_model_identifier + # ) + # + # output = GitService.run_shell_command_to_get_stdout(["git", "status"]) + # test_string = 'Untracked files:\n (use "git add ..." to include in what will be committed)\n\ttest_group' + # assert test_string in output + # + # os.system("git add .") + # output = os.popen("git commit -m 'Initial Commit'").read() + # assert "Initial Commit" in output + # assert "4 files changed" in output + # assert "test_group/process_group.json" in output + # assert "test_group/test_sub_group/hello_world/hello_world.bpmn" in output + # assert "test_group/test_sub_group/hello_world/process_model.json" in output + # assert "test_group/test_sub_group/process_group.json" in output + # + # output = GitService.run_shell_command_to_get_stdout(["git", "status"]) + # assert "On branch main" in output + # assert "nothing to commit" in output + # assert "working tree clean" in output + # + # output = os.popen("git branch --list").read() # noqa: S605 + # assert output == "* main\n" + # os.system("git branch staging") + # output = os.popen("git branch --list").read() # noqa: S605 + # assert output == "* main\n staging\n" + # + # os.system("git checkout staging") + # + # output = GitService.run_shell_command_to_get_stdout(["git", "status"]) + # assert "On branch staging" in output + # assert "nothing to commit" in output + # assert "working tree clean" in output + # + # # process_model = ProcessModelService.get_process_model(process_model_identifier) + # + # listing = os.listdir(process_model_absolute_dir) + # assert len(listing) == 2 + # assert "hello_world.bpmn" in listing + # assert "process_model.json" in listing + # + # os.system("git checkout main") + # + # output = GitService.run_shell_command_to_get_stdout(["git", "status"]) + # assert "On branch main" in output + # assert "nothing to commit" in output + # assert "working tree clean" in output + # + # file_data = b"abc123" + # new_file_path = os.path.join(process_model_absolute_dir, "new_file.txt") + # with open(new_file_path, "wb") as f_open: + # f_open.write(file_data) + # + # output = GitService.run_shell_command_to_get_stdout(["git", "status"]) + # assert "On branch main" in output + # assert "Untracked files:" in output + # assert "test_group/test_sub_group/hello_world/new_file.txt" in output + # + # os.system( + # "git add test_group/test_sub_group/hello_world/new_file.txt" + # ) # noqa: S605 + # output = os.popen("git commit -m 'add new_file.txt'").read() # noqa: S605 + # + # assert "add new_file.txt" in output + # assert "1 file changed, 1 insertion(+)" in output + # assert "test_group/test_sub_group/hello_world/new_file.txt" in output + # + # listing = os.listdir(process_model_absolute_dir) + # assert len(listing) == 3 + # assert "hello_world.bpmn" in listing + # assert "process_model.json" in listing + # assert "new_file.txt" in listing + # + # # modified_process_model_id = process_model_identifier.replace("/", ":") + # # response = client.post( + # # f"/v1.0/process-models/{modified_process_model_id}/publish?branch_to_update=staging", + # # headers=self.logged_in_headers(with_super_admin_user), + # # ) + # + # print("test_process_model_publish") + + def test_can_get_process_instance_list_with_report_metadata( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_can_get_process_instance_list_with_report_metadata.""" + process_model = load_test_spec( + process_model_id="save_process_instance_metadata/save_process_instance_metadata", + bpmn_file_name="save_process_instance_metadata.bpmn", + process_model_source_directory="save_process_instance_metadata", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=with_super_admin_user + ) + + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id + ).all() + assert len(process_instance_metadata) == 3 + + report_metadata = { + "columns": [ + {"Header": "ID", "accessor": "id"}, + {"Header": "Status", "accessor": "status"}, + {"Header": "Key One", "accessor": "key1"}, + {"Header": "Key Two", "accessor": "key2"}, + ], + "order_by": ["status"], + "filter_by": [], + } + process_instance_report = ProcessInstanceReportModel.create_with_attributes( + identifier="sure", + report_metadata=report_metadata, + user=with_super_admin_user, + ) + + response = client.get( + f"/v1.0/process-instances?report_identifier={process_instance_report.identifier}", + headers=self.logged_in_headers(with_super_admin_user), + ) + + assert response.json is not None + assert response.status_code == 200 + + assert len(response.json["results"]) == 1 + assert response.json["results"][0]["status"] == "complete" + assert response.json["results"][0]["id"] == process_instance.id + assert response.json["results"][0]["key1"] == "value1" + assert response.json["results"][0]["key2"] == "value2" + assert response.json["pagination"]["count"] == 1 + assert response.json["pagination"]["pages"] == 1 + assert response.json["pagination"]["total"] == 1 + + def test_can_get_process_instance_report_column_list( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_can_get_process_instance_list_with_report_metadata.""" + process_model = load_test_spec( + process_model_id="save_process_instance_metadata/save_process_instance_metadata", + bpmn_file_name="save_process_instance_metadata.bpmn", + process_model_source_directory="save_process_instance_metadata", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=with_super_admin_user + ) + + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id + ).all() + assert len(process_instance_metadata) == 3 + + response = client.get( + "/v1.0/process-instances/reports/columns", + headers=self.logged_in_headers(with_super_admin_user), + ) + + assert response.json is not None + assert response.status_code == 200 + assert response.json == [ + {"Header": "Id", "accessor": "id", "filterable": False}, + { + "Header": "Process", + "accessor": "process_model_display_name", + "filterable": False, + }, + {"Header": "Start", "accessor": "start_in_seconds", "filterable": False}, + {"Header": "End", "accessor": "end_in_seconds", "filterable": False}, + {"Header": "Username", "accessor": "username", "filterable": False}, + {"Header": "Status", "accessor": "status", "filterable": False}, + {"Header": "key1", "accessor": "key1", "filterable": True}, + {"Header": "key2", "accessor": "key2", "filterable": True}, + {"Header": "key3", "accessor": "key3", "filterable": True}, + ] + + def test_process_instance_list_can_order_by_metadata( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_process_instance_list_can_order_by_metadata.""" + self.create_process_group( + client, with_super_admin_user, "test_group", "test_group" + ) + process_model = load_test_spec( + "test_group/hello_world", + process_model_source_directory="nested-task-data-structure", + ) + ProcessModelService.update_process_model( + process_model, + { + "metadata_extraction_paths": [ + {"key": "time_ns", "path": "outer.time"}, + ] + }, + ) + + process_instance_one = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance_one) + processor.do_engine_steps(save=True) + assert process_instance_one.status == "complete" + process_instance_two = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance_two) + processor.do_engine_steps(save=True) + assert process_instance_two.status == "complete" + + report_metadata = { + "columns": [ + {"Header": "id", "accessor": "id"}, + {"Header": "Time", "accessor": "time_ns"}, + ], + "order_by": ["time_ns"], + } + report_one = ProcessInstanceReportModel.create_with_attributes( + identifier="report_one", + report_metadata=report_metadata, + user=with_super_admin_user, + ) + + response = client.get( + f"/v1.0/process-instances?report_id={report_one.id}", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 2 + assert response.json["results"][0]["id"] == process_instance_one.id + assert response.json["results"][1]["id"] == process_instance_two.id + + report_metadata = { + "columns": [ + {"Header": "id", "accessor": "id"}, + {"Header": "Time", "accessor": "time_ns"}, + ], + "order_by": ["-time_ns"], + } + report_two = ProcessInstanceReportModel.create_with_attributes( + identifier="report_two", + report_metadata=report_metadata, + user=with_super_admin_user, + ) + + response = client.get( + f"/v1.0/process-instances?report_id={report_two.id}", + headers=self.logged_in_headers(with_super_admin_user), + ) + + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 2 + assert response.json["results"][1]["id"] == process_instance_one.id + assert response.json["results"][0]["id"] == process_instance_two.id diff --git a/tests/spiffworkflow_backend/integration/test_secret_service.py b/tests/spiffworkflow_backend/integration/test_secret_service.py index 071ef6cc..c71f67f2 100644 --- a/tests/spiffworkflow_backend/integration/test_secret_service.py +++ b/tests/spiffworkflow_backend/integration/test_secret_service.py @@ -52,7 +52,7 @@ class SecretServiceTestHelpers(BaseTest): process_model_description=self.test_process_model_description, user=user, ) - process_model_info = ProcessModelService().get_process_model( + process_model_info = ProcessModelService.get_process_model( process_model_identifier ) return process_model_info diff --git a/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py b/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py new file mode 100644 index 00000000..96eb6297 --- /dev/null +++ b/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py @@ -0,0 +1,45 @@ +"""Test_get_localtime.""" +from flask.app import Flask +from flask.testing import FlaskClient +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) + + +class TestSaveProcessInstanceMetadata(BaseTest): + """TestSaveProcessInstanceMetadata.""" + + def test_can_save_process_instance_metadata( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_can_save_process_instance_metadata.""" + initiator_user = self.find_or_create_user("initiator_user") + self.create_process_group( + client, with_super_admin_user, "test_group", "test_group" + ) + process_model = load_test_spec( + process_model_id="save_process_instance_metadata/save_process_instance_metadata", + bpmn_file_name="save_process_instance_metadata.bpmn", + process_model_source_directory="save_process_instance_metadata", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + + process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id + ).all() + assert len(process_instance_metadata) == 3 diff --git a/tests/spiffworkflow_backend/unit/test_acceptance_test_fixtures.py b/tests/spiffworkflow_backend/unit/test_acceptance_test_fixtures.py index 1d515712..c738c7f6 100644 --- a/tests/spiffworkflow_backend/unit/test_acceptance_test_fixtures.py +++ b/tests/spiffworkflow_backend/unit/test_acceptance_test_fixtures.py @@ -1,13 +1,38 @@ """Test_acceptance_test_fixtures.""" +import os + from flask.app import Flask +from spiffworkflow_backend.models.process_group import ProcessGroup +from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.services.acceptance_test_fixtures import ( load_acceptance_test_fixtures, ) +from spiffworkflow_backend.services.process_model_service import ProcessModelService def test_start_dates_are_one_hour_apart(app: Flask) -> None: """Test_start_dates_are_one_hour_apart.""" + process_model_identifier = ( + "misc/acceptance-tests-group-one/acceptance-tests-model-1" + ) + group_identifier = os.path.dirname(process_model_identifier) + parent_group_identifier = os.path.dirname(group_identifier) + if not ProcessModelService.is_group(parent_group_identifier): + process_group = ProcessGroup( + id=parent_group_identifier, display_name=parent_group_identifier + ) + ProcessModelService.add_process_group(process_group) + if not ProcessModelService.is_group(group_identifier): + process_group = ProcessGroup(id=group_identifier, display_name=group_identifier) + ProcessModelService.add_process_group(process_group) + if not ProcessModelService.is_model(process_model_identifier): + process_model = ProcessModelInfo( + id=process_model_identifier, + display_name=process_model_identifier, + description="hey", + ) + ProcessModelService.add_process_model(process_model) process_instances = load_acceptance_test_fixtures() assert len(process_instances) > 2 diff --git a/tests/spiffworkflow_backend/unit/test_authorization_service.py b/tests/spiffworkflow_backend/unit/test_authorization_service.py index 36f07743..00622a1f 100644 --- a/tests/spiffworkflow_backend/unit/test_authorization_service.py +++ b/tests/spiffworkflow_backend/unit/test_authorization_service.py @@ -113,7 +113,7 @@ class TestAuthorizationService(BaseTest): bpmn_file_location="model_with_lanes", ) - process_model = ProcessModelService().get_process_model( + process_model = ProcessModelService.get_process_model( process_model_id=process_model_identifier ) process_instance = self.create_process_instance_from_process_model( diff --git a/tests/spiffworkflow_backend/unit/test_message_instance.py b/tests/spiffworkflow_backend/unit/test_message_instance.py index 0292032e..2c091eeb 100644 --- a/tests/spiffworkflow_backend/unit/test_message_instance.py +++ b/tests/spiffworkflow_backend/unit/test_message_instance.py @@ -44,7 +44,7 @@ class TestMessageInstance(BaseTest): client, with_super_admin_user ) - process_model = ProcessModelService().get_process_model( + process_model = ProcessModelService.get_process_model( process_model_id=process_model_identifier ) process_instance = self.create_process_instance_from_process_model( @@ -81,7 +81,7 @@ class TestMessageInstance(BaseTest): client, with_super_admin_user ) - process_model = ProcessModelService().get_process_model( + process_model = ProcessModelService.get_process_model( process_model_id=process_model_identifier ) process_instance = self.create_process_instance_from_process_model( @@ -127,7 +127,7 @@ class TestMessageInstance(BaseTest): client, with_super_admin_user ) - process_model = ProcessModelService().get_process_model( + process_model = ProcessModelService.get_process_model( process_model_id=process_model_identifier ) process_instance = self.create_process_instance_from_process_model( @@ -174,7 +174,7 @@ class TestMessageInstance(BaseTest): client, with_super_admin_user ) - process_model = ProcessModelService().get_process_model( + process_model = ProcessModelService.get_process_model( process_model_id=process_model_identifier ) process_instance = self.create_process_instance_from_process_model( diff --git a/tests/spiffworkflow_backend/unit/test_message_service.py b/tests/spiffworkflow_backend/unit/test_message_service.py index aa1f2805..c012e287 100644 --- a/tests/spiffworkflow_backend/unit/test_message_service.py +++ b/tests/spiffworkflow_backend/unit/test_message_service.py @@ -47,7 +47,7 @@ class TestMessageService(BaseTest): bpmn_file_name="message_sender.bpmn", ) - process_instance_sender = ProcessInstanceService.create_process_instance( + process_instance_sender = ProcessInstanceService.create_process_instance_from_process_model_identifier( process_model_sender.id, with_super_admin_user, ) @@ -154,7 +154,7 @@ class TestMessageService(BaseTest): user = self.find_or_create_user() - process_instance_sender = ProcessInstanceService.create_process_instance( + process_instance_sender = ProcessInstanceService.create_process_instance_from_process_model_identifier( process_model_sender.id, user, # process_group_identifier=process_model_sender.process_group_id, diff --git a/tests/spiffworkflow_backend/unit/test_process_group.py b/tests/spiffworkflow_backend/unit/test_process_group.py index 6c3ad0ad..5cf8945f 100644 --- a/tests/spiffworkflow_backend/unit/test_process_group.py +++ b/tests/spiffworkflow_backend/unit/test_process_group.py @@ -9,8 +9,7 @@ def test_there_is_at_least_one_group_after_we_create_one( app: Flask, with_db_and_bpmn_file_cleanup: None ) -> None: """Test_there_is_at_least_one_group_after_we_create_one.""" - process_model_service = ProcessModelService() process_group = ProcessGroup(id="hey", display_name="sure") - process_model_service.add_process_group(process_group) - process_groups = ProcessModelService().get_process_groups() + ProcessModelService.add_process_group(process_group) + process_groups = ProcessModelService.get_process_groups() assert len(process_groups) > 0 diff --git a/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index f0de77aa..3e010795 100644 --- a/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -161,6 +161,7 @@ class TestProcessInstanceProcessor(BaseTest): ) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) + processor.save() assert len(process_instance.active_tasks) == 1 active_task = process_instance.active_tasks[0] @@ -241,3 +242,42 @@ class TestProcessInstanceProcessor(BaseTest): ) assert process_instance.status == ProcessInstanceStatus.complete.value + + def test_does_not_recreate_active_tasks_on_multiple_saves( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_sets_permission_correctly_on_active_task_when_using_dict.""" + self.create_process_group( + client, with_super_admin_user, "test_group", "test_group" + ) + initiator_user = self.find_or_create_user("initiator_user") + finance_user_three = self.find_or_create_user("testuser3") + assert initiator_user.principal is not None + assert finance_user_three.principal is not None + AuthorizationService.import_permissions_from_yaml_file() + + finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + assert finance_group is not None + + process_model = load_test_spec( + process_model_id="test_group/model_with_lanes", + bpmn_file_name="lanes_with_owner_dict.bpmn", + process_model_source_directory="model_with_lanes", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert len(process_instance.active_tasks) == 1 + initial_active_task_id = process_instance.active_tasks[0].id + + # save again to ensure we go attempt to process the active tasks again + processor.save() + + assert len(process_instance.active_tasks) == 1 + assert initial_active_task_id == process_instance.active_tasks[0].id diff --git a/tests/spiffworkflow_backend/unit/test_process_instance_report.py b/tests/spiffworkflow_backend/unit/test_process_instance_report.py index 48239507..0a5985f2 100644 --- a/tests/spiffworkflow_backend/unit/test_process_instance_report.py +++ b/tests/spiffworkflow_backend/unit/test_process_instance_report.py @@ -37,7 +37,7 @@ def test_generate_report_with_filter_by_with_variable_substitution( with_db_and_bpmn_file_cleanup: None, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: - """Test_user_can_be_given_permission_to_administer_process_group.""" + """Test_generate_report_with_filter_by_with_variable_substitution.""" process_instances = setup_process_instances_for_reports report_metadata = { "filter_by": [ @@ -61,7 +61,7 @@ def test_generate_report_with_order_by_and_one_field( with_db_and_bpmn_file_cleanup: None, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: - """Test_user_can_be_given_permission_to_administer_process_group.""" + """Test_generate_report_with_order_by_and_one_field.""" process_instances = setup_process_instances_for_reports report_metadata = {"order_by": ["test_score"]} results = do_report_with_metadata_and_instances(report_metadata, process_instances) @@ -75,7 +75,7 @@ def test_generate_report_with_order_by_and_two_fields( with_db_and_bpmn_file_cleanup: None, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: - """Test_user_can_be_given_permission_to_administer_process_group.""" + """Test_generate_report_with_order_by_and_two_fields.""" process_instances = setup_process_instances_for_reports report_metadata = {"order_by": ["grade_level", "test_score"]} results = do_report_with_metadata_and_instances(report_metadata, process_instances) @@ -89,7 +89,7 @@ def test_generate_report_with_order_by_desc( with_db_and_bpmn_file_cleanup: None, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: - """Test_user_can_be_given_permission_to_administer_process_group.""" + """Test_generate_report_with_order_by_desc.""" process_instances = setup_process_instances_for_reports report_metadata = {"order_by": ["grade_level", "-test_score"]} results = do_report_with_metadata_and_instances(report_metadata, process_instances) @@ -103,7 +103,7 @@ def test_generate_report_with_columns( with_db_and_bpmn_file_cleanup: None, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: - """Test_user_can_be_given_permission_to_administer_process_group.""" + """Test_generate_report_with_columns.""" process_instances = setup_process_instances_for_reports report_metadata = { "columns": [ diff --git a/tests/spiffworkflow_backend/unit/test_process_model.py b/tests/spiffworkflow_backend/unit/test_process_model.py index 09421bc7..9eb6901b 100644 --- a/tests/spiffworkflow_backend/unit/test_process_model.py +++ b/tests/spiffworkflow_backend/unit/test_process_model.py @@ -5,12 +5,16 @@ from flask_bpmn.models.db import db from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) +from spiffworkflow_backend.services.process_model_service import ProcessModelService class TestProcessModel(BaseTest): @@ -122,6 +126,53 @@ class TestProcessModel(BaseTest): processor.do_engine_steps(save=True) assert process_instance.status == "complete" + def test_extract_metadata( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_can_run_process_model_with_call_activities.""" + self.create_process_group( + client, with_super_admin_user, "test_group", "test_group" + ) + process_model = load_test_spec( + "test_group/hello_world", + process_model_source_directory="nested-task-data-structure", + ) + ProcessModelService.update_process_model( + process_model, + { + "metadata_extraction_paths": [ + {"key": "awesome_var", "path": "outer.inner"}, + {"key": "invoice_number", "path": "invoice_number"}, + ] + }, + ) + + process_instance = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert process_instance.status == "complete" + + process_instance_metadata_awesome_var = ( + ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id, key="awesome_var" + ).first() + ) + assert process_instance_metadata_awesome_var is not None + assert process_instance_metadata_awesome_var.value == "sweet2" + process_instance_metadata_awesome_var = ( + ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id, key="invoice_number" + ).first() + ) + assert process_instance_metadata_awesome_var is not None + assert process_instance_metadata_awesome_var.value == "123" + def create_test_process_model(self, id: str, display_name: str) -> ProcessModelInfo: """Create_test_process_model.""" return ProcessModelInfo( diff --git a/tests/spiffworkflow_backend/unit/test_process_model_service.py b/tests/spiffworkflow_backend/unit/test_process_model_service.py index 7127eb41..7392bdfd 100644 --- a/tests/spiffworkflow_backend/unit/test_process_model_service.py +++ b/tests/spiffworkflow_backend/unit/test_process_model_service.py @@ -32,7 +32,7 @@ class TestProcessModelService(BaseTest): primary_process_id = process_model.primary_process_id assert primary_process_id == "Process_HelloWorld" - ProcessModelService().update_process_model( + ProcessModelService.update_process_model( process_model, {"display_name": "new_name"} ) diff --git a/tests/spiffworkflow_backend/unit/test_spec_file_service.py b/tests/spiffworkflow_backend/unit/test_spec_file_service.py index 9f5c5f8a..3cc353b5 100644 --- a/tests/spiffworkflow_backend/unit/test_spec_file_service.py +++ b/tests/spiffworkflow_backend/unit/test_spec_file_service.py @@ -188,7 +188,7 @@ class TestSpecFileService(BaseTest): # , # process_model_source_directory="call_activity_nested", # ) - process_model_info = ProcessModelService().get_process_model( + process_model_info = ProcessModelService.get_process_model( process_model_identifier ) files = SpecFileService.get_files(process_model_info) diff --git a/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py b/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py index aa91fcfd..26656143 100644 --- a/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py +++ b/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py @@ -28,7 +28,7 @@ class TestVariousBpmnConstructs(BaseTest): "timer_intermediate_catch_event", ) - process_model = ProcessModelService().get_process_model( + process_model = ProcessModelService.get_process_model( process_model_id=process_model_identifier ) From e4e00565815e02673844689fbd28cd4f6e7b79a4 Mon Sep 17 00:00:00 2001 From: burnettk Date: Sat, 10 Dec 2022 23:39:02 -0500 Subject: [PATCH 105/128] Squashed 'spiffworkflow-frontend/' changes from 326040b3c..55607af93 55607af93 fixed broken test w/ burnettk 4ea766eb4 mypy w/ burnettk cullerton fd2239b0e added git creds for pushing on publish w/ burnettk cullerton 0281bec01 added new notification component that allows links based on carbons w/ burnettk cullerton 49190128c display URL to open PR *** Need to figure out how to turn this into a link *** 72b15c52c Return message to use on successful publish 4997375c8 Merge branch 'main' into feature/git-integration 39deda4d4 Merge branch 'main' into feature/git-integration 027dae1c6 First pass at git integration db0c8dc29 break process instance log list page into two tabs, simple and detailed d9df1104c get the columsn for the instance list table anytime filter options are displayed if empty 3792dafdb make the frontend uris match the api calls better w/ burnettk 7095e4723 more api cleanup w/ burnettk c514ac656 cleaned up more api routes for permissions w/ burnettk c758216ed updated tasks endpoint to task-data for easier permission setting w/ burnettk 7504e1857 pyl w/ burnettk b7edc501a Merge remote-tracking branch 'origin/main' into new_report 112eed7f3 some updates to fix up saving perspectives w/ burnettk 6da6ebe2d Use the identifier, not the id when locating a process model or dmn table. 51515ea21 using an array for metadata extraction paths now instead of dictionaries w/ burnettk f0b8e7185 added some support to add process model metadata. need to fix frontend w/ burnettk 0777bda31 filtering by metadata works w/ burnettk d82a00018 favor report id over identifier but support both and ui updates to allow setting a condition value on a metadata field, changing the display name, and fixes for saving and updating a report de218ba8e updated column form var w/ burnettk de38dc436 added ability to update the display name for perspective columns w/ burnettk 555360eb6 some updates for process instance reports and metadata w/ burnettk f0f4dcd89 better display for failure causes on message list w/ burnettk c4faf5d55 added correlations to message list table w/ burnettk 65feaeecf Merge remote-tracking branch 'origin/main' into new_report fe9dddc03 Choose new report f20d6ee75 Save dates b55a24a1c Save first status c47c62a6d added script to save process instance metadata and fixed permissions issue w/ burnettk cullerton a0098ebd9 Save selected process model 55ecbe565 Use current columns 6de52904e WIP bacf11bdc Save as report component 472578b99 adding the username to the report tables bef4add43 allow disabling the permission check for the Create New Instance page to improve performance. ab929fcaa Merge branch 'main' of github.com:sartography/spiff-arena into main 603db83cb "Continue" rather than "Submit" when displaying manual tasks. 5db42f0e0 Processes you can start is now: Processes I can start c5c6c0fac lint 6f0c58da8 Auto Reload the Process Lists on the home pages' in-progress, and complete tabs 54e8a4717 update bpmn-js-spiffworkflow with better data-object handling a72daa441 Clean up css for the filter icon c755889ae update wording per harmeet: Tasks for my open processes is now My open instances bda4a6ee3 heading for instances on model show page, move instances below files, add margins 21a3eea47 display name instead of id, margin under table sections, Download xml to Download 1d83e3ac1 do not mislead user about being able to edit and clean up time in words 07380eec7 auto refresh tasks waiting for my groups on homepage 710d2340a time ago in words for in progress tab per harmeet feedback 88c4be1bd put id before process like completed tab and add title text to explain what is happening fb4136892 use process model display name rather than id for completed instances tab 8031fda3a left align files section with Start button per harmeet feedback c339d7dec add fin1, lead1, and Tasks actioned by me to Tasks completed by me 951c21f39 improve wording ed38b57e8 consistency is key e09373027 remove View label next to process instance id 38d20ceab ui feedback 3c0284633 some ui changes w/ burnettk e3711f4fd updated copmleted table text w/ burnettk 0688f5ec1 updated instances table descriptions w/ burnettk e9e9b8e2e added descriptions to task tables w/ burnettk 9b1d61866 updated breadcrumb to use display name w/ burnettk a9895f472 Hide perspectives link in nav bar (#59) 77390519b rename process_groups_list to process_group_list and fix lint 31bb0facd some updates to ui homepage to align more with notion doc 12e719146 fixed cypress tests 476c19f72 fix typo b266273e4 some more perm updates for core user w/ burnettk 05161fbcb Start of system report filters (#57) f0e0732ab fixed editing a process model w/ burnettk b02b5a2e4 filter process models based on user permissions on the backend if specified w/ burnettk 29093932f use tiles for process models w/ burnettk cullerton ab24c28d9 updated recently viewed table to be recently run and added run button w/ burnettk cullerton 9f894a8a9 added link to process model tile w/ burnettk cullerton b7a0743a5 moved delete and edit model and group buttons to icons on show pages w/ burnettk cullerton 21f7fc917 created new users for keycloak and fixed some permissions for core user w/ burnettk cullerton bd5a55c04 renamed modifyProcessModelPath to modifyProcessIdentifierForPathParam w/ burnettk 55a59b5ed modify process group id before submitting w/ burnettk 58bf7e38d Allow switching between user defined reports (#56) ec29be773 added recursive option to process model list to recurse or not and fix some ui components 56ae0afe3 fixed task frontend test 5f8a8dd64 the misc group is now 99-Misc 467e9643c allow longer username 83f6185f1 fix tests and add frontend tests f3b5cb7ca upgrade apscheduler and fix mispelling cfe7172de added a script to add a user to a group w/ burnettk 976ca7320 task cypress tests are passing w/ burnettk cullerton aa1a62505 process model cypress tests are passing w/ burnettk cullerton e36012401 make sure to pass the correct form of process group id when creating a process model w/ burnettk cullerton 97a840d04 process instance cypress tests pass now w/ burnettk cullerton 86fdb302a allow getting all process models, process instances should not save when they are initialized, and fixed some cypress tests w/ burnettk 2b15e66d2 iterating on cypress 1aa72f420 fix cypress tests git-subtree-dir: spiffworkflow-frontend git-subtree-split: 55607af9318775fb3524cc5bb4f6a3c6188efe38 --- .gitignore | 2 +- cypress/e2e/process_groups.cy.js | 21 +- cypress/e2e/process_instances.cy.js | 61 +- cypress/e2e/process_models.cy.js | 127 ++-- cypress/e2e/tasks.cy.js | 37 +- cypress/support/commands.js | 44 +- package-lock.json | 4 +- public/index.html | 1 - src/components/ButtonWithConfirmation.tsx | 2 +- src/components/MiniComponents.tsx | 22 + src/components/MyCompletedInstances.tsx | 2 + src/components/NavigationBar.tsx | 6 +- src/components/Notification.tsx | 48 ++ src/components/PaginationForTable.tsx | 3 + src/components/ProcessBreadcrumb.test.tsx | 14 +- src/components/ProcessBreadcrumb.tsx | 217 ++++--- src/components/ProcessGroupForm.tsx | 36 +- src/components/ProcessGroupListTiles.tsx | 13 +- .../ProcessInstanceListSaveAsReport.tsx | 205 ++++++ src/components/ProcessInstanceListTable.tsx | 596 ++++++++++++++++-- .../ProcessInstanceReportSearch.tsx | 85 +++ src/components/ProcessInstanceRun.tsx | 99 ++- src/components/ProcessModelForm.tsx | 137 +++- src/components/ProcessModelListTiles.tsx | 48 +- src/components/ProcessModelSearch.tsx | 2 +- src/components/ProcessSearch.tsx | 2 +- src/components/ReactDiagramEditor.tsx | 4 +- .../TableCellWithTimeAgoInWords.tsx | 17 + src/components/TasksForMyOpenProcesses.tsx | 118 ++-- src/components/TasksWaitingForMe.tsx | 86 +-- src/components/TasksWaitingForMyGroups.tsx | 120 ++-- src/config.tsx | 1 + src/helpers.tsx | 6 +- src/helpers/timeago.js | 62 ++ src/hooks/PermissionService.tsx | 21 +- src/hooks/UriListForPermissions.tsx | 34 +- src/index.css | 127 ++++ src/interfaces.ts | 78 ++- src/routes/AdminRoutes.tsx | 6 +- src/routes/AuthenticationList.tsx | 2 +- src/routes/CompletedInstances.tsx | 47 +- src/routes/CreateNewInstance.tsx | 3 +- src/routes/GroupedTasks.tsx | 8 +- src/routes/HomePageRoutes.tsx | 16 +- src/routes/JsonSchemaFormBuilder.tsx | 4 +- src/routes/MessageInstanceList.tsx | 140 ++-- src/routes/MyTasks.tsx | 75 ++- src/routes/ProcessGroupEdit.tsx | 9 +- src/routes/ProcessGroupList.tsx | 8 +- src/routes/ProcessGroupNew.tsx | 6 +- src/routes/ProcessGroupShow.tsx | 176 ++++-- src/routes/ProcessInstanceLogList.tsx | 86 ++- src/routes/ProcessInstanceReportList.tsx | 18 +- src/routes/ProcessInstanceReportShow.tsx | 4 +- src/routes/ProcessInstanceShow.tsx | 144 +++-- src/routes/ProcessModelEdit.tsx | 9 +- src/routes/ProcessModelEditDiagram.tsx | 19 +- src/routes/ProcessModelNew.tsx | 14 +- src/routes/ProcessModelShow.tsx | 236 ++++--- src/routes/ReactFormEditor.tsx | 18 +- src/routes/SecretList.tsx | 2 +- src/routes/TaskShow.tsx | 83 ++- src/services/HttpService.ts | 4 +- 63 files changed, 2727 insertions(+), 918 deletions(-) create mode 100644 src/components/MiniComponents.tsx create mode 100644 src/components/Notification.tsx create mode 100644 src/components/ProcessInstanceListSaveAsReport.tsx create mode 100644 src/components/ProcessInstanceReportSearch.tsx create mode 100644 src/components/TableCellWithTimeAgoInWords.tsx create mode 100644 src/helpers/timeago.js diff --git a/.gitignore b/.gitignore index a694da80..8ff3e35c 100644 --- a/.gitignore +++ b/.gitignore @@ -29,4 +29,4 @@ cypress/screenshots /test*.json # Editors -.idea \ No newline at end of file +.idea diff --git a/cypress/e2e/process_groups.cy.js b/cypress/e2e/process_groups.cy.js index 629b18c6..bef0e560 100644 --- a/cypress/e2e/process_groups.cy.js +++ b/cypress/e2e/process_groups.cy.js @@ -19,25 +19,22 @@ describe('process-groups', () => { cy.url().should('include', `process-groups/${groupId}`); cy.contains(`Process Group: ${groupDisplayName}`); - cy.contains('Edit process group').click(); + cy.getBySel('edit-process-group-button').click(); cy.get('input[name=display_name]').clear().type(newGroupDisplayName); cy.contains('Submit').click(); cy.contains(`Process Group: ${newGroupDisplayName}`); - cy.contains('Edit process group').click(); - cy.get('input[name=display_name]').should( - 'have.value', - newGroupDisplayName - ); - - cy.contains('Delete').click(); + cy.getBySel('delete-process-group-button').click(); cy.contains('Are you sure'); - cy.getBySel('modal-confirmation-dialog').find('.cds--btn--danger').click(); + cy.getBySel('delete-process-group-button-modal-confirmation-dialog') + .find('.cds--btn--danger') + .click(); cy.url().should('include', `process-groups`); cy.contains(groupId).should('not.exist'); }); - it('can paginate items', () => { - cy.basicPaginationTest(); - }); + // process groups no longer has pagination post-tiles + // it('can paginate items', () => { + // cy.basicPaginationTest(); + // }); }); diff --git a/cypress/e2e/process_instances.cy.js b/cypress/e2e/process_instances.cy.js index 09cad298..4d33d13f 100644 --- a/cypress/e2e/process_instances.cy.js +++ b/cypress/e2e/process_instances.cy.js @@ -3,9 +3,9 @@ import { DATE_FORMAT, PROCESS_STATUSES } from '../../src/config'; const filterByDate = (fromDate) => { cy.get('#date-picker-start-from').clear().type(format(fromDate, DATE_FORMAT)); - cy.contains('Start date from').click(); + cy.contains('Start date to').click(); cy.get('#date-picker-end-from').clear().type(format(fromDate, DATE_FORMAT)); - cy.contains('End date from').click(); + cy.contains('End date to').click(); cy.getBySel('filter-button').click(); }; @@ -53,9 +53,9 @@ const updateBpmnPythonScriptWithMonaco = ( cy.get('.monaco-editor textarea:first') .click() .focused() // change subject to currently focused element - // .type('{ctrl}a') // had been doing it this way, but it turns out to be flaky relative to clear() .clear() - .type(pythonScript, { delay: 30 }); + // long delay to ensure cypress isn't competing with monaco auto complete stuff + .type(pythonScript, { delay: 120 }); cy.contains('Close').click(); // wait for a little bit for the xml to get set before saving @@ -119,28 +119,28 @@ describe('process-instances', () => { cy.runPrimaryBpmnFile(); }); - it('can create a new instance and can modify with monaco text editor', () => { - // leave off the ending double quote since manco adds it - const originalPythonScript = 'person = "Kevin'; - const newPythonScript = 'person = "Mike'; - - const bpmnFile = 'process_model_one.bpmn'; - - // Change bpmn - cy.getBySel('files-accordion').click(); - cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click(); - cy.contains(`Process Model File: ${bpmnFile}`); - updateBpmnPythonScriptWithMonaco(newPythonScript); - cy.contains('acceptance-tests-model-1').click(); - cy.runPrimaryBpmnFile(); - - cy.getBySel('files-accordion').click(); - cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click(); - cy.contains(`Process Model File: ${bpmnFile}`); - updateBpmnPythonScriptWithMonaco(originalPythonScript); - cy.contains('acceptance-tests-model-1').click(); - cy.runPrimaryBpmnFile(); - }); + // it('can create a new instance and can modify with monaco text editor', () => { + // // leave off the ending double quote since manco adds it + // const originalPythonScript = 'person = "Kevin'; + // const newPythonScript = 'person = "Mike'; + // + // const bpmnFile = 'process_model_one.bpmn'; + // + // // Change bpmn + // cy.getBySel('files-accordion').click(); + // cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click(); + // cy.contains(`Process Model File: ${bpmnFile}`); + // updateBpmnPythonScriptWithMonaco(newPythonScript); + // cy.contains('acceptance-tests-model-1').click(); + // cy.runPrimaryBpmnFile(); + // + // cy.getBySel('files-accordion').click(); + // cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click(); + // cy.contains(`Process Model File: ${bpmnFile}`); + // updateBpmnPythonScriptWithMonaco(originalPythonScript); + // cy.contains('acceptance-tests-model-1').click(); + // cy.runPrimaryBpmnFile(); + // }); it('can paginate items', () => { // make sure we have some process instances @@ -174,13 +174,12 @@ describe('process-instances', () => { if (!['all', 'waiting'].includes(processStatus)) { cy.get(statusSelect).click(); cy.get(statusSelect).contains(processStatus).click(); - // close the dropdown again - cy.get(statusSelect).click(); cy.getBySel('filter-button').click(); + // FIXME: wait a little bit for the useEffects to be able to fully set processInstanceFilters + cy.wait(1000); + cy.url().should('include', `status=${processStatus}`); cy.assertAtLeastOneItemInPaginatedResults(); - cy.getBySel(`process-instance-status-${processStatus}`).contains( - processStatus - ); + cy.getBySel(`process-instance-status-${processStatus}`); // there should really only be one, but in CI there are sometimes more cy.get('div[aria-label="Clear all selected items"]:first').click(); } diff --git a/cypress/e2e/process_models.cy.js b/cypress/e2e/process_models.cy.js index 705f6011..4fd1b481 100644 --- a/cypress/e2e/process_models.cy.js +++ b/cypress/e2e/process_models.cy.js @@ -1,3 +1,5 @@ +import { modifyProcessIdentifierForPathParam } from '../../src/helpers'; + describe('process-models', () => { beforeEach(() => { cy.login(); @@ -9,37 +11,48 @@ describe('process-models', () => { it('can perform crud operations', () => { const uuid = () => Cypress._.random(0, 1e6); const id = uuid(); - const groupId = 'acceptance-tests-group-one'; + const groupId = 'misc/acceptance-tests-group-one'; const groupDisplayName = 'Acceptance Tests Group One'; const modelDisplayName = `Test Model 2 ${id}`; - const newModelDisplayName = `${modelDisplayName} edited`; const modelId = `test-model-2-${id}`; + const newModelDisplayName = `${modelDisplayName} edited`; + cy.contains('99-Shared Resources').click(); + cy.wait(500); cy.contains(groupDisplayName).click(); cy.createModel(groupId, modelId, modelDisplayName); - cy.url().should('include', `process-models/${groupId}:${modelId}`); + cy.url().should( + 'include', + `process-models/${modifyProcessIdentifierForPathParam( + groupId + )}:${modelId}` + ); cy.contains(`Process Model: ${modelDisplayName}`); - cy.contains('Edit process model').click(); + cy.getBySel('edit-process-model-button').click(); cy.get('input[name=display_name]').clear().type(newModelDisplayName); cy.contains('Submit').click(); - cy.contains(`Process Model: ${groupId}/${modelId}`); - cy.contains('Submit').click(); - cy.get('input[name=display_name]').should( - 'have.value', - newModelDisplayName - ); + cy.contains(`Process Model: ${newModelDisplayName}`); - cy.contains('Delete').click(); + // go back to process model show by clicking on the breadcrumb + cy.contains(modelId).click(); + + cy.getBySel('delete-process-model-button').click(); cy.contains('Are you sure'); - cy.getBySel('modal-confirmation-dialog').find('.cds--btn--danger').click(); - cy.url().should('include', `process-groups/${groupId}`); + cy.getBySel('delete-process-model-button-modal-confirmation-dialog') + .find('.cds--btn--danger') + .click(); + cy.url().should( + 'include', + `process-groups/${modifyProcessIdentifierForPathParam(groupId)}` + ); cy.contains(modelId).should('not.exist'); }); it('can create new bpmn, dmn, and json files', () => { const uuid = () => Cypress._.random(0, 1e6); const id = uuid(); - const groupId = 'acceptance-tests-group-one'; + const directParentGroupId = 'acceptance-tests-group-one'; + const groupId = `misc/${directParentGroupId}`; const groupDisplayName = 'Acceptance Tests Group One'; const modelDisplayName = `Test Model 2 ${id}`; const modelId = `test-model-2-${id}`; @@ -48,13 +61,19 @@ describe('process-models', () => { const dmnFileName = `dmn_test_file_${id}`; const jsonFileName = `json_test_file_${id}`; + cy.contains('99-Shared Resources').click(); + cy.wait(500); cy.contains(groupDisplayName).click(); cy.createModel(groupId, modelId, modelDisplayName); - cy.contains(groupId).click(); - cy.contains(modelId).click(); - cy.url().should('include', `process-models/${groupId}:${modelId}`); + cy.contains(directParentGroupId).click(); + cy.contains(modelDisplayName).click(); + cy.url().should( + 'include', + `process-models/${modifyProcessIdentifierForPathParam( + groupId + )}:${modelId}` + ); cy.contains(`Process Model: ${modelDisplayName}`); - cy.getBySel('files-accordion').click(); cy.contains(`${bpmnFileName}.bpmn`).should('not.exist'); cy.contains(`${dmnFileName}.dmn`).should('not.exist'); cy.contains(`${jsonFileName}.json`).should('not.exist'); @@ -73,7 +92,7 @@ describe('process-models', () => { cy.contains(`Process Model File: ${bpmnFileName}`); cy.contains(modelId).click(); cy.contains(`Process Model: ${modelDisplayName}`); - cy.getBySel('files-accordion').click(); + // cy.getBySel('files-accordion').click(); cy.contains(`${bpmnFileName}.bpmn`).should('exist'); // add new dmn file @@ -81,13 +100,17 @@ describe('process-models', () => { cy.contains(/^Process Model File$/); cy.get('g[data-element-id=decision_1]').click().should('exist'); cy.contains('General').click(); + cy.get('#bio-properties-panel-id') + .clear() + .type('decision_acceptance_test_1'); + cy.contains('General').click(); cy.contains('Save').click(); cy.get('input[name=file_name]').type(dmnFileName); cy.contains('Save Changes').click(); cy.contains(`Process Model File: ${dmnFileName}`); cy.contains(modelId).click(); cy.contains(`Process Model: ${modelDisplayName}`); - cy.getBySel('files-accordion').click(); + // cy.getBySel('files-accordion').click(); cy.contains(`${dmnFileName}.dmn`).should('exist'); // add new json file @@ -103,35 +126,47 @@ describe('process-models', () => { cy.wait(500); cy.contains(modelId).click(); cy.contains(`Process Model: ${modelDisplayName}`); - cy.getBySel('files-accordion').click(); + // cy.getBySel('files-accordion').click(); cy.contains(`${jsonFileName}.json`).should('exist'); - cy.contains('Edit process model').click(); - cy.contains('Delete').click(); + cy.getBySel('delete-process-model-button').click(); cy.contains('Are you sure'); - cy.getBySel('modal-confirmation-dialog').find('.cds--btn--danger').click(); - cy.url().should('include', `process-groups/${groupId}`); + cy.getBySel('delete-process-model-button-modal-confirmation-dialog') + .find('.cds--btn--danger') + .click(); + cy.url().should( + 'include', + `process-groups/${modifyProcessIdentifierForPathParam(groupId)}` + ); cy.contains(modelId).should('not.exist'); + cy.contains(modelDisplayName).should('not.exist'); }); it('can upload and run a bpmn file', () => { const uuid = () => Cypress._.random(0, 1e6); const id = uuid(); - const groupId = 'acceptance-tests-group-one'; + const directParentGroupId = 'acceptance-tests-group-one'; + const groupId = `misc/${directParentGroupId}`; const groupDisplayName = 'Acceptance Tests Group One'; const modelDisplayName = `Test Model 2 ${id}`; const modelId = `test-model-2-${id}`; cy.contains('Add a process group'); + cy.contains('99-Shared Resources').click(); + cy.wait(500); cy.contains(groupDisplayName).click(); cy.createModel(groupId, modelId, modelDisplayName); - cy.contains(`${groupId}`).click(); + cy.contains(`${directParentGroupId}`).click(); cy.contains('Add a process model'); - cy.contains(modelId).click(); - cy.url().should('include', `process-models/${groupId}:${modelId}`); + cy.contains(modelDisplayName).click(); + cy.url().should( + 'include', + `process-models/${modifyProcessIdentifierForPathParam( + groupId + )}:${modelId}` + ); cy.contains(`Process Model: ${modelDisplayName}`); - cy.getBySel('files-accordion').click(); cy.getBySel('upload-file-button').click(); cy.contains('Add file').selectFile( 'cypress/fixtures/test_bpmn_file_upload.bpmn' @@ -142,31 +177,41 @@ describe('process-models', () => { .click(); cy.runPrimaryBpmnFile(); - cy.getBySel('process-instance-list-link').click(); + // cy.getBySel('process-instance-list-link').click(); cy.getBySel('process-instance-show-link').click(); cy.getBySel('process-instance-delete').click(); cy.contains('Are you sure'); - cy.getBySel('modal-confirmation-dialog').find('.cds--btn--danger').click(); + cy.getBySel('process-instance-delete-modal-confirmation-dialog') + .find('.cds--btn--danger') + .click(); // in breadcrumb cy.contains(modelId).click(); - cy.contains('Edit process model').click(); - cy.contains('Delete').click(); + cy.getBySel('delete-process-model-button').click(); cy.contains('Are you sure'); - cy.getBySel('modal-confirmation-dialog').find('.cds--btn--danger').click(); - cy.url().should('include', `process-groups/${groupId}`); + cy.getBySel('delete-process-model-button-modal-confirmation-dialog') + .find('.cds--btn--danger') + .click(); + cy.url().should( + 'include', + `process-groups/${modifyProcessIdentifierForPathParam(groupId)}` + ); cy.contains(modelId).should('not.exist'); + cy.contains(modelDisplayName).should('not.exist'); }); - it('can paginate items', () => { - cy.contains('Acceptance Tests Group One').click(); - cy.basicPaginationTest(); - }); + // process models no longer has pagination post-tiles + // it.only('can paginate items', () => { + // cy.contains('99-Shared Resources').click(); + // cy.wait(500); + // cy.contains('Acceptance Tests Group One').click(); + // cy.basicPaginationTest(); + // }); it('can allow searching for model', () => { cy.getBySel('process-model-selection').click().type('model-3'); cy.contains('acceptance-tests-group-one/acceptance-tests-model-3').click(); - cy.contains('List').click(); + cy.contains('Acceptance Tests Model 3'); }); }); diff --git a/cypress/e2e/tasks.cy.js b/cypress/e2e/tasks.cy.js index 9d5b836a..e58566b8 100644 --- a/cypress/e2e/tasks.cy.js +++ b/cypress/e2e/tasks.cy.js @@ -1,18 +1,27 @@ const submitInputIntoFormField = (taskName, fieldKey, fieldValue) => { - cy.contains(`Task: ${taskName}`); + cy.contains(`Task: ${taskName}`, { timeout: 10000 }); cy.get(fieldKey).clear().type(fieldValue); cy.contains('Submit').click(); }; const checkFormFieldIsReadOnly = (formName, fieldKey) => { cy.contains(`Task: ${formName}`); - cy.get(fieldKey).invoke('attr', 'readonly').should('exist'); + cy.get(fieldKey).invoke('attr', 'disabled').should('exist'); }; const checkTaskHasClass = (taskName, className) => { cy.get(`g[data-element-id=${taskName}]`).should('have.class', className); }; +const kickOffModelWithForm = (modelId, formName) => { + cy.navigateToProcessModel( + 'Acceptance Tests Group One', + 'Acceptance Tests Model 2', + 'acceptance-tests-model-2' + ); + cy.runPrimaryBpmnFile(true); +}; + describe('tasks', () => { beforeEach(() => { cy.login(); @@ -21,7 +30,6 @@ describe('tasks', () => { cy.logout(); }); - // TODO: need to fix the next_task thing to make this pass it('can complete and navigate a form', () => { const groupDisplayName = 'Acceptance Tests Group One'; const modelId = `acceptance-tests-model-2`; @@ -30,11 +38,7 @@ describe('tasks', () => { const activeTaskClassName = 'active-task-highlight'; cy.navigateToProcessModel(groupDisplayName, modelDisplayName, modelId); - - // avoid reloading so we can click on the task link that appears on running the process instance - cy.runPrimaryBpmnFile(false); - - cy.contains('my task').click(); + cy.runPrimaryBpmnFile(true); submitInputIntoFormField( 'get_user_generated_number_one', @@ -59,7 +63,6 @@ describe('tasks', () => { '#root_user_generated_number_1' ); - cy.getBySel('form-nav-form3').should('have.text', 'form3 - Current'); cy.getBySel('form-nav-form3').click(); submitInputIntoFormField( 'get_user_generated_number_three', @@ -111,18 +114,12 @@ describe('tasks', () => { }); it('can paginate items', () => { - cy.navigateToProcessModel( - 'Acceptance Tests Group One', - 'Acceptance Tests Model 2', - 'acceptance-tests-model-2' - ); - // make sure we have some tasks - cy.runPrimaryBpmnFile(); - cy.runPrimaryBpmnFile(); - cy.runPrimaryBpmnFile(); - cy.runPrimaryBpmnFile(); - cy.runPrimaryBpmnFile(); + kickOffModelWithForm(); + kickOffModelWithForm(); + kickOffModelWithForm(); + kickOffModelWithForm(); + kickOffModelWithForm(); cy.navigateToHome(); cy.basicPaginationTest(); diff --git a/cypress/support/commands.js b/cypress/support/commands.js index 40074518..f0034168 100644 --- a/cypress/support/commands.js +++ b/cypress/support/commands.js @@ -1,4 +1,5 @@ import { string } from 'prop-types'; +import { modifyProcessIdentifierForPathParam } from '../../src/helpers'; // *********************************************** // This example commands.js shows you how to @@ -31,9 +32,8 @@ Cypress.Commands.add('getBySel', (selector, ...args) => { }); Cypress.Commands.add('navigateToHome', () => { - cy.get('button[aria-label="Open menu"]').click(); + cy.getBySel('header-menu-expand-button').click(); cy.getBySel('side-nav-items').contains('Home').click(); - // cy.getBySel('nav-home').click(); }); Cypress.Commands.add('navigateToAdmin', () => { @@ -76,27 +76,39 @@ Cypress.Commands.add('createModel', (groupId, modelId, modelDisplayName) => { cy.get('input[name=id]').should('have.value', modelId); cy.contains('Submit').click(); - cy.url().should('include', `process-models/${groupId}:${modelId}`); + cy.url().should( + 'include', + `process-models/${modifyProcessIdentifierForPathParam(groupId)}:${modelId}` + ); cy.contains(`Process Model: ${modelDisplayName}`); }); -Cypress.Commands.add('runPrimaryBpmnFile', (reload = true) => { - cy.contains('Run').click(); - cy.contains(/Process Instance.*kicked off/); - if (reload) { - cy.reload(true); - cy.contains(/Process Instance.*kicked off/).should('not.exist'); +Cypress.Commands.add( + 'runPrimaryBpmnFile', + (expectAutoRedirectToHumanTask = false) => { + cy.contains('Run').click(); + if (expectAutoRedirectToHumanTask) { + // the url changes immediately, so also make sure we get some content from the next page, "Task:", or else when we try to interact with the page, it'll re-render and we'll get an error with cypress. + cy.url().should('include', `/tasks/`); + cy.contains('Task: '); + } else { + cy.contains(/Process Instance.*kicked off/); + cy.reload(true); + cy.contains(/Process Instance.*kicked off/).should('not.exist'); + } } -}); +); Cypress.Commands.add( 'navigateToProcessModel', (groupDisplayName, modelDisplayName, modelIdentifier) => { cy.navigateToAdmin(); + cy.contains('99-Shared Resources').click(); + cy.contains(`Process Group: 99-Shared Resources`, { timeout: 10000 }); cy.contains(groupDisplayName).click(); cy.contains(`Process Group: ${groupDisplayName}`); // https://stackoverflow.com/q/51254946/6090676 - cy.getBySel('process-model-show-link').contains(modelIdentifier).click(); + cy.getBySel('process-model-show-link').contains(modelDisplayName).click(); cy.contains(`Process Model: ${modelDisplayName}`); } ); @@ -120,13 +132,3 @@ Cypress.Commands.add('assertAtLeastOneItemInPaginatedResults', () => { Cypress.Commands.add('assertNoItemInPaginatedResults', () => { cy.contains(/\b0–0 of 0 items/); }); - -Cypress.Commands.add('modifyProcessModelPath', (path) => { - path.replace('/', ':'); - return path; -}); - -Cypress.Commands.add('modifyProcessModelPath', (path) => { - path.replace('/', ':'); - return path; -}); diff --git a/package-lock.json b/package-lock.json index f31017c8..ba233998 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7980,7 +7980,7 @@ }, "node_modules/bpmn-js-spiffworkflow": { "version": "0.0.8", - "resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#e92f48da7cb4416310af71bb1699caaca87324cd", + "resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#aca23dc56e5d37aa1ed0a3cf11acb55f76a36da7", "license": "MIT", "dependencies": { "inherits": "^2.0.4", @@ -37138,7 +37138,7 @@ } }, "bpmn-js-spiffworkflow": { - "version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#e92f48da7cb4416310af71bb1699caaca87324cd", + "version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#aca23dc56e5d37aa1ed0a3cf11acb55f76a36da7", "from": "bpmn-js-spiffworkflow@sartography/bpmn-js-spiffworkflow#main", "requires": { "inherits": "^2.0.4", diff --git a/public/index.html b/public/index.html index 8e5b00b0..ae3a2307 100644 --- a/public/index.html +++ b/public/index.html @@ -41,4 +41,3 @@ --> - diff --git a/src/components/ButtonWithConfirmation.tsx b/src/components/ButtonWithConfirmation.tsx index af2ec5eb..f8a56b25 100644 --- a/src/components/ButtonWithConfirmation.tsx +++ b/src/components/ButtonWithConfirmation.tsx @@ -46,7 +46,7 @@ export default function ButtonWithConfirmation({ + {processModelDisplayName} + + ); +} diff --git a/src/components/MyCompletedInstances.tsx b/src/components/MyCompletedInstances.tsx index fe665295..2d0fe26a 100644 --- a/src/components/MyCompletedInstances.tsx +++ b/src/components/MyCompletedInstances.tsx @@ -8,6 +8,8 @@ export default function MyCompletedInstances() { filtersEnabled={false} paginationQueryParamPrefix={paginationQueryParamPrefix} perPageOptions={[2, 5, 25]} + reportIdentifier="system_report_instances_initiated_by_me" + showReports={false} /> ); } diff --git a/src/components/NavigationBar.tsx b/src/components/NavigationBar.tsx index cc7137fb..47e0de99 100644 --- a/src/components/NavigationBar.tsx +++ b/src/components/NavigationBar.tsx @@ -74,7 +74,9 @@ export default function NavigationBar() { if (UserService.isLoggedIn()) { return ( <> - {UserService.getUsername()} + + {UserService.getUsername()} + {configurationElement()}
; return getHeaderLabel((column as any).Header); }); - const formatProcessInstanceId = (row: any, id: any) => { - const modifiedProcessModelId: String = modifyProcessModelPath( - row.process_model_identifier - ); + const formatProcessInstanceId = (row: ProcessInstance, id: number) => { + const modifiedProcessModelId: String = + modifyProcessIdentifierForPathParam(row.process_model_identifier); return ( {id} @@ -602,12 +1079,15 @@ export default function ProcessInstanceListTable({ const formatProcessModelIdentifier = (_row: any, identifier: any) => { return ( {identifier} ); }; + const formatSecondsForDisplay = (_row: any, seconds: any) => { return convertSecondsToFormattedDateTime(seconds) || '-'; }; @@ -615,14 +1095,16 @@ export default function ProcessInstanceListTable({ return value; }; - const columnFormatters: Record = { + const reportColumnFormatters: Record = { id: formatProcessInstanceId, process_model_identifier: formatProcessModelIdentifier, + process_model_display_name: FormatProcessModelDisplayName, start_in_seconds: formatSecondsForDisplay, end_in_seconds: formatSecondsForDisplay, }; const formattedColumn = (row: any, column: any) => { - const formatter = columnFormatters[column.accessor] ?? defaultFormatter; + const formatter = + reportColumnFormatters[column.accessor] ?? defaultFormatter; const value = row[column.accessor]; if (column.accessor === 'status') { return ( @@ -635,7 +1117,7 @@ export default function ProcessInstanceListTable({ }; const rows = processInstances.map((row: any) => { - const currentRow = (reportMetadata as any).columns.map((column: any) => { + const currentRow = reportColumns().map((column: any) => { return formattedColumn(row, column); }); return {currentRow}; @@ -664,6 +1146,25 @@ export default function ProcessInstanceListTable({ setShowFilterOptions(!showFilterOptions); }; + const reportSearchComponent = () => { + if (showReports) { + const columns = [ + + + , + ]; + return ( + + {columns} + + ); + } + return null; + }; + const filterComponent = () => { if (!filtersEnabled) { return null; @@ -671,14 +1172,17 @@ export default function ProcessInstanceListTable({ return ( <> + + {reportSearchComponent()} + + + ); + } return ( - ); } diff --git a/src/components/ProcessModelForm.tsx b/src/components/ProcessModelForm.tsx index 9725f242..7cfd4d61 100644 --- a/src/components/ProcessModelForm.tsx +++ b/src/components/ProcessModelForm.tsx @@ -1,10 +1,20 @@ import { useState } from 'react'; import { useNavigate } from 'react-router-dom'; +import { + Button, + ButtonSet, + Form, + Stack, + TextInput, + Grid, + Column, + // @ts-ignore +} from '@carbon/react'; // @ts-ignore -import { Button, ButtonSet, Form, Stack, TextInput } from '@carbon/react'; -import { modifyProcessModelPath, slugifyString } from '../helpers'; +import { AddAlt, TrashCan } from '@carbon/icons-react'; +import { modifyProcessIdentifierForPathParam, slugifyString } from '../helpers'; import HttpService from '../services/HttpService'; -import { ProcessModel } from '../interfaces'; +import { MetadataExtractionPath, ProcessModel } from '../interfaces'; type OwnProps = { mode: string; @@ -23,13 +33,13 @@ export default function ProcessModelForm({ const [idHasBeenUpdatedByUser, setIdHasBeenUpdatedByUser] = useState(false); const [displayNameInvalid, setDisplayNameInvalid] = useState(false); + useState(false); const navigate = useNavigate(); const navigateToProcessModel = (result: ProcessModel) => { if ('id' in result) { - const modifiedProcessModelPathFromResult = modifyProcessModelPath( - result.id - ); + const modifiedProcessModelPathFromResult = + modifyProcessIdentifierForPathParam(result.id); navigate(`/admin/process-models/${modifiedProcessModelPathFromResult}`); } }; @@ -52,14 +62,20 @@ export default function ProcessModelForm({ if (hasErrors) { return; } - const path = `/process-models/${processGroupId}`; + let path = `/process-models/${modifyProcessIdentifierForPathParam( + processGroupId || '' + )}`; let httpMethod = 'POST'; if (mode === 'edit') { httpMethod = 'PUT'; + path = `/process-models/${modifyProcessIdentifierForPathParam( + processModel.id + )}`; } const postBody = { display_name: processModel.display_name, description: processModel.description, + metadata_extraction_paths: processModel.metadata_extraction_paths, }; if (mode === 'new') { Object.assign(postBody, { @@ -83,6 +99,80 @@ export default function ProcessModelForm({ setProcessModel(processModelToCopy); }; + const metadataExtractionPathForm = ( + index: number, + metadataExtractionPath: MetadataExtractionPath + ) => { + return ( + + + { + const cep: MetadataExtractionPath[] = + processModel.metadata_extraction_paths || []; + const newMeta = { ...metadataExtractionPath }; + newMeta.key = event.target.value; + cep[index] = newMeta; + updateProcessModel({ metadata_extraction_paths: cep }); + }} + /> + + + { + const cep: MetadataExtractionPath[] = + processModel.metadata_extraction_paths || []; + const newMeta = { ...metadataExtractionPath }; + newMeta.path = event.target.value; + cep[index] = newMeta; + updateProcessModel({ metadata_extraction_paths: cep }); + }} + /> + + + + + + ); + return textInputs; }; diff --git a/src/components/ProcessModelListTiles.tsx b/src/components/ProcessModelListTiles.tsx index a10fd754..1412635c 100644 --- a/src/components/ProcessModelListTiles.tsx +++ b/src/components/ProcessModelListTiles.tsx @@ -5,15 +5,24 @@ import { // @ts-ignore } from '@carbon/react'; import HttpService from '../services/HttpService'; -import { ProcessModel, ProcessInstance } from '../interfaces'; -import { modifyProcessModelPath, truncateString } from '../helpers'; +import { ProcessModel, ProcessInstance, ProcessGroup } from '../interfaces'; +import { + modifyProcessIdentifierForPathParam, + truncateString, +} from '../helpers'; import ProcessInstanceRun from './ProcessInstanceRun'; type OwnProps = { headerElement?: ReactElement; + processGroup?: ProcessGroup; + checkPermissions?: boolean; }; -export default function ProcessModelListTiles({ headerElement }: OwnProps) { +export default function ProcessModelListTiles({ + headerElement, + processGroup, + checkPermissions = true, +}: OwnProps) { const [searchParams] = useSearchParams(); const [processModels, setProcessModels] = useState( null @@ -25,13 +34,18 @@ export default function ProcessModelListTiles({ headerElement }: OwnProps) { const setProcessModelsFromResult = (result: any) => { setProcessModels(result.results); }; - // only allow 10 for now until we get the backend only returnin certain models for user execution - const queryParams = '?per_page=10'; + // only allow 10 for now until we get the backend only returning certain models for user execution + let queryParams = '?per_page=20'; + if (processGroup) { + queryParams = `${queryParams}&process_group_identifier=${processGroup.id}`; + } else { + queryParams = `${queryParams}&recursive=true&filter_runnable_by_user=true`; + } HttpService.makeCallToBackend({ path: `/process-models${queryParams}`, successCallback: setProcessModelsFromResult, }); - }, [searchParams]); + }, [searchParams, processGroup]); const processInstanceRunResultTag = () => { if (processInstance) { @@ -40,9 +54,9 @@ export default function ProcessModelListTiles({ headerElement }: OwnProps) {

Process Instance {processInstance.id} kicked off ( view @@ -61,19 +75,29 @@ export default function ProcessModelListTiles({ headerElement }: OwnProps) { displayText = (processModels || []).map((row: ProcessModel) => { return (

-
{row.display_name}
+

- {truncateString(row.description || '', 25)} + {truncateString(row.description || '', 100)}

diff --git a/src/components/ProcessModelSearch.tsx b/src/components/ProcessModelSearch.tsx index 8d5ca90b..8a3c0b9f 100644 --- a/src/components/ProcessModelSearch.tsx +++ b/src/components/ProcessModelSearch.tsx @@ -35,7 +35,7 @@ export default function ProcessModelSearch({ if (processModel) { return `${processModel.id} (${truncateString( processModel.display_name, - 20 + 75 )})`; } return null; diff --git a/src/components/ProcessSearch.tsx b/src/components/ProcessSearch.tsx index 75ee69b2..ff0aa97d 100644 --- a/src/components/ProcessSearch.tsx +++ b/src/components/ProcessSearch.tsx @@ -41,7 +41,7 @@ export default function ProcessSearch({ if (process) { return `${process.display_name} (${truncateString( process.identifier, - 20 + 75 )})`; } return null; diff --git a/src/components/ReactDiagramEditor.tsx b/src/components/ReactDiagramEditor.tsx index 956ff6c6..11839e9c 100644 --- a/src/components/ReactDiagramEditor.tsx +++ b/src/components/ReactDiagramEditor.tsx @@ -429,7 +429,7 @@ export default function ReactDiagramEditor({ fetch(urlToUse) .then((response) => response.text()) .then((text) => { - const processId = `Proccess_${makeid(7)}`; + const processId = `Process_${makeid(7)}`; const newText = text.replace('{{PROCESS_ID}}', processId); setDiagramXMLString(newText); }) @@ -569,7 +569,7 @@ export default function ReactDiagramEditor({ a={targetUris.processModelFileShowPath} ability={ability} > - + ); diff --git a/src/components/TableCellWithTimeAgoInWords.tsx b/src/components/TableCellWithTimeAgoInWords.tsx new file mode 100644 index 00000000..9952c056 --- /dev/null +++ b/src/components/TableCellWithTimeAgoInWords.tsx @@ -0,0 +1,17 @@ +// @ts-ignore +import { TimeAgo } from '../helpers/timeago'; +import { convertSecondsToFormattedDateTime } from '../helpers'; + +type OwnProps = { + timeInSeconds: number; +}; + +export default function TableCellWithTimeAgoInWords({ + timeInSeconds, +}: OwnProps) { + return ( +
+ ); +} diff --git a/src/components/TasksForMyOpenProcesses.tsx b/src/components/TasksForMyOpenProcesses.tsx index 002f7408..deb2030e 100644 --- a/src/components/TasksForMyOpenProcesses.tsx +++ b/src/components/TasksForMyOpenProcesses.tsx @@ -6,13 +6,17 @@ import PaginationForTable from './PaginationForTable'; import { convertSecondsToFormattedDateTime, getPageInfoFromSearchParams, - modifyProcessModelPath, + modifyProcessIdentifierForPathParam, + refreshAtInterval, } from '../helpers'; import HttpService from '../services/HttpService'; import { PaginationObject } from '../interfaces'; +import TableCellWithTimeAgoInWords from './TableCellWithTimeAgoInWords'; const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5; const paginationQueryParamPrefix = 'tasks_for_my_open_processes'; +const REFRESH_INTERVAL = 5; +const REFRESH_TIMEOUT = 600; export default function MyOpenProcesses() { const [searchParams] = useSearchParams(); @@ -20,45 +24,50 @@ export default function MyOpenProcesses() { const [pagination, setPagination] = useState(null); useEffect(() => { - const { page, perPage } = getPageInfoFromSearchParams( - searchParams, - PER_PAGE_FOR_TASKS_ON_HOME_PAGE, - undefined, - paginationQueryParamPrefix - ); - const setTasksFromResult = (result: any) => { - setTasks(result.results); - setPagination(result.pagination); + const getTasks = () => { + const { page, perPage } = getPageInfoFromSearchParams( + searchParams, + PER_PAGE_FOR_TASKS_ON_HOME_PAGE, + undefined, + paginationQueryParamPrefix + ); + const setTasksFromResult = (result: any) => { + setTasks(result.results); + setPagination(result.pagination); + }; + HttpService.makeCallToBackend({ + path: `/tasks/for-my-open-processes?per_page=${perPage}&page=${page}`, + successCallback: setTasksFromResult, + }); }; - HttpService.makeCallToBackend({ - path: `/tasks/for-my-open-processes?per_page=${perPage}&page=${page}`, - successCallback: setTasksFromResult, - }); + getTasks(); + refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, getTasks); }, [searchParams]); const buildTable = () => { const rows = tasks.map((row) => { const rowToUse = row as any; const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`; - const modifiedProcessModelIdentifier = modifyProcessModelPath( - rowToUse.process_model_identifier - ); + const modifiedProcessModelIdentifier = + modifyProcessIdentifierForPathParam(rowToUse.process_model_identifier); return ( - - + - - - - - - - + + + + + + @@ -112,7 +117,11 @@ export default function MyOpenProcesses() { const tasksComponent = () => { if (pagination && pagination.total < 1) { - return null; + return ( +

+ There are no tasks for processes you started at this time. +

+ ); } const { page, perPage } = getPageInfoFromSearchParams( searchParams, @@ -121,22 +130,27 @@ export default function MyOpenProcesses() { paginationQueryParamPrefix ); return ( - <> -

Tasks for my open processes

- - + ); }; - if (pagination) { - return tasksComponent(); - } - return null; + return ( + <> +

My open instances

+

+ These tasks are for processes you started which are not complete. You + may not have an action to take at this time. See below for tasks waiting + on you. +

+ {tasksComponent()} + + ); } diff --git a/src/components/TasksWaitingForMe.tsx b/src/components/TasksWaitingForMe.tsx index 53079dd2..7d06b7a3 100644 --- a/src/components/TasksWaitingForMe.tsx +++ b/src/components/TasksWaitingForMe.tsx @@ -6,10 +6,11 @@ import PaginationForTable from './PaginationForTable'; import { convertSecondsToFormattedDateTime, getPageInfoFromSearchParams, - modifyProcessModelPath, + modifyProcessIdentifierForPathParam, } from '../helpers'; import HttpService from '../services/HttpService'; import { PaginationObject } from '../interfaces'; +import TableCellWithTimeAgoInWords from './TableCellWithTimeAgoInWords'; const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5; @@ -39,25 +40,26 @@ export default function TasksWaitingForMe() { const rows = tasks.map((row) => { const rowToUse = row as any; const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`; - const modifiedProcessModelIdentifier = modifyProcessModelPath( - rowToUse.process_model_identifier - ); + const modifiedProcessModelIdentifier = + modifyProcessIdentifierForPathParam(rowToUse.process_model_identifier); return (
- - + - - - - - - - - + + + + + + + @@ -113,7 +111,11 @@ export default function TasksWaitingForMe() { const tasksComponent = () => { if (pagination && pagination.total < 1) { - return null; + return ( +

+ You have no task assignments at this time. +

+ ); } const { page, perPage } = getPageInfoFromSearchParams( searchParams, @@ -122,22 +124,26 @@ export default function TasksWaitingForMe() { 'tasks_waiting_for_me' ); return ( - <> -

Tasks waiting for me

- - + ); }; - if (pagination) { - return tasksComponent(); - } - return null; + return ( + <> +

Tasks waiting for me

+

+ These processes are waiting on you to complete the next task. All are + processes created by others that are now actionable by you. +

+ {tasksComponent()} + + ); } diff --git a/src/components/TasksWaitingForMyGroups.tsx b/src/components/TasksWaitingForMyGroups.tsx index a60f826b..565cd4a5 100644 --- a/src/components/TasksWaitingForMyGroups.tsx +++ b/src/components/TasksWaitingForMyGroups.tsx @@ -6,59 +6,68 @@ import PaginationForTable from './PaginationForTable'; import { convertSecondsToFormattedDateTime, getPageInfoFromSearchParams, - modifyProcessModelPath, + modifyProcessIdentifierForPathParam, + refreshAtInterval, } from '../helpers'; import HttpService from '../services/HttpService'; import { PaginationObject } from '../interfaces'; +import TableCellWithTimeAgoInWords from './TableCellWithTimeAgoInWords'; const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5; const paginationQueryParamPrefix = 'tasks_waiting_for_my_groups'; +const REFRESH_INTERVAL = 5; +const REFRESH_TIMEOUT = 600; -export default function TasksForWaitingForMyGroups() { +export default function TasksWaitingForMyGroups() { const [searchParams] = useSearchParams(); const [tasks, setTasks] = useState([]); const [pagination, setPagination] = useState(null); useEffect(() => { - const { page, perPage } = getPageInfoFromSearchParams( - searchParams, - PER_PAGE_FOR_TASKS_ON_HOME_PAGE, - undefined, - paginationQueryParamPrefix - ); - const setTasksFromResult = (result: any) => { - setTasks(result.results); - setPagination(result.pagination); + const getTasks = () => { + const { page, perPage } = getPageInfoFromSearchParams( + searchParams, + PER_PAGE_FOR_TASKS_ON_HOME_PAGE, + undefined, + paginationQueryParamPrefix + ); + const setTasksFromResult = (result: any) => { + setTasks(result.results); + setPagination(result.pagination); + }; + HttpService.makeCallToBackend({ + path: `/tasks/for-my-groups?per_page=${perPage}&page=${page}`, + successCallback: setTasksFromResult, + }); }; - HttpService.makeCallToBackend({ - path: `/tasks/for-my-groups?per_page=${perPage}&page=${page}`, - successCallback: setTasksFromResult, - }); + getTasks(); + refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, getTasks); }, [searchParams]); const buildTable = () => { const rows = tasks.map((row) => { const rowToUse = row as any; const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`; - const modifiedProcessModelIdentifier = modifyProcessModelPath( - rowToUse.process_model_identifier - ); + const modifiedProcessModelIdentifier = + modifyProcessIdentifierForPathParam(rowToUse.process_model_identifier); return (
- - + - - - - - - - - + + + + + + + @@ -114,7 +119,11 @@ export default function TasksForWaitingForMyGroups() { const tasksComponent = () => { if (pagination && pagination.total < 1) { - return null; + return ( +

+ Your groups have no task assignments at this time. +

+ ); } const { page, perPage } = getPageInfoFromSearchParams( searchParams, @@ -123,22 +132,25 @@ export default function TasksForWaitingForMyGroups() { paginationQueryParamPrefix ); return ( - <> -

Tasks waiting for my groups

- - + ); }; - if (pagination) { - return tasksComponent(); - } - return null; + return ( + <> +

Tasks waiting for my groups

+

+ This is a list of tasks for groups you belong to that can be completed + by any member of the group. +

+ {tasksComponent()} + + ); } diff --git a/src/config.tsx b/src/config.tsx index 5e7e96fe..b0816a39 100644 --- a/src/config.tsx +++ b/src/config.tsx @@ -14,6 +14,7 @@ export const PROCESS_STATUSES = [ 'complete', 'error', 'suspended', + 'terminated', ]; // with time: yyyy-MM-dd HH:mm:ss diff --git a/src/helpers.tsx b/src/helpers.tsx index ab97c8dc..6781ada9 100644 --- a/src/helpers.tsx +++ b/src/helpers.tsx @@ -174,18 +174,18 @@ export const getProcessModelFullIdentifierFromSearchParams = ( // https://stackoverflow.com/a/71352046/6090676 export const truncateString = (text: string, len: number) => { if (text.length > len && text.length > 0) { - return `${text.split(' ').slice(0, len).join(' ')} ...`; + return `${text.split('').slice(0, len).join('')} ...`; } return text; }; // Because of limitations in the way openapi defines parameters, we have to modify process models ids // which are basically paths to the models -export const modifyProcessModelPath = (path: string) => { +export const modifyProcessIdentifierForPathParam = (path: string) => { return path.replace(/\//g, ':') || ''; }; -export const unModifyProcessModelPath = (path: string) => { +export const unModifyProcessIdentifierForPathParam = (path: string) => { return path.replace(/:/g, '/') || ''; }; diff --git a/src/helpers/timeago.js b/src/helpers/timeago.js new file mode 100644 index 00000000..54d0969c --- /dev/null +++ b/src/helpers/timeago.js @@ -0,0 +1,62 @@ +/* eslint-disable no-restricted-syntax */ +// https://gist.github.com/caiotarifa/30ae974f2293c761f3139dd194abd9e5 +export const TimeAgo = (function awesomeFunc() { + const self = {}; + + // Public Methods + self.locales = { + prefix: '', + sufix: 'ago', + + seconds: 'less than a minute', + minute: 'about a minute', + minutes: '%d minutes', + hour: 'about an hour', + hours: 'about %d hours', + day: 'a day', + days: '%d days', + month: 'about a month', + months: '%d months', + year: 'about a year', + years: '%d years', + }; + + self.inWords = function inWords(timeAgo) { + const milliseconds = timeAgo * 1000; + const seconds = Math.floor( + (new Date() - parseInt(milliseconds, 10)) / 1000 + ); + const separator = this.locales.separator || ' '; + let words = this.locales.prefix + separator; + let interval = 0; + const intervals = { + year: seconds / 31536000, + month: seconds / 2592000, + day: seconds / 86400, + hour: seconds / 3600, + minute: seconds / 60, + }; + + let distance = this.locales.seconds; + + // eslint-disable-next-line guard-for-in + for (const key in intervals) { + interval = Math.floor(intervals[key]); + + if (interval > 1) { + distance = this.locales[`${key}s`]; + break; + } else if (interval === 1) { + distance = this.locales[key]; + break; + } + } + + distance = distance.replace(/%d/i, interval); + words += distance + separator + this.locales.sufix; + + return words.trim(); + }; + + return self; +})(); diff --git a/src/hooks/PermissionService.tsx b/src/hooks/PermissionService.tsx index e8a40b96..fad496a7 100644 --- a/src/hooks/PermissionService.tsx +++ b/src/hooks/PermissionService.tsx @@ -1,7 +1,7 @@ // We may need to update usage of Ability when we update. // They say they are going to rename PureAbility to Ability and remove the old class. import { AbilityBuilder, Ability } from '@casl/ability'; -import { useContext, useEffect } from 'react'; +import { useContext, useEffect, useState } from 'react'; import { AbilityContext } from '../contexts/Can'; import { PermissionCheckResponseBody, PermissionsToCheck } from '../interfaces'; import HttpService from '../services/HttpService'; @@ -10,6 +10,7 @@ export const usePermissionFetcher = ( permissionsToCheck: PermissionsToCheck ) => { const ability = useContext(AbilityContext); + const [permissionsLoaded, setPermissionsLoaded] = useState(false); useEffect(() => { const processPermissionResult = (result: PermissionCheckResponseBody) => { @@ -34,15 +35,17 @@ export const usePermissionFetcher = ( } }); ability.update(rules); + setPermissionsLoaded(true); }; - - HttpService.makeCallToBackend({ - path: `/permissions-check`, - httpMethod: 'POST', - successCallback: processPermissionResult, - postBody: { requests_to_check: permissionsToCheck }, - }); + if (Object.keys(permissionsToCheck).length !== 0) { + HttpService.makeCallToBackend({ + path: `/permissions-check`, + httpMethod: 'POST', + successCallback: processPermissionResult, + postBody: { requests_to_check: permissionsToCheck }, + }); + } }); - return { ability }; + return { ability, permissionsLoaded }; }; diff --git a/src/hooks/UriListForPermissions.tsx b/src/hooks/UriListForPermissions.tsx index 9c61234b..f84465c8 100644 --- a/src/hooks/UriListForPermissions.tsx +++ b/src/hooks/UriListForPermissions.tsx @@ -1,20 +1,28 @@ +import { useMemo } from 'react'; import { useParams } from 'react-router-dom'; export const useUriListForPermissions = () => { const params = useParams(); - const targetUris = { - authenticationListPath: `/v1.0/authentications`, - messageInstanceListPath: '/v1.0/messages', - processGroupListPath: '/v1.0/process-groups', - processGroupShowPath: `/v1.0/process-groups/${params.process_group_id}`, - processInstanceActionPath: `/v1.0/process-models/${params.process_model_id}/process-instances`, - processInstanceListPath: '/v1.0/process-instances', - processModelCreatePath: `/v1.0/process-models/${params.process_group_id}`, - processModelFileCreatePath: `/v1.0/process-models/${params.process_model_id}/files`, - processModelFileShowPath: `/v1.0/process-models/${params.process_model_id}/files/${params.file_name}`, - processModelShowPath: `/v1.0/process-models/${params.process_model_id}`, - secretListPath: `/v1.0/secrets`, - }; + const targetUris = useMemo(() => { + return { + authenticationListPath: `/v1.0/authentications`, + messageInstanceListPath: '/v1.0/messages', + processGroupListPath: '/v1.0/process-groups', + processGroupShowPath: `/v1.0/process-groups/${params.process_group_id}`, + processInstanceCreatePath: `/v1.0/process-instances/${params.process_model_id}`, + processInstanceActionPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}`, + processInstanceListPath: '/v1.0/process-instances', + processInstanceLogListPath: `/v1.0/logs/${params.process_model_id}/${params.process_instance_id}`, + processInstanceReportListPath: '/v1.0/process-instances/reports', + processInstanceTaskListPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, + processModelCreatePath: `/v1.0/process-models/${params.process_group_id}`, + processModelFileCreatePath: `/v1.0/process-models/${params.process_model_id}/files`, + processModelFileShowPath: `/v1.0/process-models/${params.process_model_id}/files/${params.file_name}`, + processModelPublishPath: `/v1.0/process-models/${params.process_model_id}/publish`, + processModelShowPath: `/v1.0/process-models/${params.process_model_id}`, + secretListPath: `/v1.0/secrets`, + }; + }, [params]); return { targetUris }; }; diff --git a/src/index.css b/src/index.css index 0d267086..248a23d7 100644 --- a/src/index.css +++ b/src/index.css @@ -5,6 +5,15 @@ color: white; } +.megacondensed { + padding-left: 0px; +} + +/* defaults to 3rem, which isn't long sufficient for "elizabeth" */ +.cds--header__action.username-header-text { + width: 5rem; +} + h1 { font-weight: 400; font-size: 28px; @@ -60,6 +69,24 @@ h2 { color: black; } +/* match normal link colors */ +.cds--btn--ghost.button-link { + color: #0062fe; + padding-left: 0; +} +.cds--btn--ghost.button-link:visited { + color: #0062fe; + padding-left: 0; +} +.cds--btn--ghost.button-link:hover { + color: #0062fe; + padding-left: 0; +} +.cds--btn--ghost.button-link:visited:hover { + color: #0062fe; + padding-left: 0; +} + .cds--header__global .cds--btn--primary { background-color: #161616 } @@ -138,6 +165,26 @@ h1.with-icons { margin-bottom: 1em; } +.with-top-margin { + margin-top: 1em; +} + +.with-extra-top-margin { + margin-top: 1.3em; +} + +.with-tiny-top-margin { + margin-top: 4px; +} + +.with-large-bottom-margin { + margin-bottom: 3em; +} + +.with-tiny-bottom-margin { + margin-bottom: 4px; +} + .diagram-viewer-canvas { border:1px solid #000000; height:70vh; @@ -243,3 +290,83 @@ in on this with the react-jsonschema-form repo. This is just a patch fix to allo position: absolute; bottom: 1em; } + +.cds--tabs .cds--tabs__nav-link { + max-width: 20rem; +} + +.clear-left { + clear: left; +} + +td.actions-cell { + width: 1em; +} + +.no-results-message { + font-style: italic; + margin-left: 2em; + margin-top: 1em; + font-size: 14px; +} + +.data-table-description { + font-size: 14px; + line-height: 18px; + letter-spacing: 0.16px; + color: #525252; + margin-bottom: 1em; +} + +/* top and bottom margin since this is sort of the middle of three sections on the process model show page */ +.process-model-files-section { + margin: 2em 0; +} + +.filterIcon { + text-align: right; + padding-bottom: 10px; +} + +.cds--btn--ghost:not([disabled]) svg.red-icon { + fill: red; +} + +svg.green-icon { + fill: #198038; +} + +svg.notification-icon { + margin-right: 1rem; +} + +.failure-string { + color: red; +} + +.cds--btn--ghost.cds--btn--sm.button-tag-icon { + padding-left: 0; + padding-right: 0; + padding-top: 0; +} + +/* .no-wrap cds--label cds--label--inline cds--label--inline--md{ */ +.no-wrap .cds--label--inline{ + word-break: normal; +} + +.combo-box-in-modal { + height: 300px; +} + +.cds--btn.narrow-button { + max-width: 10rem; + min-width: 5rem; + word-break: normal; + +} + +/* lime green */ +.tag-type-green:hover { + background-color: #80ee90; +} diff --git a/src/interfaces.ts b/src/interfaces.ts index f4540f4f..079e4cdc 100644 --- a/src/interfaces.ts +++ b/src/interfaces.ts @@ -12,9 +12,8 @@ export interface RecentProcessModel { } export interface ProcessReference { - id: string; // The unique id of the process or decision table. name: string; // The process or decision Display name. - identifier: string; + identifier: string; // The unique id of the process display_name: string; process_group_id: string; process_model_id: string; @@ -39,6 +38,68 @@ export interface ProcessFile { export interface ProcessInstance { id: number; process_model_identifier: string; + process_model_display_name: string; +} + +export interface MessageCorrelationProperties { + [key: string]: string; +} + +export interface MessageCorrelations { + [key: string]: MessageCorrelationProperties; +} + +export interface MessageInstance { + id: number; + process_model_identifier: string; + process_model_display_name: string; + process_instance_id: number; + message_identifier: string; + message_type: string; + failure_cause: string; + status: string; + created_at_in_seconds: number; + message_correlations?: MessageCorrelations; +} + +export interface ReportFilter { + field_name: string; + field_value: string; + operator?: string; +} + +export interface ReportColumn { + Header: string; + accessor: string; + filterable: boolean; +} + +export interface ReportColumnForEditing extends ReportColumn { + filter_field_value: string; + filter_operator: string; +} + +export interface ReportMetadata { + columns: ReportColumn[]; + filter_by: ReportFilter[]; + order_by: string[]; +} + +export interface ProcessInstanceReport { + id: number; + identifier: string; + name: string; + report_metadata: ReportMetadata; +} + +export interface ProcessGroupLite { + id: string; + display_name: string; +} + +export interface MetadataExtractionPath { + key: string; + path: string; } export interface ProcessModel { @@ -47,6 +108,8 @@ export interface ProcessModel { display_name: string; primary_file_name: string; files: ProcessFile[]; + parent_groups?: ProcessGroupLite[]; + metadata_extraction_paths?: MetadataExtractionPath[]; } export interface ProcessGroup { @@ -55,10 +118,19 @@ export interface ProcessGroup { description?: string | null; process_models?: ProcessModel[]; process_groups?: ProcessGroup[]; + parent_groups?: ProcessGroupLite[]; } +export interface HotCrumbItemObject { + entityToExplode: ProcessModel | ProcessGroup | string; + entityType: string; + linkLastItem?: boolean; +} + +export type HotCrumbItemArray = [displayValue: string, url?: string]; + // tuple of display value and URL -export type HotCrumbItem = [displayValue: string, url?: string]; +export type HotCrumbItem = HotCrumbItemArray | HotCrumbItemObject; export interface ErrorForDisplay { message: string; diff --git a/src/routes/AdminRoutes.tsx b/src/routes/AdminRoutes.tsx index 91ae7ab0..da6cae35 100644 --- a/src/routes/AdminRoutes.tsx +++ b/src/routes/AdminRoutes.tsx @@ -71,11 +71,11 @@ export default function AdminRoutes() { element={} /> } /> } /> } /> } /> } /> diff --git a/src/routes/AuthenticationList.tsx b/src/routes/AuthenticationList.tsx index a249aa27..4f320df4 100644 --- a/src/routes/AuthenticationList.tsx +++ b/src/routes/AuthenticationList.tsx @@ -54,7 +54,7 @@ export default function AuthenticationList() {
Instance IdProcess ModelIdProcess Process InstanceMessage ModelName Type Failure CauseCorrelations Status Created At
{row.id} {row.message_identifier} {row.message_type}{row.failure_cause || '-'} {row.status}Process Instance Name TypeFailure CauseCorrelationsDetails Status Created At
{getHeaderLabel((column as any).Header)}
+ {timeInSeconds ? TimeAgo.inWords(timeInSeconds) : '-'} +
- {rowToUse.process_model_display_name} + {rowToUse.process_instance_id} - View {rowToUse.process_instance_id} + {rowToUse.process_model_display_name} {rowToUse.task_title} {rowToUse.process_instance_status} {rowToUse.group_identifier || '-'} {convertSecondsToFormattedDateTime( rowToUse.created_at_in_seconds ) || '-'} - {convertSecondsToFormattedDateTime( - rowToUse.updated_at_in_seconds - ) || '-'} -
Process ModelProcess InstanceTask NameProcess Instance StatusAssigned GroupProcess StartedProcess UpdatedIdProcessTaskWaiting ForDate StartedLast Updated Actions
- {rowToUse.process_model_display_name} + {rowToUse.process_instance_id} - View {rowToUse.process_instance_id} + {rowToUse.process_model_display_name} {rowToUse.username}{rowToUse.process_instance_status} {rowToUse.group_identifier || '-'} {convertSecondsToFormattedDateTime( rowToUse.created_at_in_seconds ) || '-'} - {convertSecondsToFormattedDateTime( - rowToUse.updated_at_in_seconds - ) || '-'} -
Process ModelProcess InstanceTask NameProcess Started ByProcess Instance StatusAssigned GroupProcess StartedProcess UpdatedIdProcessTaskStarted ByWaiting ForDate StartedLast Updated Actions
- {rowToUse.process_model_display_name} + {rowToUse.process_instance_id} - View {rowToUse.process_instance_id} + {rowToUse.process_model_display_name} {rowToUse.username}{rowToUse.process_instance_status} {rowToUse.group_identifier || '-'} {convertSecondsToFormattedDateTime( rowToUse.created_at_in_seconds ) || '-'} - {convertSecondsToFormattedDateTime( - rowToUse.updated_at_in_seconds - ) || '-'} -
Process ModelProcess InstanceTask NameProcess Started ByProcess Instance StatusAssigned GroupProcess StartedProcess UpdatedIdProcessTaskStarted ByWaiting ForDate StartedLast Updated Actions
- + {rows} diff --git a/src/routes/CompletedInstances.tsx b/src/routes/CompletedInstances.tsx index 237c21f3..f97bb5d5 100644 --- a/src/routes/CompletedInstances.tsx +++ b/src/routes/CompletedInstances.tsx @@ -1,5 +1,48 @@ -import MyCompletedInstances from '../components/MyCompletedInstances'; +import ProcessInstanceListTable from '../components/ProcessInstanceListTable'; export default function CompletedInstances() { - return ; + return ( + <> +

My completed instances

+

+ This is a list of instances you started that are now complete. +

+ +

Tasks completed by me

+

+ This is a list of instances where you have completed tasks. +

+ +

Tasks completed by my groups

+

+ This is a list of instances with tasks that were completed by groups you + belong to. +

+ + + ); } diff --git a/src/routes/CreateNewInstance.tsx b/src/routes/CreateNewInstance.tsx index fbb3f844..24f1f9fc 100644 --- a/src/routes/CreateNewInstance.tsx +++ b/src/routes/CreateNewInstance.tsx @@ -3,7 +3,8 @@ import ProcessModelListTiles from '../components/ProcessModelListTiles'; export default function CreateNewInstance() { return ( Process models available to you} + headerElement={

Processes I can start

} + checkPermissions={false} /> ); } diff --git a/src/routes/GroupedTasks.tsx b/src/routes/GroupedTasks.tsx index a08959c5..9fe0d3a5 100644 --- a/src/routes/GroupedTasks.tsx +++ b/src/routes/GroupedTasks.tsx @@ -1,15 +1,15 @@ import TasksForMyOpenProcesses from '../components/TasksForMyOpenProcesses'; import TasksWaitingForMe from '../components/TasksWaitingForMe'; -import TasksForWaitingForMyGroups from '../components/TasksWaitingForMyGroups'; +import TasksWaitingForMyGroups from '../components/TasksWaitingForMyGroups'; export default function GroupedTasks() { return ( <> + {/* be careful moving these around since the first two have with-large-bottom-margin in order to get some space between the three table sections. */} + {/* i wish Stack worked to add space just between top-level elements */} -
-
- + ); } diff --git a/src/routes/HomePageRoutes.tsx b/src/routes/HomePageRoutes.tsx index 42048451..872a7a69 100644 --- a/src/routes/HomePageRoutes.tsx +++ b/src/routes/HomePageRoutes.tsx @@ -18,12 +18,10 @@ export default function HomePageRoutes() { useEffect(() => { setErrorMessage(null); let newSelectedTabIndex = 0; - if (location.pathname.match(/^\/tasks\/grouped\b/)) { + if (location.pathname.match(/^\/tasks\/completed-instances\b/)) { newSelectedTabIndex = 1; - } else if (location.pathname.match(/^\/tasks\/completed-instances\b/)) { - newSelectedTabIndex = 2; } else if (location.pathname.match(/^\/tasks\/create-new-instance\b/)) { - newSelectedTabIndex = 3; + newSelectedTabIndex = 2; } setSelectedTabIndex(newSelectedTabIndex); }, [location, setErrorMessage]); @@ -36,13 +34,13 @@ export default function HomePageRoutes() { <> - navigate('/tasks/my-tasks')}>My Tasks - navigate('/tasks/grouped')}>Grouped Tasks + {/* navigate('/tasks/my-tasks')}>My Tasks */} + navigate('/tasks/grouped')}>In Progress navigate('/tasks/completed-instances')}> - Completed Instances + Completed navigate('/tasks/create-new-instance')}> - Create New Instance + + Start New + @@ -55,7 +53,7 @@ export default function HomePageRoutes() { <> {renderTabs()} - } /> + } /> } /> } /> } /> diff --git a/src/routes/JsonSchemaFormBuilder.tsx b/src/routes/JsonSchemaFormBuilder.tsx index c97e959a..6d101101 100644 --- a/src/routes/JsonSchemaFormBuilder.tsx +++ b/src/routes/JsonSchemaFormBuilder.tsx @@ -3,7 +3,7 @@ import { useEffect, useState } from 'react'; import { Button, Select, SelectItem, TextInput } from '@carbon/react'; import { useParams } from 'react-router-dom'; import { FormField } from '../interfaces'; -import { modifyProcessModelPath, slugifyString } from '../helpers'; +import { modifyProcessIdentifierForPathParam, slugifyString } from '../helpers'; import HttpService from '../services/HttpService'; export default function JsonSchemaFormBuilder() { @@ -28,7 +28,7 @@ export default function JsonSchemaFormBuilder() { const [formFieldTitle, setFormFieldTitle] = useState(''); const [formFieldType, setFormFieldType] = useState(''); - const modifiedProcessModelId = modifyProcessModelPath( + const modifiedProcessModelId = modifyProcessIdentifierForPathParam( `${params.process_model_id}` ); diff --git a/src/routes/MessageInstanceList.tsx b/src/routes/MessageInstanceList.tsx index 3ead5462..a9ec6b69 100644 --- a/src/routes/MessageInstanceList.tsx +++ b/src/routes/MessageInstanceList.tsx @@ -1,16 +1,19 @@ import { useEffect, useState } from 'react'; // @ts-ignore -import { Table } from '@carbon/react'; +import { ErrorOutline } from '@carbon/icons-react'; +// @ts-ignore +import { Table, Modal, Button } from '@carbon/react'; import { Link, useParams, useSearchParams } from 'react-router-dom'; import PaginationForTable from '../components/PaginationForTable'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; import { - convertSecondsToFormattedDateString, + convertSecondsToFormattedDateTime, getPageInfoFromSearchParams, - modifyProcessModelPath, - unModifyProcessModelPath, + modifyProcessIdentifierForPathParam, } from '../helpers'; import HttpService from '../services/HttpService'; +import { FormatProcessModelDisplayName } from '../components/MiniComponents'; +import { MessageInstance } from '../interfaces'; export default function MessageInstanceList() { const params = useParams(); @@ -18,6 +21,9 @@ export default function MessageInstanceList() { const [messageIntances, setMessageInstances] = useState([]); const [pagination, setPagination] = useState(null); + const [messageInstanceForModal, setMessageInstanceForModal] = + useState(null); + useEffect(() => { const setMessageInstanceListFromResult = (result: any) => { setMessageInstances(result.results); @@ -36,41 +42,89 @@ export default function MessageInstanceList() { }); }, [searchParams, params]); - const buildTable = () => { - // return null; - const rows = messageIntances.map((row) => { - const rowToUse = row as any; + const handleCorrelationDisplayClose = () => { + setMessageInstanceForModal(null); + }; + + const correlationsDisplayModal = () => { + if (messageInstanceForModal) { + let failureCausePre = null; + if (messageInstanceForModal.failure_cause) { + failureCausePre = ( + <> +

+ {messageInstanceForModal.failure_cause} +

+
+ + ); + } return ( -
- - + + {failureCausePre} +

Correlations:

+
+            {JSON.stringify(
+              messageInstanceForModal.message_correlations,
+              null,
+              2
+            )}
+          
+
+ ); + } + return null; + }; + + const buildTable = () => { + const rows = messageIntances.map((row: MessageInstance) => { + let errorIcon = null; + let errorTitle = null; + if (row.failure_cause) { + errorTitle = 'Instance has an error'; + errorIcon = ( + <> +   + + + ); + } + return ( +
+ + - - - - + + + + ); @@ -79,12 +133,12 @@ export default function MessageInstanceList() {
IDId
{rowToUse.id} - - {rowToUse.process_model_identifier} - -
{row.id}{FormatProcessModelDisplayName(row)} - {rowToUse.process_instance_id} + {row.process_instance_id} {rowToUse.message_identifier}{rowToUse.message_type}{rowToUse.failure_cause || '-'}{rowToUse.status}{row.message_identifier}{row.message_type} - {convertSecondsToFormattedDateString( - rowToUse.created_at_in_seconds - )} + + {row.status} + {convertSecondsToFormattedDateTime(row.created_at_in_seconds)}
- - + + - + - + @@ -102,17 +156,16 @@ export default function MessageInstanceList() { {breadcrumbElement}

Messages

+ {correlationsDisplayModal()} (null); + const [processInstance, setProcessInstance] = + useState(null); useEffect(() => { const getTasks = () => { @@ -40,6 +48,28 @@ export default function MyTasks() { refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, getTasks); }, [searchParams]); + const processInstanceRunResultTag = () => { + if (processInstance) { + return ( +
+

+ Process Instance {processInstance.id} kicked off ( + + view + + ). +

+
+ ); + } + return null; + }; + let recentProcessModels: RecentProcessModel[] = []; const recentProcessModelsString = localStorage.getItem('recentProcessModels'); if (recentProcessModelsString !== null) { @@ -50,9 +80,8 @@ export default function MyTasks() { const rows = tasks.map((row) => { const rowToUse = row as any; const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.id}`; - const modifiedProcessModelIdentifier = modifyProcessModelPath( - rowToUse.process_model_identifier - ); + const modifiedProcessModelIdentifier = + modifyProcessIdentifierForPathParam(rowToUse.process_model_identifier); return (
+ + ); }); return ( <> -

Recently viewed process models

+

Recently instantiated process models

Instance IdProcess ModelIdProcess Process InstanceMessage ModelName TypeFailure CauseDetails Status Created At
@@ -66,9 +95,9 @@ export default function MyTasks() { - View {rowToUse.process_instance_id} + {rowToUse.process_instance_id} { - const rows = recentProcessModels.map((row) => { - const rowToUse = row as any; - const modifiedProcessModelId = modifyProcessModelPath( - rowToUse.processModelIdentifier + const rows = recentProcessModels.map((row: RecentProcessModel) => { + const processModel: ProcessModel = { + id: row.processModelIdentifier, + description: '', + display_name: '', + primary_file_name: '', + files: [], + }; + const modifiedProcessModelId = modifyProcessIdentifierForPathParam( + row.processModelIdentifier ); return ( -
- {rowToUse.processModelDisplayName} + {row.processModelDisplayName} + +
+ {rows} @@ -176,6 +216,7 @@ export default function MyTasks() { } return ( <> + {processInstanceRunResultTag()} {tasksWaitingForMe}
{relevantProcessModelSection} diff --git a/src/routes/ProcessGroupEdit.tsx b/src/routes/ProcessGroupEdit.tsx index e9f88c0e..d15aac57 100644 --- a/src/routes/ProcessGroupEdit.tsx +++ b/src/routes/ProcessGroupEdit.tsx @@ -27,10 +27,11 @@ export default function ProcessGroupEdit() {

Edit Process Group: {(processGroup as any).id}

diff --git a/src/routes/ProcessGroupList.tsx b/src/routes/ProcessGroupList.tsx index 4c448f08..d9ceaf59 100644 --- a/src/routes/ProcessGroupList.tsx +++ b/src/routes/ProcessGroupList.tsx @@ -7,7 +7,7 @@ import { import { Can } from '@casl/react'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; import HttpService from '../services/HttpService'; -import { modifyProcessModelPath } from '../helpers'; +import { modifyProcessIdentifierForPathParam } from '../helpers'; import { CarbonComboBoxSelection, PermissionsToCheck } from '../interfaces'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { usePermissionFetcher } from '../hooks/PermissionService'; @@ -39,7 +39,7 @@ export default function ProcessGroupList() { }; // for search box HttpService.makeCallToBackend({ - path: `/process-models?per_page=1000`, + path: `/process-models?per_page=1000&recursive=true`, successCallback: processResultForProcessModels, }); }, [searchParams]); @@ -48,7 +48,9 @@ export default function ProcessGroupList() { const processModelSearchOnChange = (selection: CarbonComboBoxSelection) => { const processModel = selection.selectedItem; navigate( - `/admin/process-models/${modifyProcessModelPath(processModel.id)}` + `/admin/process-models/${modifyProcessIdentifierForPathParam( + processModel.id + )}` ); }; return ( diff --git a/src/routes/ProcessGroupNew.tsx b/src/routes/ProcessGroupNew.tsx index ca20fc47..d762a2b2 100644 --- a/src/routes/ProcessGroupNew.tsx +++ b/src/routes/ProcessGroupNew.tsx @@ -14,7 +14,11 @@ export default function ProcessGroupNew() { const hotCrumbs: HotCrumbItem[] = [['Process Groups', '/admin']]; if (parentGroupId) { - hotCrumbs.push(['', `process_group:${parentGroupId}:link`]); + hotCrumbs.push({ + entityToExplode: parentGroupId, + entityType: 'process-group-id', + linkLastItem: true, + }); } return ( diff --git a/src/routes/ProcessGroupShow.tsx b/src/routes/ProcessGroupShow.tsx index 3d6a0ee4..e4f467c4 100644 --- a/src/routes/ProcessGroupShow.tsx +++ b/src/routes/ProcessGroupShow.tsx @@ -1,39 +1,51 @@ import { useEffect, useState } from 'react'; -import { Link, useSearchParams, useParams } from 'react-router-dom'; +import { + // Link, + useSearchParams, + useParams, + useNavigate, +} from 'react-router-dom'; +import { + TrashCan, + Edit, + // @ts-ignore +} from '@carbon/icons-react'; // @ts-ignore -import { Button, Table, Stack } from '@carbon/react'; +import { Button, Stack } from '@carbon/react'; import { Can } from '@casl/react'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; -import PaginationForTable from '../components/PaginationForTable'; import HttpService from '../services/HttpService'; import { getPageInfoFromSearchParams, - modifyProcessModelPath, - unModifyProcessModelPath, + modifyProcessIdentifierForPathParam, + unModifyProcessIdentifierForPathParam, } from '../helpers'; import { PaginationObject, PermissionsToCheck, ProcessGroup, - ProcessModel, + // ProcessModel, } from '../interfaces'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { usePermissionFetcher } from '../hooks/PermissionService'; import ProcessGroupListTiles from '../components/ProcessGroupListTiles'; +import ButtonWithConfirmation from '../components/ButtonWithConfirmation'; +import ProcessModelListTiles from '../components/ProcessModelListTiles'; export default function ProcessGroupShow() { const params = useParams(); const [searchParams] = useSearchParams(); + const navigate = useNavigate(); const [processGroup, setProcessGroup] = useState(null); - const [processModels, setProcessModels] = useState([]); + // const [processModels, setProcessModels] = useState([]); const [modelPagination, setModelPagination] = useState(null); const { targetUris } = useUriListForPermissions(); const permissionRequestData: PermissionsToCheck = { [targetUris.processGroupListPath]: ['POST'], - [targetUris.processGroupShowPath]: ['PUT'], + [targetUris.processGroupShowPath]: ['PUT', 'DELETE'], [targetUris.processModelCreatePath]: ['POST'], }; const { ability } = usePermissionFetcher(permissionRequestData); @@ -42,12 +54,12 @@ export default function ProcessGroupShow() { const { page, perPage } = getPageInfoFromSearchParams(searchParams); const setProcessModelFromResult = (result: any) => { - setProcessModels(result.results); + // setProcessModels(result.results); setModelPagination(result.pagination); }; const processResult = (result: any) => { setProcessGroup(result); - const unmodifiedProcessGroupId = unModifyProcessModelPath( + const unmodifiedProcessGroupId = unModifyProcessIdentifierForPathParam( (params as any).process_group_id ); HttpService.makeCallToBackend({ @@ -61,56 +73,105 @@ export default function ProcessGroupShow() { }); }, [params, searchParams]); - const buildModelTable = () => { - if (processGroup === null) { - return null; + // const buildModelTable = () => { + // if (processGroup === null) { + // return null; + // } + // const rows = processModels.map((row: ProcessModel) => { + // const modifiedProcessModelId: String = + // modifyProcessIdentifierForPathParam((row as any).id); + // return ( + //
+ // + // + // + // ); + // }); + // return ( + //
+ //

Process Models

+ //
Process ModelActions
+ // + // {row.id} + // + // {row.display_name}
+ // + // + // + // + // + // + // {rows} + //
Process Model IdDisplay Name
+ // + // ); + // }; + + const navigateToProcessGroups = (_result: any) => { + navigate(`/admin/process-groups`); + }; + + const deleteProcessGroup = () => { + if (processGroup) { + HttpService.makeCallToBackend({ + path: `/process-groups/${modifyProcessIdentifierForPathParam( + processGroup.id + )}`, + successCallback: navigateToProcessGroups, + httpMethod: 'DELETE', + }); } - const rows = processModels.map((row: ProcessModel) => { - const modifiedProcessModelId: String = modifyProcessModelPath( - (row as any).id - ); - return ( - - - - {row.id} - - - {row.display_name} - - ); - }); - return ( -
-

Process Models

- - - - - - - - {rows} -
Process Model IdDisplay Name
-
- ); }; if (processGroup && modelPagination) { - const { page, perPage } = getPageInfoFromSearchParams(searchParams); - const modifiedProcessGroupId = modifyProcessModelPath(processGroup.id); + // const { page, perPage } = getPageInfoFromSearchParams(searchParams); + const modifiedProcessGroupId = modifyProcessIdentifierForPathParam( + processGroup.id + ); return ( <> -

Process Group: {processGroup.display_name}

+ +

+ Process Group: {processGroup.display_name} +

+ + + + + + +
+

{processGroup.description}

    @@ -131,30 +192,27 @@ export default function ProcessGroupShow() { Add a process model - - -

    + Process Models} + processGroup={processGroup} + /> {/* eslint-disable-next-line sonarjs/no-gratuitous-expressions */} - {modelPagination && modelPagination.total > 0 && ( + {/* {modelPagination && modelPagination.total > 0 && ( - )} + )} */}

    Process Groups} + headerElement={

    Process Groups

    } />
diff --git a/src/routes/ProcessInstanceLogList.tsx b/src/routes/ProcessInstanceLogList.tsx index 214a0eac..37ef5519 100644 --- a/src/routes/ProcessInstanceLogList.tsx +++ b/src/routes/ProcessInstanceLogList.tsx @@ -1,25 +1,27 @@ import { useEffect, useState } from 'react'; // @ts-ignore -import { Table } from '@carbon/react'; +import { Table, Tabs, TabList, Tab } from '@carbon/react'; import { useParams, useSearchParams, Link } from 'react-router-dom'; import PaginationForTable from '../components/PaginationForTable'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; import { getPageInfoFromSearchParams, - modifyProcessModelPath, - unModifyProcessModelPath, + modifyProcessIdentifierForPathParam, convertSecondsToFormattedDateTime, } from '../helpers'; import HttpService from '../services/HttpService'; +import { useUriListForPermissions } from '../hooks/UriListForPermissions'; export default function ProcessInstanceLogList() { const params = useParams(); - const [searchParams] = useSearchParams(); + const [searchParams, setSearchParams] = useSearchParams(); const [processInstanceLogs, setProcessInstanceLogs] = useState([]); const [pagination, setPagination] = useState(null); - const modifiedProcessModelId = modifyProcessModelPath( + const modifiedProcessModelId = modifyProcessIdentifierForPathParam( `${params.process_model_id}` ); + const { targetUris } = useUriListForPermissions(); + const isDetailedView = searchParams.get('detailed') === 'true'; useEffect(() => { const setProcessInstanceLogListFromResult = (result: any) => { @@ -28,26 +30,36 @@ export default function ProcessInstanceLogList() { }; const { page, perPage } = getPageInfoFromSearchParams(searchParams); HttpService.makeCallToBackend({ - path: `/process-instances/${params.process_instance_id}/logs?per_page=${perPage}&page=${page}`, + path: `${targetUris.processInstanceLogListPath}?per_page=${perPage}&page=${page}&detailed=${isDetailedView}`, successCallback: setProcessInstanceLogListFromResult, }); - }, [searchParams, params]); + }, [ + searchParams, + params, + targetUris.processInstanceLogListPath, + isDetailedView, + ]); const buildTable = () => { const rows = processInstanceLogs.map((row) => { const rowToUse = row as any; return ( - {rowToUse.bpmn_process_identifier} + {rowToUse.id} {rowToUse.message} - {rowToUse.bpmn_task_identifier} {rowToUse.bpmn_task_name} - {rowToUse.bpmn_task_type} + {isDetailedView && ( + <> + {rowToUse.bpmn_task_identifier} + {rowToUse.bpmn_task_type} + {rowToUse.bpmn_process_identifier} + + )} {rowToUse.username} {convertSecondsToFormattedDateTime(rowToUse.timestamp)} @@ -59,11 +71,16 @@ export default function ProcessInstanceLogList() { - + - - + {isDetailedView && ( + <> + + + + + )} @@ -72,34 +89,57 @@ export default function ProcessInstanceLogList() {
Bpmn Process IdentifierId MessageTask Identifier Task NameTask TypeTask IdentifierTask TypeBpmn Process IdentifierUser Timestamp
); }; + const selectedTabIndex = isDetailedView ? 1 : 0; if (pagination) { const { page, perPage } = getPageInfoFromSearchParams(searchParams); return ( -
+ <> + + + { + searchParams.set('detailed', 'false'); + setSearchParams(searchParams); + }} + > + Simple + + { + searchParams.set('detailed', 'true'); + setSearchParams(searchParams); + }} + > + Detailed + + + +
-
+ ); } return null; diff --git a/src/routes/ProcessInstanceReportList.tsx b/src/routes/ProcessInstanceReportList.tsx index 298008d1..906fb314 100644 --- a/src/routes/ProcessInstanceReportList.tsx +++ b/src/routes/ProcessInstanceReportList.tsx @@ -2,12 +2,22 @@ import { useEffect, useState } from 'react'; // @ts-ignore import { Button, Table } from '@carbon/react'; import { useParams, Link } from 'react-router-dom'; +import { Can } from '@casl/react'; import HttpService from '../services/HttpService'; +import { useUriListForPermissions } from '../hooks/UriListForPermissions'; +import { PermissionsToCheck } from '../interfaces'; +import { usePermissionFetcher } from '../hooks/PermissionService'; export default function ProcessInstanceReportList() { const params = useParams(); const [processInstanceReports, setProcessInstanceReports] = useState([]); + const { targetUris } = useUriListForPermissions(); + const permissionRequestData: PermissionsToCheck = { + [targetUris.processInstanceReportListPath]: ['POST'], + }; + const { ability } = usePermissionFetcher(permissionRequestData); + useEffect(() => { HttpService.makeCallToBackend({ path: `/process-instances/reports`, @@ -45,9 +55,11 @@ export default function ProcessInstanceReportList() { const headerStuff = ( <>

Process Instance Perspectives

- + + + ); if (processInstanceReports?.length > 0) { diff --git a/src/routes/ProcessInstanceReportShow.tsx b/src/routes/ProcessInstanceReportShow.tsx index 46ecce58..38622691 100644 --- a/src/routes/ProcessInstanceReportShow.tsx +++ b/src/routes/ProcessInstanceReportShow.tsx @@ -76,9 +76,7 @@ export default function ProcessInstanceReport() { return (

Process Instance Perspective: {params.report_identifier}