work in spiff is approved and merged, updating dependency
This commit is contained in:
commit
ce98cd75de
|
@ -16,9 +16,10 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- { python: "3.11", os: "ubuntu-latest", session: "pre-commit" }
|
||||
- { python: "3.11", os: "ubuntu-latest", session: "safety" }
|
||||
- { python: "3.11", os: "ubuntu-latest", session: "mypy" }
|
||||
- { python: "3.10", os: "ubuntu-latest", session: "mypy" }
|
||||
- { python: "3.9", os: "ubuntu-latest", session: "mypy" }
|
||||
- {
|
||||
python: "3.11",
|
||||
os: "ubuntu-latest",
|
||||
|
@ -37,6 +38,24 @@ jobs:
|
|||
session: "tests",
|
||||
database: "sqlite",
|
||||
}
|
||||
- {
|
||||
python: "3.10",
|
||||
os: "ubuntu-latest",
|
||||
session: "tests",
|
||||
database: "sqlite",
|
||||
}
|
||||
- {
|
||||
python: "3.9",
|
||||
os: "ubuntu-latest",
|
||||
session: "tests",
|
||||
database: "sqlite",
|
||||
}
|
||||
- {
|
||||
python: "3.10",
|
||||
os: "windows-latest",
|
||||
session: "tests",
|
||||
database: "sqlite",
|
||||
}
|
||||
- {
|
||||
python: "3.11",
|
||||
os: "macos-latest",
|
||||
|
@ -97,29 +116,29 @@ jobs:
|
|||
pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
|
||||
nox --version
|
||||
|
||||
- name: Compute pre-commit cache key
|
||||
if: matrix.session == 'pre-commit'
|
||||
id: pre-commit-cache
|
||||
shell: python
|
||||
run: |
|
||||
import hashlib
|
||||
import sys
|
||||
|
||||
python = "py{}.{}".format(*sys.version_info[:2])
|
||||
payload = sys.version.encode() + sys.executable.encode()
|
||||
digest = hashlib.sha256(payload).hexdigest()
|
||||
result = "${{ runner.os }}-{}-{}-pre-commit".format(python, digest[:8])
|
||||
|
||||
print("::set-output name=result::{}".format(result))
|
||||
|
||||
- name: Restore pre-commit cache
|
||||
uses: actions/cache@v3.0.11
|
||||
if: matrix.session == 'pre-commit'
|
||||
with:
|
||||
path: ~/.cache/pre-commit
|
||||
key: ${{ steps.pre-commit-cache.outputs.result }}-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
restore-keys: |
|
||||
${{ steps.pre-commit-cache.outputs.result }}-
|
||||
# - name: Compute pre-commit cache key
|
||||
# if: matrix.session == 'pre-commit'
|
||||
# id: pre-commit-cache
|
||||
# shell: python
|
||||
# run: |
|
||||
# import hashlib
|
||||
# import sys
|
||||
#
|
||||
# python = "py{}.{}".format(*sys.version_info[:2])
|
||||
# payload = sys.version.encode() + sys.executable.encode()
|
||||
# digest = hashlib.sha256(payload).hexdigest()
|
||||
# result = "${{ runner.os }}-{}-{}-pre-commit".format(python, digest[:8])
|
||||
#
|
||||
# print("::set-output name=result::{}".format(result))
|
||||
#
|
||||
# - name: Restore pre-commit cache
|
||||
# uses: actions/cache@v3.0.11
|
||||
# if: matrix.session == 'pre-commit'
|
||||
# with:
|
||||
# path: ~/.cache/pre-commit
|
||||
# key: ${{ steps.pre-commit-cache.outputs.result }}-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
# restore-keys: |
|
||||
# ${{ steps.pre-commit-cache.outputs.result }}-
|
||||
- name: Setup Mysql
|
||||
uses: mirromutth/mysql-action@v1.1
|
||||
with:
|
||||
|
@ -140,11 +159,12 @@ jobs:
|
|||
|
||||
- name: Upload coverage data
|
||||
# pin to upload coverage from only one matrix entry, otherwise coverage gets confused later
|
||||
if: always() && matrix.session == 'tests' && matrix.python == '3.11' && matrix.os == 'ubuntu-latest'
|
||||
if: always() && matrix.session == 'tests' && matrix.python == '3.11' && matrix.os == 'ubuntu-latest' && matrix.database == 'mysql'
|
||||
uses: "actions/upload-artifact@v3.0.0"
|
||||
# this action doesn't seem to respect working-directory so include working-directory value in path
|
||||
with:
|
||||
name: coverage-data
|
||||
path: ".coverage.*"
|
||||
path: "spiffworkflow-backend/.coverage.*"
|
||||
|
||||
- name: Upload documentation
|
||||
if: matrix.session == 'docs-build'
|
||||
|
@ -160,6 +180,30 @@ jobs:
|
|||
name: logs-${{matrix.python}}-${{matrix.os}}-${{matrix.database}}
|
||||
path: "./log/*.log"
|
||||
|
||||
run_pre_commit_checks:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: .
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3.0.2
|
||||
with:
|
||||
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
|
||||
fetch-depth: 0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4.2.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry
|
||||
poetry --version
|
||||
- name: Poetry Install
|
||||
run: poetry install
|
||||
- name: run_pre_commit
|
||||
run: ./bin/run_pre_commit_in_ci
|
||||
|
||||
check_docker_start_script:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
@ -178,7 +222,7 @@ jobs:
|
|||
|
||||
coverage:
|
||||
runs-on: ubuntu-latest
|
||||
needs: tests
|
||||
needs: [tests, run_pre_commit_checks, check_docker_start_script]
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3.0.2
|
||||
|
@ -211,6 +255,8 @@ jobs:
|
|||
uses: actions/download-artifact@v3.0.1
|
||||
with:
|
||||
name: coverage-data
|
||||
# this action doesn't seem to respect working-directory so include working-directory value in path
|
||||
path: spiffworkflow-backend
|
||||
|
||||
- name: Combine coverage data and display human readable report
|
||||
run: |
|
||||
|
@ -249,3 +295,49 @@ jobs:
|
|||
with:
|
||||
name: pr_number
|
||||
path: pr/
|
||||
|
||||
build-and-push-image:
|
||||
needs: coverage
|
||||
if: ${{ github.ref_name == 'main' && github.event_name == 'push' }}
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: sartography/spiffworkflow-backend
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3.0.2
|
||||
with:
|
||||
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
|
||||
fetch-depth: 0
|
||||
- name: Get current date
|
||||
id: date
|
||||
run: echo "date=$(date +%s)" >> $GITHUB_OUTPUT
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
|
||||
env:
|
||||
# if we ever support more than main we will need to format the ref name
|
||||
# like sub '/' with '-'
|
||||
TAG: ${{ github.ref_name }}-${{ steps.date.outputs.date }}
|
||||
with:
|
||||
# this action doesn't seem to respect working-directory so set context
|
||||
context: spiffworkflow-backend
|
||||
push: true
|
||||
tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ env.TAG }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
# spiff-arena
|
||||
|
||||
This is a monorepo based on git subtrees that pulls together various
|
||||
spiffworkflow-related projects. Here's an example command to push back to one
|
||||
project:
|
||||
|
||||
git subtree push --prefix=spiffworkflow-frontend git@github.com:sartography/spiffworkflow-frontend.git add_md_file
|
||||
git subtree push --prefix=spiffworkflow-frontend git@github.com:sartography/spiffworkflow-frontend.git add_md_file
|
||||
|
||||
# run all lint checks and tests
|
||||
|
||||
# run pyl
|
||||
`./bin/run_pyl`
|
||||
|
||||
Requires at root:
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function error_handler() {
|
||||
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||
exit "$2"
|
||||
}
|
||||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
poetry run pre-commit run --verbose --all-files
|
|
@ -604,6 +604,7 @@ flask-mail = "*"
|
|||
flask-marshmallow = "*"
|
||||
flask-migrate = "*"
|
||||
flask-restful = "*"
|
||||
greenlet = "^2.0.1"
|
||||
sentry-sdk = "*"
|
||||
sphinx-autoapi = "^2.0.0"
|
||||
spiffworkflow = "*"
|
||||
|
@ -613,7 +614,7 @@ werkzeug = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/flask-bpmn"
|
||||
reference = "main"
|
||||
resolved_reference = "17434e0907cc35914d013614bb79288eed1bd437"
|
||||
resolved_reference = "5e40777f4013f71f2c1237f13f7dba1bdd5c0de3"
|
||||
|
||||
[[package]]
|
||||
name = "flask-cors"
|
||||
|
@ -739,14 +740,15 @@ gitdb = ">=4.0.1,<5"
|
|||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
version = "1.1.3.post0"
|
||||
version = "2.0.1"
|
||||
description = "Lightweight in-process concurrent programming"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
|
||||
|
||||
[package.extras]
|
||||
docs = ["Sphinx"]
|
||||
docs = ["Sphinx", "docutils (<0.18)"]
|
||||
test = ["faulthandler", "objgraph", "psutil"]
|
||||
|
||||
[[package]]
|
||||
name = "gunicorn"
|
||||
|
@ -1771,7 +1773,7 @@ test = ["pytest"]
|
|||
[[package]]
|
||||
name = "SpiffWorkflow"
|
||||
version = "1.2.1"
|
||||
description = ""
|
||||
description = "A workflow framework and BPMN/DMN Processor"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
@ -1786,7 +1788,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "8d820dce1f439bb76bc07e39629832d998d6f634"
|
||||
resolved_reference = "580939cc8cb0b7ade1571483bd1e28f554434ac4"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
|
@ -1877,6 +1879,14 @@ category = "dev"
|
|||
optional = false
|
||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.0.1"
|
||||
description = "A lil' TOML parser"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "tornado"
|
||||
version = "6.2"
|
||||
|
@ -2141,7 +2151,7 @@ tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "p
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = ">=3.11,<3.12"
|
||||
content-hash = "2b730b9fd814bf805d68d2d7e3bc1caaf2e907c9f4bab4085f34322995670d27"
|
||||
content-hash = "8c37333988fdd68bc6868faf474e628a690582acd17ee3b31b18e005a864fecf"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
@ -2437,71 +2447,63 @@ gitpython = [
|
|||
{file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"},
|
||||
]
|
||||
greenlet = [
|
||||
{file = "greenlet-1.1.3.post0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:949c9061b8c6d3e6e439466a9be1e787208dec6246f4ec5fffe9677b4c19fcc3"},
|
||||
{file = "greenlet-1.1.3.post0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:d7815e1519a8361c5ea2a7a5864945906f8e386fa1bc26797b4d443ab11a4589"},
|
||||
{file = "greenlet-1.1.3.post0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9649891ab4153f217f319914455ccf0b86986b55fc0573ce803eb998ad7d6854"},
|
||||
{file = "greenlet-1.1.3.post0-cp27-cp27m-win32.whl", hash = "sha256:11fc7692d95cc7a6a8447bb160d98671ab291e0a8ea90572d582d57361360f05"},
|
||||
{file = "greenlet-1.1.3.post0-cp27-cp27m-win_amd64.whl", hash = "sha256:05ae7383f968bba4211b1fbfc90158f8e3da86804878442b4fb6c16ccbcaa519"},
|
||||
{file = "greenlet-1.1.3.post0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ccbe7129a282ec5797df0451ca1802f11578be018a32979131065565da89b392"},
|
||||
{file = "greenlet-1.1.3.post0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a8b58232f5b72973350c2b917ea3df0bebd07c3c82a0a0e34775fc2c1f857e9"},
|
||||
{file = "greenlet-1.1.3.post0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:f6661b58412879a2aa099abb26d3c93e91dedaba55a6394d1fb1512a77e85de9"},
|
||||
{file = "greenlet-1.1.3.post0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c6e942ca9835c0b97814d14f78da453241837419e0d26f7403058e8db3e38f8"},
|
||||
{file = "greenlet-1.1.3.post0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a812df7282a8fc717eafd487fccc5ba40ea83bb5b13eb3c90c446d88dbdfd2be"},
|
||||
{file = "greenlet-1.1.3.post0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a7a6560df073ec9de2b7cb685b199dfd12519bc0020c62db9d1bb522f989fa"},
|
||||
{file = "greenlet-1.1.3.post0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:17a69967561269b691747e7f436d75a4def47e5efcbc3c573180fc828e176d80"},
|
||||
{file = "greenlet-1.1.3.post0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:60839ab4ea7de6139a3be35b77e22e0398c270020050458b3d25db4c7c394df5"},
|
||||
{file = "greenlet-1.1.3.post0-cp310-cp310-win_amd64.whl", hash = "sha256:8926a78192b8b73c936f3e87929931455a6a6c6c385448a07b9f7d1072c19ff3"},
|
||||
{file = "greenlet-1.1.3.post0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:c6f90234e4438062d6d09f7d667f79edcc7c5e354ba3a145ff98176f974b8132"},
|
||||
{file = "greenlet-1.1.3.post0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:814f26b864ed2230d3a7efe0336f5766ad012f94aad6ba43a7c54ca88dd77cba"},
|
||||
{file = "greenlet-1.1.3.post0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fda1139d87ce5f7bd80e80e54f9f2c6fe2f47983f1a6f128c47bf310197deb6"},
|
||||
{file = "greenlet-1.1.3.post0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0643250dd0756f4960633f5359884f609a234d4066686754e834073d84e9b51"},
|
||||
{file = "greenlet-1.1.3.post0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cb863057bed786f6622982fb8b2c122c68e6e9eddccaa9fa98fd937e45ee6c4f"},
|
||||
{file = "greenlet-1.1.3.post0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8c0581077cf2734569f3e500fab09c0ff6a2ab99b1afcacbad09b3c2843ae743"},
|
||||
{file = "greenlet-1.1.3.post0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:695d0d8b5ae42c800f1763c9fce9d7b94ae3b878919379150ee5ba458a460d57"},
|
||||
{file = "greenlet-1.1.3.post0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5662492df0588a51d5690f6578f3bbbd803e7f8d99a99f3bf6128a401be9c269"},
|
||||
{file = "greenlet-1.1.3.post0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:bffba15cff4802ff493d6edcf20d7f94ab1c2aee7cfc1e1c7627c05f1102eee8"},
|
||||
{file = "greenlet-1.1.3.post0-cp35-cp35m-win32.whl", hash = "sha256:7afa706510ab079fd6d039cc6e369d4535a48e202d042c32e2097f030a16450f"},
|
||||
{file = "greenlet-1.1.3.post0-cp35-cp35m-win_amd64.whl", hash = "sha256:3a24f3213579dc8459e485e333330a921f579543a5214dbc935bc0763474ece3"},
|
||||
{file = "greenlet-1.1.3.post0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:64e10f303ea354500c927da5b59c3802196a07468332d292aef9ddaca08d03dd"},
|
||||
{file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:eb6ac495dccb1520667cfea50d89e26f9ffb49fa28496dea2b95720d8b45eb54"},
|
||||
{file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:88720794390002b0c8fa29e9602b395093a9a766b229a847e8d88349e418b28a"},
|
||||
{file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39464518a2abe9c505a727af7c0b4efff2cf242aa168be5f0daa47649f4d7ca8"},
|
||||
{file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0914f02fcaa8f84f13b2df4a81645d9e82de21ed95633765dd5cc4d3af9d7403"},
|
||||
{file = "greenlet-1.1.3.post0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96656c5f7c95fc02c36d4f6ef32f4e94bb0b6b36e6a002c21c39785a4eec5f5d"},
|
||||
{file = "greenlet-1.1.3.post0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4f74aa0092602da2069df0bc6553919a15169d77bcdab52a21f8c5242898f519"},
|
||||
{file = "greenlet-1.1.3.post0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:3aeac044c324c1a4027dca0cde550bd83a0c0fbff7ef2c98df9e718a5086c194"},
|
||||
{file = "greenlet-1.1.3.post0-cp36-cp36m-win32.whl", hash = "sha256:fe7c51f8a2ab616cb34bc33d810c887e89117771028e1e3d3b77ca25ddeace04"},
|
||||
{file = "greenlet-1.1.3.post0-cp36-cp36m-win_amd64.whl", hash = "sha256:70048d7b2c07c5eadf8393e6398595591df5f59a2f26abc2f81abca09610492f"},
|
||||
{file = "greenlet-1.1.3.post0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:66aa4e9a726b70bcbfcc446b7ba89c8cec40f405e51422c39f42dfa206a96a05"},
|
||||
{file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:025b8de2273d2809f027d347aa2541651d2e15d593bbce0d5f502ca438c54136"},
|
||||
{file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:82a38d7d2077128a017094aff334e67e26194f46bd709f9dcdacbf3835d47ef5"},
|
||||
{file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7d20c3267385236b4ce54575cc8e9f43e7673fc761b069c820097092e318e3b"},
|
||||
{file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8ece5d1a99a2adcb38f69af2f07d96fb615415d32820108cd340361f590d128"},
|
||||
{file = "greenlet-1.1.3.post0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2794eef1b04b5ba8948c72cc606aab62ac4b0c538b14806d9c0d88afd0576d6b"},
|
||||
{file = "greenlet-1.1.3.post0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a8d24eb5cb67996fb84633fdc96dbc04f2d8b12bfcb20ab3222d6be271616b67"},
|
||||
{file = "greenlet-1.1.3.post0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0120a879aa2b1ac5118bce959ea2492ba18783f65ea15821680a256dfad04754"},
|
||||
{file = "greenlet-1.1.3.post0-cp37-cp37m-win32.whl", hash = "sha256:bef49c07fcb411c942da6ee7d7ea37430f830c482bf6e4b72d92fd506dd3a427"},
|
||||
{file = "greenlet-1.1.3.post0-cp37-cp37m-win_amd64.whl", hash = "sha256:62723e7eb85fa52e536e516ee2ac91433c7bb60d51099293671815ff49ed1c21"},
|
||||
{file = "greenlet-1.1.3.post0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d25cdedd72aa2271b984af54294e9527306966ec18963fd032cc851a725ddc1b"},
|
||||
{file = "greenlet-1.1.3.post0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:924df1e7e5db27d19b1359dc7d052a917529c95ba5b8b62f4af611176da7c8ad"},
|
||||
{file = "greenlet-1.1.3.post0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ec615d2912b9ad807afd3be80bf32711c0ff9c2b00aa004a45fd5d5dde7853d9"},
|
||||
{file = "greenlet-1.1.3.post0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0971d37ae0eaf42344e8610d340aa0ad3d06cd2eee381891a10fe771879791f9"},
|
||||
{file = "greenlet-1.1.3.post0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:325f272eb997916b4a3fc1fea7313a8adb760934c2140ce13a2117e1b0a8095d"},
|
||||
{file = "greenlet-1.1.3.post0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75afcbb214d429dacdf75e03a1d6d6c5bd1fa9c35e360df8ea5b6270fb2211c"},
|
||||
{file = "greenlet-1.1.3.post0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5c2d21c2b768d8c86ad935e404cc78c30d53dea009609c3ef3a9d49970c864b5"},
|
||||
{file = "greenlet-1.1.3.post0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:467b73ce5dcd89e381292fb4314aede9b12906c18fab903f995b86034d96d5c8"},
|
||||
{file = "greenlet-1.1.3.post0-cp38-cp38-win32.whl", hash = "sha256:8149a6865b14c33be7ae760bcdb73548bb01e8e47ae15e013bf7ef9290ca309a"},
|
||||
{file = "greenlet-1.1.3.post0-cp38-cp38-win_amd64.whl", hash = "sha256:104f29dd822be678ef6b16bf0035dcd43206a8a48668a6cae4d2fe9c7a7abdeb"},
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:c8c9301e3274276d3d20ab6335aa7c5d9e5da2009cccb01127bddb5c951f8870"},
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8415239c68b2ec9de10a5adf1130ee9cb0ebd3e19573c55ba160ff0ca809e012"},
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:3c22998bfef3fcc1b15694818fc9b1b87c6cc8398198b96b6d355a7bcb8c934e"},
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa1845944e62f358d63fcc911ad3b415f585612946b8edc824825929b40e59e"},
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:890f633dc8cb307761ec566bc0b4e350a93ddd77dc172839be122be12bae3e10"},
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cf37343e43404699d58808e51f347f57efd3010cc7cee134cdb9141bd1ad9ea"},
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5edf75e7fcfa9725064ae0d8407c849456553a181ebefedb7606bac19aa1478b"},
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a954002064ee919b444b19c1185e8cce307a1f20600f47d6f4b6d336972c809"},
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-win32.whl", hash = "sha256:2ccdc818cc106cc238ff7eba0d71b9c77be868fdca31d6c3b1347a54c9b187b2"},
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-win_amd64.whl", hash = "sha256:91a84faf718e6f8b888ca63d0b2d6d185c8e2a198d2a7322d75c303e7097c8b7"},
|
||||
{file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"},
|
||||
{file = "greenlet-2.0.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515"},
|
||||
{file = "greenlet-2.0.1-cp27-cp27m-win32.whl", hash = "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a"},
|
||||
{file = "greenlet-2.0.1-cp27-cp27m-win_amd64.whl", hash = "sha256:3001d00eba6bbf084ae60ec7f4bb8ed375748f53aeaefaf2a37d9f0370558524"},
|
||||
{file = "greenlet-2.0.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d566b82e92ff2e09dd6342df7e0eb4ff6275a3f08db284888dcd98134dbd4243"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0722c9be0797f544a3ed212569ca3fe3d9d1a1b13942d10dd6f0e8601e484d26"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d37990425b4687ade27810e3b1a1c37825d242ebc275066cfee8cb6b8829ccd"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be35822f35f99dcc48152c9839d0171a06186f2d71ef76dc57fa556cc9bf6b45"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c140e7eb5ce47249668056edf3b7e9900c6a2e22fb0eaf0513f18a1b2c14e1da"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d21681f09e297a5adaa73060737e3aa1279a13ecdcfcc6ef66c292cb25125b2d"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb412b7db83fe56847df9c47b6fe3f13911b06339c2aa02dcc09dce8bbf582cd"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6a08799e9e88052221adca55741bf106ec7ea0710bca635c208b751f0d5b617"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e112e03d37987d7b90c1e98ba5e1b59e1645226d78d73282f45b326f7bddcb9"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56961cfca7da2fdd178f95ca407fa330c64f33289e1804b592a77d5593d9bd94"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13ba6e8e326e2116c954074c994da14954982ba2795aebb881c07ac5d093a58a"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bf633a50cc93ed17e494015897361010fc08700d92676c87931d3ea464123ce"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9f2c221eecb7ead00b8e3ddb913c67f75cba078fd1d326053225a3f59d850d72"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:13ebf93c343dd8bd010cd98e617cb4c1c1f352a0cf2524c82d3814154116aa82"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:6f61d71bbc9b4a3de768371b210d906726535d6ca43506737682caa754b956cd"},
|
||||
{file = "greenlet-2.0.1-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:2d0bac0385d2b43a7bd1d651621a4e0f1380abc63d6fb1012213a401cbd5bf8f"},
|
||||
{file = "greenlet-2.0.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:f6327b6907b4cb72f650a5b7b1be23a2aab395017aa6f1adb13069d66360eb3f"},
|
||||
{file = "greenlet-2.0.1-cp35-cp35m-win32.whl", hash = "sha256:81b0ea3715bf6a848d6f7149d25bf018fd24554a4be01fcbbe3fdc78e890b955"},
|
||||
{file = "greenlet-2.0.1-cp35-cp35m-win_amd64.whl", hash = "sha256:38255a3f1e8942573b067510f9611fc9e38196077b0c8eb7a8c795e105f9ce77"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:04957dc96669be041e0c260964cfef4c77287f07c40452e61abe19d647505581"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:4aeaebcd91d9fee9aa768c1b39cb12214b30bf36d2b7370505a9f2165fedd8d9"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974a39bdb8c90a85982cdb78a103a32e0b1be986d411303064b28a80611f6e51"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dca09dedf1bd8684767bc736cc20c97c29bc0c04c413e3276e0962cd7aeb148"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c0757db9bd08470ff8277791795e70d0bf035a011a528ee9a5ce9454b6cba2"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5067920de254f1a2dee8d3d9d7e4e03718e8fd2d2d9db962c8c9fa781ae82a39"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5a8e05057fab2a365c81abc696cb753da7549d20266e8511eb6c9d9f72fe3e92"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:3d75b8d013086b08e801fbbb896f7d5c9e6ccd44f13a9241d2bf7c0df9eda928"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:097e3dae69321e9100202fc62977f687454cd0ea147d0fd5a766e57450c569fd"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:cb242fc2cda5a307a7698c93173d3627a2a90d00507bccf5bc228851e8304963"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:72b00a8e7c25dcea5946692a2485b1a0c0661ed93ecfedfa9b6687bd89a24ef5"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:cd4ccc364cf75d1422e66e247e52a93da6a9b73cefa8cad696f3cbbb75af179d"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c8b1c43e75c42a6cafcc71defa9e01ead39ae80bd733a2608b297412beede68"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b1992ba9d4780d9af9726bbcef6a1db12d9ab1ccc35e5773685a24b7fb2758eb"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b5e83e4de81dcc9425598d9469a624826a0b1211380ac444c7c791d4a2137c19"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"},
|
||||
{file = "greenlet-2.0.1.tar.gz", hash = "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67"},
|
||||
]
|
||||
gunicorn = [
|
||||
{file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"},
|
||||
|
@ -2909,6 +2911,8 @@ protobuf = [
|
|||
psycopg2 = [
|
||||
{file = "psycopg2-2.9.5-cp310-cp310-win32.whl", hash = "sha256:d3ef67e630b0de0779c42912fe2cbae3805ebaba30cda27fea2a3de650a9414f"},
|
||||
{file = "psycopg2-2.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:4cb9936316d88bfab614666eb9e32995e794ed0f8f6b3b718666c22819c1d7ee"},
|
||||
{file = "psycopg2-2.9.5-cp311-cp311-win32.whl", hash = "sha256:093e3894d2d3c592ab0945d9eba9d139c139664dcf83a1c440b8a7aa9bb21955"},
|
||||
{file = "psycopg2-2.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:920bf418000dd17669d2904472efeab2b20546efd0548139618f8fa305d1d7ad"},
|
||||
{file = "psycopg2-2.9.5-cp36-cp36m-win32.whl", hash = "sha256:b9ac1b0d8ecc49e05e4e182694f418d27f3aedcfca854ebd6c05bb1cffa10d6d"},
|
||||
{file = "psycopg2-2.9.5-cp36-cp36m-win_amd64.whl", hash = "sha256:fc04dd5189b90d825509caa510f20d1d504761e78b8dfb95a0ede180f71d50e5"},
|
||||
{file = "psycopg2-2.9.5-cp37-cp37m-win32.whl", hash = "sha256:922cc5f0b98a5f2b1ff481f5551b95cd04580fd6f0c72d9b22e6c0145a4840e0"},
|
||||
|
@ -2920,18 +2924,7 @@ psycopg2 = [
|
|||
{file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"},
|
||||
]
|
||||
pyasn1 = [
|
||||
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
|
||||
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
|
||||
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
|
||||
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
|
||||
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
|
||||
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
|
||||
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
|
||||
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
|
||||
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
|
||||
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
|
||||
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
|
||||
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
|
||||
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
|
||||
]
|
||||
pycodestyle = [
|
||||
|
@ -3282,6 +3275,10 @@ toml = [
|
|||
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
||||
]
|
||||
tomli = [
|
||||
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
|
||||
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
|
||||
]
|
||||
tornado = [
|
||||
{file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"},
|
||||
{file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"},
|
||||
|
|
|
@ -104,6 +104,9 @@ MonkeyType = "^22.2.0"
|
|||
[tool.poetry.scripts]
|
||||
spiffworkflow-backend = "spiffworkflow_backend.__main__:main"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
tomli = "^2.0.1"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
# ignore deprecation warnings from various packages that we don't control
|
||||
filterwarnings = [
|
||||
|
|
|
@ -156,7 +156,7 @@ jobs:
|
|||
|
||||
- name: Upload coverage data
|
||||
# pin to upload coverage from only one matrix entry, otherwise coverage gets confused later
|
||||
if: always() && matrix.session == 'tests' && matrix.python == '3.11' && matrix.os == 'ubuntu-latest'
|
||||
if: always() && matrix.session == 'tests' && matrix.python == '3.11' && matrix.os == 'ubuntu-latest' && matrix.database == 'mysql'
|
||||
uses: "actions/upload-artifact@v3.0.0"
|
||||
with:
|
||||
name: coverage-data
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
import csv
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
|
||||
from spiffworkflow_backend import get_hacked_up_app_for_script
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
import time
|
||||
|
||||
from apscheduler.schedulers.background import BlockingScheduler # type: ignore
|
||||
|
||||
from spiffworkflow_backend import create_app
|
||||
from spiffworkflow_backend import start_scheduler
|
||||
from spiffworkflow_backend.helpers.db_helper import try_to_connect
|
||||
|
|
|
@ -7,6 +7,8 @@ from flask.app import Flask
|
|||
from flask.testing import FlaskClient
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
@ -17,7 +19,6 @@ from spiffworkflow_backend.services.process_instance_service import (
|
|||
ProcessInstanceService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
# from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
|
|
|
@ -643,7 +643,7 @@ werkzeug = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/flask-bpmn"
|
||||
reference = "main"
|
||||
resolved_reference = "5e40777f4013f71f2c1237f13f7dba1bdd5c0de3"
|
||||
resolved_reference = "886bfdc31aade43e9683439e6d29b06acb235081"
|
||||
|
||||
[[package]]
|
||||
name = "Flask-Cors"
|
||||
|
@ -1875,8 +1875,8 @@ lxml = "*"
|
|||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "feature/parser_info_features"
|
||||
resolved_reference = "849c223ee904f528a116818615c1fc08506ba63b"
|
||||
reference = "main"
|
||||
resolved_reference = "14d3d8c3f69af880eaf994be1689ee9fcc72e829"
|
||||
|
||||
[[package]]
|
||||
name = "SQLAlchemy"
|
||||
|
@ -2259,7 +2259,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = ">=3.9,<3.12"
|
||||
content-hash = "eedbd5bc2c365d144f88dfe95fafc74f3708cde585bf6e29e78005dd51793284"
|
||||
content-hash = "a6d3882a3ab142b82201b83ee8a0552fd16112c4540e2a1dbcb5c38599b917c1"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
|
|
@ -27,7 +27,7 @@ flask-marshmallow = "*"
|
|||
flask-migrate = "*"
|
||||
flask-restful = "*"
|
||||
werkzeug = "*"
|
||||
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/parser_info_features"}
|
||||
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
|
||||
#SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" }
|
||||
sentry-sdk = "^1.10"
|
||||
sphinx-autoapi = "^2.0"
|
||||
|
|
|
@ -5,7 +5,6 @@ from typing import Any
|
|||
import connexion # type: ignore
|
||||
import flask.app
|
||||
import flask.json
|
||||
import spiffworkflow_backend.load_database_models # noqa: F401
|
||||
import sqlalchemy
|
||||
from apscheduler.schedulers.background import BackgroundScheduler # type: ignore
|
||||
from apscheduler.schedulers.base import BaseScheduler # type: ignore
|
||||
|
@ -15,6 +14,9 @@ from flask_bpmn.models.db import db
|
|||
from flask_bpmn.models.db import migrate
|
||||
from flask_cors import CORS # type: ignore
|
||||
from flask_mail import Mail # type: ignore
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
import spiffworkflow_backend.load_database_models # noqa: F401
|
||||
from spiffworkflow_backend.config import setup_config
|
||||
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint
|
||||
|
@ -24,7 +26,6 @@ from spiffworkflow_backend.services.authorization_service import AuthorizationSe
|
|||
from spiffworkflow_backend.services.background_processing_service import (
|
||||
BackgroundProcessingService,
|
||||
)
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
|
||||
class MyJSONEncoder(DefaultJSONProvider):
|
||||
|
@ -38,11 +39,14 @@ class MyJSONEncoder(DefaultJSONProvider):
|
|||
return_dict = {}
|
||||
for row_key in obj.keys():
|
||||
row_value = obj[row_key]
|
||||
if hasattr(row_value, "__dict__"):
|
||||
if hasattr(row_value, "serialized"):
|
||||
return_dict.update(row_value.serialized)
|
||||
elif hasattr(row_value, "__dict__"):
|
||||
return_dict.update(row_value.__dict__)
|
||||
else:
|
||||
return_dict.update({row_key: row_value})
|
||||
return_dict.pop("_sa_instance_state")
|
||||
if "_sa_instance_state" in return_dict:
|
||||
return_dict.pop("_sa_instance_state")
|
||||
return return_dict
|
||||
return super().default(obj)
|
||||
|
||||
|
|
|
@ -872,6 +872,64 @@ paths:
|
|||
items:
|
||||
$ref: "#/components/schemas/Task"
|
||||
|
||||
/tasks/for-my-open-processes:
|
||||
parameters:
|
||||
- name: page
|
||||
in: query
|
||||
required: false
|
||||
description: The page number to return. Defaults to page 1.
|
||||
schema:
|
||||
type: integer
|
||||
- name: per_page
|
||||
in: query
|
||||
required: false
|
||||
description: The page number to return. Defaults to page 1.
|
||||
schema:
|
||||
type: integer
|
||||
get:
|
||||
tags:
|
||||
- Process Instances
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_my_open_processes
|
||||
summary: returns the list of tasks for given user's open process instances
|
||||
responses:
|
||||
"200":
|
||||
description: list of tasks
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Task"
|
||||
|
||||
/tasks/for-processes-started-by-others:
|
||||
parameters:
|
||||
- name: page
|
||||
in: query
|
||||
required: false
|
||||
description: The page number to return. Defaults to page 1.
|
||||
schema:
|
||||
type: integer
|
||||
- name: per_page
|
||||
in: query
|
||||
required: false
|
||||
description: The page number to return. Defaults to page 1.
|
||||
schema:
|
||||
type: integer
|
||||
get:
|
||||
tags:
|
||||
- Process Instances
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_processes_started_by_others
|
||||
summary: returns the list of tasks for given user's open process instances
|
||||
responses:
|
||||
"200":
|
||||
description: list of tasks
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Task"
|
||||
|
||||
/process-instance/{process_instance_id}/tasks:
|
||||
parameters:
|
||||
- name: process_instance_id
|
||||
|
|
|
@ -3,9 +3,10 @@ import os
|
|||
import threading
|
||||
|
||||
from flask.app import Flask
|
||||
from spiffworkflow_backend.services.logging_service import setup_logger
|
||||
from werkzeug.utils import ImportStringError
|
||||
|
||||
from spiffworkflow_backend.services.logging_service import setup_logger
|
||||
|
||||
|
||||
class ConfigurationError(Exception):
|
||||
"""ConfigurationError."""
|
||||
|
|
|
@ -6,13 +6,14 @@ from typing import TYPE_CHECKING
|
|||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import RelationshipProperty
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.task import Task
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import RelationshipProperty
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
|
|
@ -5,9 +5,10 @@ from dataclasses import dataclass
|
|||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
|
||||
@dataclass
|
||||
|
|
|
@ -7,6 +7,7 @@ from typing import Optional
|
|||
import marshmallow
|
||||
from marshmallow import INCLUDE
|
||||
from marshmallow import Schema
|
||||
|
||||
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
|
||||
|
||||
|
||||
|
|
|
@ -4,12 +4,13 @@ from typing import TYPE_CHECKING
|
|||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.message_correlation_property import (
|
||||
MessageCorrelationPropertyModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from spiffworkflow_backend.models.message_correlation_message_instance import ( # noqa: F401
|
||||
|
@ -43,6 +44,7 @@ class MessageCorrelationModel(SpiffworkflowBaseDBModel):
|
|||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
|
||||
message_correlation_property = relationship("MessageCorrelationPropertyModel")
|
||||
message_correlations_message_instances = relationship(
|
||||
"MessageCorrelationMessageInstanceModel", cascade="delete"
|
||||
)
|
||||
|
|
|
@ -3,9 +3,10 @@ from dataclasses import dataclass
|
|||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
|
||||
@dataclass
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
"""Message_correlation_property."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
|
||||
|
||||
class MessageCorrelationPropertyModel(SpiffworkflowBaseDBModel):
|
||||
"""MessageCorrelationPropertyModel."""
|
||||
|
|
|
@ -7,14 +7,15 @@ from typing import TYPE_CHECKING
|
|||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.event import listens_for
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from spiffworkflow_backend.models.message_correlation_message_instance import ( # noqa: F401
|
||||
MessageCorrelationMessageInstanceModel,
|
||||
|
@ -58,6 +59,8 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
|
|||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
|
||||
message_correlations: dict | None = None
|
||||
|
||||
@validates("message_type")
|
||||
def validate_message_type(self, key: str, value: Any) -> Any:
|
||||
"""Validate_message_type."""
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
"""Message_correlation_property."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
|
||||
|
||||
class MessageTriggerableProcessModel(SpiffworkflowBaseDBModel):
|
||||
"""MessageTriggerableProcessModel."""
|
||||
|
|
|
@ -4,11 +4,12 @@ from typing import Any
|
|||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
|
||||
from spiffworkflow_backend.models.principal import PrincipalModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
|
||||
from spiffworkflow_backend.models.principal import PrincipalModel
|
||||
|
||||
|
||||
class PermitDeny(enum.Enum):
|
||||
"""PermitDeny."""
|
||||
|
|
|
@ -3,12 +3,13 @@ from dataclasses import dataclass
|
|||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.schema import CheckConstraint
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
||||
class DataValidityError(Exception):
|
||||
"""DataValidityError."""
|
||||
|
|
|
@ -9,6 +9,7 @@ from typing import Any
|
|||
import marshmallow
|
||||
from marshmallow import post_load
|
||||
from marshmallow import Schema
|
||||
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
|
||||
|
||||
|
@ -26,9 +27,7 @@ class ProcessGroup:
|
|||
process_models: list[ProcessModelInfo] = field(
|
||||
default_factory=list[ProcessModelInfo]
|
||||
)
|
||||
process_groups: list[ProcessGroup] = field(
|
||||
default_factory=list['ProcessGroup']
|
||||
)
|
||||
process_groups: list[ProcessGroup] = field(default_factory=list["ProcessGroup"])
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""__post_init__."""
|
||||
|
|
|
@ -12,15 +12,16 @@ from marshmallow import INCLUDE
|
|||
from marshmallow import Schema
|
||||
from marshmallow_enum import EnumField # type: ignore
|
||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import deferred
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.task import Task
|
||||
from spiffworkflow_backend.models.task import TaskSchema
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import deferred
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
|
||||
class NavigationItemSchema(Schema):
|
||||
|
@ -99,17 +100,17 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
local_bpmn_xml_file_contents = ""
|
||||
if self.bpmn_xml_file_contents:
|
||||
local_bpmn_xml_file_contents = self.bpmn_xml_file_contents.decode("utf-8")
|
||||
|
||||
return {
|
||||
"id": self.id,
|
||||
"process_model_identifier": self.process_model_identifier,
|
||||
"process_group_identifier": self.process_group_identifier,
|
||||
"status": self.status,
|
||||
"bpmn_json": self.bpmn_json,
|
||||
"start_in_seconds": self.start_in_seconds,
|
||||
"end_in_seconds": self.end_in_seconds,
|
||||
"process_initiator_id": self.process_initiator_id,
|
||||
"bpmn_xml_file_contents": local_bpmn_xml_file_contents,
|
||||
"bpmn_version_control_identifier": self.bpmn_version_control_identifier,
|
||||
"bpmn_version_control_type": self.bpmn_version_control_type,
|
||||
"spiff_step": self.spiff_step,
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,10 @@ from typing import TypedDict
|
|||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import deferred
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
)
|
||||
|
@ -17,9 +21,6 @@ from spiffworkflow_backend.models.user import UserModel
|
|||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import deferred
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
|
||||
ReportMetadata = dict[str, Any]
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import enum
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import field
|
||||
from typing import Any
|
||||
|
@ -9,6 +10,7 @@ from typing import Any
|
|||
import marshmallow
|
||||
from marshmallow import Schema
|
||||
from marshmallow.decorators import post_load
|
||||
|
||||
from spiffworkflow_backend.models.file import File
|
||||
|
||||
|
||||
|
@ -49,6 +51,11 @@ class ProcessModelInfo:
|
|||
return True
|
||||
return False
|
||||
|
||||
# for use with os.path.join so it can work on windows
|
||||
def id_for_file_path(self) -> str:
|
||||
"""Id_for_file_path."""
|
||||
return self.id.replace("/", os.sep)
|
||||
|
||||
|
||||
class ProcessModelInfoSchema(Schema):
|
||||
"""ProcessModelInfoSchema."""
|
||||
|
|
|
@ -4,9 +4,10 @@ from dataclasses import dataclass
|
|||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from marshmallow import Schema
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
||||
@dataclass()
|
||||
class SecretModel(SpiffworkflowBaseDBModel):
|
||||
|
|
|
@ -10,12 +10,13 @@ from flask_bpmn.api.api_error import ApiError
|
|||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from marshmallow import Schema
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.services.authentication_service import (
|
||||
AuthenticationProviderTypes,
|
||||
)
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
|
||||
class UserNotFoundError(Exception):
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
"""UserGroupAssignment."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
||||
class UserGroupAssignmentModel(SpiffworkflowBaseDBModel):
|
||||
"""UserGroupAssignmentModel."""
|
||||
|
|
|
@ -7,6 +7,8 @@ from flask import redirect
|
|||
from flask import render_template
|
||||
from flask import request
|
||||
from flask import url_for
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
|
@ -16,7 +18,6 @@ from spiffworkflow_backend.services.process_instance_service import (
|
|||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
admin_blueprint = Blueprint(
|
||||
"admin", __name__, template_folder="templates", static_folder="static"
|
||||
|
|
|
@ -28,12 +28,18 @@ from lxml import etree # type: ignore
|
|||
from lxml.builder import ElementMaker # type: ignore
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import asc
|
||||
from sqlalchemy import desc
|
||||
|
||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
)
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel
|
||||
from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
|
||||
from spiffworkflow_backend.models.file import FileSchema
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
from spiffworkflow_backend.models.message_triggerable_process_model import (
|
||||
|
@ -75,8 +81,6 @@ from spiffworkflow_backend.services.secret_service import SecretService
|
|||
from spiffworkflow_backend.services.service_task_service import ServiceTaskService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
from sqlalchemy import asc
|
||||
from sqlalchemy import desc
|
||||
|
||||
|
||||
class TaskDataSelectOption(TypedDict):
|
||||
|
@ -178,10 +182,14 @@ def process_group_update(
|
|||
return make_response(jsonify(process_group), 200)
|
||||
|
||||
|
||||
def process_groups_list(process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
|
||||
def process_groups_list(
|
||||
process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_groups_list."""
|
||||
if process_group_identifier is not None:
|
||||
process_groups = ProcessModelService().get_process_groups(process_group_identifier)
|
||||
process_groups = ProcessModelService().get_process_groups(
|
||||
process_group_identifier
|
||||
)
|
||||
else:
|
||||
process_groups = ProcessModelService().get_process_groups()
|
||||
batch = ProcessModelService().get_batch(
|
||||
|
@ -572,16 +580,35 @@ def message_instance_list(
|
|||
MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore
|
||||
MessageInstanceModel.id.desc(), # type: ignore
|
||||
)
|
||||
.join(MessageModel)
|
||||
.join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id)
|
||||
.join(ProcessInstanceModel)
|
||||
.add_columns(
|
||||
MessageModel.identifier.label("message_identifier"),
|
||||
ProcessInstanceModel.process_model_identifier,
|
||||
ProcessInstanceModel.process_group_identifier,
|
||||
)
|
||||
.paginate(page=page, per_page=per_page, error_out=False)
|
||||
)
|
||||
|
||||
for message_instance in message_instances:
|
||||
message_correlations: dict = {}
|
||||
for (
|
||||
mcmi
|
||||
) in (
|
||||
message_instance.MessageInstanceModel.message_correlations_message_instances
|
||||
):
|
||||
mc = MessageCorrelationModel.query.filter_by(
|
||||
id=mcmi.message_correlation_id
|
||||
).all()
|
||||
for m in mc:
|
||||
if m.name not in message_correlations:
|
||||
message_correlations[m.name] = {}
|
||||
message_correlations[m.name][
|
||||
m.message_correlation_property.identifier
|
||||
] = m.value
|
||||
message_instance.MessageInstanceModel.message_correlations = (
|
||||
message_correlations
|
||||
)
|
||||
|
||||
response_json = {
|
||||
"results": message_instances.items,
|
||||
"pagination": {
|
||||
|
@ -993,6 +1020,67 @@ def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Res
|
|||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def task_list_for_my_open_processes(
|
||||
page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_list_for_my_open_processes."""
|
||||
return get_tasks(page=page, per_page=per_page)
|
||||
|
||||
|
||||
def task_list_for_processes_started_by_others(
|
||||
page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_list_for_processes_started_by_others."""
|
||||
return get_tasks(processes_started_by_user=False, page=page, per_page=per_page)
|
||||
|
||||
|
||||
def get_tasks(
|
||||
processes_started_by_user: bool = True, page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Get_tasks."""
|
||||
user_id = g.user.id
|
||||
active_tasks_query = (
|
||||
ActiveTaskModel.query.outerjoin(
|
||||
GroupModel, GroupModel.id == ActiveTaskModel.lane_assignment_id
|
||||
)
|
||||
.join(ProcessInstanceModel)
|
||||
.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)
|
||||
)
|
||||
|
||||
if processes_started_by_user:
|
||||
active_tasks_query = active_tasks_query.filter(
|
||||
ProcessInstanceModel.process_initiator_id == user_id
|
||||
).outerjoin(ActiveTaskUserModel, and_(ActiveTaskUserModel.user_id == user_id))
|
||||
else:
|
||||
active_tasks_query = active_tasks_query.filter(
|
||||
ProcessInstanceModel.process_initiator_id != user_id
|
||||
).join(ActiveTaskUserModel, and_(ActiveTaskUserModel.user_id == user_id))
|
||||
|
||||
active_tasks = active_tasks_query.add_columns(
|
||||
ProcessInstanceModel.process_model_identifier,
|
||||
ProcessInstanceModel.status.label("process_instance_status"), # type: ignore
|
||||
ProcessInstanceModel.updated_at_in_seconds,
|
||||
ProcessInstanceModel.created_at_in_seconds,
|
||||
UserModel.username,
|
||||
GroupModel.identifier.label("group_identifier"),
|
||||
ActiveTaskModel.task_name,
|
||||
ActiveTaskModel.task_title,
|
||||
ActiveTaskModel.process_model_display_name,
|
||||
ActiveTaskModel.process_instance_id,
|
||||
ActiveTaskUserModel.user_id.label("current_user_is_potential_owner"),
|
||||
).paginate(page=page, per_page=per_page, error_out=False)
|
||||
|
||||
response_json = {
|
||||
"results": active_tasks.items,
|
||||
"pagination": {
|
||||
"count": len(active_tasks.items),
|
||||
"total": active_tasks.total,
|
||||
"pages": active_tasks.pages,
|
||||
},
|
||||
}
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def process_instance_task_list(
|
||||
process_instance_id: int, all_tasks: bool = False, spiff_step: int = 0
|
||||
) -> flask.wrappers.Response:
|
||||
|
@ -1326,9 +1414,18 @@ def find_process_instance_by_id_or_raise(
|
|||
process_instance_id: int,
|
||||
) -> ProcessInstanceModel:
|
||||
"""Find_process_instance_by_id_or_raise."""
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
process_instance_query = ProcessInstanceModel.query.filter_by(
|
||||
id=process_instance_id
|
||||
).first()
|
||||
)
|
||||
|
||||
# we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves:
|
||||
# this returns an object that allows you to do: process_instance.UserModel.username
|
||||
# process_instance = db.session.query(ProcessInstanceModel, UserModel).filter_by(id=process_instance_id).first()
|
||||
# you can also use splat with add_columns, but it still didn't ultimately give us access to the process instance
|
||||
# attributes or username like we wanted:
|
||||
# process_instance_query.join(UserModel).add_columns(*ProcessInstanceModel.__table__.columns, UserModel.username)
|
||||
|
||||
process_instance = process_instance_query.first()
|
||||
if process_instance is None:
|
||||
raise (
|
||||
ApiError(
|
||||
|
|
|
@ -12,13 +12,14 @@ from flask import g
|
|||
from flask import redirect
|
||||
from flask import request
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.authentication_service import (
|
||||
AuthenticationService,
|
||||
)
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
"""
|
||||
.. module:: crc.api.user
|
||||
|
|
|
@ -9,10 +9,11 @@ from flask import request
|
|||
from flask import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
APPLICATION_JSON: Final = "application/json"
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
from typing import Any
|
||||
|
||||
from flask import g
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
from typing import Any
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
|
|
|
@ -4,6 +4,7 @@ from typing import Any
|
|||
|
||||
import pytz
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
|
|
|
@ -9,6 +9,7 @@ from typing import Any
|
|||
from typing import Callable
|
||||
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
|
|
|
@ -4,9 +4,10 @@ import time
|
|||
|
||||
from flask import current_app
|
||||
from flask_bpmn.models.db import db
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
|
||||
def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
|
||||
|
|
|
@ -11,9 +11,10 @@ from flask import current_app
|
|||
from flask import redirect
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel
|
||||
|
||||
|
||||
class AuthenticationProviderTypes(enum.Enum):
|
||||
"""AuthenticationServiceProviders."""
|
||||
|
|
|
@ -11,6 +11,8 @@ from flask import request
|
|||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from sqlalchemy import text
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
|
||||
|
@ -25,7 +27,6 @@ from spiffworkflow_backend.services.process_instance_processor import (
|
|||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
from sqlalchemy import text
|
||||
|
||||
|
||||
class PermissionsFileNotSetError(Exception):
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""Background_processing_service."""
|
||||
import flask
|
||||
|
||||
from spiffworkflow_backend.services.message_service import MessageService
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""Data_setup_service."""
|
||||
from flask import current_app
|
||||
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ from typing import Union
|
|||
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.services.email_service import EmailService
|
||||
|
|
|
@ -7,6 +7,7 @@ from typing import Optional
|
|||
import pytz
|
||||
from flask import current_app
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.models.file import CONTENT_TYPES
|
||||
from spiffworkflow_backend.models.file import File
|
||||
from spiffworkflow_backend.models.file import FileType
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
import os
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
from typing import Optional
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ from typing import Optional
|
|||
from flask import g
|
||||
from flask.app import Flask
|
||||
from flask_bpmn.models.db import db
|
||||
|
||||
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
|
||||
|
||||
|
||||
|
|
|
@ -3,6 +3,10 @@ from typing import Any
|
|||
from typing import Optional
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy import select
|
||||
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_correlation_message_instance import (
|
||||
MessageCorrelationMessageInstanceModel,
|
||||
|
@ -19,9 +23,6 @@ from spiffworkflow_backend.services.process_instance_processor import (
|
|||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy import select
|
||||
|
||||
|
||||
class MessageServiceError(Exception):
|
||||
|
|
|
@ -65,6 +65,7 @@ from SpiffWorkflow.spiff.serializer.task_spec_converters import UserTaskConverte
|
|||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel
|
||||
from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
|
||||
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
|
||||
|
|
|
@ -7,6 +7,7 @@ from flask import current_app
|
|||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceApi
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
|
|
|
@ -8,6 +8,7 @@ from typing import Optional
|
|||
from typing import TypeVar
|
||||
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
)
|
||||
|
@ -160,7 +161,9 @@ class ProcessModelService(FileSystemService):
|
|||
process_models.sort()
|
||||
return process_models
|
||||
|
||||
def get_process_groups(self, process_group_id: Optional[str] = None) -> list[ProcessGroup]:
|
||||
def get_process_groups(
|
||||
self, process_group_id: Optional[str] = None
|
||||
) -> list[ProcessGroup]:
|
||||
"""Returns the process_groups as a list in display order."""
|
||||
process_groups = self.__scan_process_groups(process_group_id)
|
||||
process_groups.sort()
|
||||
|
@ -215,7 +218,7 @@ class ProcessModelService(FileSystemService):
|
|||
def __get_all_nested_models(self, group_path: str) -> list:
|
||||
"""__get_all_nested_models."""
|
||||
all_nested_models = []
|
||||
for root, dirs, files in os.walk(group_path):
|
||||
for _root, dirs, _files in os.walk(group_path):
|
||||
for dir in dirs:
|
||||
model_dir = os.path.join(group_path, dir)
|
||||
if ProcessModelService().is_model(model_dir):
|
||||
|
@ -254,7 +257,9 @@ class ProcessModelService(FileSystemService):
|
|||
index += 1
|
||||
return process_groups
|
||||
|
||||
def __scan_process_groups(self, process_group_id: Optional[str] = None) -> list[ProcessGroup]:
|
||||
def __scan_process_groups(
|
||||
self, process_group_id: Optional[str] = None
|
||||
) -> list[ProcessGroup]:
|
||||
"""__scan_process_groups."""
|
||||
if not os.path.exists(FileSystemService.root_path()):
|
||||
return [] # Nothing to scan yet. There are no files.
|
||||
|
@ -303,9 +308,7 @@ class ProcessModelService(FileSystemService):
|
|||
if self.is_group(nested_item.path):
|
||||
# This is a nested group
|
||||
process_group.process_groups.append(
|
||||
self.__scan_process_group(
|
||||
nested_item.path
|
||||
)
|
||||
self.__scan_process_group(nested_item.path)
|
||||
)
|
||||
elif self.is_model(nested_item.path):
|
||||
process_group.process_models.append(
|
||||
|
|
|
@ -8,6 +8,7 @@ from typing import Optional
|
|||
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException # type: ignore
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
CustomBpmnScriptEngine,
|
||||
)
|
||||
|
|
|
@ -3,6 +3,7 @@ from typing import Optional
|
|||
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
|
||||
from spiffworkflow_backend.models.secret_model import SecretModel
|
||||
|
||||
# from cryptography.fernet import Fernet
|
||||
|
|
|
@ -5,6 +5,7 @@ from typing import Any
|
|||
import requests
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.secret_service import SecretService
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ from lxml import etree # type: ignore
|
|||
from lxml.etree import _Element # type: ignore
|
||||
from lxml.etree import Element as EtreeElement
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
|
||||
|
||||
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
|
||||
from spiffworkflow_backend.models.file import File
|
||||
from spiffworkflow_backend.models.file import FileReference
|
||||
|
|
|
@ -6,6 +6,7 @@ from flask import current_app
|
|||
from flask import g
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel
|
||||
from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
|
|
|
@ -9,5 +9,3 @@
|
|||
},
|
||||
"ui:order": ["name", "department"]
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -8,10 +8,12 @@ from typing import Dict
|
|||
from typing import Optional
|
||||
|
||||
from flask import current_app
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
from werkzeug.test import TestResponse # type: ignore
|
||||
|
||||
from spiffworkflow_backend.models.permission_assignment import Permission
|
||||
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroup
|
||||
|
@ -25,8 +27,6 @@ from spiffworkflow_backend.services.authorization_service import AuthorizationSe
|
|||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
from werkzeug.test import TestResponse # type: ignore
|
||||
|
||||
# from tests.spiffworkflow_backend.helpers.test_data import logged_in_headers
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ import os
|
|||
from typing import Optional
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
"""User."""
|
||||
from typing import Optional
|
||||
|
||||
from tests.spiffworkflow_backend.helpers.example_data import ExampleDataLoader
|
||||
|
||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroup
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from tests.spiffworkflow_backend.helpers.example_data import ExampleDataLoader
|
||||
|
||||
|
||||
def assure_process_group_exists(process_group_id: Optional[str] = None) -> ProcessGroup:
|
||||
|
|
|
@ -2,10 +2,11 @@
|
|||
import ast
|
||||
import base64
|
||||
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.services.authentication_service import (
|
||||
AuthenticationService,
|
||||
)
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
|
||||
class TestAuthentication(BaseTest):
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
"""Test_logging_service."""
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
||||
class TestLoggingService(BaseTest):
|
||||
"""Test logging service."""
|
||||
|
|
|
@ -3,6 +3,8 @@ import json
|
|||
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroup
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroupSchema
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
|
@ -11,7 +13,6 @@ from spiffworkflow_backend.models.user import UserModel
|
|||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
|
||||
class TestNestedGroups(BaseTest):
|
||||
|
|
|
@ -9,6 +9,9 @@ import pytest
|
|||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from flask_bpmn.models.db import db
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
)
|
||||
|
@ -32,8 +35,6 @@ from spiffworkflow_backend.services.process_instance_service import (
|
|||
ProcessInstanceService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
# from spiffworkflow_backend.services.git_service import GitService
|
||||
|
||||
|
@ -622,7 +623,9 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_model_file_update."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
modified_process_model_id = process_model_identifier.replace("/", ":")
|
||||
|
||||
data = {"key1": "THIS DATA"}
|
||||
|
@ -646,7 +649,9 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_model_file_update."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
modified_process_model_id = process_model_identifier.replace("/", ":")
|
||||
|
||||
data = {"file": (io.BytesIO(b""), "random_fact.svg")}
|
||||
|
@ -722,7 +727,9 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_model_file_update."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
# self.create_spec_file(client, user=with_super_admin_user)
|
||||
|
||||
# process_model = load_test_spec("random_fact")
|
||||
|
@ -748,7 +755,9 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_model_file_update."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
modified_process_model_identifier = process_model_identifier.replace("/", ":")
|
||||
|
||||
response = client.delete(
|
||||
|
@ -769,7 +778,9 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_process_model_file_update."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
modified_process_model_identifier = process_model_identifier.replace("/", ":")
|
||||
|
||||
response = client.delete(
|
||||
|
@ -796,7 +807,9 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_get_file."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
modified_process_model_identifier = process_model_identifier.replace("/", ":")
|
||||
|
||||
response = client.get(
|
||||
|
@ -816,7 +829,9 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_get_workflow_from_workflow_spec."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
modified_process_model_identifier = process_model_identifier.replace("/", ":")
|
||||
|
||||
response = client.post(
|
||||
|
@ -871,7 +886,9 @@ class TestProcessApi(BaseTest):
|
|||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_get_process_group_when_found."""
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
|
||||
process_model_identifier = self.create_group_and_model_with_bpmn(
|
||||
client, with_super_admin_user
|
||||
)
|
||||
process_group_id, process_model_id = os.path.split(process_model_identifier)
|
||||
|
||||
response = client.get(
|
||||
|
@ -937,7 +954,9 @@ class TestProcessApi(BaseTest):
|
|||
"""Test_process_instance_create."""
|
||||
test_process_model_id = "runs_without_input/sample"
|
||||
headers = self.logged_in_headers(with_super_admin_user)
|
||||
response = self.create_process_instance_from_process_model_id(client, test_process_model_id, headers)
|
||||
response = self.create_process_instance_from_process_model_id(
|
||||
client, test_process_model_id, headers
|
||||
)
|
||||
assert response.json is not None
|
||||
assert response.json["updated_at_in_seconds"] is not None
|
||||
assert response.json["status"] == "not_started"
|
||||
|
@ -1305,7 +1324,9 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
|
||||
headers = self.logged_in_headers(with_super_admin_user)
|
||||
self.create_process_instance_from_process_model_id(client, process_model_identifier, headers)
|
||||
self.create_process_instance_from_process_model_id(
|
||||
client, process_model_identifier, headers
|
||||
)
|
||||
|
||||
response = client.get(
|
||||
"/v1.0/process-instances",
|
||||
|
@ -1350,11 +1371,21 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_location=bpmn_file_location,
|
||||
)
|
||||
headers = self.logged_in_headers(with_super_admin_user)
|
||||
self.create_process_instance_from_process_model_id(client, process_model_identifier, headers)
|
||||
self.create_process_instance_from_process_model_id(client, process_model_identifier, headers)
|
||||
self.create_process_instance_from_process_model_id(client, process_model_identifier, headers)
|
||||
self.create_process_instance_from_process_model_id(client, process_model_identifier, headers)
|
||||
self.create_process_instance_from_process_model_id(client, process_model_identifier, headers)
|
||||
self.create_process_instance_from_process_model_id(
|
||||
client, process_model_identifier, headers
|
||||
)
|
||||
self.create_process_instance_from_process_model_id(
|
||||
client, process_model_identifier, headers
|
||||
)
|
||||
self.create_process_instance_from_process_model_id(
|
||||
client, process_model_identifier, headers
|
||||
)
|
||||
self.create_process_instance_from_process_model_id(
|
||||
client, process_model_identifier, headers
|
||||
)
|
||||
self.create_process_instance_from_process_model_id(
|
||||
client, process_model_identifier, headers
|
||||
)
|
||||
|
||||
response = client.get(
|
||||
"/v1.0/process-instances?per_page=2&page=3",
|
||||
|
@ -1410,7 +1441,7 @@ class TestProcessApi(BaseTest):
|
|||
updated_at_in_seconds=round(time.time()),
|
||||
start_in_seconds=(1000 * i) + 1000,
|
||||
end_in_seconds=(1000 * i) + 2000,
|
||||
bpmn_json=json.dumps({"i": i}),
|
||||
bpmn_version_control_identifier=i,
|
||||
)
|
||||
db.session.add(process_instance)
|
||||
db.session.commit()
|
||||
|
@ -1456,7 +1487,12 @@ class TestProcessApi(BaseTest):
|
|||
results = response.json["results"]
|
||||
assert len(results) == 4
|
||||
for i in range(4):
|
||||
assert json.loads(results[i]["bpmn_json"])["i"] in (1, 2, 3, 4)
|
||||
assert json.loads(results[i]["bpmn_version_control_identifier"]) in (
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
)
|
||||
|
||||
# start > 2000, end < 5000 - this should eliminate the first 2 and the last
|
||||
response = client.get(
|
||||
|
@ -1466,8 +1502,8 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json is not None
|
||||
results = response.json["results"]
|
||||
assert len(results) == 2
|
||||
assert json.loads(results[0]["bpmn_json"])["i"] in (2, 3)
|
||||
assert json.loads(results[1]["bpmn_json"])["i"] in (2, 3)
|
||||
assert json.loads(results[0]["bpmn_version_control_identifier"]) in (2, 3)
|
||||
assert json.loads(results[1]["bpmn_version_control_identifier"]) in (2, 3)
|
||||
|
||||
# start > 1000, start < 4000 - this should eliminate the first and the last 2
|
||||
response = client.get(
|
||||
|
@ -1477,8 +1513,8 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json is not None
|
||||
results = response.json["results"]
|
||||
assert len(results) == 2
|
||||
assert json.loads(results[0]["bpmn_json"])["i"] in (1, 2)
|
||||
assert json.loads(results[1]["bpmn_json"])["i"] in (1, 2)
|
||||
assert json.loads(results[0]["bpmn_version_control_identifier"]) in (1, 2)
|
||||
assert json.loads(results[1]["bpmn_version_control_identifier"]) in (1, 2)
|
||||
|
||||
# end > 2000, end < 6000 - this should eliminate the first and the last
|
||||
response = client.get(
|
||||
|
@ -1489,7 +1525,11 @@ class TestProcessApi(BaseTest):
|
|||
results = response.json["results"]
|
||||
assert len(results) == 3
|
||||
for i in range(3):
|
||||
assert json.loads(results[i]["bpmn_json"])["i"] in (1, 2, 3)
|
||||
assert json.loads(results[i]["bpmn_version_control_identifier"]) in (
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
)
|
||||
|
||||
def test_process_instance_report_list(
|
||||
self,
|
||||
|
@ -1657,7 +1697,9 @@ class TestProcessApi(BaseTest):
|
|||
) -> Any:
|
||||
"""Setup_testing_instance."""
|
||||
headers = self.logged_in_headers(with_super_admin_user)
|
||||
response = self.create_process_instance_from_process_model_id(client, process_model_id, headers)
|
||||
response = self.create_process_instance_from_process_model_id(
|
||||
client, process_model_id, headers
|
||||
)
|
||||
process_instance = response.json
|
||||
assert isinstance(process_instance, dict)
|
||||
process_instance_id = process_instance["id"]
|
||||
|
|
|
@ -6,14 +6,15 @@ import pytest
|
|||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from werkzeug.test import TestResponse # type: ignore
|
||||
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.secret_model import SecretModel
|
||||
from spiffworkflow_backend.models.secret_model import SecretModelSchema
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.secret_service import SecretService
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from werkzeug.test import TestResponse # type: ignore
|
||||
|
||||
|
||||
class SecretServiceTestHelpers(BaseTest):
|
||||
|
|
|
@ -2,14 +2,15 @@
|
|||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from flask_bpmn.models.db import db
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
|
||||
class TestGetGroupMembers(BaseTest):
|
||||
|
|
|
@ -4,6 +4,9 @@ import datetime
|
|||
import pytz
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
|
@ -14,8 +17,6 @@ from spiffworkflow_backend.services.process_instance_processor import (
|
|||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
|
||||
class TestGetLocaltime(BaseTest):
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""Test_acceptance_test_fixtures."""
|
||||
from flask.app import Flask
|
||||
|
||||
from spiffworkflow_backend.services.acceptance_test_fixtures import (
|
||||
load_acceptance_test_fixtures,
|
||||
)
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
import pytest
|
||||
from flask import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.models.user import UserNotFoundError
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
|
@ -12,7 +14,6 @@ from spiffworkflow_backend.services.process_instance_service import (
|
|||
ProcessInstanceService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
|
||||
class TestAuthorizationService(BaseTest):
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
"""Test_various_bpmn_constructs."""
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
|
@ -8,7 +10,6 @@ from spiffworkflow_backend.services.process_instance_processor import (
|
|||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
|
||||
class TestDotNotation(BaseTest):
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
"""Test_environment_var_script."""
|
||||
from flask import Flask
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
|
||||
class TestEnvironmentVarScript(BaseTest):
|
||||
|
|
|
@ -3,11 +3,12 @@ import pytest
|
|||
from flask import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from flask_bpmn.models.db import db
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
|
||||
class TestMessageInstance(BaseTest):
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
"""Test_message_service."""
|
||||
from flask import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_correlation_message_instance import (
|
||||
MessageCorrelationMessageInstanceModel,
|
||||
|
@ -15,8 +18,6 @@ from spiffworkflow_backend.services.process_instance_processor import (
|
|||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
|
||||
class TestMessageService(BaseTest):
|
||||
|
|
|
@ -2,11 +2,12 @@
|
|||
import pytest
|
||||
from flask.app import Flask
|
||||
from flask_bpmn.models.db import db
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.permission_target import (
|
||||
InvalidPermissionTargetUriError,
|
||||
)
|
||||
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
|
||||
class TestPermissionTarget(BaseTest):
|
||||
|
|
|
@ -2,14 +2,15 @@
|
|||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from flask_bpmn.models.db import db
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
|
||||
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
|
||||
from spiffworkflow_backend.models.principal import PrincipalModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
|
||||
# we think we can get the list of roles for a user.
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""Process Model."""
|
||||
from flask.app import Flask
|
||||
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroup
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
||||
|
|
|
@ -3,6 +3,9 @@ import pytest
|
|||
from flask import g
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
@ -16,8 +19,6 @@ from spiffworkflow_backend.services.process_instance_processor import (
|
|||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
|
||||
class TestProcessInstanceProcessor(BaseTest):
|
||||
|
|
|
@ -2,11 +2,12 @@
|
|||
from typing import Optional
|
||||
|
||||
from flask.app import Flask
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance_report import (
|
||||
ProcessInstanceReportModel,
|
||||
)
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
# from tests.spiffworkflow_backend.helpers.test_data import find_or_create_process_group
|
||||
# from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
|
||||
|
|
|
@ -2,14 +2,15 @@
|
|||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from flask_bpmn.models.db import db
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
|
||||
class TestProcessModel(BaseTest):
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
"""Test_process_model_service."""
|
||||
from flask import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
||||
|
||||
class TestProcessModelService(BaseTest):
|
||||
"""TestProcessModelService."""
|
||||
|
|
|
@ -3,12 +3,13 @@ import pytest
|
|||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
|
||||
class TestOpenFile(BaseTest):
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
"""Test Permissions."""
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.script_unit_test_runner import PythonScriptContext
|
||||
from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
|
||||
class TestScriptUnitTestRunner(BaseTest):
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
"""Test_various_bpmn_constructs."""
|
||||
from flask.app import Flask
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.services.secret_service import SecretService
|
||||
from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
|
||||
class TestServiceTaskDelegate(BaseTest):
|
||||
|
|
|
@ -7,6 +7,9 @@ from flask.testing import FlaskClient
|
|||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
|
|
@ -3,10 +3,11 @@ from decimal import Decimal
|
|||
|
||||
from flask.app import Flask
|
||||
from flask_bpmn.models.db import db
|
||||
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
|
||||
|
||||
|
||||
class TestSpiffLogging(BaseTest):
|
||||
"""TestSpiffLogging."""
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
"""Test_various_bpmn_constructs."""
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
|
||||
class TestVariousBpmnConstructs(BaseTest):
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""Test cases for the __main__ module."""
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from spiffworkflow_backend import __main__
|
||||
|
||||
|
||||
|
|
|
@ -169,14 +169,13 @@ describe('process-instances', () => {
|
|||
cy.getBySel('process-instance-list-link').click();
|
||||
cy.assertAtLeastOneItemInPaginatedResults();
|
||||
|
||||
const statusSelect = '#process-instance-status-select';
|
||||
PROCESS_STATUSES.forEach((processStatus) => {
|
||||
if (!['all', 'waiting'].includes(processStatus)) {
|
||||
cy.get('#process-instance-status-select').click();
|
||||
cy.get('#process-instance-status-select')
|
||||
.contains(processStatus)
|
||||
.click();
|
||||
cy.get(statusSelect).click();
|
||||
cy.get(statusSelect).contains(processStatus).click();
|
||||
// close the dropdown again
|
||||
cy.get('#process-instance-status-select').click();
|
||||
cy.get(statusSelect).click();
|
||||
cy.getBySel('filter-button').click();
|
||||
cy.assertAtLeastOneItemInPaginatedResults();
|
||||
cy.getBySel(`process-instance-status-${processStatus}`).contains(
|
||||
|
|
|
@ -144,10 +144,11 @@ describe('process-models', () => {
|
|||
|
||||
cy.getBySel('process-instance-list-link').click();
|
||||
cy.getBySel('process-instance-show-link').click();
|
||||
cy.contains('Delete').click();
|
||||
cy.getBySel('process-instance-delete').click();
|
||||
cy.contains('Are you sure');
|
||||
cy.getBySel('modal-confirmation-dialog').find('.cds--btn--danger').click();
|
||||
cy.contains(`Process Instances for: ${groupId}/${modelId}`);
|
||||
|
||||
// in breadcrumb
|
||||
cy.contains(modelId).click();
|
||||
|
||||
cy.contains('Edit process model').click();
|
||||
|
|
|
@ -46545,7 +46545,7 @@
|
|||
"@csstools/postcss-text-decoration-shorthand": "^1.0.0",
|
||||
"@csstools/postcss-trigonometric-functions": "^1.0.2",
|
||||
"@csstools/postcss-unset-value": "^1.0.2",
|
||||
"autoprefixer": "10.4.8",
|
||||
"autoprefixer": "10.4.5",
|
||||
"browserslist": "^4.21.3",
|
||||
"css-blank-pseudo": "^3.0.3",
|
||||
"css-has-pseudo": "^3.0.4",
|
||||
|
@ -46583,7 +46583,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"autoprefixer": {
|
||||
"version": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.5.tgz",
|
||||
"version": "10.4.5",
|
||||
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.5.tgz",
|
||||
"integrity": "sha512-Fvd8yCoA7lNX/OUllvS+aS1I7WRBclGXsepbvT8ZaPgrH24rgXpZzF0/6Hh3ZEkwg+0AES/Osd196VZmYoEFtw==",
|
||||
"requires": {
|
||||
"browserslist": "^4.20.2",
|
||||
|
|
|
@ -6,8 +6,7 @@ import { BrowserRouter, Routes, Route } from 'react-router-dom';
|
|||
import ErrorContext from './contexts/ErrorContext';
|
||||
import NavigationBar from './components/NavigationBar';
|
||||
|
||||
import HomePage from './routes/HomePage';
|
||||
import TaskShow from './routes/TaskShow';
|
||||
import HomePageRoutes from './routes/HomePageRoutes';
|
||||
import ErrorBoundary from './components/ErrorBoundary';
|
||||
import AdminRoutes from './routes/AdminRoutes';
|
||||
import { ErrorForDisplay } from './interfaces';
|
||||
|
@ -54,17 +53,9 @@ export default function App() {
|
|||
{errorTag}
|
||||
<ErrorBoundary>
|
||||
<Routes>
|
||||
<Route path="/" element={<HomePage />} />
|
||||
<Route path="/tasks" element={<HomePage />} />
|
||||
<Route path="/" element={<HomePageRoutes />} />
|
||||
<Route path="/tasks/*" element={<HomePageRoutes />} />
|
||||
<Route path="/admin/*" element={<AdminRoutes />} />
|
||||
<Route
|
||||
path="/tasks/:process_instance_id/:task_id"
|
||||
element={<TaskShow />}
|
||||
/>
|
||||
<Route
|
||||
path="/tasks/:process_instance_id/:task_id"
|
||||
element={<TaskShow />}
|
||||
/>
|
||||
</Routes>
|
||||
</ErrorBoundary>
|
||||
</Content>
|
||||
|
|
|
@ -3,6 +3,7 @@ import { useState } from 'react';
|
|||
import { Button, Modal } from '@carbon/react';
|
||||
|
||||
type OwnProps = {
|
||||
'data-qa'?: string;
|
||||
description?: string;
|
||||
buttonLabel?: string;
|
||||
onConfirmation: (..._args: any[]) => any;
|
||||
|
@ -18,6 +19,7 @@ export default function ButtonWithConfirmation({
|
|||
description,
|
||||
buttonLabel,
|
||||
onConfirmation,
|
||||
'data-qa': dataQa,
|
||||
title = 'Are you sure?',
|
||||
confirmButtonLabel = 'OK',
|
||||
kind = 'danger',
|
||||
|
@ -58,6 +60,7 @@ export default function ButtonWithConfirmation({
|
|||
return (
|
||||
<>
|
||||
<Button
|
||||
data-qa={dataQa}
|
||||
onClick={handleShowConfirmationPrompt}
|
||||
kind={kind}
|
||||
renderIcon={renderIcon}
|
||||
|
|
|
@ -0,0 +1,139 @@
|
|||
import { useEffect, useState } from 'react';
|
||||
// @ts-ignore
|
||||
import { Button, Table } from '@carbon/react';
|
||||
import { Link, useSearchParams } from 'react-router-dom';
|
||||
import PaginationForTable from './PaginationForTable';
|
||||
import {
|
||||
convertSecondsToFormattedDateTime,
|
||||
getPageInfoFromSearchParams,
|
||||
modifyProcessModelPath,
|
||||
} from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { PaginationObject } from '../interfaces';
|
||||
|
||||
const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5;
|
||||
|
||||
export default function MyTasksForProcessesStartedByOthers() {
|
||||
const [searchParams] = useSearchParams();
|
||||
const [tasks, setTasks] = useState([]);
|
||||
const [pagination, setPagination] = useState<PaginationObject | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const { page, perPage } = getPageInfoFromSearchParams(
|
||||
searchParams,
|
||||
PER_PAGE_FOR_TASKS_ON_HOME_PAGE
|
||||
);
|
||||
const setTasksFromResult = (result: any) => {
|
||||
setTasks(result.results);
|
||||
setPagination(result.pagination);
|
||||
};
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/tasks/for-processes-started-by-others?per_page=${perPage}&page=${page}`,
|
||||
successCallback: setTasksFromResult,
|
||||
});
|
||||
}, [searchParams]);
|
||||
|
||||
const buildTable = () => {
|
||||
const rows = tasks.map((row) => {
|
||||
const rowToUse = row as any;
|
||||
const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`;
|
||||
const modifiedProcessModelIdentifier = modifyProcessModelPath(
|
||||
rowToUse.process_model_identifier
|
||||
);
|
||||
return (
|
||||
<tr key={rowToUse.id}>
|
||||
<td>
|
||||
<Link
|
||||
data-qa="process-model-show-link"
|
||||
to={`/admin/process-models/${modifiedProcessModelIdentifier}`}
|
||||
>
|
||||
{rowToUse.process_model_display_name}
|
||||
</Link>
|
||||
</td>
|
||||
<td>
|
||||
<Link
|
||||
data-qa="process-instance-show-link"
|
||||
to={`/admin/process-models/${modifiedProcessModelIdentifier}/process-instances/${rowToUse.process_instance_id}`}
|
||||
>
|
||||
View {rowToUse.process_instance_id}
|
||||
</Link>
|
||||
</td>
|
||||
<td
|
||||
title={`task id: ${rowToUse.name}, spiffworkflow task guid: ${rowToUse.id}`}
|
||||
>
|
||||
{rowToUse.task_title}
|
||||
</td>
|
||||
<td>{rowToUse.username}</td>
|
||||
<td>{rowToUse.process_instance_status}</td>
|
||||
<td>{rowToUse.group_identifier || '-'}</td>
|
||||
<td>
|
||||
{convertSecondsToFormattedDateTime(
|
||||
rowToUse.created_at_in_seconds
|
||||
) || '-'}
|
||||
</td>
|
||||
<td>
|
||||
{convertSecondsToFormattedDateTime(
|
||||
rowToUse.updated_at_in_seconds
|
||||
) || '-'}
|
||||
</td>
|
||||
<td>
|
||||
<Button
|
||||
variant="primary"
|
||||
href={taskUrl}
|
||||
hidden={rowToUse.process_instance_status === 'suspended'}
|
||||
disabled={!rowToUse.current_user_is_potential_owner}
|
||||
>
|
||||
Go
|
||||
</Button>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
});
|
||||
return (
|
||||
<Table striped bordered>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Process Model</th>
|
||||
<th>Process Instance</th>
|
||||
<th>Task Name</th>
|
||||
<th>Process Started By</th>
|
||||
<th>Process Instance Status</th>
|
||||
<th>Assigned Group</th>
|
||||
<th>Process Started</th>
|
||||
<th>Process Updated</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>{rows}</tbody>
|
||||
</Table>
|
||||
);
|
||||
};
|
||||
|
||||
const tasksComponent = () => {
|
||||
if (pagination && pagination.total < 1) {
|
||||
return null;
|
||||
}
|
||||
const { page, perPage } = getPageInfoFromSearchParams(
|
||||
searchParams,
|
||||
PER_PAGE_FOR_TASKS_ON_HOME_PAGE
|
||||
);
|
||||
return (
|
||||
<>
|
||||
<h1>Tasks waiting for me</h1>
|
||||
<PaginationForTable
|
||||
page={page}
|
||||
perPage={perPage}
|
||||
perPageOptions={[2, PER_PAGE_FOR_TASKS_ON_HOME_PAGE, 25]}
|
||||
pagination={pagination}
|
||||
tableToDisplay={buildTable()}
|
||||
path="/tasks/for-my-open-processes"
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
if (pagination) {
|
||||
return tasksComponent();
|
||||
}
|
||||
return null;
|
||||
}
|
|
@ -0,0 +1,137 @@
|
|||
import { useEffect, useState } from 'react';
|
||||
// @ts-ignore
|
||||
import { Button, Table } from '@carbon/react';
|
||||
import { Link, useSearchParams } from 'react-router-dom';
|
||||
import PaginationForTable from './PaginationForTable';
|
||||
import {
|
||||
convertSecondsToFormattedDateTime,
|
||||
getPageInfoFromSearchParams,
|
||||
modifyProcessModelPath,
|
||||
} from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { PaginationObject } from '../interfaces';
|
||||
|
||||
const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5;
|
||||
|
||||
export default function MyOpenProcesses() {
|
||||
const [searchParams] = useSearchParams();
|
||||
const [tasks, setTasks] = useState([]);
|
||||
const [pagination, setPagination] = useState<PaginationObject | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const { page, perPage } = getPageInfoFromSearchParams(
|
||||
searchParams,
|
||||
PER_PAGE_FOR_TASKS_ON_HOME_PAGE
|
||||
);
|
||||
const setTasksFromResult = (result: any) => {
|
||||
setTasks(result.results);
|
||||
setPagination(result.pagination);
|
||||
};
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/tasks/for-my-open-processes?per_page=${perPage}&page=${page}`,
|
||||
successCallback: setTasksFromResult,
|
||||
});
|
||||
}, [searchParams]);
|
||||
|
||||
const buildTable = () => {
|
||||
const rows = tasks.map((row) => {
|
||||
const rowToUse = row as any;
|
||||
const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`;
|
||||
const modifiedProcessModelIdentifier = modifyProcessModelPath(
|
||||
rowToUse.process_model_identifier
|
||||
);
|
||||
return (
|
||||
<tr key={rowToUse.id}>
|
||||
<td>
|
||||
<Link
|
||||
data-qa="process-model-show-link"
|
||||
to={`/admin/process-models/${modifiedProcessModelIdentifier}`}
|
||||
>
|
||||
{rowToUse.process_model_display_name}
|
||||
</Link>
|
||||
</td>
|
||||
<td>
|
||||
<Link
|
||||
data-qa="process-instance-show-link"
|
||||
to={`/admin/process-models/${modifiedProcessModelIdentifier}/process-instances/${rowToUse.process_instance_id}`}
|
||||
>
|
||||
View {rowToUse.process_instance_id}
|
||||
</Link>
|
||||
</td>
|
||||
<td
|
||||
title={`task id: ${rowToUse.name}, spiffworkflow task guid: ${rowToUse.id}`}
|
||||
>
|
||||
{rowToUse.task_title}
|
||||
</td>
|
||||
<td>{rowToUse.process_instance_status}</td>
|
||||
<td>{rowToUse.group_identifier || '-'}</td>
|
||||
<td>
|
||||
{convertSecondsToFormattedDateTime(
|
||||
rowToUse.created_at_in_seconds
|
||||
) || '-'}
|
||||
</td>
|
||||
<td>
|
||||
{convertSecondsToFormattedDateTime(
|
||||
rowToUse.updated_at_in_seconds
|
||||
) || '-'}
|
||||
</td>
|
||||
<td>
|
||||
<Button
|
||||
variant="primary"
|
||||
href={taskUrl}
|
||||
hidden={rowToUse.process_instance_status === 'suspended'}
|
||||
disabled={!rowToUse.current_user_is_potential_owner}
|
||||
>
|
||||
Go
|
||||
</Button>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
});
|
||||
return (
|
||||
<Table striped bordered>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Process Model</th>
|
||||
<th>Process Instance</th>
|
||||
<th>Task Name</th>
|
||||
<th>Process Instance Status</th>
|
||||
<th>Assigned Group</th>
|
||||
<th>Process Started</th>
|
||||
<th>Process Updated</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>{rows}</tbody>
|
||||
</Table>
|
||||
);
|
||||
};
|
||||
|
||||
const tasksComponent = () => {
|
||||
if (pagination && pagination.total < 1) {
|
||||
return null;
|
||||
}
|
||||
const { page, perPage } = getPageInfoFromSearchParams(
|
||||
searchParams,
|
||||
PER_PAGE_FOR_TASKS_ON_HOME_PAGE
|
||||
);
|
||||
return (
|
||||
<>
|
||||
<h1>Tasks for my open processes</h1>
|
||||
<PaginationForTable
|
||||
page={page}
|
||||
perPage={perPage}
|
||||
perPageOptions={[2, PER_PAGE_FOR_TASKS_ON_HOME_PAGE, 25]}
|
||||
pagination={pagination}
|
||||
tableToDisplay={buildTable()}
|
||||
path="/tasks/for-my-open-processes"
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
if (pagination) {
|
||||
return tasksComponent();
|
||||
}
|
||||
return null;
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
import MyTasksForProcessesStartedByOthers from '../components/MyTasksForProcessesStartedByOthers';
|
||||
import TasksForMyOpenProcesses from '../components/TasksForMyOpenProcesses';
|
||||
|
||||
export default function GroupedTasks() {
|
||||
return (
|
||||
<>
|
||||
<TasksForMyOpenProcesses />
|
||||
<br />
|
||||
<MyTasksForProcessesStartedByOthers />
|
||||
</>
|
||||
);
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
import { useContext, useEffect, useState } from 'react';
|
||||
import { Route, Routes, useLocation, useNavigate } from 'react-router-dom';
|
||||
// @ts-ignore
|
||||
import { Tabs, TabList, Tab } from '@carbon/react';
|
||||
import TaskShow from './TaskShow';
|
||||
import ErrorContext from '../contexts/ErrorContext';
|
||||
import MyTasks from './MyTasks';
|
||||
import GroupedTasks from './GroupedTasks';
|
||||
|
||||
export default function HomePageRoutes() {
|
||||
const location = useLocation();
|
||||
const setErrorMessage = (useContext as any)(ErrorContext)[1];
|
||||
const [selectedTabIndex, setSelectedTabIndex] = useState<number>(0);
|
||||
const navigate = useNavigate();
|
||||
|
||||
useEffect(() => {
|
||||
setErrorMessage(null);
|
||||
let newSelectedTabIndex = 0;
|
||||
if (location.pathname.match(/^\/tasks\/grouped\b/)) {
|
||||
newSelectedTabIndex = 1;
|
||||
}
|
||||
setSelectedTabIndex(newSelectedTabIndex);
|
||||
}, [location, setErrorMessage]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Tabs selectedIndex={selectedTabIndex}>
|
||||
<TabList aria-label="List of tabs">
|
||||
<Tab onClick={() => navigate('/tasks/my-tasks')}>My Tasks</Tab>
|
||||
<Tab onClick={() => navigate('/tasks/grouped')}>Grouped Tasks</Tab>
|
||||
</TabList>
|
||||
</Tabs>
|
||||
<br />
|
||||
<Routes>
|
||||
<Route path="/" element={<MyTasks />} />
|
||||
<Route path="my-tasks" element={<MyTasks />} />
|
||||
<Route path=":process_instance_id/:task_id" element={<TaskShow />} />
|
||||
<Route path="grouped" element={<GroupedTasks />} />
|
||||
</Routes>
|
||||
</>
|
||||
);
|
||||
}
|
|
@ -65,7 +65,7 @@ export default function MessageInstanceList() {
|
|||
</td>
|
||||
<td>{rowToUse.message_identifier}</td>
|
||||
<td>{rowToUse.message_type}</td>
|
||||
<td>{rowToUse.failure_cause}</td>
|
||||
<td>{rowToUse.failure_cause || '-'}</td>
|
||||
<td>{rowToUse.status}</td>
|
||||
<td>
|
||||
{convertSecondsToFormattedDate(rowToUse.created_at_in_seconds)}
|
||||
|
|
|
@ -12,7 +12,7 @@ import { PaginationObject, RecentProcessModel } from '../interfaces';
|
|||
|
||||
const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5;
|
||||
|
||||
export default function HomePage() {
|
||||
export default function MyTasks() {
|
||||
const [searchParams] = useSearchParams();
|
||||
const [tasks, setTasks] = useState([]);
|
||||
const [pagination, setPagination] = useState<PaginationObject | null>(null);
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue