Merge pull request #32 from sartography/feature/call_activity_selection

Feature/call activity selection
This commit is contained in:
Dan Funk 2022-11-14 10:50:33 -05:00 committed by GitHub
commit 369f84af05
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 227 additions and 459 deletions

View File

@ -1,9 +1,6 @@
Spiffworkflow Backend
==========
|Tests| |Codecov|
|pre-commit| |Black|
|Tests| |Codecov| |pre-commit| |Black|
.. |Tests| image:: https://github.com/sartography/spiffworkflow-backend/workflows/Tests/badge.svg
:target: https://github.com/sartography/spiffworkflow-backend/actions?workflow=Tests
@ -90,5 +87,3 @@ This project was generated from `@cjolowicz`_'s `Hypermodern Python Cookiecutter
.. github-only
.. _Contributor Guide: CONTRIBUTING.rst
.. _Usage: https://spiffworkflow-backend.readthedocs.io/en/latest/usage.html
(test)

View File

@ -23,6 +23,7 @@ def upgrade():
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("bpmn_process_identifier", sa.String(length=255), nullable=True),
sa.Column("bpmn_file_relative_path", sa.String(length=255), nullable=True),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(

View File

@ -98,7 +98,7 @@ python-versions = ">=3.5"
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
[[package]]
name = "Babel"
@ -271,7 +271,7 @@ optional = false
python-versions = ">=3.6.0"
[package.extras]
unicode-backport = ["unicodedata2"]
unicode_backport = ["unicodedata2"]
[[package]]
name = "classify-imports"
@ -643,7 +643,7 @@ werkzeug = "*"
type = "git"
url = "https://github.com/sartography/flask-bpmn"
reference = "main"
resolved_reference = "df9ab9a12078e4f908c87778371725e0af414a11"
resolved_reference = "886bfdc31aade43e9683439e6d29b06acb235081"
[[package]]
name = "Flask-Cors"
@ -825,7 +825,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "importlib-metadata"
version = "5.0.0"
version = "4.13.0"
description = "Read metadata from Python packages"
category = "main"
optional = false
@ -1517,7 +1517,7 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-toolbelt"
@ -1630,7 +1630,7 @@ falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
httpx = ["httpx (>=0.16.0)"]
pure-eval = ["asttokens", "executing", "pure-eval"]
pure_eval = ["asttokens", "executing", "pure-eval"]
pyspark = ["pyspark (>=2.4.4)"]
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
rq = ["rq (>=0.6)"]
@ -1876,7 +1876,7 @@ lxml = "*"
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "580939cc8cb0b7ade1571483bd1e28f554434ac4"
resolved_reference = "14d3d8c3f69af880eaf994be1689ee9fcc72e829"
[[package]]
name = "SQLAlchemy"
@ -1894,19 +1894,19 @@ aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"]
mssql = ["pyodbc"]
mssql-pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"]
mssql_pymssql = ["pymssql"]
mssql_pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
mysql-connector = ["mysql-connector-python"]
mysql_connector = ["mysql-connector-python"]
oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
postgresql = ["psycopg2 (>=2.7)"]
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
postgresql_psycopg2binary = ["psycopg2-binary"]
postgresql_psycopg2cffi = ["psycopg2cffi"]
pymysql = ["pymysql", "pymysql (<1)"]
sqlcipher = ["sqlcipher3_binary"]
@ -2596,6 +2596,7 @@ greenlet = [
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"},
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"},
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"},
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"},
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"},
{file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"},
{file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"},
@ -2604,6 +2605,7 @@ greenlet = [
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"},
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"},
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"},
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"},
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"},
{file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"},
{file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"},
@ -2612,6 +2614,7 @@ greenlet = [
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"},
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"},
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"},
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"},
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"},
{file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"},
{file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"},
@ -2634,8 +2637,8 @@ imagesize = [
{file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
]
importlib-metadata = [
{file = "importlib_metadata-5.0.0-py3-none-any.whl", hash = "sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43"},
{file = "importlib_metadata-5.0.0.tar.gz", hash = "sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab"},
{file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
{file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
]
inflection = [
{file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"},
@ -2940,10 +2943,7 @@ orjson = [
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"},
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"},
{file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"},
{file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"},
{file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"},
{file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"},
{file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"},
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"},
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"},
{file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"},

View File

@ -10,4 +10,5 @@ class BpmnProcessIdLookup(SpiffworkflowBaseDBModel):
id = db.Column(db.Integer, primary_key=True)
bpmn_process_identifier = db.Column(db.String(255), unique=True, index=True)
display_name = db.Column(db.String(255), unique=True, index=True)
bpmn_file_relative_path = db.Column(db.String(255))

View File

@ -74,7 +74,13 @@ class FileReference:
id: str
name: str
type: str # can be 'process', 'decision', or just 'file'
file_name: str
file_path: str
has_lanes: bool
executable: bool
messages: dict
correlations: dict
start_messages: list
@dataclass(order=True)
class File:

View File

@ -16,6 +16,7 @@ class MessageTriggerableProcessModel(SpiffworkflowBaseDBModel):
ForeignKey(MessageModel.id), nullable=False, unique=True
)
process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True)
# fixme: Maybe we don't need this anymore?
process_group_identifier: str = db.Column(db.String(50), nullable=False, index=True)
updated_at_in_seconds: int = db.Column(db.Integer)

View File

@ -64,11 +64,11 @@ from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsMode
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.user import verify_token
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.custom_parser import MyCustomParser
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.message_service import MessageService
from spiffworkflow_backend.services.process_instance_processor import MyCustomParser
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@ -309,9 +309,7 @@ def process_model_show(modified_process_model_identifier: str) -> Any:
files = sorted(SpecFileService.get_files(process_model))
process_model.files = files
for file in process_model.files:
file.references = SpecFileService.get_references_for_file(
file, process_model, MyCustomParser
)
file.references = SpecFileService.get_references_for_file(file, process_model)
process_model_json = ProcessModelInfoSchema().dump(process_model)
return process_model_json
@ -1147,26 +1145,7 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
task = ProcessInstanceService.spiff_task_to_api_task(spiff_task)
task.data = spiff_task.data
task.process_model_display_name = process_model.display_name
process_model_with_form = process_model
all_processes = SpecFileService.get_all_bpmn_process_identifiers_for_process_model(
process_model
)
if task.process_name not in all_processes:
bpmn_file_full_path = (
ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier(
task.process_name
)
)
relative_path = os.path.relpath(
bpmn_file_full_path, start=FileSystemService.root_path()
)
process_model_relative_path = os.path.dirname(relative_path)
process_model_with_form = (
ProcessModelService.get_process_model_from_relative_path(
process_model_relative_path
)
)
if task.type == "User Task":
if not form_schema_file_name:
@ -1303,9 +1282,7 @@ def script_unit_test_create(
# TODO: move this to an xml service or something
file_contents = SpecFileService.get_data(process_model, file.name)
bpmn_etree_element = SpecFileService.get_etree_element_from_binary_data(
file_contents, file.name
)
bpmn_etree_element = etree.fromstring(file_contents)
nsmap = bpmn_etree_element.nsmap
spiff_element_maker = ElementMaker(

View File

@ -0,0 +1,9 @@
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser
class MyCustomParser(BpmnDmnParser): # type: ignore
"""A BPMN and DMN parser that can also parse spiffworkflow-specific extensions."""
OVERRIDE_PARSER_CLASSES = BpmnDmnParser.OVERRIDE_PARSER_CLASSES
OVERRIDE_PARSER_CLASSES.update(SpiffBpmnParser.OVERRIDE_PARSER_CLASSES)

View File

@ -91,6 +91,7 @@ from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsMode
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserModelSchema
from spiffworkflow_backend.scripts.script import Script
from spiffworkflow_backend.services.custom_parser import MyCustomParser
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate
@ -239,13 +240,6 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
)
class MyCustomParser(BpmnDmnParser): # type: ignore
"""A BPMN and DMN parser that can also parse spiffworkflow-specific extensions."""
OVERRIDE_PARSER_CLASSES = BpmnDmnParser.OVERRIDE_PARSER_CLASSES
OVERRIDE_PARSER_CLASSES.update(SpiffBpmnParser.OVERRIDE_PARSER_CLASSES)
IdToBpmnProcessSpecMapping = NewType(
"IdToBpmnProcessSpecMapping", dict[str, BpmnProcessSpec]
)
@ -680,41 +674,18 @@ class ProcessInstanceProcessor:
return parser
@staticmethod
def backfill_missing_bpmn_process_id_lookup_records(
bpmn_process_identifier: str,
) -> Optional[str]:
def backfill_missing_bpmn_process_id_lookup_records(bpmn_process_identifier: str) -> Optional[str]:
"""Backfill_missing_bpmn_process_id_lookup_records."""
process_models = ProcessModelService().get_process_models()
for process_model in process_models:
if process_model.primary_file_name:
try:
etree_element = SpecFileService.get_etree_element_from_file_name(
process_model, process_model.primary_file_name
)
bpmn_process_identifiers = []
except ProcessModelFileNotFoundError:
# if primary_file_name doesn't actually exist on disk, then just go on to the next process_model
continue
try:
bpmn_process_identifiers = (
SpecFileService.get_executable_bpmn_process_identifiers(
etree_element
)
)
except ValidationException:
# ignore validation errors here
pass
if bpmn_process_identifier in bpmn_process_identifiers:
SpecFileService.store_bpmn_process_identifiers(
process_model,
process_model.primary_file_name,
etree_element,
)
return FileSystemService.full_path_to_process_model_file(
process_model
)
refs = SpecFileService.reference_map(SpecFileService.get_references_for_process(process_model))
bpmn_process_identifiers = refs.keys()
if bpmn_process_identifier in bpmn_process_identifiers:
SpecFileService.update_process_cache(refs[bpmn_process_identifier])
return FileSystemService.full_path_to_process_model_file(
process_model
)
return None
@staticmethod

View File

@ -2,10 +2,12 @@
import os
import shutil
from datetime import datetime
from typing import Any
from typing import Any, Type
from typing import List
from typing import Optional
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from SpiffWorkflow.bpmn.parser.ProcessParser import ProcessParser
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
from lxml import etree # type: ignore
@ -25,6 +27,7 @@ from spiffworkflow_backend.models.message_triggerable_process_model import (
MessageTriggerableProcessModel,
)
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.services.custom_parser import MyCustomParser
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
@ -58,38 +61,72 @@ class SpecFileService(FileSystemService):
return files
@staticmethod
def get_references_for_file(
file: File, process_model_info: ProcessModelInfo, parser_class: Any
) -> list[FileReference]:
def reference_map(references: list[FileReference]) -> dict[str, FileReference]:
""" Creates a dict with provided references organized by id. """
ref_map = {}
for ref in references:
ref_map[ref.id] = ref
return ref_map
@staticmethod
def get_references(process_models: List[ProcessModelInfo]) -> list[FileReference]:
"""Returns all references -- process_ids, and decision ids, across all process models provided"""
references = []
for process_model in process_models:
references.extend(SpecFileService.get_references_for_process(process_model))
@staticmethod
def get_references_for_process(process_model_info: ProcessModelInfo) -> list[FileReference]:
files = SpecFileService.get_files(process_model_info)
references = []
for file in files:
references.extend(SpecFileService.get_references_for_file(file, process_model_info))
return references
@staticmethod
def get_references_for_file(file: File, process_model_info: ProcessModelInfo) -> list[FileReference]:
"""Uses spiffworkflow to parse BPMN and DMN files to determine how they can be externally referenced.
Returns a list of Reference objects that contain the type of reference, the id, the name.
Ex.
id = {str} 'Level3'
name = {str} 'Level 3'
type = {str} 'process'
type = {str} 'process' / 'decision'
"""
references: list[FileReference] = []
file_path = SpecFileService.file_path(process_model_info, file.name)
parser = parser_class()
full_file_path = SpecFileService.file_path(process_model_info, file.name)
file_path = os.path.join(process_model_info.id, file.name)
parser = MyCustomParser()
parser_type = None
sub_parser = None
has_lanes = False
executable = True
messages = {}
correlations = {}
start_messages = []
if file.type == FileType.bpmn.value:
parser.add_bpmn_file(file_path)
parser.add_bpmn_file(full_file_path)
parser_type = "process"
sub_parsers = list(parser.process_parsers.values())
messages = parser.messages
correlations = parser.correlations
elif file.type == FileType.dmn.value:
parser.add_dmn_file(file_path)
parser.add_dmn_file(full_file_path)
sub_parsers = list(parser.dmn_parsers.values())
parser_type = "decision"
else:
return references
for sub_parser in sub_parsers:
references.append(
FileReference(
id=sub_parser.get_id(), name=sub_parser.get_name(), type=parser_type
)
)
if parser_type == 'process':
has_lanes = sub_parser.has_lanes()
executable = sub_parser.process_executable
start_messages = sub_parser.start_messages()
references.append(FileReference(
id=sub_parser.get_id(), name=sub_parser.get_name(), type=parser_type,
file_name=file.name, file_path=file_path, has_lanes=has_lanes,
executable=executable, messages=messages,
correlations=correlations, start_messages=start_messages
))
return references
@staticmethod
@ -101,8 +138,7 @@ class SpecFileService(FileSystemService):
return SpecFileService.update_file(process_model_info, file_name, binary_data)
@staticmethod
def update_file(
process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
def update_file(process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
) -> File:
"""Update_file."""
SpecFileService.assert_valid_file_name(file_name)
@ -121,12 +157,21 @@ class SpecFileService(FileSystemService):
):
# If no primary process exists, make this primary process.
set_primary_file = True
SpecFileService.process_bpmn_file(
process_model_info,
file_name,
binary_data,
set_primary_file=set_primary_file,
)
references = SpecFileService.get_references_for_file(file, process_model_info)
for ref in references:
if ref.type == "process":
ProcessModelService().update_spec(
process_model_info, {
"primary_process_id": ref.id,
"primary_file_name": file_name,
"is_review": ref.has_lanes,
}
)
SpecFileService.update_process_cache(ref)
SpecFileService.update_message_cache(ref)
SpecFileService.update_message_trigger_cache(ref, process_model_info)
SpecFileService.update_correlation_cache(ref)
break
return file
@ -181,354 +226,117 @@ class SpecFileService(FileSystemService):
if os.path.exists(dir_path):
shutil.rmtree(dir_path)
@staticmethod
def get_etree_element_from_file_name(
process_model_info: ProcessModelInfo, file_name: str
) -> EtreeElement:
"""Get_etree_element_from_file_name."""
binary_data = SpecFileService.get_data(process_model_info, file_name)
return SpecFileService.get_etree_element_from_binary_data(
binary_data, file_name
)
# fixme: Place all the caching stuff in a different service.
@staticmethod
def get_etree_element_from_binary_data(
binary_data: bytes, file_name: str
) -> EtreeElement:
"""Get_etree_element_from_binary_data."""
try:
return etree.fromstring(binary_data)
except etree.XMLSyntaxError as xse:
raise ApiError(
"invalid_xml",
"Failed to parse xml: " + str(xse),
file_name=file_name,
) from xse
@staticmethod
def process_bpmn_file(
process_model_info: ProcessModelInfo,
file_name: str,
binary_data: Optional[bytes] = None,
set_primary_file: Optional[bool] = False,
) -> None:
"""Set_primary_bpmn."""
# If this is a BPMN, extract the process id, and determine if it is contains swim lanes.
extension = SpecFileService.get_extension(file_name)
file_type = FileType[extension]
if file_type == FileType.bpmn:
if not binary_data:
binary_data = SpecFileService.get_data(process_model_info, file_name)
bpmn_etree_element: EtreeElement = (
SpecFileService.get_etree_element_from_binary_data(
binary_data, file_name
)
def update_process_cache(ref: FileReference) -> None:
process_id_lookup = BpmnProcessIdLookup.query.filter_by(bpmn_process_identifier=ref.id).first()
if process_id_lookup is None:
process_id_lookup = BpmnProcessIdLookup(
bpmn_process_identifier=ref.id,
display_name=ref.name,
bpmn_file_relative_path=ref.file_path,
)
try:
if set_primary_file:
attributes_to_update = {
"primary_process_id": (
SpecFileService.get_bpmn_process_identifier(
bpmn_etree_element
)
),
"primary_file_name": file_name,
"is_review": SpecFileService.has_swimlane(bpmn_etree_element),
}
ProcessModelService().update_spec(
process_model_info, attributes_to_update
)
SpecFileService.check_for_message_models(
bpmn_etree_element, process_model_info
)
SpecFileService.store_bpmn_process_identifiers(
process_model_info, file_name, bpmn_etree_element
)
except ValidationException as ve:
if ve.args[0].find("No executable process tag found") >= 0:
raise ApiError(
error_code="missing_executable_option",
message="No executable process tag found. Please make sure the Executable option is set in the workflow.",
) from ve
else:
raise ApiError(
error_code="validation_error",
message=f"There was an error validating your workflow. Original message is: {ve}",
) from ve
db.session.add(process_id_lookup)
else:
raise ApiError(
"invalid_xml",
"Only a BPMN can be the primary file.",
file_name=file_name,
)
@staticmethod
def has_swimlane(et_root: _Element) -> bool:
"""Look through XML and determine if there are any lanes present that have a label."""
elements = et_root.xpath(
"//bpmn:lane",
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
)
retval = False
for el in elements:
if el.get("name"):
retval = True
return retval
@staticmethod
def append_identifier_of_process_to_array(
process_element: _Element, process_identifiers: list[str]
) -> None:
"""Append_identifier_of_process_to_array."""
process_id_key = "id"
if "name" in process_element.attrib:
process_id_key = "name"
process_identifiers.append(process_element.attrib[process_id_key])
@staticmethod
def get_all_bpmn_process_identifiers_for_process_model(
process_model_info: ProcessModelInfo,
) -> list[str]:
"""Get_all_bpmn_process_identifiers_for_process_model."""
if process_model_info.primary_file_name is None:
return []
binary_data = SpecFileService.get_data(
process_model_info, process_model_info.primary_file_name
)
et_root: EtreeElement = SpecFileService.get_etree_element_from_binary_data(
binary_data, process_model_info.primary_file_name
)
process_identifiers: list[str] = []
for child in et_root:
if child.tag.endswith("process") and child.attrib.get(
"isExecutable", False
):
subprocesses = child.xpath(
"//bpmn:subProcess",
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
if ref.file_path != process_id_lookup.bpmn_file_relative_path:
full_bpmn_file_path = SpecFileService.full_path_from_relative_path(
process_id_lookup.bpmn_file_relative_path
)
for subprocess in subprocesses:
SpecFileService.append_identifier_of_process_to_array(
subprocess, process_identifiers
)
SpecFileService.append_identifier_of_process_to_array(
child, process_identifiers
)
if len(process_identifiers) == 0:
raise ValidationException("No executable process tag found")
return process_identifiers
@staticmethod
def get_executable_process_elements(et_root: _Element) -> list[_Element]:
"""Get_executable_process_elements."""
process_elements = []
for child in et_root:
if child.tag.endswith("process") and child.attrib.get(
"isExecutable", False
):
process_elements.append(child)
if len(process_elements) == 0:
raise ValidationException("No executable process tag found")
return process_elements
@staticmethod
def get_executable_bpmn_process_identifiers(et_root: _Element) -> list[str]:
"""Get_executable_bpmn_process_identifiers."""
process_elements = SpecFileService.get_executable_process_elements(et_root)
bpmn_process_identifiers = [pe.attrib["id"] for pe in process_elements]
return bpmn_process_identifiers
@staticmethod
def get_bpmn_process_identifier(et_root: _Element) -> str:
"""Get_bpmn_process_identifier."""
process_elements = SpecFileService.get_executable_process_elements(et_root)
# There are multiple root elements
if len(process_elements) > 1:
# Look for the element that has the startEvent in it
for e in process_elements:
this_element: EtreeElement = e
for child_element in list(this_element):
if child_element.tag.endswith("startEvent"):
# coorce Any to string
return str(this_element.attrib["id"])
raise ValidationException(
"No start event found in %s" % et_root.attrib["id"]
)
return str(process_elements[0].attrib["id"])
@staticmethod
def store_bpmn_process_identifiers(
process_model_info: ProcessModelInfo, bpmn_file_name: str, et_root: _Element
) -> None:
"""Store_bpmn_process_identifiers."""
relative_process_model_path = process_model_info.id_for_file_path()
relative_bpmn_file_path = os.path.join(
relative_process_model_path, bpmn_file_name
)
bpmn_process_identifiers = (
SpecFileService.get_executable_bpmn_process_identifiers(et_root)
)
for bpmn_process_identifier in bpmn_process_identifiers:
process_id_lookup = BpmnProcessIdLookup.query.filter_by(
bpmn_process_identifier=bpmn_process_identifier
).first()
if process_id_lookup is None:
process_id_lookup = BpmnProcessIdLookup(
bpmn_process_identifier=bpmn_process_identifier,
bpmn_file_relative_path=relative_bpmn_file_path,
)
db.session.add(process_id_lookup)
db.session.commit()
else:
if relative_bpmn_file_path != process_id_lookup.bpmn_file_relative_path:
full_bpmn_file_path = SpecFileService.full_path_from_relative_path(
process_id_lookup.bpmn_file_relative_path
)
# if the old relative bpmn file no longer exists, then assume things were moved around
# on the file system. Otherwise, assume it is a duplicate process id and error.
if os.path.isfile(full_bpmn_file_path):
raise ValidationException(
f"Process id ({bpmn_process_identifier}) has already been used for "
f"{process_id_lookup.bpmn_file_relative_path}. It cannot be reused."
)
else:
process_id_lookup.bpmn_file_relative_path = (
relative_bpmn_file_path
)
db.session.add(process_id_lookup)
db.session.commit()
@staticmethod
def check_for_message_models(
et_root: _Element, process_model_info: ProcessModelInfo
) -> None:
"""Check_for_message_models."""
for child in et_root:
if child.tag.endswith("message"):
message_model_identifier = child.attrib.get("id")
message_name = child.attrib.get("name")
if message_model_identifier is None:
# if the old relative bpmn file no longer exists, then assume things were moved around
# on the file system. Otherwise, assume it is a duplicate process id and error.
if os.path.isfile(full_bpmn_file_path):
raise ValidationException(
"Message identifier is missing from bpmn xml"
f"Process id ({ref.id}) has already been used for "
f"{process_id_lookup.bpmn_file_relative_path}. It cannot be reused."
)
else:
process_id_lookup.bpmn_file_relative_path = (
ref.file_path
)
db.session.add(process_id_lookup)
db.session.commit()
@staticmethod
def update_message_cache(ref: FileReference) -> None:
"""Assure we have a record in the database of all possible message ids and names."""
for message_model_identifier in ref.messages.keys():
message_model = MessageModel.query.filter_by(identifier=message_model_identifier).first()
if message_model is None:
message_model = MessageModel(
identifier=message_model_identifier, name=ref.messages[message_model_identifier]
)
db.session.add(message_model)
db.session.commit()
@staticmethod
def update_message_trigger_cache(ref: FileReference, process_model_info: ProcessModelInfo) -> None:
"""assure we know which messages can trigger the start of a process."""
for message_model_identifier in ref.start_messages:
message_model = MessageModel.query.filter_by(
identifier=message_model_identifier
).first()
if message_model is None:
message_model = MessageModel(
identifier=message_model_identifier, name=message_name
raise ValidationException(
f"Could not find message model with identifier '{message_model_identifier}'"
f"Required by a Start Event in : {ref.file_name}"
)
db.session.add(message_model)
db.session.commit()
for child in et_root:
if child.tag.endswith("}process"):
message_event_definitions = child.xpath(
"//bpmn:startEvent/bpmn:messageEventDefinition",
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
)
if message_event_definitions:
message_event_definition = message_event_definitions[0]
message_model_identifier = message_event_definition.attrib.get(
"messageRef"
)
if message_model_identifier is None:
raise ValidationException(
"Could not find messageRef from message event definition: {message_event_definition}"
)
message_model = MessageModel.query.filter_by(
identifier=message_model_identifier
message_triggerable_process_model = (
MessageTriggerableProcessModel.query.filter_by(
message_model_id=message_model.id,
).first()
if message_model is None:
raise ValidationException(
f"Could not find message model with identifier '{message_model_identifier}'"
f"specified by message event definition: {message_event_definition}"
)
)
if message_triggerable_process_model is None:
message_triggerable_process_model = (
MessageTriggerableProcessModel.query.filter_by(
MessageTriggerableProcessModel(
message_model_id=message_model.id,
).first()
)
if message_triggerable_process_model is None:
message_triggerable_process_model = (
MessageTriggerableProcessModel(
message_model_id=message_model.id,
process_model_identifier=process_model_info.id,
process_group_identifier="process_group_identifier",
)
process_model_identifier=process_model_info.id,
process_group_identifier="process_group_identifier"
)
db.session.add(message_triggerable_process_model)
db.session.commit()
else:
if (
message_triggerable_process_model.process_model_identifier
!= process_model_info.id
# or message_triggerable_process_model.process_group_identifier
# != process_model_info.process_group_id
):
raise ValidationException(
f"Message model is already used to start process model {process_model_info.id}"
)
for child in et_root:
if child.tag.endswith("correlationProperty"):
correlation_identifier = child.attrib.get("id")
if correlation_identifier is None:
raise ValidationException(
"Correlation identifier is missing from bpmn xml"
)
correlation_property_retrieval_expressions = child.xpath(
"//bpmn:correlationPropertyRetrievalExpression",
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
)
if not correlation_property_retrieval_expressions:
raise ValidationException(
f"Correlation is missing correlation property retrieval expressions: {correlation_identifier}"
)
for cpre in correlation_property_retrieval_expressions:
message_model_identifier = cpre.attrib.get("messageRef")
if message_model_identifier is None:
db.session.add(message_triggerable_process_model)
db.session.commit()
else:
if (
message_triggerable_process_model.process_model_identifier
!= process_model_info.id
# or message_triggerable_process_model.process_group_identifier
# != process_model_info.process_group_id
):
raise ValidationException(
f"Message identifier is missing from correlation property: {correlation_identifier}"
f"Message model is already used to start process model {process_model_info.id}"
)
message_model = MessageModel.query.filter_by(
identifier=message_model_identifier
@staticmethod
def update_correlation_cache(ref: FileReference) -> None:
for correlation_identifier in ref.correlations.keys():
correlation_property_retrieval_expressions = \
ref.correlations[correlation_identifier]['retrieval_expressions']
for cpre in correlation_property_retrieval_expressions:
message_model_identifier = cpre["messageRef"]
message_model = MessageModel.query.filter_by(identifier=message_model_identifier).first()
if message_model is None:
raise ValidationException(
f"Could not find message model with identifier '{message_model_identifier}'"
f"specified by correlation property: {cpre}"
)
# fixme: I think we are currently ignoring the correction properties.
message_correlation_property = (
MessageCorrelationPropertyModel.query.filter_by(
identifier=correlation_identifier,
message_model_id=message_model.id,
).first()
if message_model is None:
raise ValidationException(
f"Could not find message model with identifier '{message_model_identifier}'"
f"specified by correlation property: {cpre}"
)
message_correlation_property = (
MessageCorrelationPropertyModel.query.filter_by(
identifier=correlation_identifier,
message_model_id=message_model.id,
).first()
)
if message_correlation_property is None:
message_correlation_property = MessageCorrelationPropertyModel(
identifier=correlation_identifier,
message_model_id=message_model.id,
)
if message_correlation_property is None:
message_correlation_property = MessageCorrelationPropertyModel(
identifier=correlation_identifier,
message_model_id=message_model.id,
)
db.session.add(message_correlation_property)
db.session.commit()
db.session.add(message_correlation_property)
db.session.commit()

View File

@ -29,6 +29,7 @@ class ExampleDataLoader:
if bpmn_file_name is None we load all files in process_model_source_directory,
otherwise, we only load bpmn_file_name
"""
if process_model_source_directory is None:
raise Exception("You must include `process_model_source_directory`.")
@ -80,15 +81,14 @@ class ExampleDataLoader:
try:
file = open(file_path, "rb")
data = file.read()
SpecFileService.add_file(
file_info = SpecFileService.add_file(
process_model_info=spec, file_name=filename, binary_data=data
)
if is_primary:
SpecFileService.process_bpmn_file(
spec, filename, data, set_primary_file=True
)
workflow_spec_service = ProcessModelService()
workflow_spec_service.save_process_model(spec)
references = SpecFileService.get_references_for_file(file_info, spec)
spec.primary_process_id = references[0].id
spec.primary_file_name = filename
ProcessModelService().save_process_model(spec)
finally:
if file:
file.close()

View File

@ -2325,3 +2325,4 @@ class TestProcessApi(BaseTest):
)
print("test_script_unit_test_run")

View File

@ -14,7 +14,9 @@ from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLoo
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
class TestSpecFileService(BaseTest):
"""TestSpecFileService."""
@ -78,15 +80,15 @@ class TestSpecFileService(BaseTest):
bpmn_process_id_lookups[0].bpmn_file_relative_path
== self.call_activity_nested_relative_file_path
)
with pytest.raises(ApiError) as exception:
with pytest.raises(ValidationException) as exception:
load_test_spec(
"call_activity_nested_duplicate",
process_model_source_directory="call_activity_duplicate",
bpmn_file_name="call_activity_nested_duplicate",
)
assert f"Process id ({bpmn_process_identifier}) has already been used" in str(
exception.value
)
assert f"Process id ({bpmn_process_identifier}) has already been used" in str(
exception.value
)
def test_updates_relative_file_path_when_appropriate(
self,
@ -162,18 +164,14 @@ class TestSpecFileService(BaseTest):
files = SpecFileService.get_files(process_model_info)
file = next(filter(lambda f: f.name == "call_activity_level_3.bpmn", files))
ca_3 = SpecFileService.get_references_for_file(
file, process_model_info, BpmnDmnParser
)
ca_3 = SpecFileService.get_references_for_file(file, process_model_info)
assert len(ca_3) == 1
assert ca_3[0].name == "Level 3"
assert ca_3[0].id == "Level3"
assert ca_3[0].type == "process"
file = next(filter(lambda f: f.name == "level2c.dmn", files))
dmn1 = SpecFileService.get_references_for_file(
file, process_model_info, BpmnDmnParser
)
dmn1 = SpecFileService.get_references_for_file(file, process_model_info)
assert len(dmn1) == 1
assert dmn1[0].name == "Decision 1"
assert dmn1[0].id == "Decision_0vrtcmk"