spiff-element-units integration (#214)
This commit is contained in:
parent
771acc3c2e
commit
1dd445de68
|
@ -5,3 +5,4 @@ t
|
|||
.dccache
|
||||
*~
|
||||
version_info.json
|
||||
.coverage*
|
|
@ -19,3 +19,4 @@ node_modules
|
|||
/bin/import_secrets.py
|
||||
/src/spiffworkflow_backend/config/secrets.py
|
||||
*null-ls_*
|
||||
/local_wheels/*.whl
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
# Local Wheels
|
||||
|
||||
If you have any wheels you wish to test locally, copy them into this folder then run:
|
||||
|
||||
```
|
||||
poetry add local_wheels/my.whl
|
||||
```
|
||||
|
||||
Alternatively you can sideload it:
|
||||
|
||||
```
|
||||
poetry run pip install local_wheels/*.whl
|
||||
```
|
||||
|
||||
when you boot the backend.
|
File diff suppressed because it is too large
Load Diff
|
@ -86,6 +86,7 @@ prometheus-flask-exporter = "^0.22.3"
|
|||
safety = "^2.3.5"
|
||||
sqlalchemy = "^2.0.7"
|
||||
marshmallow-sqlalchemy = "^0.29.0"
|
||||
spiff-element-units = "^0.1.0"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "^7.1.2"
|
||||
|
|
|
@ -145,3 +145,11 @@ SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB = environ.get(
|
|||
|
||||
# this is only used in CI. use SPIFFWORKFLOW_BACKEND_DATABASE_URI instead for real configuration
|
||||
SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_FEATURE_ELEMENT_UNITS_ENABLED = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_FEATURE_ELEMENT_UNITS_ENABLED", default="false") == "true"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR", default=None
|
||||
)
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
||||
from flask import current_app
|
||||
|
||||
BpmnSpecDict = Dict[str, Any]
|
||||
|
||||
|
||||
class ElementUnitsService:
|
||||
"""Feature gated glue between the backend and spiff-element-units."""
|
||||
|
||||
@classmethod
|
||||
def _cache_dir(cls) -> Optional[str]:
|
||||
return current_app.config["SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR"] # type: ignore
|
||||
|
||||
@classmethod
|
||||
def _enabled(cls) -> bool:
|
||||
enabled = current_app.config["SPIFFWORKFLOW_BACKEND_FEATURE_ELEMENT_UNITS_ENABLED"]
|
||||
return enabled and cls._cache_dir() is not None
|
||||
|
||||
@classmethod
|
||||
def cache_element_units_for_workflow(cls, cache_key: str, bpmn_spec_dict: BpmnSpecDict) -> None:
|
||||
if not cls._enabled():
|
||||
return None
|
||||
|
||||
try:
|
||||
# for now we are importing inside each of these functions, not sure the best
|
||||
# way to do this in an overall feature flagged strategy but this gets things
|
||||
# moving
|
||||
import spiff_element_units # type: ignore
|
||||
|
||||
bpmn_spec_json = json.dumps(bpmn_spec_dict)
|
||||
spiff_element_units.cache_element_units_for_workflow(cls._cache_dir(), cache_key, bpmn_spec_json)
|
||||
except Exception as e:
|
||||
current_app.logger.exception(e)
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def workflow_from_cached_element_unit(cls, cache_key: str, element_id: str) -> Optional[BpmnSpecDict]:
|
||||
if not cls._enabled():
|
||||
return None
|
||||
|
||||
try:
|
||||
# for now we are importing inside each of these functions, not sure the best
|
||||
# way to do this in an overall feature flagged strategy but this gets things
|
||||
# moving
|
||||
import spiff_element_units
|
||||
|
||||
bpmn_spec_json = spiff_element_units.workflow_from_cached_element_unit(
|
||||
cls._cache_dir(), cache_key, element_id
|
||||
)
|
||||
return json.loads(bpmn_spec_json) # type: ignore
|
||||
except Exception as e:
|
||||
current_app.logger.exception(e)
|
||||
return None
|
|
@ -91,6 +91,9 @@ from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
|
|||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.scripts.script import Script
|
||||
from spiffworkflow_backend.services.custom_parser import MyCustomParser
|
||||
from spiffworkflow_backend.services.element_units_service import (
|
||||
ElementUnitsService,
|
||||
)
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
@ -673,6 +676,25 @@ class ProcessInstanceProcessor:
|
|||
bpmn_definition_to_task_definitions_mappings,
|
||||
)
|
||||
|
||||
#
|
||||
# see if we have any cached element units and if so step on the spec and subprocess_specs.
|
||||
# in the early stages of development this will return the full workflow when the feature
|
||||
# flag is set to on. as time goes we will need to think about how this plays in with the
|
||||
# bpmn definition tables more.
|
||||
#
|
||||
|
||||
element_unit_process_dict = None
|
||||
full_process_model_hash = bpmn_process_definition.full_process_model_hash
|
||||
|
||||
if full_process_model_hash is not None:
|
||||
element_unit_process_dict = ElementUnitsService.workflow_from_cached_element_unit(
|
||||
full_process_model_hash,
|
||||
bpmn_process_definition.bpmn_identifier,
|
||||
)
|
||||
if element_unit_process_dict is not None:
|
||||
spiff_bpmn_process_dict["spec"] = element_unit_process_dict["spec"]
|
||||
spiff_bpmn_process_dict["subprocess_specs"] = element_unit_process_dict["subprocess_specs"]
|
||||
|
||||
bpmn_process = process_instance_model.bpmn_process
|
||||
if bpmn_process is not None:
|
||||
single_bpmn_process_dict = cls._get_bpmn_process_dict(bpmn_process, get_tasks=True)
|
||||
|
@ -1073,6 +1095,26 @@ class ProcessInstanceProcessor:
|
|||
)
|
||||
self.process_instance_model.bpmn_process_definition = bpmn_process_definition_parent
|
||||
|
||||
#
|
||||
# builds and caches the element units for the parent bpmn process defintion. these
|
||||
# element units can then be queried using the same hash for later execution.
|
||||
#
|
||||
# TODO: this seems to be run each time a process instance is started, so element
|
||||
# units will only be queried after a save/resume point. the hash used as the key
|
||||
# can be anything, so possibly some hash of all files required to form the process
|
||||
# definition and their hashes could be used? Not sure how that plays in with the
|
||||
# bpmn_process_defintion hash though.
|
||||
#
|
||||
|
||||
# TODO: first time through for an instance the bpmn_spec_dict seems to get mutated,
|
||||
# so for now we don't seed the cache until the second instance. not immediately a
|
||||
# problem and can be part of the larger discussion mentioned in the TODO above.
|
||||
|
||||
full_process_model_hash = bpmn_process_definition_parent.full_process_model_hash
|
||||
|
||||
if full_process_model_hash is not None and "task_specs" in bpmn_spec_dict["spec"]:
|
||||
ElementUnitsService.cache_element_units_for_workflow(full_process_model_hash, bpmn_spec_dict)
|
||||
|
||||
def save(self) -> None:
|
||||
"""Saves the current state of this processor to the database."""
|
||||
self.process_instance_model.spiff_serializer_version = self.SERIALIZER_VERSION
|
||||
|
|
|
@ -0,0 +1,116 @@
|
|||
{
|
||||
"serializer_version": "spiff-element-units-integration",
|
||||
"spec": {
|
||||
"correlation_keys": {},
|
||||
"data_objects": {},
|
||||
"description": "No Tasks",
|
||||
"file": "tests/data/process-models/test-cases/no-tasks/no-tasks.bpmn",
|
||||
"io_specification": null,
|
||||
"name": "no_tasks",
|
||||
"task_specs": {
|
||||
"End": {
|
||||
"description": "",
|
||||
"id": "no_tasks_3",
|
||||
"inputs": [
|
||||
"no_tasks.EndJoin"
|
||||
],
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"manual": false,
|
||||
"name": "End",
|
||||
"outputs": [],
|
||||
"typename": "Simple"
|
||||
},
|
||||
"Event_0qq9il3": {
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"event_definition": {
|
||||
"external": false,
|
||||
"internal": false,
|
||||
"typename": "NoneEventDefinition"
|
||||
},
|
||||
"extensions": {},
|
||||
"id": "no_tasks_5",
|
||||
"inputs": [
|
||||
"StartEvent_1"
|
||||
],
|
||||
"internal": false,
|
||||
"io_specification": null,
|
||||
"lane": null,
|
||||
"lookahead": 2,
|
||||
"manual": false,
|
||||
"name": "Event_0qq9il3",
|
||||
"outputs": [
|
||||
"no_tasks.EndJoin"
|
||||
],
|
||||
"position": {
|
||||
"x": 272.0,
|
||||
"y": 159.0
|
||||
},
|
||||
"typename": "EndEvent"
|
||||
},
|
||||
"Start": {
|
||||
"description": "",
|
||||
"id": "no_tasks_1",
|
||||
"inputs": [],
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"manual": false,
|
||||
"name": "Start",
|
||||
"outputs": [
|
||||
"StartEvent_1"
|
||||
],
|
||||
"typename": "StartTask"
|
||||
},
|
||||
"StartEvent_1": {
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"event_definition": {
|
||||
"external": false,
|
||||
"internal": false,
|
||||
"typename": "NoneEventDefinition"
|
||||
},
|
||||
"extensions": {},
|
||||
"id": "no_tasks_4",
|
||||
"inputs": [
|
||||
"Start"
|
||||
],
|
||||
"internal": false,
|
||||
"io_specification": null,
|
||||
"lane": null,
|
||||
"lookahead": 2,
|
||||
"manual": false,
|
||||
"name": "StartEvent_1",
|
||||
"outputs": [
|
||||
"Event_0qq9il3"
|
||||
],
|
||||
"position": {
|
||||
"x": 179.0,
|
||||
"y": 159.0
|
||||
},
|
||||
"typename": "StartEvent"
|
||||
},
|
||||
"no_tasks.EndJoin": {
|
||||
"description": "",
|
||||
"id": "no_tasks_2",
|
||||
"inputs": [
|
||||
"Event_0qq9il3"
|
||||
],
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"manual": false,
|
||||
"name": "no_tasks.EndJoin",
|
||||
"outputs": [
|
||||
"End"
|
||||
],
|
||||
"typename": "_EndJoin"
|
||||
}
|
||||
},
|
||||
"typename": "BpmnProcessSpec"
|
||||
},
|
||||
"subprocess_specs": {}
|
||||
}
|
|
@ -0,0 +1,133 @@
|
|||
import json
|
||||
import os
|
||||
import tempfile
|
||||
from typing import Generator
|
||||
|
||||
import pytest
|
||||
from flask.app import Flask
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.services.element_units_service import BpmnSpecDict
|
||||
from spiffworkflow_backend.services.element_units_service import ElementUnitsService
|
||||
|
||||
#
|
||||
# we don't want to fully flex every aspect of the spiff-element-units
|
||||
# library here, mainly just checking that our interaction with it is
|
||||
# as expected.
|
||||
#
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def app_no_cache_dir(app: Flask) -> Generator[Flask, None, None]:
|
||||
app.config["SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR"] = None
|
||||
yield app
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def app_some_cache_dir(app: Flask) -> Generator[Flask, None, None]:
|
||||
app.config["SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR"] = "some_cache_dir"
|
||||
yield app
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def app_disabled(app: Flask) -> Generator[Flask, None, None]:
|
||||
app.config["SPIFFWORKFLOW_BACKEND_FEATURE_ELEMENT_UNITS_ENABLED"] = False
|
||||
yield app
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def app_enabled(app_some_cache_dir: Flask) -> Generator[Flask, None, None]:
|
||||
app_some_cache_dir.config["SPIFFWORKFLOW_BACKEND_FEATURE_ELEMENT_UNITS_ENABLED"] = True
|
||||
yield app_some_cache_dir
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def app_enabled_tmp_cache_dir(app_enabled: Flask) -> Generator[Flask, None, None]:
|
||||
with tempfile.TemporaryDirectory() as tmpdirname:
|
||||
app_enabled.config["SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR"] = tmpdirname
|
||||
yield app_enabled
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def example_specs_dict(app: Flask) -> Generator[BpmnSpecDict, None, None]:
|
||||
path = os.path.join(app.instance_path, "..", "..", "tests", "data", "specs-json", "no-tasks.json")
|
||||
with open(path) as f:
|
||||
yield json.loads(f.read())
|
||||
|
||||
|
||||
class TestElementUnitsService(BaseTest):
|
||||
"""Tests the ElementUnitsService."""
|
||||
|
||||
def test_cache_dir_env_is_respected(
|
||||
self,
|
||||
app_some_cache_dir: Flask,
|
||||
) -> None:
|
||||
assert ElementUnitsService._cache_dir() == "some_cache_dir"
|
||||
|
||||
def test_feature_disabled_if_env_is_false(
|
||||
self,
|
||||
app_disabled: Flask,
|
||||
) -> None:
|
||||
assert not ElementUnitsService._enabled()
|
||||
|
||||
def test_feature_enabled_if_env_is_true(
|
||||
self,
|
||||
app_enabled: Flask,
|
||||
) -> None:
|
||||
assert ElementUnitsService._enabled()
|
||||
|
||||
def test_is_disabled_when_no_cache_dir(
|
||||
self,
|
||||
app_no_cache_dir: Flask,
|
||||
) -> None:
|
||||
assert not ElementUnitsService._enabled()
|
||||
|
||||
def test_ok_to_cache_when_disabled(
|
||||
self,
|
||||
app_disabled: Flask,
|
||||
) -> None:
|
||||
result = ElementUnitsService.cache_element_units_for_workflow("", {})
|
||||
assert result is None
|
||||
|
||||
def test_ok_to_read_workflow_from_cached_element_unit_when_disabled(
|
||||
self,
|
||||
app_disabled: Flask,
|
||||
) -> None:
|
||||
result = ElementUnitsService.workflow_from_cached_element_unit("", "")
|
||||
assert result is None
|
||||
|
||||
def test_can_write_to_cache(
|
||||
self,
|
||||
app_enabled_tmp_cache_dir: Flask,
|
||||
example_specs_dict: BpmnSpecDict,
|
||||
) -> None:
|
||||
result = ElementUnitsService.cache_element_units_for_workflow("testing", example_specs_dict)
|
||||
assert result is None
|
||||
|
||||
def test_can_write_to_cache_multiple_times(
|
||||
self,
|
||||
app_enabled_tmp_cache_dir: Flask,
|
||||
example_specs_dict: BpmnSpecDict,
|
||||
) -> None:
|
||||
result = ElementUnitsService.cache_element_units_for_workflow("testing", example_specs_dict)
|
||||
assert result is None
|
||||
result = ElementUnitsService.cache_element_units_for_workflow("testing", example_specs_dict)
|
||||
assert result is None
|
||||
result = ElementUnitsService.cache_element_units_for_workflow("testing", example_specs_dict)
|
||||
assert result is None
|
||||
|
||||
def test_can_read_element_unit_for_process_from_cache(
|
||||
self,
|
||||
app_enabled_tmp_cache_dir: Flask,
|
||||
example_specs_dict: BpmnSpecDict,
|
||||
) -> None:
|
||||
ElementUnitsService.cache_element_units_for_workflow("testing", example_specs_dict)
|
||||
cached_specs_dict = ElementUnitsService.workflow_from_cached_element_unit("testing", "no_tasks")
|
||||
assert cached_specs_dict == example_specs_dict
|
||||
|
||||
def test_reading_element_unit_for_uncached_process_returns_none(
|
||||
self,
|
||||
app_enabled_tmp_cache_dir: Flask,
|
||||
) -> None:
|
||||
cached_specs_dict = ElementUnitsService.workflow_from_cached_element_unit("testing", "no_tasks")
|
||||
assert cached_specs_dict is None
|
Loading…
Reference in New Issue