File based generic json data store (#490)

This commit is contained in:
jbirddog 2023-09-18 15:55:24 -04:00 committed by GitHub
parent 7f870977c0
commit d154d83ce5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 225 additions and 2 deletions

View File

@ -6,6 +6,7 @@ from SpiffWorkflow.bpmn.specs.data_spec import BpmnDataStoreSpecification # typ
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.json_data_store import JSONDataStoreModel
from spiffworkflow_backend.services.file_system_service import FileSystemService
def _process_model_location_for_task(spiff_task: SpiffTask) -> str | None:
@ -15,6 +16,14 @@ def _process_model_location_for_task(spiff_task: SpiffTask) -> str | None:
return None
def _data_store_filename(name: str) -> str:
return f"{name}.json"
def _data_store_exists_at_location(location: str, name: str) -> bool:
return FileSystemService.file_exists_at_relative_path(location, _data_store_filename(name))
class JSONDataStore(BpmnDataStoreSpecification): # type: ignore
"""JSONDataStore."""
@ -22,7 +31,7 @@ class JSONDataStore(BpmnDataStoreSpecification): # type: ignore
"""get."""
model: JSONDataStoreModel | None = None
location = _process_model_location_for_task(my_task)
if location is not None:
if location is not None and _data_store_exists_at_location(location, self.bpmn_id):
model = db.session.query(JSONDataStoreModel).filter_by(name=self.bpmn_id, location=location).first()
if model is None:
raise Exception(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.")
@ -31,7 +40,7 @@ class JSONDataStore(BpmnDataStoreSpecification): # type: ignore
def set(self, my_task: SpiffTask) -> None:
"""set."""
location = _process_model_location_for_task(my_task)
if location is None:
if location is None or not _data_store_exists_at_location(location, self.bpmn_id):
raise Exception(f"Unable to write to data store '{self.bpmn_id}' using location '{location}'.")
data = my_task.data[self.bpmn_id]
model = JSONDataStoreModel(
@ -73,3 +82,55 @@ class JSONDataStoreConverter(BpmnSpecConverter): # type: ignore
def from_dict(self, dct: dict[str, Any]) -> JSONDataStore:
"""from_dict."""
return JSONDataStore(**dct)
class JSONFileDataStore(BpmnDataStoreSpecification): # type: ignore
"""JSONFileDataStore."""
def get(self, my_task: SpiffTask) -> None:
"""get."""
location = _process_model_location_for_task(my_task)
if location is None or not _data_store_exists_at_location(location, self.bpmn_id):
raise Exception(f"Unable to read from data store '{self.bpmn_id}' using location '{location}'.")
contents = FileSystemService.contents_of_json_file_at_relative_path(
location, _data_store_filename(self.bpmn_id)
)
my_task.data[self.bpmn_id] = contents
def set(self, my_task: SpiffTask) -> None:
"""set."""
location = _process_model_location_for_task(my_task)
if location is None or not _data_store_exists_at_location(location, self.bpmn_id):
raise Exception(f"Unable to write to data store '{self.bpmn_id}' using location '{location}'.")
data = my_task.data[self.bpmn_id]
FileSystemService.write_to_json_file_at_relative_path(location, _data_store_filename(self.bpmn_id), data)
del my_task.data[self.bpmn_id]
@staticmethod
def register_converter(spec_config: dict[str, Any]) -> None:
spec_config["task_specs"].append(JSONFileDataStoreConverter)
@staticmethod
def register_data_store_class(data_store_classes: dict[str, Any]) -> None:
data_store_classes["JSONFileDataStore"] = JSONFileDataStore
class JSONFileDataStoreConverter(BpmnSpecConverter): # type: ignore
"""JSONFileDataStoreConverter."""
def __init__(self, registry): # type: ignore
"""__init__."""
super().__init__(JSONFileDataStore, registry)
def to_dict(self, spec: Any) -> dict[str, Any]:
"""to_dict."""
return {
"bpmn_id": spec.bpmn_id,
"bpmn_name": spec.bpmn_name,
"capacity": spec.capacity,
"is_unlimited": spec.is_unlimited,
}
def from_dict(self, dct: dict[str, Any]) -> JSONFileDataStore:
"""from_dict."""
return JSONFileDataStore(**dct)

View File

@ -3,6 +3,7 @@ from typing import Any
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
from spiffworkflow_backend.data_stores.json import JSONDataStore
from spiffworkflow_backend.data_stores.json import JSONFileDataStore
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
from spiffworkflow_backend.specs.start_event import StartEvent
@ -18,4 +19,5 @@ class MyCustomParser(BpmnDmnParser): # type: ignore
DATA_STORE_CLASSES: dict[str, Any] = {}
JSONDataStore.register_data_store_class(DATA_STORE_CLASSES)
JSONFileDataStore.register_data_store_class(DATA_STORE_CLASSES)
TypeaheadDataStore.register_data_store_class(DATA_STORE_CLASSES)

View File

@ -1,7 +1,9 @@
import json
import os
from collections.abc import Generator
from contextlib import contextmanager
from datetime import datetime
from typing import Any
import pytz
from flask import current_app
@ -99,6 +101,32 @@ class FileSystemService:
def full_path_from_relative_path(relative_path: str) -> str:
return os.path.join(FileSystemService.root_path(), relative_path)
@classmethod
def file_exists_at_relative_path(cls, relative_path: str, file_name: str) -> bool:
full_path = cls.full_path_from_relative_path(os.path.join(relative_path, file_name))
return os.path.isfile(full_path)
@classmethod
def contents_of_file_at_relative_path(cls, relative_path: str, file_name: str) -> str:
full_path = cls.full_path_from_relative_path(os.path.join(relative_path, file_name))
with open(full_path) as f:
return f.read()
@classmethod
def contents_of_json_file_at_relative_path(cls, relative_path: str, file_name: str) -> Any:
contents = cls.contents_of_file_at_relative_path(relative_path, file_name)
return json.loads(contents)
@classmethod
def write_to_file_at_relative_path(cls, relative_path: str, file_name: str, contents: str) -> None:
full_path = cls.full_path_from_relative_path(os.path.join(relative_path, file_name))
with open(full_path, "w") as f:
f.write(contents)
@classmethod
def write_to_json_file_at_relative_path(cls, relative_path: str, file_name: str, contents: Any) -> None:
cls.write_to_file_at_relative_path(relative_path, file_name, json.dumps(contents, indent=4, sort_keys=True))
@staticmethod
def process_model_relative_path(process_model: ProcessModelInfo) -> str:
"""Get the file path to a process model relative to SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR.

View File

@ -44,6 +44,7 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from spiffworkflow_backend.data_stores.json import JSONDataStore
from spiffworkflow_backend.data_stores.json import JSONFileDataStore
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
@ -93,6 +94,7 @@ from sqlalchemy import and_
StartEvent.register_converter(SPIFF_SPEC_CONFIG)
JSONDataStore.register_converter(SPIFF_SPEC_CONFIG)
JSONFileDataStore.register_converter(SPIFF_SPEC_CONFIG)
TypeaheadDataStore.register_converter(SPIFF_SPEC_CONFIG)
# Sorry about all this crap. I wanted to move this thing to another file, but

View File

@ -0,0 +1,85 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:dataStore id="contacts_datastore" name="JSONFileDataStore" />
<bpmn:process id="Process_fil0r1s" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_15zp7wu</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_15zp7wu" sourceRef="StartEvent_1" targetRef="Activity_1xf8a34" />
<bpmn:endEvent id="Event_1qwgen4">
<bpmn:incoming>Flow_0citdoo</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_114y4md" sourceRef="Activity_1xf8a34" targetRef="Activity_0t6yb39" />
<bpmn:scriptTask id="Activity_1xf8a34">
<bpmn:incoming>Flow_15zp7wu</bpmn:incoming>
<bpmn:outgoing>Flow_114y4md</bpmn:outgoing>
<bpmn:dataOutputAssociation id="DataOutputAssociation_0qt31ab">
<bpmn:targetRef>DataStoreReference_1b40zg5</bpmn:targetRef>
</bpmn:dataOutputAssociation>
<bpmn:script>contacts_datastore = [
{"contact": "Joe Bob", "company": "Some Job", "email": "joebob@email.ai"},
{"contact": "Sue Smith", "company": "Some Job", "email": "sue@email.ai", "notes": "Decision Maker\nDoes'nt answer emails."},
{"contact": "Some Person", "company": "Another Job", "email": "person@test.com"},
{"contact": "Them Person", "company": "Them Company", "email": "them@test.com"},
]</bpmn:script>
</bpmn:scriptTask>
<bpmn:dataStoreReference id="DataStoreReference_1b40zg5" name="Load Contacts" dataStoreRef="contacts_datastore" />
<bpmn:sequenceFlow id="Flow_0citdoo" sourceRef="Activity_0t6yb39" targetRef="Event_1qwgen4" />
<bpmn:scriptTask id="Activity_0t6yb39">
<bpmn:incoming>Flow_114y4md</bpmn:incoming>
<bpmn:outgoing>Flow_0citdoo</bpmn:outgoing>
<bpmn:property id="Property_1pbyq4i" name="__targetRef_placeholder" />
<bpmn:dataInputAssociation id="DataInputAssociation_0b9m2aj">
<bpmn:sourceRef>DataStoreReference_1nsdav3</bpmn:sourceRef>
<bpmn:targetRef>Property_1pbyq4i</bpmn:targetRef>
</bpmn:dataInputAssociation>
<bpmn:script>x = contacts_datastore[1]</bpmn:script>
</bpmn:scriptTask>
<bpmn:dataStoreReference id="DataStoreReference_1nsdav3" name="Read Contacts" dataStoreRef="contacts_datastore" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_fil0r1s">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="-428" y="-38" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1qwgen4_di" bpmnElement="Event_1qwgen4">
<dc:Bounds x="82" y="-38" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0y1vdup_di" bpmnElement="Activity_1xf8a34">
<dc:Bounds x="-220" y="-60" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="DataStoreReference_1b40zg5_di" bpmnElement="DataStoreReference_1b40zg5">
<dc:Bounds x="-195" y="145" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1e8abt4_di" bpmnElement="Activity_0t6yb39">
<dc:Bounds x="-70" y="-60" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="DataStoreReference_1nsdav3_di" bpmnElement="DataStoreReference_1nsdav3">
<dc:Bounds x="-45" y="145" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="-57" y="202" width="74" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_15zp7wu_di" bpmnElement="Flow_15zp7wu">
<di:waypoint x="-392" y="-20" />
<di:waypoint x="-220" y="-20" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_114y4md_di" bpmnElement="Flow_114y4md">
<di:waypoint x="-120" y="-20" />
<di:waypoint x="-70" y="-20" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="DataOutputAssociation_0qt31ab_di" bpmnElement="DataOutputAssociation_0qt31ab">
<di:waypoint x="-169" y="20" />
<di:waypoint x="-166" y="145" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0citdoo_di" bpmnElement="Flow_0citdoo">
<di:waypoint x="30" y="-20" />
<di:waypoint x="82" y="-20" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="DataInputAssociation_0b9m2aj_di" bpmnElement="DataInputAssociation_0b9m2aj">
<di:waypoint x="-20" y="145" />
<di:waypoint x="-20" y="20" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,9 @@
{
"description": "",
"display_name": "Test Level 1",
"exception_notification_addresses": [],
"fault_or_suspend_on_exception": "fault",
"metadata_extraction_paths": null,
"primary_file_name": "load.bpmn",
"primary_process_id": "Process_fil0r1s"
}

View File

@ -0,0 +1,35 @@
from flask.app import Flask
from flask.testing import FlaskClient
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
class TestJSONFileDataStore(BaseTest):
def test_can_execute_diagram(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
process_model = load_test_spec(
process_model_id="tests/data/json_file_data_store",
process_model_source_directory="json_file_data_store",
)
process_instance = self.create_process_instance_from_process_model(process_model=process_model)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps()
assert "x" in processor.bpmn_process_instance.data
result = processor.bpmn_process_instance.data["x"]
assert result == {
"company": "Some Job",
"contact": "Sue Smith",
"email": "sue@email.ai",
"notes": "Decision Maker\nDoes'nt answer emails.",
}