Add the ability to forcibly restart a workflow, while retaining that workflows data.

A workflow specification knows it's version number, which is generated by the version of the files that make it up.
A workflow specification version number is the primary file (the lead BPMN) followed by a consistency ordered version each extra file associated with the workflow.  A change in any file modifies the specifications version.
This commit is contained in:
Dan Funk 2020-03-04 17:08:45 -05:00
parent d0f0acc8cf
commit 78b6f040eb
3 changed files with 74 additions and 7 deletions

View File

@ -103,11 +103,17 @@ class WorkflowProcessor(object):
@staticmethod
def get_spec(workflow_spec_id):
"""Returns the last version of the specification."""
parser = WorkflowProcessor.get_parser()
major_version = 0 # The version of the pirmary file.
minor_version = [] # The versions of the minor files if any.
process_id = None
file_data_models = session.query(FileDataModel) \
.join(FileModel) \
.filter(FileModel.workflow_spec_id == workflow_spec_id).all()
.filter(FileModel.workflow_spec_id == workflow_spec_id)\
.filter(FileDataModel.version == FileModel.latest_version)\
.order_by(FileModel.id)\
.all()
for file_data in file_data_models:
if file_data.file_model.type == FileType.bpmn:
bpmn: ElementTree.Element = ElementTree.fromstring(file_data.data)
@ -117,9 +123,16 @@ class WorkflowProcessor(object):
elif file_data.file_model.type == FileType.dmn:
dmn: ElementTree.Element = ElementTree.fromstring(file_data.data)
parser.add_dmn_xml(dmn, filename=file_data.file_model.name)
if file_data.file_model.primary:
major_version = file_data.version
else:
minor_version.append(file_data.version)
if process_id is None:
raise(Exception("There is no primary BPMN model defined for workflow %s" % workflow_spec_id))
return parser.get_spec(process_id)
minor_version.insert(0, major_version) # Add major version to begining.
spec = parser.get_spec(process_id)
spec.description = ".".join(str(x) for x in minor_version)
return spec
@classmethod
@ -144,6 +157,19 @@ class WorkflowProcessor(object):
session.commit()
return processor
def restart_with_current_task_data(self):
"""Recreate this workflow, but keep the data from the last completed task and add it back into the first task.
This may be useful when a workflow specification changes, and users need to review all the
prior steps, but don't need to reenter all the previous data. """
spec = WorkflowProcessor.get_spec(self.workflow_spec_id)
bpmn_workflow = BpmnWorkflow(spec, script_engine=self._script_engine)
bpmn_workflow.data = self.bpmn_workflow.data
for task in bpmn_workflow.get_tasks(SpiffTask.READY):
task.data = self.bpmn_workflow.last_task.data
bpmn_workflow.do_engine_steps()
self.bpmn_workflow = bpmn_workflow
def get_status(self):
if self.bpmn_workflow.is_completed():
return WorkflowStatus.complete
@ -153,6 +179,10 @@ class WorkflowProcessor(object):
else:
return WorkflowStatus.waiting
def get_spec_version(self):
"""We use the spec's descrption field to store the version information"""
return self.bpmn_workflow.spec.description
def do_engine_steps(self):
self.bpmn_workflow.do_engine_steps()

View File

@ -203,6 +203,3 @@ class TestTasksApi(BaseTest):
self.assertIsNotNone(workflow_api.next_task['documentation'])
self.assertTrue("norris" in workflow_api.next_task['documentation'])
# response = ProtocolBuilderService.get_study_details(self.test_study_id)

View File

@ -1,14 +1,16 @@
import os
import string
import random
from unittest.mock import patch
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
from crc import session
from crc import session, db, app
from crc.api.common import ApiError
from crc.models.file import FileModel, FileDataModel
from crc.models.study import StudyModel
from crc.models.workflow import WorkflowSpecModel, WorkflowStatus
from crc.models.workflow import WorkflowSpecModel, WorkflowStatus, WorkflowModel
from crc.services.file_service import FileService
from tests.base_test import BaseTest
from crc.services.workflow_processor import WorkflowProcessor
@ -214,3 +216,41 @@ class TestWorkflowProcessor(BaseTest):
self.assertIn("last_updated", task.data["study"]["info"])
self.assertIn("sponsor", task.data["study"]["info"])
def test_spec_versioning(self):
self.load_example_data()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("decision_table")
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
self.assertEquals("1.1", processor.get_spec_version())
file_service = FileService()
file_service.add_workflow_spec_file(workflow_spec_model, "new_file.txt", "txt", b'blahblah')
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
self.assertEquals("1.1.1", processor.get_spec_version())
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'docx', 'docx.bpmn')
file = open(file_path, "rb")
data = file.read()
file_model = db.session.query(FileModel).filter(FileModel.name == "decision_table.bpmn").first()
file_service.update_file(file_model, data, "txt")
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
self.assertEquals("2.1.1", processor.get_spec_version())
def test_restart_workflow(self):
self.load_example_data()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.study_id == study.id).first()
self.assertEqual(workflow_model.workflow_spec_id, workflow_spec_model.id)
task = processor.next_task()
task.data = {"key": "Value"}
processor.complete_task(task)
task_before_restart = processor.next_task()
processor.restart_with_current_task_data()
task_after_restart = processor.next_task()
self.assertNotEqual(task.get_name(), task_before_restart.get_name())
self.assertEqual(task.get_name(), task_after_restart.get_name())
self.assertEqual(task.data, task_after_restart.data)