Squashed 'SpiffWorkflow/' changes from 580939cc..cd4da465

cd4da465 Merge pull request #264 from sartography/bugfix/dmn-equality-with-boolean
414a59eb disambiguate DMN expressions
eea53c91 Merge pull request #263 from sartography/feature/cleanup-task-completion
d248d5b1 execute postscript before other complete hook tasks
c09f1a90 streamline predict & remove some duplicated calls to it
64c21791 remove duplicate calls to update
4ca1076d move task update to _on_complete to ensure data is copied consistently after task related activities are done
d037a7eb small changes for readability
025bc30f Quick patch -- is_executable needs to be accurate immediately.
14d3d8c3 Merge pull request #262 from sartography/feature/parser_info_features
849c223e We are jumping through a lot of complex xml parsing in SpiffWorkflow-Backend because we need to know some basic information about a BPMN process at the moment it is saved.  Rather than do that work in the backend, it seems better to have SpiffWorkflow handle parsing the xml and providing a bit of metadata, including:

git-subtree-dir: SpiffWorkflow
git-subtree-split: cd4da465e125ca1ae1b57d227bfa324d9d4c507c
This commit is contained in:
burnettk 2022-11-18 10:03:32 -05:00
parent 5d972b7be5
commit 1bed0fb3ee
11 changed files with 60 additions and 32 deletions

View File

@ -19,11 +19,15 @@ class BpmnWorkflowTestCase(unittest.TestCase):
serializer = BpmnWorkflowSerializer(wf_spec_converter)
def load_workflow_spec(self, filename, process_name, validate=True):
def get_parser(self, filename, validate=True):
f = os.path.join(os.path.dirname(__file__), 'data', filename)
validator = BpmnValidator() if validate else None
parser = TestBpmnParser(validator=validator)
parser.add_bpmn_files_by_glob(f)
return parser
def load_workflow_spec(self, filename, process_name, validate=True):
parser = self.get_parser(filename, validate)
top_level_spec = parser.get_spec(process_name)
subprocesses = parser.get_subprocess_specs(process_name)
return top_level_spec, subprocesses

View File

@ -6,10 +6,22 @@ from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
class CollaborationTest(BpmnWorkflowTestCase):
def testParserProvidesInfoOnMessagesAndCorrelations(self):
parser = self.get_parser('collaboration.bpmn')
self.assertEqual(list(parser.messages.keys()), ['love_letter', 'love_letter_response'])
self.assertEqual(parser.correlations,
{'lover_name': {'name': "Lover's Name",
'retrieval_expressions': [
{'expression': 'lover_name',
'messageRef': 'love_letter'},
{'expression': 'from_name',
'messageRef': 'love_letter_response'}]}}
)
def testCollaboration(self):
spec, subprocesses = self.load_collaboration('collaboration.bpmn', 'my_collaboration')
# Only executable processes should be started
self.assertIn('process_buddy', subprocesses)
self.assertNotIn('random_person_process', subprocesses)
@ -122,4 +134,4 @@ class CollaborationTest(BpmnWorkflowTestCase):
start = self.workflow.get_tasks_from_spec_name('Start')[0]
start.data['lover_name'] = 'Peggy'
self.workflow.do_engine_steps()
self.save_restore()
self.save_restore()

View File

@ -19,6 +19,12 @@ class SwimLaneTest(BpmnWorkflowTestCase):
spec, subprocesses = self.load_workflow_spec('lanes.bpmn','lanes')
self.workflow = BpmnWorkflow(spec, subprocesses)
def testBpmnParserKnowsLanesExist(self):
parser = self.get_parser('lanes.bpmn')
self.assertTrue(parser.get_process_parser('lanes').has_lanes())
parser = self.get_parser('random_fact.bpmn')
self.assertFalse(parser.get_process_parser('random_fact').has_lanes())
def testRunThroughHappy(self):
self.workflow.do_engine_steps()

View File

@ -23,13 +23,17 @@ class BaseTestCase(BpmnWorkflowTestCase):
serializer = BpmnWorkflowSerializer(wf_spec_converter)
def load_workflow_spec(self, filename, process_name, dmn_filename=None):
bpmn = os.path.join(os.path.dirname(__file__), 'data', filename)
def get_parser(self, filename, dmn_filename=None):
f = os.path.join(os.path.dirname(__file__), 'data', filename)
parser = CamundaParser()
parser.add_bpmn_files_by_glob(bpmn)
parser.add_bpmn_files_by_glob(f)
if dmn_filename is not None:
dmn = os.path.join(os.path.dirname(__file__), 'data', 'dmn', dmn_filename)
parser.add_dmn_files_by_glob(dmn)
return parser
def load_workflow_spec(self, filename, process_name, dmn_filename=None):
parser = self.get_parser(filename, dmn_filename)
top_level_spec = parser.get_spec(process_name)
subprocesses = parser.get_subprocess_specs(process_name)
return top_level_spec, subprocesses

View File

@ -70,11 +70,9 @@ class MultiInstanceParallelArrayTest(BaseTestCase):
{"CurrentFamilyMember": {"Birthdate": "10/05/1985" + str(x)}})
self.workflow.do_engine_steps()
self.workflow.complete_task_from_id(task.id)
# The data should still be available on the current task.
self.assertEqual({'FirstName': "The Funk #%i" % x,
'Birthdate': '10/05/1985' + str(x)},
self.workflow.get_task(task.id)
.data['CurrentFamilyMember'])
# We used to check that the current data variable was available in the task,
# but there's no reason to preserve it after the task completes. We removed it
# in some cases and left it in others, which just adds to the confusion.
self.workflow.do_engine_steps()
if save_restore:
self.reload_save_restore()

View File

@ -44,8 +44,7 @@ class ResetTokenTestMIParallel(BaseTestCase):
self.workflow.do_engine_steps()
if save_restore: self.save_restore()
self.assertEqual({'current': {'A': 'y'},
'do_step': 'Yes',
self.assertEqual({'do_step': 'Yes',
'output': {'1': {'A': 'x'}, '2': {'A': 'y'}, '3': {'A': 'z'}}},
self.workflow.last_task.data)
@ -66,8 +65,7 @@ class ResetTokenTestMIParallel(BaseTestCase):
self.assertTrue(self.workflow.is_completed())
self.assertEqual({'current': {'A': 'x'},
'do_step': 'Yes',
self.assertEqual({'do_step': 'Yes',
'C': 'c',
'output': {'1': {'A': 'a1'},
'2': {'A': 'y'},
@ -75,11 +73,6 @@ class ResetTokenTestMIParallel(BaseTestCase):
self.workflow.last_task.data)
def suite():
return unittest.TestLoader().loadTestsFromTestCase(ResetTokenTestMIParallel)

View File

@ -14,6 +14,16 @@ class StartMessageTest(BaseTestCase):
self.spec, self.subprocesses = self.load_workflow_spec('message_test.bpmn', 'ThrowCatch')
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
def testParserCanReturnStartMessages(self):
parser = self.get_parser('message_test.bpmn')
self.assertEqual(
parser.process_parsers['ThrowCatch'].start_messages(), ['Message_1rkbi27'])
parser = self.get_parser('random_fact.bpmn')
self.assertEqual(
parser.process_parsers['random_fact'].start_messages(), [])
def testRunThroughHappy(self):
self.actual_test(save_restore=False)

View File

@ -1,11 +1,11 @@
Start
first
excl_choice_1
sub_workflow_1
Start
first
excl_choice_1
last
End
sub_workflow_1
last
End

View File

@ -1,10 +1,10 @@
Start
first
sub_workflow_1
Start
first
last
End
sub_workflow_1
second
join
last

View File

@ -38,6 +38,8 @@ class TaskSpecTest(unittest.TestCase):
def do_next_unique_task(self, name):
# This method asserts that there is only one ready task! The specified
# one - and then completes it
for task in self.workflow.get_tasks(TaskState.WAITING):
task.task_spec._update(task)
ready_tasks = self.workflow.get_tasks(TaskState.READY)
self.assertEqual(1, len(ready_tasks))
task = ready_tasks[0]
@ -58,12 +60,13 @@ class TaskSpecTest(unittest.TestCase):
self.load_workflow_spec('data', 'block_to_subworkflow.xml')
self.do_next_unique_task('Start')
self.do_next_unique_task('first')
self.do_next_unique_task('sub_workflow_1')
# Inner:
# Inner. The subworkflow task will complete automatically after the subwokflow completes
self.do_next_unique_task('Start')
self.do_next_unique_task('first')
self.do_next_unique_task('last')
self.do_next_unique_task('End')
# Back to outer:
self.do_next_unique_task('last')
self.do_next_unique_task('End')
@ -72,7 +75,7 @@ class TaskSpecTest(unittest.TestCase):
self.load_workflow_spec('data', 'subworkflow_to_block.xml')
self.do_next_unique_task('Start')
self.do_next_unique_task('first')
self.do_next_unique_task('sub_workflow_1')
# Inner:
self.do_next_unique_task('Start')
self.do_next_unique_task('first')
@ -86,8 +89,9 @@ class TaskSpecTest(unittest.TestCase):
self.load_workflow_spec('control-flow', 'subworkflow_to_join.xml')
self.do_next_unique_task('Start')
self.do_next_unique_task('first')
self.do_next_named_step('second', ['sub_workflow_1'])
self.do_next_unique_task('sub_workflow_1')
# The subworkflow task now sets its child tasks to READY and waits
self.do_next_named_step('second', ['Start'])
# Inner:
self.do_next_unique_task('Start')
self.do_next_unique_task('first')
@ -102,8 +106,8 @@ class TaskSpecTest(unittest.TestCase):
self.load_workflow_spec('control-flow', 'subworkflow_to_join.xml')
self.do_next_unique_task('Start')
self.do_next_unique_task('first')
self.do_next_named_step('second', ['sub_workflow_1'])
self.do_next_unique_task('sub_workflow_1')
self.do_next_named_step('second', ['Start'])
# Inner:
self.do_next_unique_task('Start')
self.do_next_unique_task('first')

View File

@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
from builtins import str
from builtins import range
import time
from SpiffWorkflow.task import Task, TaskState
from SpiffWorkflow.workflow import Workflow
@ -33,7 +31,6 @@ def on_reached_cb(workflow, task, taken_path):
props = []
for key, value in list(task.task_spec.data.items()):
props.append('='.join((key, str(value))))
# print "REACHED:", task.get_name(), atts, props
# Store the list of data in the workflow.
atts = ';'.join(atts)