mirror of
https://github.com/status-im/spiff-arena.git
synced 2025-02-05 22:53:57 +00:00
35fd8ffc0f
b439f69f2 Merge pull request #296 from sartography/bugfix/subprocess-access-to-data-objects 6d2a2031e update spiff subworkflow tasks too 992c38671 make data objects referenceable within subprocesses 6c8ff5cdf allow subprocesses & call activities to have different data copy policies 2b14f3a48 initialize subprocesses in _update_hook instead of _on_ready_before 791f335d5 Merge pull request #295 from sartography/improvement/remove-camunda-from-base-and-misc-cleanup 28b579beb remove a few unused, duplicative, and debugging methods 8f14d1098 remove some other unused diagrams and tests 408bc6734 rely on top level camunda parser for almost all namespace references 895b2cc9b remove camunda namespace from base bpmn parser 76ecbf7cc Merge pull request #294 from sartography/bugfix/reactivate-boundary-event 82b6c8ad4 hack to ensure timers (and other events) are reset if returned to via loop reset 590903f47 Merge pull request #292 from sartography/feature/multiinstance-refactor 537490043 fix bug & typo f31726db1 raise error on attempting to migrate workflows with MI 44e6d08d8 create spiff multiinstance task 2168c022b create camunda MI that approximates what it used to do 9894cea59 some improvements and bugfixes f857ad5d4 remove some now unused functionality & tests, create a few more tests 6fead9d04 updated serializer & fixes for most tests ec662ecdd add parallel multiinstance bd19b2a8a working sequential multiinstance 2f9c192b6 further cleanup around _update_hook 947792bf6 fix bug in exclusive gateway migration d3d87b28d add io spec to all tasks f1586e275 add support for standard loop tasks git-subtree-dir: SpiffWorkflow git-subtree-split: b439f69f23b547df4de1e8e0c636997f2fd4e33b
114 lines
5.4 KiB
Python
114 lines
5.4 KiB
Python
from datetime import datetime, timedelta
|
|
|
|
from SpiffWorkflow.task import TaskState
|
|
from SpiffWorkflow.bpmn.specs.events.event_definitions import LOCALTZ
|
|
|
|
from .exceptions import VersionMigrationError
|
|
|
|
def td_to_iso(td):
|
|
total = td.total_seconds()
|
|
v1, seconds = total // 60, total % 60
|
|
v2, minutes = v1 // 60, v1 % 60
|
|
days, hours = v2 // 24, v2 % 60
|
|
return f"P{days:.0f}DT{hours:.0f}H{minutes:.0f}M{seconds}S"
|
|
|
|
def convert_timer_expressions(dct):
|
|
|
|
message = "Unable to convert time specifications for {spec}. This most likely because the values are set during workflow execution."
|
|
|
|
has_timer = lambda ts: 'event_definition' in ts and ts['event_definition']['typename'] in [ 'CycleTimerEventDefinition', 'TimerEventDefinition']
|
|
for spec in [ ts for ts in dct['spec']['task_specs'].values() if has_timer(ts) ]:
|
|
spec['event_definition']['name'] = spec['event_definition'].pop('label')
|
|
if spec['event_definition']['typename'] == 'TimerEventDefinition':
|
|
expr = spec['event_definition'].pop('dateTime')
|
|
try:
|
|
dt = eval(expr)
|
|
if isinstance(dt, datetime):
|
|
spec['event_definition']['expression'] = f"'{dt.isoformat()}'"
|
|
spec['event_definition']['typename'] = 'TimeDateEventDefinition'
|
|
elif isinstance(dt, timedelta):
|
|
spec['event_definition']['expression'] = f"'{td_to_iso(dt)}'"
|
|
spec['event_definition']['typename'] = 'DurationTimerEventDefinition'
|
|
except:
|
|
raise VersionMigrationError(message.format(spec=spec['name']))
|
|
|
|
if spec['event_definition']['typename'] == 'CycleTimerEventDefinition':
|
|
|
|
tasks = [ t for t in dct['tasks'].values() if t['task_spec'] == spec['name'] ]
|
|
task = tasks[0] if len(tasks) > 0 else None
|
|
|
|
expr = spec['event_definition'].pop('cycle_definition')
|
|
try:
|
|
repeat, duration = eval(expr)
|
|
spec['event_definition']['expression'] = f"'R{repeat}/{td_to_iso(duration)}'"
|
|
if task is not None:
|
|
cycles_complete = task['data'].pop('repeat_count', 0)
|
|
start_time = task['internal_data'].pop('start_time', None)
|
|
if start_time is not None:
|
|
dt = datetime.fromisoformat(start_time)
|
|
task['internal_data']['event_value'] = {
|
|
'cycles': repeat - cycles_complete,
|
|
'next': datetime.combine(dt.date(), dt.time(), LOCALTZ).isoformat(),
|
|
'duration': duration.total_seconds(),
|
|
}
|
|
except:
|
|
raise VersionMigrationError(message.format(spec=spec['name']))
|
|
|
|
if spec['typename'] == 'StartEvent':
|
|
spec['outputs'].remove(spec['name'])
|
|
if task is not None:
|
|
children = [ dct['tasks'][c] for c in task['children'] ]
|
|
# Formerly cycles were handled by looping back and reusing the tasks so this removes the extra tasks
|
|
remove = [ c for c in children if c['task_spec'] == task['task_spec']][0]
|
|
for task_id in remove['children']:
|
|
child = dct['tasks'][task_id]
|
|
if child['task_spec'].startswith('return') or child['state'] != TaskState.COMPLETED:
|
|
dct['tasks'].pop(task_id)
|
|
else:
|
|
task['children'].append(task_id)
|
|
task['children'].remove(remove['id'])
|
|
dct['tasks'].pop(remove['id'])
|
|
|
|
def add_default_condition_to_cond_task_specs(dct):
|
|
|
|
for spec in [ts for ts in dct['spec']['task_specs'].values() if ts['typename'] == 'ExclusiveGateway']:
|
|
if (None, spec['default_task_spec']) not in spec['cond_task_specs']:
|
|
spec['cond_task_specs'].append({'condition': None, 'task_spec': spec['default_task_spec']})
|
|
|
|
def create_data_objects_and_io_specs(dct):
|
|
|
|
def update_data_specs(spec):
|
|
for obj in spec.get('data_objects', {}).values():
|
|
obj['typename'] = 'DataObject'
|
|
data_inputs = spec.pop('data_inputs', [])
|
|
data_outputs = spec.pop('data_outputs', [])
|
|
if len(data_outputs) > 0 or len(data_outputs) > 0:
|
|
for item in data_inputs:
|
|
item['typename'] = 'TaskDataReference'
|
|
for item in data_outputs:
|
|
item['typename'] = 'TaskDataReference'
|
|
io_spec = {
|
|
'typename': 'BpmnIoSpecification',
|
|
'data_inputs': data_inputs,
|
|
'data_outputs': data_outputs,
|
|
}
|
|
spec['io_specification'] = io_spec
|
|
else:
|
|
spec['io_specification'] = None
|
|
|
|
update_data_specs(dct['spec'])
|
|
for sp in dct['subprocess_specs'].values():
|
|
update_data_specs(sp)
|
|
|
|
for spec in dct['spec']['task_specs'].values():
|
|
for item in spec.get('data_input_associations', {}):
|
|
item['typename'] = 'DataObject'
|
|
for item in spec.get('data_output_associations', {}):
|
|
item['typename'] = 'DataObject'
|
|
|
|
def check_multiinstance(dct):
|
|
|
|
specs = [ spec for spec in dct['spec']['task_specs'].values() if 'prevtaskclass' in spec ]
|
|
if len(specs) > 0:
|
|
raise VersionMigrationError("This workflow cannot be migrated because it contains MultiInstance Tasks")
|