diff --git a/Pipfile b/Pipfile index 5f12da0b..d76c4bd4 100644 --- a/Pipfile +++ b/Pipfile @@ -53,7 +53,7 @@ dateparser = "*" # pipenv install --editable ../SpiffWorkflow (but fix things back before commiting!) # Merged Commit https://github.com/sartography/SpiffWorkflow/pull/178 broke usage of SpiffWorkflow # References to task states will need to be updated to allow using newest version -spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow", ref = "e47072ff88d7121c01861b08851e5b6f6fbf6609"} +spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow"} [requires] python_version = "3.9" diff --git a/Pipfile.lock b/Pipfile.lock index 56ad9c1c..00f82b67 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "9f340919423c7c2d75664173de3132b3925a0e0bc28216951f188010e3a1e1f1" + "sha256": "c2fd582e7167dd91efba5115ea49717313e8bc5cf0589e5469b6e641326cfc8b" }, "pipfile-spec": 6, "requires": { @@ -70,6 +70,28 @@ "markers": "python_version >= '3.6'", "version": "==2.10.1" }, + "backports.zoneinfo": { + "hashes": [ + "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf", + "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328", + "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546", + "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6", + "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570", + "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9", + "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7", + "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987", + "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722", + "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582", + "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc", + "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b", + "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1", + "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08", + "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac", + "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2" + ], + "markers": "python_version >= '3.6' and python_version < '3.9'", + "version": "==0.2.1" + }, "bcrypt": { "hashes": [ "sha256:2b02d6bfc6336d1094276f3f588aa1225a598e27f8e3388f4db9948cb707b521", @@ -199,7 +221,7 @@ "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667", "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035" ], - "markers": "python_full_version >= '3.6.2' and python_full_version < '4.0.0'", + "markers": "python_version < '4' and python_full_version >= '3.6.2'", "version": "==0.3.0" }, "click-plugins": { @@ -528,6 +550,14 @@ "markers": "python_version < '3.10'", "version": "==4.11.3" }, + "importlib-resources": { + "hashes": [ + "sha256:b6062987dfc51f0fcb809187cffbd60f35df7acb4589091f154214af6d0d49d3", + "sha256:e447dc01619b1e951286f3929be820029d48c75eb25d265c28b92a16548212b8" + ], + "markers": "python_version < '3.9'", + "version": "==5.7.1" + }, "inflection": { "hashes": [ "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417", @@ -1147,14 +1177,6 @@ "index": "pypi", "version": "==0.14.4" }, - "setuptools": { - "hashes": [ - "sha256:26ead7d1f93efc0f8c804d9fafafbe4a44b179580a7105754b245155f9af05a8", - "sha256:47c7b0c0f8fc10eec4cf1e71c6fdadf8decaa74ffa087e68cd1c20db7ad6a592" - ], - "markers": "python_version >= '3.7'", - "version": "==62.1.0" - }, "six": { "hashes": [ "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", @@ -1244,7 +1266,7 @@ }, "spiffworkflow": { "git": "https://github.com/sartography/SpiffWorkflow", - "ref": "e47072ff88d7121c01861b08851e5b6f6fbf6609" + "ref": "2617ef4cb82b85db9c756a38632c66290f3e1007" }, "sqlalchemy": { "hashes": [ @@ -1317,7 +1339,7 @@ "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14", "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_full_version < '4.0.0'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.26.9" }, "vine": { @@ -1466,7 +1488,7 @@ "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad", "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099" ], - "markers": "python_version >= '3.7'", + "markers": "python_version < '3.10'", "version": "==3.8.0" } }, diff --git a/crc/api/workflow.py b/crc/api/workflow.py index 6c081c8f..344449a3 100644 --- a/crc/api/workflow.py +++ b/crc/api/workflow.py @@ -1,5 +1,6 @@ import uuid +from SpiffWorkflow import TaskState from flask import g from crc import session @@ -16,7 +17,6 @@ from crc.services.workflow_processor import WorkflowProcessor from crc.services.workflow_service import WorkflowService from crc.services.workflow_spec_service import WorkflowSpecService - def all_specifications(libraries=False,standalone=False): spec_service = WorkflowSpecService() if libraries and standalone: @@ -255,14 +255,15 @@ def set_current_task(workflow_id, task_id): processor = WorkflowProcessor(workflow_model) task_id = uuid.UUID(task_id) spiff_task = processor.bpmn_workflow.get_task(task_id) - cancel_notify = (spiff_task.state == spiff_task.COMPLETED and + cancel_notify = (spiff_task.state == TaskState.COMPLETED and spiff_task.task_spec.__class__.__name__ != 'EndEvent') if not spiff_task: # An invalid task_id was requested. raise ApiError("invalid_task", "The Task you requested no longer exists as a part of this workflow.") _verify_user_and_role(processor, spiff_task) user_uid = UserService.current_user(allow_admin_impersonate=True).uid - if spiff_task.state != spiff_task.COMPLETED and spiff_task.state != spiff_task.READY: + + if spiff_task.state != TaskState.COMPLETED and spiff_task.state != TaskState.READY: raise ApiError("invalid_state", "You may not move the token to a task who's state is not " "currently set to COMPLETE or READY.") @@ -295,7 +296,7 @@ def update_task(workflow_id, task_id, body, terminate_loop=None, update_all=Fals if not spiff_task: raise ApiError("empty_task", "Processor failed to obtain task.", status_code=404) - if spiff_task.state != spiff_task.READY: + if spiff_task.state != TaskState.READY: raise ApiError("invalid_state", "You may not update a task unless it is in the READY state. " "Consider calling a token reset to make this task Ready.") diff --git a/crc/services/workflow_processor.py b/crc/services/workflow_processor.py index 22a94c38..33699c53 100644 --- a/crc/services/workflow_processor.py +++ b/crc/services/workflow_processor.py @@ -10,7 +10,7 @@ from SpiffWorkflow.serializer.exceptions import MissingSpecError from lxml import etree from datetime import datetime -from SpiffWorkflow import Task as SpiffTask, WorkflowException, Task +from SpiffWorkflow import Task as SpiffTask, WorkflowException, Task, TaskState from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer from SpiffWorkflow.bpmn.workflow import BpmnWorkflow @@ -172,7 +172,7 @@ class WorkflowProcessor(object): if UserService.has_user(): current_user = UserService.current_user(allow_admin_impersonate=True) current_user_data = UserModelSchema().dump(current_user) - tasks = bpmn_workflow.get_tasks(SpiffTask.READY) + tasks = bpmn_workflow.get_tasks(TaskState.READY) for task in tasks: task.data['current_user'] = current_user_data @@ -236,7 +236,7 @@ class WorkflowProcessor(object): def save(self): """Saves the current state of this processor to the database """ self.workflow_model.bpmn_workflow_json = self.serialize() - complete_states = [SpiffTask.CANCELLED, SpiffTask.COMPLETED] + complete_states = [TaskState.CANCELLED, TaskState.COMPLETED] tasks = list(self.get_all_user_tasks()) self.workflow_model.status = self.get_status() self.workflow_model.total_tasks = len(tasks) @@ -304,7 +304,7 @@ class WorkflowProcessor(object): if bpmn_workflow.is_completed(): return WorkflowStatus.complete user_tasks = bpmn_workflow.get_ready_user_tasks() - waiting_tasks = bpmn_workflow.get_tasks(Task.WAITING) + waiting_tasks = bpmn_workflow.get_tasks(TaskState.WAITING) if len(waiting_tasks) > 0: return WorkflowStatus.waiting if len(user_tasks) > 0: @@ -355,7 +355,7 @@ class WorkflowProcessor(object): endtasks = [] if self.bpmn_workflow.is_completed(): - for task in SpiffTask.Iterator(self.bpmn_workflow.task_tree, SpiffTask.ANY_MASK): + for task in SpiffTask.Iterator(self.bpmn_workflow.task_tree, TaskState.ANY_MASK): # Assure that we find the end event for this workflow, and not for any sub-workflows. if isinstance(task.task_spec, EndEvent) and task.workflow == self.bpmn_workflow: endtasks.append(task) @@ -366,11 +366,11 @@ class WorkflowProcessor(object): # a parallel gateway with multiple tasks, so prefer ones that share a parent. # Get a list of all ready tasks - ready_tasks = self.bpmn_workflow.get_tasks(SpiffTask.READY) + ready_tasks = self.bpmn_workflow.get_tasks(TaskState.READY) if len(ready_tasks) == 0: # If no ready tasks exist, check for a waiting task. - waiting_tasks = self.bpmn_workflow.get_tasks(SpiffTask.WAITING) + waiting_tasks = self.bpmn_workflow.get_tasks(TaskState.WAITING) if len(waiting_tasks) > 0: return waiting_tasks[0] else: @@ -398,12 +398,12 @@ class WorkflowProcessor(object): # If there are no ready tasks, but the thing isn't complete yet, find the first non-complete task # and return that next_task = None - for task in SpiffTask.Iterator(self.bpmn_workflow.task_tree, SpiffTask.NOT_FINISHED_MASK): + for task in SpiffTask.Iterator(self.bpmn_workflow.task_tree, TaskState.NOT_FINISHED_MASK): next_task = task return next_task def completed_user_tasks(self): - completed_user_tasks = self.bpmn_workflow.get_tasks(SpiffTask.COMPLETED) + completed_user_tasks = self.bpmn_workflow.get_tasks(TaskState.COMPLETED) completed_user_tasks.reverse() completed_user_tasks = list( filter(lambda task: not self.bpmn_workflow._is_engine_task(task.task_spec), completed_user_tasks)) @@ -445,18 +445,19 @@ class WorkflowProcessor(object): additional_tasks = [] if len(ready_tasks) > 0: for child in ready_tasks[0].parent.children: - if child.state == SpiffTask.COMPLETED: + if child.state == TaskState.COMPLETED: additional_tasks.append(child) return ready_tasks + additional_tasks def get_all_user_tasks(self): - all_tasks = self.bpmn_workflow.get_tasks(SpiffTask.ANY_MASK) + all_tasks = self.bpmn_workflow.get_tasks(TaskState.ANY_MASK) return [t for t in all_tasks if not self.bpmn_workflow._is_engine_task(t.task_spec)] def get_all_completed_tasks(self): - all_tasks = self.bpmn_workflow.get_tasks(SpiffTask.ANY_MASK) + all_tasks = self.bpmn_workflow.get_tasks(TaskState.ANY_MASK) return [t for t in all_tasks - if not self.bpmn_workflow._is_engine_task(t.task_spec) and t.state in [t.COMPLETED, t.CANCELLED]] + if not self.bpmn_workflow._is_engine_task(t.task_spec) and + t.state in [TaskState.COMPLETED, TaskState.CANCELLED]] def get_nav_item(self, task): for nav_item in self.bpmn_workflow.get_nav_list(): diff --git a/crc/services/workflow_service.py b/crc/services/workflow_service.py index b1cc53b1..fb1e99c7 100755 --- a/crc/services/workflow_service.py +++ b/crc/services/workflow_service.py @@ -8,7 +8,7 @@ from datetime import datetime from typing import List import jinja2 -from SpiffWorkflow import Task as SpiffTask, WorkflowException, NavItem +from SpiffWorkflow import Task as SpiffTask, WorkflowException, NavItem, TaskState from SpiffWorkflow.bpmn.PythonScriptEngine import Box from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask from SpiffWorkflow.bpmn.specs.ScriptTask import ScriptTask @@ -188,7 +188,7 @@ class WorkflowService(object): f"The validation has been exited early on task '{exit_task.task_spec.id}' " f"and was parented by ", exit_task.parent) - tasks = processor.bpmn_workflow.get_tasks(SpiffTask.READY) + tasks = processor.bpmn_workflow.get_tasks(TaskState.READY) for task in tasks: if task.task_spec.lane is not None and task.task_spec.lane not in task.data: raise ApiError.from_task("invalid_role", @@ -805,7 +805,7 @@ class WorkflowService(object): # All ready tasks should have a valid name, and this can be computed for # some tasks, particularly multi-instance tasks that all have the same spec # but need different labels. - if spiff_task.state == SpiffTask.READY: + if spiff_task.state == TaskState.READY: task.properties = WorkflowService._process_properties(spiff_task, props) task.title = WorkflowService.__calculate_title(spiff_task) @@ -838,7 +838,7 @@ class WorkflowService(object): # if the task is ready, we should raise an error, but if it is in the future or the past, we may not # have the information we need to properly set the title, so don't error out, and just use what is # provided. - if spiff_task.state == spiff_task.READY: + if spiff_task.state == TaskState.READY: raise ApiError.from_task(code="task_title_error", message="Could not set task title on task %s with '%s' property because %s" % (spiff_task.task_spec.name, Task.PROP_EXTENSIONS_TITLE, str(e)), diff --git a/tests/workflow/test_workflow_processor.py b/tests/workflow/test_workflow_processor.py index b37f09f9..e8d43307 100644 --- a/tests/workflow/test_workflow_processor.py +++ b/tests/workflow/test_workflow_processor.py @@ -1,5 +1,6 @@ import os +from SpiffWorkflow import TaskState from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer from tests.base_test import BaseTest @@ -177,7 +178,7 @@ class TestWorkflowProcessor(BaseTest): processor.do_engine_steps() task = processor.next_task() self.assertIsNotNone(task) - self.assertEqual(task.state, task.WAITING) + self.assertEqual(task.state, TaskState.WAITING) def test_workflow_validation_error_is_properly_raised(self):