From 7070503239299244c2b08d9fc62a527e0ddb9bb7 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 6 Feb 2023 15:59:26 -0500 Subject: [PATCH 1/5] POC for saving some data about each step --- .../services/process_instance_processor.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 63c5a25ee..006cf8897 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -30,6 +30,7 @@ from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine # type: ig from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment # type: ignore from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment +from SpiffWorkflow.bpmn.serializer.helpers.registry import DefaultRegistry # type: ignore from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore EventBasedGatewayConverter, ) @@ -688,14 +689,24 @@ class ProcessInstanceProcessor: "lane_assignment_id": lane_assignment_id, } - def spiff_step_details_mapping(self) -> dict: + def spiff_step_details_mapping(self, spiff_task: Optional[SpiffTask]=None) -> dict: """SaveSpiffStepDetails.""" # bpmn_json = self.serialize() # wf_json = json.loads(bpmn_json) + default_registry = DefaultRegistry() + + if spiff_task is None: + # TODO: safer to pass in task vs use last task? + spiff_task = self.bpmn_process_instance.last_task + + task_data = default_registry.convert(spiff_task.data) + python_env = default_registry.convert(self._script_engine.environment.last_result()) + task_json: Dict[str, Any] = { # "tasks": wf_json["tasks"], # "subprocesses": wf_json["subprocesses"], - # "python_env": self._script_engine.environment.last_result(), + "task_data": task_data, + "python_env": python_env, } return { @@ -1521,7 +1532,7 @@ class ProcessInstanceProcessor: def did_complete_task(task: SpiffTask) -> None: if should_log(task): self._script_engine.environment.revise_state_with_task_data(task) - step_details.append(self.spiff_step_details_mapping()) + step_details.append(self.spiff_step_details_mapping(task)) try: self.bpmn_process_instance.refresh_waiting_tasks() From 1d4a7a9b94cd2e2df11a71ba34b6427b6753e931 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 8 Feb 2023 12:43:30 -0500 Subject: [PATCH 2/5] added additional columns to spiff_step_details w/ burnettk jbirddog --- spiffworkflow-backend/bin/recreate_db | 2 ++ .../migrations/versions/e05ca5cdc312_.py | 36 +++++++++++++++++++ .../models/spiff_step_details.py | 7 +++- .../services/process_instance_processor.py | 18 +++++++--- 4 files changed, 57 insertions(+), 6 deletions(-) create mode 100644 spiffworkflow-backend/migrations/versions/e05ca5cdc312_.py diff --git a/spiffworkflow-backend/bin/recreate_db b/spiffworkflow-backend/bin/recreate_db index 39a074071..8a4e4b5a9 100755 --- a/spiffworkflow-backend/bin/recreate_db +++ b/spiffworkflow-backend/bin/recreate_db @@ -50,6 +50,8 @@ if [[ "${1:-}" == "clean" ]]; then docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "create database spiffworkflow_backend_local_development;" fi fi +elif [[ "${1:-}" == "migrate" ]]; then + tasks="$tasks migrate" fi tasks="$tasks upgrade" diff --git a/spiffworkflow-backend/migrations/versions/e05ca5cdc312_.py b/spiffworkflow-backend/migrations/versions/e05ca5cdc312_.py new file mode 100644 index 000000000..8e9313244 --- /dev/null +++ b/spiffworkflow-backend/migrations/versions/e05ca5cdc312_.py @@ -0,0 +1,36 @@ +"""empty message + +Revision ID: e05ca5cdc312 +Revises: ca9b79dde5cc +Create Date: 2023-02-08 12:21:41.722774 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = 'e05ca5cdc312' +down_revision = 'ca9b79dde5cc' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('spiff_step_details', sa.Column('task_id', sa.String(length=50), nullable=False)) + op.add_column('spiff_step_details', sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False)) + op.add_column('spiff_step_details', sa.Column('engine_step_start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True)) + op.add_column('spiff_step_details', sa.Column('engine_step_end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True)) + op.drop_column('spiff_step_details', 'timestamp') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('spiff_step_details', sa.Column('timestamp', mysql.DECIMAL(precision=17, scale=6), nullable=False)) + op.drop_column('spiff_step_details', 'engine_step_end_in_seconds') + op.drop_column('spiff_step_details', 'engine_step_start_in_seconds') + op.drop_column('spiff_step_details', 'bpmn_task_identifier') + op.drop_column('spiff_step_details', 'task_id') + # ### end Alembic commands ### diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py index 0d0e7b447..5a844329b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py @@ -27,4 +27,9 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel): ) spiff_step: int = db.Column(db.Integer, nullable=False) task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore - timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) + task_id: str | None = db.Column(db.String(50), nullable=False) + bpmn_task_identifier: str | None = db.Column(db.String(255), nullable=False) + + # timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) + engine_step_start_in_seconds: float | None = db.Column(db.DECIMAL(17, 6)) + engine_step_end_in_seconds: float | None = db.Column(db.DECIMAL(17, 6)) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index d2745bf9c..1641f213c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -689,7 +689,7 @@ class ProcessInstanceProcessor: "lane_assignment_id": lane_assignment_id, } - def spiff_step_details_mapping(self, spiff_task: Optional[SpiffTask]=None) -> dict: + def spiff_step_details_mapping(self, spiff_task: Optional[SpiffTask]=None, start_in_seconds: Optional[float] = 0, end_in_seconds: Optional[float] = 0) -> dict: """SaveSpiffStepDetails.""" # bpmn_json = self.serialize() # wf_json = json.loads(bpmn_json) @@ -699,6 +699,9 @@ class ProcessInstanceProcessor: # TODO: safer to pass in task vs use last task? spiff_task = self.bpmn_process_instance.last_task + if spiff_task is None: + return {} + task_data = default_registry.convert(spiff_task.data) python_env = default_registry.convert(self._script_engine.environment.last_result()) @@ -708,13 +711,14 @@ class ProcessInstanceProcessor: "task_data": task_data, "python_env": python_env, } - return { "process_instance_id": self.process_instance_model.id, "spiff_step": self.process_instance_model.spiff_step or 1, "task_json": task_json, - "timestamp": round(time.time()), - # "completed_by_user_id": self.current_user().id, + "task_id": str(spiff_task.id), + "bpmn_task_identifier": spiff_task.task_spec.id, + "engine_step_start_in_seconds": start_in_seconds, + "engine_step_end_in_seconds": end_in_seconds, } def spiff_step_details(self) -> SpiffStepDetailsModel: @@ -1517,6 +1521,9 @@ class ProcessInstanceProcessor: # "Subprocess" } + # making a dictionary to ensure we are not shadowing variables in the other methods + current_task_start_in_seconds = {} + def should_log(task: SpiffTask) -> bool: if ( task.task_spec.spec_type in tasks_to_log @@ -1527,12 +1534,13 @@ class ProcessInstanceProcessor: def will_complete_task(task: SpiffTask) -> None: if should_log(task): + current_task_start_in_seconds['time'] = time.time() self.increment_spiff_step() def did_complete_task(task: SpiffTask) -> None: if should_log(task): self._script_engine.environment.revise_state_with_task_data(task) - step_details.append(self.spiff_step_details_mapping(task)) + step_details.append(self.spiff_step_details_mapping(task, current_task_start_in_seconds['time'], time.time())) try: self.bpmn_process_instance.refresh_waiting_tasks() From df5451685f65ae85d3f6aff9fb5984b25aa5d51d Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 8 Feb 2023 17:31:20 -0500 Subject: [PATCH 3/5] updated controller to use spiff step details to find correct task data to show w/ burnettk --- .../migrations/versions/e05ca5cdc312_.py | 2 + .../models/spiff_step_details.py | 5 ++- .../routes/process_instances_controller.py | 37 ++++++++++++------- .../services/process_instance_processor.py | 3 +- 4 files changed, 31 insertions(+), 16 deletions(-) diff --git a/spiffworkflow-backend/migrations/versions/e05ca5cdc312_.py b/spiffworkflow-backend/migrations/versions/e05ca5cdc312_.py index 8e9313244..ab6d1b643 100644 --- a/spiffworkflow-backend/migrations/versions/e05ca5cdc312_.py +++ b/spiffworkflow-backend/migrations/versions/e05ca5cdc312_.py @@ -18,6 +18,7 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### + op.add_column('spiff_step_details', sa.Column('task_state', sa.String(length=50), nullable=False)) op.add_column('spiff_step_details', sa.Column('task_id', sa.String(length=50), nullable=False)) op.add_column('spiff_step_details', sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False)) op.add_column('spiff_step_details', sa.Column('engine_step_start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True)) @@ -33,4 +34,5 @@ def downgrade(): op.drop_column('spiff_step_details', 'engine_step_start_in_seconds') op.drop_column('spiff_step_details', 'bpmn_task_identifier') op.drop_column('spiff_step_details', 'task_id') + op.drop_column('spiff_step_details', 'task_state') # ### end Alembic commands ### diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py index 5a844329b..6f9761816 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py @@ -27,8 +27,9 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel): ) spiff_step: int = db.Column(db.Integer, nullable=False) task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore - task_id: str | None = db.Column(db.String(50), nullable=False) - bpmn_task_identifier: str | None = db.Column(db.String(255), nullable=False) + task_id: str = db.Column(db.String(50), nullable=False) + task_state: str = db.Column(db.String(50), nullable=False) + bpmn_task_identifier: str = db.Column(db.String(255), nullable=False) # timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) engine_step_start_in_seconds: float | None = db.Column(db.DECIMAL(17, 6)) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index a2e68bf3d..0ca0ada1f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -556,20 +556,31 @@ def process_instance_task_list( get_task_data: bool = False, ) -> flask.wrappers.Response: """Process_instance_task_list.""" - if spiff_step > 0: - step_detail = ( - db.session.query(SpiffStepDetailsModel) - .filter( - SpiffStepDetailsModel.process_instance_id == process_instance.id, - SpiffStepDetailsModel.spiff_step == spiff_step, - ) - .first() + step_detail_query = ( + db.session.query(SpiffStepDetailsModel) + .filter( + SpiffStepDetailsModel.process_instance_id == process_instance.id, ) - if step_detail is not None and process_instance.bpmn_json is not None: - bpmn_json = json.loads(process_instance.bpmn_json) - bpmn_json["tasks"] = step_detail.task_json["tasks"] - bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"] - process_instance.bpmn_json = json.dumps(bpmn_json) + ) + + if spiff_step > 0: + step_detail_query = step_detail_query.filter(SpiffStepDetailsModel.spiff_step <= spiff_step) + + step_details = step_detail_query.all() + bpmn_json = json.loads(process_instance.bpmn_json) + tasks = bpmn_json['tasks'] + + # if step_detail is not None and process_instance.bpmn_json is not None: + for step_detail in step_details: + print(f"step_detail.task_id: {step_detail.task_id}") + print(f"step_detail.bpmn_task_identifier: {step_detail.bpmn_task_identifier}") + if step_detail.task_id in tasks: + task_data = step_detail.task_json['task_data'] | step_detail.task_json['python_env'] + if task_data is None: + task_data = {} + tasks[step_detail.task_id]['data'] = task_data + + process_instance.bpmn_json = json.dumps(bpmn_json) processor = ProcessInstanceProcessor(process_instance) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 1641f213c..d8bfeba78 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -716,7 +716,8 @@ class ProcessInstanceProcessor: "spiff_step": self.process_instance_model.spiff_step or 1, "task_json": task_json, "task_id": str(spiff_task.id), - "bpmn_task_identifier": spiff_task.task_spec.id, + "task_state": spiff_task.state, + "bpmn_task_identifier": spiff_task.task_spec.name, "engine_step_start_in_seconds": start_in_seconds, "engine_step_end_in_seconds": end_in_seconds, } From a5dc669509b1b37b00ffccd9c7a2c31420b7ced2 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Thu, 9 Feb 2023 09:45:31 -0500 Subject: [PATCH 4/5] Fix bug where deletes from the environment were not removed from the task data as well --- .../services/process_instance_processor.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index d8bfeba78..977bd61bd 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -207,6 +207,13 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment) self.state.update(context) exec(script, self.state) # noqa + # since the task data is not directly mutated when the script executes, need to determine which keys + # have been deleted from the environment and remove them from task data if present. + context_keys_to_drop = context.keys() - self.state.keys() + + for key_to_drop in context_keys_to_drop: + context.pop(key_to_drop) + self.state = self._user_defined_state(external_methods) # the task data needs to be updated with the current state so data references can be resolved properly. From 7347c73d6a3d4bcfc0ab55febbbb3034e44a88ec Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 9 Feb 2023 10:28:36 -0500 Subject: [PATCH 5/5] pyl w/ burnettk --- .../routes/process_instances_controller.py | 23 +++++++++---------- .../services/process_instance_processor.py | 21 +++++++++++++---- 2 files changed, 27 insertions(+), 17 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 0ca0ada1f..7c23f250d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -556,29 +556,28 @@ def process_instance_task_list( get_task_data: bool = False, ) -> flask.wrappers.Response: """Process_instance_task_list.""" - step_detail_query = ( - db.session.query(SpiffStepDetailsModel) - .filter( - SpiffStepDetailsModel.process_instance_id == process_instance.id, - ) + step_detail_query = db.session.query(SpiffStepDetailsModel).filter( + SpiffStepDetailsModel.process_instance_id == process_instance.id, ) if spiff_step > 0: - step_detail_query = step_detail_query.filter(SpiffStepDetailsModel.spiff_step <= spiff_step) + step_detail_query = step_detail_query.filter( + SpiffStepDetailsModel.spiff_step <= spiff_step + ) step_details = step_detail_query.all() - bpmn_json = json.loads(process_instance.bpmn_json) - tasks = bpmn_json['tasks'] + bpmn_json = json.loads(process_instance.bpmn_json or "{}") + tasks = bpmn_json["tasks"] # if step_detail is not None and process_instance.bpmn_json is not None: for step_detail in step_details: - print(f"step_detail.task_id: {step_detail.task_id}") - print(f"step_detail.bpmn_task_identifier: {step_detail.bpmn_task_identifier}") if step_detail.task_id in tasks: - task_data = step_detail.task_json['task_data'] | step_detail.task_json['python_env'] + task_data = ( + step_detail.task_json["task_data"] | step_detail.task_json["python_env"] + ) if task_data is None: task_data = {} - tasks[step_detail.task_id]['data'] = task_data + tasks[step_detail.task_id]["data"] = task_data process_instance.bpmn_json = json.dumps(bpmn_json) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index d011c9788..dc75ac6ae 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -682,7 +682,12 @@ class ProcessInstanceProcessor: "lane_assignment_id": lane_assignment_id, } - def spiff_step_details_mapping(self, spiff_task: Optional[SpiffTask]=None, start_in_seconds: Optional[float] = 0, end_in_seconds: Optional[float] = 0) -> dict: + def spiff_step_details_mapping( + self, + spiff_task: Optional[SpiffTask] = None, + start_in_seconds: Optional[float] = 0, + end_in_seconds: Optional[float] = 0, + ) -> dict: """SaveSpiffStepDetails.""" # bpmn_json = self.serialize() # wf_json = json.loads(bpmn_json) @@ -696,8 +701,10 @@ class ProcessInstanceProcessor: return {} task_data = default_registry.convert(spiff_task.data) - python_env = default_registry.convert(self._script_engine.environment.last_result()) - + python_env = default_registry.convert( + self._script_engine.environment.last_result() + ) + task_json: Dict[str, Any] = { # "tasks": wf_json["tasks"], # "subprocesses": wf_json["subprocesses"], @@ -1528,13 +1535,17 @@ class ProcessInstanceProcessor: def will_complete_task(task: SpiffTask) -> None: if should_log(task): - current_task_start_in_seconds['time'] = time.time() + current_task_start_in_seconds["time"] = time.time() self.increment_spiff_step() def did_complete_task(task: SpiffTask) -> None: if should_log(task): self._script_engine.environment.revise_state_with_task_data(task) - step_details.append(self.spiff_step_details_mapping(task, current_task_start_in_seconds['time'], time.time())) + step_details.append( + self.spiff_step_details_mapping( + task, current_task_start_in_seconds["time"], time.time() + ) + ) try: self.bpmn_process_instance.refresh_waiting_tasks()