tasks and subprocesses can are setting the task states properly now when getting task data w/ burnettk jbirddog
This commit is contained in:
parent
4791bda56d
commit
d0aa6b1cf5
|
@ -0,0 +1,34 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 63fc8d693b9f
|
||||
Revises: e05ca5cdc312
|
||||
Create Date: 2023-02-09 11:54:34.935801
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '63fc8d693b9f'
|
||||
down_revision = 'e05ca5cdc312'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('spiff_step_details', sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=False))
|
||||
op.add_column('spiff_step_details', sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True))
|
||||
op.drop_column('spiff_step_details', 'engine_step_end_in_seconds')
|
||||
op.drop_column('spiff_step_details', 'engine_step_start_in_seconds')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('spiff_step_details', sa.Column('engine_step_start_in_seconds', mysql.DECIMAL(precision=17, scale=6), nullable=True))
|
||||
op.add_column('spiff_step_details', sa.Column('engine_step_end_in_seconds', mysql.DECIMAL(precision=17, scale=6), nullable=True))
|
||||
op.drop_column('spiff_step_details', 'end_in_seconds')
|
||||
op.drop_column('spiff_step_details', 'start_in_seconds')
|
||||
# ### end Alembic commands ###
|
|
@ -4,6 +4,7 @@ from __future__ import annotations
|
|||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
|
@ -30,3 +31,5 @@ class HumanTaskUserModel(SpiffworkflowBaseDBModel):
|
|||
ForeignKey(HumanTaskModel.id), nullable=False, index=True # type: ignore
|
||||
)
|
||||
user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
|
||||
|
||||
human_task = relationship(HumanTaskModel)
|
||||
|
|
|
@ -31,6 +31,5 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
|
|||
task_state: str = db.Column(db.String(50), nullable=False)
|
||||
bpmn_task_identifier: str = db.Column(db.String(255), nullable=False)
|
||||
|
||||
# timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
|
||||
engine_step_start_in_seconds: float | None = db.Column(db.DECIMAL(17, 6))
|
||||
engine_step_end_in_seconds: float | None = db.Column(db.DECIMAL(17, 6))
|
||||
start_in_seconds: float = db.Column(db.DECIMAL(17, 6), nullable=False)
|
||||
end_in_seconds: float | None = db.Column(db.DECIMAL(17, 6))
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""Task."""
|
||||
import enum
|
||||
from SpiffWorkflow.task import TaskStateNames # type: ignore
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
@ -212,6 +213,12 @@ class Task:
|
|||
value for name, value in vars(cls).items() if name.startswith("FIELD_TYPE")
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def task_state_name_to_int(cls, task_state_name: str) -> int:
|
||||
task_state_integers = {v: k for k, v in TaskStateNames.items()}
|
||||
task_state_int: int = task_state_integers[task_state_name]
|
||||
return task_state_int
|
||||
|
||||
|
||||
class OptionSchema(Schema):
|
||||
"""OptionSchema."""
|
||||
|
|
|
@ -11,7 +11,7 @@ from flask import jsonify
|
|||
from flask import make_response
|
||||
from flask import request
|
||||
from flask.wrappers import Response
|
||||
from SpiffWorkflow.task import TaskState # type: ignore
|
||||
from SpiffWorkflow.task import TaskState, TaskStateNames # type: ignore
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import or_
|
||||
|
||||
|
@ -20,6 +20,7 @@ from spiffworkflow_backend.models.db import db
|
|||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
|
||||
from spiffworkflow_backend.models.task import Task
|
||||
from spiffworkflow_backend.models.process_instance import (
|
||||
ProcessInstanceCannotBeDeletedError,
|
||||
)
|
||||
|
@ -568,20 +569,52 @@ def process_instance_task_list(
|
|||
step_details = step_detail_query.all()
|
||||
bpmn_json = json.loads(process_instance.bpmn_json or "{}")
|
||||
tasks = bpmn_json["tasks"]
|
||||
subprocesses = bpmn_json["subprocesses"]
|
||||
|
||||
# if step_detail is not None and process_instance.bpmn_json is not None:
|
||||
steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details}
|
||||
|
||||
subprocesses_to_set_to_waiting = []
|
||||
for step_detail in step_details:
|
||||
if step_detail.task_id in tasks:
|
||||
# task_ids_in_use.append(step_detail.task_id)
|
||||
task_data = (
|
||||
step_detail.task_json["task_data"] | step_detail.task_json["python_env"]
|
||||
)
|
||||
if task_data is None:
|
||||
task_data = {}
|
||||
tasks[step_detail.task_id]["data"] = task_data
|
||||
tasks[step_detail.task_id]['state'] = Task.task_state_name_to_int(step_detail.task_state)
|
||||
else:
|
||||
for subprocess_id, subprocess_info in subprocesses.items():
|
||||
if step_detail.task_id in subprocess_info['tasks']:
|
||||
task_data = (
|
||||
step_detail.task_json["task_data"] | step_detail.task_json["python_env"]
|
||||
)
|
||||
if task_data is None:
|
||||
task_data = {}
|
||||
subprocess_info['tasks'][step_detail.task_id]["data"] = task_data
|
||||
subprocess_info['tasks'][step_detail.task_id]['state'] = Task.task_state_name_to_int(step_detail.task_state)
|
||||
subprocesses_to_set_to_waiting.append(subprocess_id)
|
||||
|
||||
for subprocess_info in subprocesses.values():
|
||||
for spiff_task_id in subprocess_info['tasks']:
|
||||
if spiff_task_id not in steps_by_id:
|
||||
subprocess_info['tasks'][spiff_task_id]['data'] = {}
|
||||
subprocess_info['tasks'][spiff_task_id]['state'] = TaskState.FUTURE
|
||||
for spiff_task_id in tasks:
|
||||
if spiff_task_id not in steps_by_id:
|
||||
tasks[spiff_task_id]['data'] = {}
|
||||
if spiff_task_id in subprocesses_to_set_to_waiting:
|
||||
tasks[spiff_task_id]['state'] = TaskState.WAITING
|
||||
else:
|
||||
tasks[spiff_task_id]['state'] = TaskState.FUTURE
|
||||
|
||||
process_instance.bpmn_json = json.dumps(bpmn_json)
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
spiff_task = processor.__class__.get_task_by_bpmn_identifier(step_details[-1].bpmn_task_identifier, processor.bpmn_process_instance)
|
||||
if spiff_task is not None:
|
||||
spiff_task.complete()
|
||||
|
||||
spiff_tasks = None
|
||||
if all_tasks:
|
||||
|
@ -606,6 +639,16 @@ def process_instance_task_list(
|
|||
processor, spiff_task, calling_subprocess_task_id=calling_subprocess_task_id
|
||||
)
|
||||
if get_task_data:
|
||||
# if str(spiff_task.id) in steps_by_id:
|
||||
# spiff_step_detail = steps_by_id[str(spiff_task.id)]
|
||||
# task_data = (
|
||||
# spiff_step_detail.task_json["task_data"] | spiff_step_detail.task_json["python_env"]
|
||||
# )
|
||||
# task.data = task_data
|
||||
# task.state = spiff_step_detail.task_state
|
||||
# else:
|
||||
# task.data = {}
|
||||
# task.state = TaskStateNames[TaskState.FUTURE]
|
||||
task.data = spiff_task.data
|
||||
tasks.append(task)
|
||||
|
||||
|
|
|
@ -133,6 +133,10 @@ class ProcessInstanceLockedBySomethingElseError(Exception):
|
|||
pass
|
||||
|
||||
|
||||
class SpiffStepDetailIsMissingError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore
|
||||
def __init__(self, environment_globals: Dict[str, Any]):
|
||||
"""BoxedTaskDataBasedScriptEngineEnvironment."""
|
||||
|
@ -685,8 +689,8 @@ class ProcessInstanceProcessor:
|
|||
def spiff_step_details_mapping(
|
||||
self,
|
||||
spiff_task: Optional[SpiffTask] = None,
|
||||
start_in_seconds: Optional[float] = 0,
|
||||
end_in_seconds: Optional[float] = 0,
|
||||
start_in_seconds: Optional[float] = None,
|
||||
end_in_seconds: Optional[float] = None,
|
||||
) -> dict:
|
||||
"""SaveSpiffStepDetails."""
|
||||
# bpmn_json = self.serialize()
|
||||
|
@ -700,6 +704,10 @@ class ProcessInstanceProcessor:
|
|||
if spiff_task is None:
|
||||
return {}
|
||||
|
||||
# it's only None when we're starting a human task (it's not complete yet)
|
||||
if start_in_seconds is None:
|
||||
start_in_seconds = time.time()
|
||||
|
||||
task_data = default_registry.convert(spiff_task.data)
|
||||
python_env = default_registry.convert(
|
||||
self._script_engine.environment.last_result()
|
||||
|
@ -716,15 +724,15 @@ class ProcessInstanceProcessor:
|
|||
"spiff_step": self.process_instance_model.spiff_step or 1,
|
||||
"task_json": task_json,
|
||||
"task_id": str(spiff_task.id),
|
||||
"task_state": spiff_task.state,
|
||||
"task_state": spiff_task.get_state_name(),
|
||||
"bpmn_task_identifier": spiff_task.task_spec.name,
|
||||
"engine_step_start_in_seconds": start_in_seconds,
|
||||
"engine_step_end_in_seconds": end_in_seconds,
|
||||
"start_in_seconds": start_in_seconds,
|
||||
"end_in_seconds": end_in_seconds,
|
||||
}
|
||||
|
||||
def spiff_step_details(self) -> SpiffStepDetailsModel:
|
||||
def spiff_step_details(self, spiff_task: Optional[SpiffTask] = None) -> SpiffStepDetailsModel:
|
||||
"""SaveSpiffStepDetails."""
|
||||
details_mapping = self.spiff_step_details_mapping()
|
||||
details_mapping = self.spiff_step_details_mapping(spiff_task=spiff_task)
|
||||
details_model = SpiffStepDetailsModel(**details_mapping)
|
||||
return details_model
|
||||
|
||||
|
@ -934,7 +942,7 @@ class ProcessInstanceProcessor:
|
|||
potential_owner_hash = self.get_potential_owner_ids_from_task(
|
||||
ready_or_waiting_task
|
||||
)
|
||||
extensions = ready_or_waiting_task.task_spec.extensions
|
||||
extensions = task_spec.extensions
|
||||
|
||||
form_file_name = None
|
||||
ui_form_file_name = None
|
||||
|
@ -965,15 +973,19 @@ class ProcessInstanceProcessor:
|
|||
lane_assignment_id=potential_owner_hash["lane_assignment_id"],
|
||||
)
|
||||
db.session.add(human_task)
|
||||
db.session.commit()
|
||||
|
||||
for potential_owner_id in potential_owner_hash[
|
||||
"potential_owner_ids"
|
||||
]:
|
||||
human_task_user = HumanTaskUserModel(
|
||||
user_id=potential_owner_id, human_task_id=human_task.id
|
||||
user_id=potential_owner_id, human_task=human_task
|
||||
)
|
||||
db.session.add(human_task_user)
|
||||
|
||||
self.increment_spiff_step()
|
||||
spiff_step_detail_mapping = self.spiff_step_details_mapping(spiff_task=ready_or_waiting_task, start_in_seconds=time.time())
|
||||
spiff_step_detail = SpiffStepDetailsModel(**spiff_step_detail_mapping)
|
||||
db.session.add(spiff_step_detail)
|
||||
db.session.commit()
|
||||
|
||||
if len(human_tasks) > 0:
|
||||
|
@ -1512,14 +1524,13 @@ class ProcessInstanceProcessor:
|
|||
tasks_to_log = {
|
||||
"BPMN Task",
|
||||
"Script Task",
|
||||
"Service Task"
|
||||
# "End Event",
|
||||
# "Default Start Event",
|
||||
# "Exclusive Gateway",
|
||||
"Service Task",
|
||||
"Default Start Event",
|
||||
"Exclusive Gateway",
|
||||
# "End Join",
|
||||
# "End Event",
|
||||
# "Default Throwing Event",
|
||||
# "Subprocess"
|
||||
"End Event",
|
||||
"Default Throwing Event",
|
||||
"Subprocess"
|
||||
}
|
||||
|
||||
# making a dictionary to ensure we are not shadowing variables in the other methods
|
||||
|
@ -1714,12 +1725,19 @@ class ProcessInstanceProcessor:
|
|||
self, task: SpiffTask, human_task: HumanTaskModel, user: UserModel
|
||||
) -> None:
|
||||
"""Complete_task."""
|
||||
self.increment_spiff_step()
|
||||
self.bpmn_process_instance.complete_task_from_id(task.id)
|
||||
human_task.completed_by_user_id = user.id
|
||||
human_task.completed = True
|
||||
db.session.add(human_task)
|
||||
details_model = self.spiff_step_details()
|
||||
details_model = SpiffStepDetailsModel.query.filter_by(process_instance_id=self.process_instance_model.id, task_id=str(task.id), task_state="READY").order_by(SpiffStepDetailsModel.id.desc()).first()
|
||||
if details_model is None:
|
||||
raise SpiffStepDetailIsMissingError(
|
||||
f"Cannot find a ready spiff_step_detail entry for process instance {self.process_instance_model.id} "
|
||||
f"and task_id is {task.id}"
|
||||
)
|
||||
|
||||
details_model.task_state = task.get_state_name()
|
||||
details_model.end_in_seconds = time.time()
|
||||
db.session.add(details_model)
|
||||
|
||||
# this is the thing that actually commits the db transaction (on behalf of the other updates above as well)
|
||||
|
|
Loading…
Reference in New Issue