added additional columns to spiff_step_details w/ burnettk jbirddog

This commit is contained in:
jasquat 2023-02-08 12:43:30 -05:00
parent 2dc1e00d69
commit 1d4a7a9b94
4 changed files with 57 additions and 6 deletions

View File

@ -50,6 +50,8 @@ if [[ "${1:-}" == "clean" ]]; then
docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "create database spiffworkflow_backend_local_development;" docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "create database spiffworkflow_backend_local_development;"
fi fi
fi fi
elif [[ "${1:-}" == "migrate" ]]; then
tasks="$tasks migrate"
fi fi
tasks="$tasks upgrade" tasks="$tasks upgrade"

View File

@ -0,0 +1,36 @@
"""empty message
Revision ID: e05ca5cdc312
Revises: ca9b79dde5cc
Create Date: 2023-02-08 12:21:41.722774
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = 'e05ca5cdc312'
down_revision = 'ca9b79dde5cc'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('spiff_step_details', sa.Column('task_id', sa.String(length=50), nullable=False))
op.add_column('spiff_step_details', sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False))
op.add_column('spiff_step_details', sa.Column('engine_step_start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True))
op.add_column('spiff_step_details', sa.Column('engine_step_end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True))
op.drop_column('spiff_step_details', 'timestamp')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('spiff_step_details', sa.Column('timestamp', mysql.DECIMAL(precision=17, scale=6), nullable=False))
op.drop_column('spiff_step_details', 'engine_step_end_in_seconds')
op.drop_column('spiff_step_details', 'engine_step_start_in_seconds')
op.drop_column('spiff_step_details', 'bpmn_task_identifier')
op.drop_column('spiff_step_details', 'task_id')
# ### end Alembic commands ###

View File

@ -27,4 +27,9 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
) )
spiff_step: int = db.Column(db.Integer, nullable=False) spiff_step: int = db.Column(db.Integer, nullable=False)
task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) task_id: str | None = db.Column(db.String(50), nullable=False)
bpmn_task_identifier: str | None = db.Column(db.String(255), nullable=False)
# timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
engine_step_start_in_seconds: float | None = db.Column(db.DECIMAL(17, 6))
engine_step_end_in_seconds: float | None = db.Column(db.DECIMAL(17, 6))

View File

@ -689,7 +689,7 @@ class ProcessInstanceProcessor:
"lane_assignment_id": lane_assignment_id, "lane_assignment_id": lane_assignment_id,
} }
def spiff_step_details_mapping(self, spiff_task: Optional[SpiffTask]=None) -> dict: def spiff_step_details_mapping(self, spiff_task: Optional[SpiffTask]=None, start_in_seconds: Optional[float] = 0, end_in_seconds: Optional[float] = 0) -> dict:
"""SaveSpiffStepDetails.""" """SaveSpiffStepDetails."""
# bpmn_json = self.serialize() # bpmn_json = self.serialize()
# wf_json = json.loads(bpmn_json) # wf_json = json.loads(bpmn_json)
@ -699,6 +699,9 @@ class ProcessInstanceProcessor:
# TODO: safer to pass in task vs use last task? # TODO: safer to pass in task vs use last task?
spiff_task = self.bpmn_process_instance.last_task spiff_task = self.bpmn_process_instance.last_task
if spiff_task is None:
return {}
task_data = default_registry.convert(spiff_task.data) task_data = default_registry.convert(spiff_task.data)
python_env = default_registry.convert(self._script_engine.environment.last_result()) python_env = default_registry.convert(self._script_engine.environment.last_result())
@ -708,13 +711,14 @@ class ProcessInstanceProcessor:
"task_data": task_data, "task_data": task_data,
"python_env": python_env, "python_env": python_env,
} }
return { return {
"process_instance_id": self.process_instance_model.id, "process_instance_id": self.process_instance_model.id,
"spiff_step": self.process_instance_model.spiff_step or 1, "spiff_step": self.process_instance_model.spiff_step or 1,
"task_json": task_json, "task_json": task_json,
"timestamp": round(time.time()), "task_id": str(spiff_task.id),
# "completed_by_user_id": self.current_user().id, "bpmn_task_identifier": spiff_task.task_spec.id,
"engine_step_start_in_seconds": start_in_seconds,
"engine_step_end_in_seconds": end_in_seconds,
} }
def spiff_step_details(self) -> SpiffStepDetailsModel: def spiff_step_details(self) -> SpiffStepDetailsModel:
@ -1517,6 +1521,9 @@ class ProcessInstanceProcessor:
# "Subprocess" # "Subprocess"
} }
# making a dictionary to ensure we are not shadowing variables in the other methods
current_task_start_in_seconds = {}
def should_log(task: SpiffTask) -> bool: def should_log(task: SpiffTask) -> bool:
if ( if (
task.task_spec.spec_type in tasks_to_log task.task_spec.spec_type in tasks_to_log
@ -1527,12 +1534,13 @@ class ProcessInstanceProcessor:
def will_complete_task(task: SpiffTask) -> None: def will_complete_task(task: SpiffTask) -> None:
if should_log(task): if should_log(task):
current_task_start_in_seconds['time'] = time.time()
self.increment_spiff_step() self.increment_spiff_step()
def did_complete_task(task: SpiffTask) -> None: def did_complete_task(task: SpiffTask) -> None:
if should_log(task): if should_log(task):
self._script_engine.environment.revise_state_with_task_data(task) self._script_engine.environment.revise_state_with_task_data(task)
step_details.append(self.spiff_step_details_mapping(task)) step_details.append(self.spiff_step_details_mapping(task, current_task_start_in_seconds['time'], time.time()))
try: try:
self.bpmn_process_instance.refresh_waiting_tasks() self.bpmn_process_instance.refresh_waiting_tasks()