Merge pull request #193 from sartography/feature/remove_spif_step_details
Feature/remove spif step details
This commit is contained in:
commit
a13c623053
|
@ -47,7 +47,8 @@ def app() -> Flask:
|
|||
def with_db_and_bpmn_file_cleanup() -> None:
|
||||
"""Do it cleanly!"""
|
||||
meta = db.metadata
|
||||
db.session.execute(db.update(BpmnProcessModel, values={"parent_process_id": None}))
|
||||
db.session.execute(db.update(BpmnProcessModel, values={"top_level_process_id": None}))
|
||||
db.session.execute(db.update(BpmnProcessModel, values={"direct_parent_process_id": None}))
|
||||
|
||||
for table in reversed(meta.sorted_tables):
|
||||
db.session.execute(table.delete())
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: b652c232839f
|
||||
Revision ID: 0b5dd14bfbac
|
||||
Revises:
|
||||
Create Date: 2023-03-17 16:50:32.774216
|
||||
Create Date: 2023-03-23 16:25:33.288500
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
@ -10,7 +10,7 @@ import sqlalchemy as sa
|
|||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b652c232839f'
|
||||
revision = '0b5dd14bfbac'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
@ -115,19 +115,22 @@ def upgrade():
|
|||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('guid', sa.String(length=36), nullable=True),
|
||||
sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=False),
|
||||
sa.Column('parent_process_id', sa.Integer(), nullable=True),
|
||||
sa.Column('top_level_process_id', sa.Integer(), nullable=True),
|
||||
sa.Column('direct_parent_process_id', sa.Integer(), nullable=True),
|
||||
sa.Column('properties_json', sa.JSON(), nullable=False),
|
||||
sa.Column('json_data_hash', sa.String(length=255), nullable=False),
|
||||
sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
|
||||
sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
|
||||
sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ),
|
||||
sa.ForeignKeyConstraint(['parent_process_id'], ['bpmn_process.id'], ),
|
||||
sa.ForeignKeyConstraint(['direct_parent_process_id'], ['bpmn_process.id'], ),
|
||||
sa.ForeignKeyConstraint(['top_level_process_id'], ['bpmn_process.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('guid')
|
||||
)
|
||||
op.create_index(op.f('ix_bpmn_process_bpmn_process_definition_id'), 'bpmn_process', ['bpmn_process_definition_id'], unique=False)
|
||||
op.create_index(op.f('ix_bpmn_process_direct_parent_process_id'), 'bpmn_process', ['direct_parent_process_id'], unique=False)
|
||||
op.create_index(op.f('ix_bpmn_process_json_data_hash'), 'bpmn_process', ['json_data_hash'], unique=False)
|
||||
op.create_index(op.f('ix_bpmn_process_parent_process_id'), 'bpmn_process', ['parent_process_id'], unique=False)
|
||||
op.create_index(op.f('ix_bpmn_process_top_level_process_id'), 'bpmn_process', ['top_level_process_id'], unique=False)
|
||||
op.create_table('bpmn_process_definition_relationship',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('bpmn_process_definition_parent_id', sa.Integer(), nullable=False),
|
||||
|
@ -248,7 +251,6 @@ def upgrade():
|
|||
sa.Column('status', sa.String(length=50), nullable=True),
|
||||
sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True),
|
||||
sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True),
|
||||
sa.Column('spiff_step', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ),
|
||||
sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ),
|
||||
sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ),
|
||||
|
@ -344,22 +346,6 @@ def upgrade():
|
|||
op.create_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), 'process_instance_queue', ['locked_at_in_seconds'], unique=False)
|
||||
op.create_index(op.f('ix_process_instance_queue_locked_by'), 'process_instance_queue', ['locked_by'], unique=False)
|
||||
op.create_index(op.f('ix_process_instance_queue_status'), 'process_instance_queue', ['status'], unique=False)
|
||||
op.create_table('spiff_step_details',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||
sa.Column('spiff_step', sa.Integer(), nullable=False),
|
||||
sa.Column('task_json', sa.JSON(), nullable=False),
|
||||
sa.Column('task_id', sa.String(length=50), nullable=False),
|
||||
sa.Column('task_state', sa.String(length=50), nullable=False),
|
||||
sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False),
|
||||
sa.Column('delta_json', sa.JSON(), nullable=True),
|
||||
sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=False),
|
||||
sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('process_instance_id', 'spiff_step', name='process_instance_id_spiff_step')
|
||||
)
|
||||
op.create_index(op.f('ix_spiff_step_details_process_instance_id'), 'spiff_step_details', ['process_instance_id'], unique=False)
|
||||
op.create_table('task',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('guid', sa.String(length=36), nullable=False),
|
||||
|
@ -465,8 +451,6 @@ def downgrade():
|
|||
op.drop_index(op.f('ix_task_json_data_hash'), table_name='task')
|
||||
op.drop_index(op.f('ix_task_bpmn_process_id'), table_name='task')
|
||||
op.drop_table('task')
|
||||
op.drop_index(op.f('ix_spiff_step_details_process_instance_id'), table_name='spiff_step_details')
|
||||
op.drop_table('spiff_step_details')
|
||||
op.drop_index(op.f('ix_process_instance_queue_status'), table_name='process_instance_queue')
|
||||
op.drop_index(op.f('ix_process_instance_queue_locked_by'), table_name='process_instance_queue')
|
||||
op.drop_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), table_name='process_instance_queue')
|
||||
|
@ -519,8 +503,9 @@ def downgrade():
|
|||
op.drop_index(op.f('ix_bpmn_process_definition_relationship_bpmn_process_definition_child_id'), table_name='bpmn_process_definition_relationship')
|
||||
op.drop_index(op.f('ix_bpmn_process_definition_relationship_bpmn_process_definition_parent_id'), table_name='bpmn_process_definition_relationship')
|
||||
op.drop_table('bpmn_process_definition_relationship')
|
||||
op.drop_index(op.f('ix_bpmn_process_parent_process_id'), table_name='bpmn_process')
|
||||
op.drop_index(op.f('ix_bpmn_process_top_level_process_id'), table_name='bpmn_process')
|
||||
op.drop_index(op.f('ix_bpmn_process_json_data_hash'), table_name='bpmn_process')
|
||||
op.drop_index(op.f('ix_bpmn_process_direct_parent_process_id'), table_name='bpmn_process')
|
||||
op.drop_index(op.f('ix_bpmn_process_bpmn_process_definition_id'), table_name='bpmn_process')
|
||||
op.drop_table('bpmn_process')
|
||||
op.drop_index(op.f('ix_user_service_id'), table_name='user')
|
|
@ -901,24 +901,24 @@ paths:
|
|||
description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity.
|
||||
schema:
|
||||
type: string
|
||||
- name: all_tasks
|
||||
in: query
|
||||
required: false
|
||||
description: If true, this wil return all tasks associated with the process instance and not just user tasks.
|
||||
schema:
|
||||
type: boolean
|
||||
- name: spiff_step
|
||||
in: query
|
||||
required: false
|
||||
description: If set will return the tasks as they were during a specific step of execution.
|
||||
schema:
|
||||
type: integer
|
||||
- name: most_recent_tasks_only
|
||||
in: query
|
||||
required: false
|
||||
description: If true, this wil return only the most recent tasks.
|
||||
schema:
|
||||
type: boolean
|
||||
- name: bpmn_process_guid
|
||||
in: query
|
||||
required: false
|
||||
description: The guid of the bpmn process to get the tasks for.
|
||||
schema:
|
||||
type: string
|
||||
- name: to_task_guid
|
||||
in: query
|
||||
required: false
|
||||
description: Get the tasks only up to the given guid.
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
tags:
|
||||
- Process Instances
|
||||
|
@ -954,24 +954,24 @@ paths:
|
|||
description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity.
|
||||
schema:
|
||||
type: string
|
||||
- name: all_tasks
|
||||
in: query
|
||||
required: false
|
||||
description: If true, this wil return all tasks associated with the process instance and not just user tasks.
|
||||
schema:
|
||||
type: boolean
|
||||
- name: spiff_step
|
||||
in: query
|
||||
required: false
|
||||
description: If set will return the tasks as they were during a specific step of execution.
|
||||
schema:
|
||||
type: integer
|
||||
- name: most_recent_tasks_only
|
||||
in: query
|
||||
required: false
|
||||
description: If true, this wil return only the most recent tasks.
|
||||
schema:
|
||||
type: boolean
|
||||
- name: bpmn_process_guid
|
||||
in: query
|
||||
required: false
|
||||
description: The guid of the bpmn process to get the tasks for.
|
||||
schema:
|
||||
type: string
|
||||
- name: to_task_guid
|
||||
in: query
|
||||
required: false
|
||||
description: Get the tasks only up to the given guid.
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
tags:
|
||||
- Process Instances
|
||||
|
@ -1176,7 +1176,7 @@ paths:
|
|||
schema:
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
|
||||
/process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}:
|
||||
/process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{to_task_guid}:
|
||||
parameters:
|
||||
- name: modified_process_model_identifier
|
||||
in: path
|
||||
|
@ -1190,12 +1190,12 @@ paths:
|
|||
description: The unique id of an existing process instance.
|
||||
schema:
|
||||
type: integer
|
||||
- name: spiff_step
|
||||
in: query
|
||||
required: false
|
||||
description: Reset the process to this state
|
||||
- name: to_task_guid
|
||||
in: path
|
||||
required: true
|
||||
description: Get the tasks only up to the given guid.
|
||||
schema:
|
||||
type: integer
|
||||
type: string
|
||||
post:
|
||||
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_reset
|
||||
summary: Reset a process instance to an earlier step
|
||||
|
@ -1573,7 +1573,7 @@ paths:
|
|||
items:
|
||||
$ref: "#/components/schemas/Task"
|
||||
|
||||
/task-data/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}:
|
||||
/task-data/{modified_process_model_identifier}/{process_instance_id}/{task_guid}:
|
||||
parameters:
|
||||
- name: modified_process_model_identifier
|
||||
in: path
|
||||
|
@ -1587,15 +1587,15 @@ paths:
|
|||
description: The unique id of an existing process instance.
|
||||
schema:
|
||||
type: integer
|
||||
- name: spiff_step
|
||||
- name: task_guid
|
||||
in: path
|
||||
required: true
|
||||
description: If set will return the tasks as they were during a specific step of execution.
|
||||
description: The unique id of the task.
|
||||
schema:
|
||||
type: integer
|
||||
type: string
|
||||
get:
|
||||
operationId: spiffworkflow_backend.routes.tasks_controller.task_data_show
|
||||
summary: Get task data for a single task in a spiff step.
|
||||
summary: Get task data for a single task.
|
||||
tags:
|
||||
- Process Instances
|
||||
responses:
|
||||
|
@ -1605,35 +1605,8 @@ paths:
|
|||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Task"
|
||||
|
||||
/task-data/{modified_process_model_identifier}/{process_instance_id}/{task_id}:
|
||||
parameters:
|
||||
- name: modified_process_model_identifier
|
||||
in: path
|
||||
required: true
|
||||
description: The modified id of an existing process model
|
||||
schema:
|
||||
type: string
|
||||
- name: process_instance_id
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of an existing process instance.
|
||||
schema:
|
||||
type: integer
|
||||
- name: task_id
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of the task.
|
||||
schema:
|
||||
type: string
|
||||
- name: spiff_step
|
||||
in: query
|
||||
required: false
|
||||
description: If set will return the tasks as they were during a specific step of execution.
|
||||
schema:
|
||||
type: integer
|
||||
put:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update
|
||||
operationId: spiffworkflow_backend.routes.tasks_controller.task_data_update
|
||||
summary: Update the task data for requested instance and task
|
||||
tags:
|
||||
- Process Instances
|
||||
|
@ -1738,7 +1711,7 @@ paths:
|
|||
schema:
|
||||
$ref: "#/components/schemas/Workflow"
|
||||
|
||||
/task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_id}:
|
||||
/task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_guid}:
|
||||
parameters:
|
||||
- name: modified_process_model_identifier
|
||||
in: path
|
||||
|
@ -1752,14 +1725,14 @@ paths:
|
|||
description: The unique id of the process instance
|
||||
schema:
|
||||
type: string
|
||||
- name: task_id
|
||||
- name: task_guid
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of the task.
|
||||
schema:
|
||||
type: string
|
||||
post:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.manual_complete_task
|
||||
operationId: spiffworkflow_backend.routes.tasks_controller.manual_complete_task
|
||||
summary: Mark a task complete without executing it
|
||||
tags:
|
||||
- Process Instances
|
||||
|
@ -1838,9 +1811,9 @@ paths:
|
|||
schema:
|
||||
$ref: "#/components/schemas/ServiceTask"
|
||||
|
||||
/tasks/{process_instance_id}/{task_id}:
|
||||
/tasks/{process_instance_id}/{task_guid}:
|
||||
parameters:
|
||||
- name: task_id
|
||||
- name: task_guid
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of an existing process group.
|
||||
|
|
|
@ -41,9 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import (
|
|||
) # noqa: F401
|
||||
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401
|
||||
from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401
|
||||
from spiffworkflow_backend.models.spiff_step_details import (
|
||||
SpiffStepDetailsModel,
|
||||
) # noqa: F401
|
||||
from spiffworkflow_backend.models.user import UserModel # noqa: F401
|
||||
from spiffworkflow_backend.models.group import GroupModel # noqa: F401
|
||||
from spiffworkflow_backend.models.process_instance_metadata import (
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
|
@ -8,12 +10,17 @@ from spiffworkflow_backend.models.db import db
|
|||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
|
||||
|
||||
class BpmnProcessNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
# properties_json attributes:
|
||||
# "last_task", # guid generated by spiff
|
||||
# "root", # guid generated by spiff
|
||||
# "success", # boolean
|
||||
# "bpmn_messages", # if top-level process
|
||||
# "correlations", # if top-level process
|
||||
@dataclass
|
||||
class BpmnProcessModel(SpiffworkflowBaseDBModel):
|
||||
__tablename__ = "bpmn_process"
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
|
@ -24,7 +31,8 @@ class BpmnProcessModel(SpiffworkflowBaseDBModel):
|
|||
)
|
||||
bpmn_process_definition = relationship(BpmnProcessDefinitionModel)
|
||||
|
||||
parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True)
|
||||
top_level_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True)
|
||||
direct_parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True)
|
||||
|
||||
properties_json: dict = db.Column(db.JSON, nullable=False)
|
||||
json_data_hash: str = db.Column(db.String(255), nullable=False, index=True)
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
|
||||
|
@ -10,6 +12,7 @@ from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
|||
#
|
||||
# each subprocess will have its own row in this table.
|
||||
# there is a join table to link them together: bpmn_process_definition_relationship
|
||||
@dataclass
|
||||
class BpmnProcessDefinitionModel(SpiffworkflowBaseDBModel):
|
||||
__tablename__ = "bpmn_process_definition"
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy import UniqueConstraint
|
||||
|
||||
|
@ -10,6 +12,7 @@ from spiffworkflow_backend.models.db import db
|
|||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class BpmnProcessDefinitionRelationshipModel(SpiffworkflowBaseDBModel):
|
||||
__tablename__ = "bpmn_process_definition_relationship"
|
||||
__table_args__ = (
|
||||
|
|
|
@ -87,6 +87,10 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
"ProcessInstanceMetadataModel",
|
||||
cascade="delete",
|
||||
) # type: ignore
|
||||
process_instance_queue = relationship(
|
||||
"ProcessInstanceQueueModel",
|
||||
cascade="delete",
|
||||
) # type: ignore
|
||||
|
||||
start_in_seconds: int | None = db.Column(db.Integer, index=True)
|
||||
end_in_seconds: int | None = db.Column(db.Integer, index=True)
|
||||
|
@ -96,7 +100,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
|
||||
bpmn_version_control_type: str = db.Column(db.String(50))
|
||||
bpmn_version_control_identifier: str = db.Column(db.String(255))
|
||||
spiff_step: int = db.Column(db.Integer)
|
||||
|
||||
bpmn_xml_file_contents: str | None = None
|
||||
process_model_with_diagram_identifier: str | None = None
|
||||
|
@ -117,7 +120,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
"bpmn_xml_file_contents": self.bpmn_xml_file_contents,
|
||||
"bpmn_version_control_identifier": self.bpmn_version_control_identifier,
|
||||
"bpmn_version_control_type": self.bpmn_version_control_type,
|
||||
"spiff_step": self.spiff_step,
|
||||
"process_initiator_username": self.process_initiator.username,
|
||||
}
|
||||
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
"""Spiff_step_details."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy import UniqueConstraint
|
||||
from sqlalchemy.orm import deferred
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
|
||||
"""SpiffStepDetailsModel."""
|
||||
|
||||
__tablename__ = "spiff_step_details"
|
||||
__table_args__ = (UniqueConstraint("process_instance_id", "spiff_step", name="process_instance_id_spiff_step"),)
|
||||
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id: int = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
|
||||
)
|
||||
spiff_step: int = db.Column(db.Integer, nullable=False)
|
||||
task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
|
||||
task_id: str = db.Column(db.String(50), nullable=False)
|
||||
task_state: str = db.Column(db.String(50), nullable=False)
|
||||
bpmn_task_identifier: str = db.Column(db.String(255), nullable=False)
|
||||
delta_json: list = deferred(db.Column(db.JSON)) # type: ignore
|
||||
|
||||
start_in_seconds: float = db.Column(db.DECIMAL(17, 6), nullable=False)
|
||||
|
||||
# to fix mypy in 3.9 - not sure why syntax like:
|
||||
# float | None
|
||||
# works in other dataclass db models
|
||||
end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6))
|
|
@ -63,9 +63,11 @@ class TaskModel(SpiffworkflowBaseDBModel):
|
|||
json_data_hash: str = db.Column(db.String(255), nullable=False, index=True)
|
||||
python_env_data_hash: str = db.Column(db.String(255), nullable=False, index=True)
|
||||
|
||||
start_in_seconds: float = db.Column(db.DECIMAL(17, 6))
|
||||
start_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6))
|
||||
end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6))
|
||||
|
||||
data: Optional[dict] = None
|
||||
|
||||
def python_env_data(self) -> dict:
|
||||
return JsonDataModel.find_data_dict_by_hash(self.python_env_data_hash)
|
||||
|
||||
|
@ -106,7 +108,6 @@ class Task:
|
|||
event_definition: Union[dict[str, Any], None] = None,
|
||||
call_activity_process_identifier: Optional[str] = None,
|
||||
calling_subprocess_task_id: Optional[str] = None,
|
||||
task_spiff_step: Optional[int] = None,
|
||||
):
|
||||
"""__init__."""
|
||||
self.id = id
|
||||
|
@ -121,7 +122,6 @@ class Task:
|
|||
self.event_definition = event_definition
|
||||
self.call_activity_process_identifier = call_activity_process_identifier
|
||||
self.calling_subprocess_task_id = calling_subprocess_task_id
|
||||
self.task_spiff_step = task_spiff_step
|
||||
|
||||
self.data = data
|
||||
if self.data is None:
|
||||
|
@ -179,7 +179,6 @@ class Task:
|
|||
"event_definition": self.event_definition,
|
||||
"call_activity_process_identifier": self.call_activity_process_identifier,
|
||||
"calling_subprocess_task_id": self.calling_subprocess_task_id,
|
||||
"task_spiff_step": self.task_spiff_step,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy import UniqueConstraint
|
||||
from sqlalchemy.orm import relationship
|
||||
|
@ -11,6 +13,7 @@ from spiffworkflow_backend.models.db import db
|
|||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class TaskDefinitionModel(SpiffworkflowBaseDBModel):
|
||||
__tablename__ = "task_definition"
|
||||
__table_args__ = (
|
||||
|
|
|
@ -16,15 +16,9 @@ from spiffworkflow_backend.exceptions.api_error import ApiError
|
|||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
)
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.json_data import JsonDataModel
|
||||
from spiffworkflow_backend.models.principal import PrincipalModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
|
||||
from spiffworkflow_backend.models.process_instance import (
|
||||
ProcessInstanceTaskDataCannotBeUpdatedError,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
|
||||
from spiffworkflow_backend.models.process_instance_file_data import (
|
||||
ProcessInstanceFileDataModel,
|
||||
)
|
||||
|
@ -38,7 +32,6 @@ from spiffworkflow_backend.services.process_instance_processor import (
|
|||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.task_service import TaskService
|
||||
|
||||
|
||||
process_api_blueprint = Blueprint("process_api", __name__)
|
||||
|
@ -169,60 +162,6 @@ def github_webhook_receive(body: Dict) -> Response:
|
|||
return Response(json.dumps({"git_pull": result}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def task_data_update(
|
||||
process_instance_id: str,
|
||||
modified_process_model_identifier: str,
|
||||
task_id: str,
|
||||
body: Dict,
|
||||
) -> Response:
|
||||
"""Update task data."""
|
||||
process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first()
|
||||
if process_instance:
|
||||
if process_instance.status != "suspended":
|
||||
raise ProcessInstanceTaskDataCannotBeUpdatedError(
|
||||
"The process instance needs to be suspended to update the task-data."
|
||||
f" It is currently: {process_instance.status}"
|
||||
)
|
||||
|
||||
task_model = TaskModel.query.filter_by(guid=task_id).first()
|
||||
if task_model is None:
|
||||
raise ApiError(
|
||||
error_code="update_task_data_error",
|
||||
message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.",
|
||||
)
|
||||
|
||||
if "new_task_data" in body:
|
||||
new_task_data_str: str = body["new_task_data"]
|
||||
new_task_data_dict = json.loads(new_task_data_str)
|
||||
json_data_dict = TaskService.update_task_data_on_task_model(
|
||||
task_model, new_task_data_dict, "json_data_hash"
|
||||
)
|
||||
if json_data_dict is not None:
|
||||
json_data = JsonDataModel(**json_data_dict)
|
||||
db.session.add(json_data)
|
||||
ProcessInstanceProcessor.add_event_to_process_instance(
|
||||
process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_id
|
||||
)
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
raise ApiError(
|
||||
error_code="update_task_data_error",
|
||||
message=f"Could not update the Instance. Original error is {e}",
|
||||
) from e
|
||||
else:
|
||||
raise ApiError(
|
||||
error_code="update_task_data_error",
|
||||
message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.",
|
||||
)
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=200,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any:
|
||||
"""Get_required_parameter_or_raise."""
|
||||
return_value = None
|
||||
|
@ -263,30 +202,6 @@ def send_bpmn_event(
|
|||
)
|
||||
|
||||
|
||||
def manual_complete_task(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: str,
|
||||
task_id: str,
|
||||
body: Dict,
|
||||
) -> Response:
|
||||
"""Mark a task complete without executing it."""
|
||||
execute = body.get("execute", True)
|
||||
process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first()
|
||||
if process_instance:
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.manual_complete_task(task_id, execute)
|
||||
else:
|
||||
raise ApiError(
|
||||
error_code="complete_task",
|
||||
message=f"Could not complete Task {task_id} in Instance {process_instance_id}",
|
||||
)
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=200,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def _commit_and_push_to_git(message: str) -> None:
|
||||
"""Commit_and_push_to_git."""
|
||||
if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE"]:
|
||||
|
|
|
@ -4,7 +4,6 @@ import json
|
|||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
import flask.wrappers
|
||||
from flask import current_app
|
||||
|
@ -13,12 +12,12 @@ from flask import jsonify
|
|||
from flask import make_response
|
||||
from flask import request
|
||||
from flask.wrappers import Response
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
||||
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
|
@ -42,8 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import (
|
|||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
|
||||
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||
from spiffworkflow_backend.models.task import Task
|
||||
from spiffworkflow_backend.models.task import TaskModel
|
||||
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
@ -85,6 +82,7 @@ from spiffworkflow_backend.services.process_instance_service import (
|
|||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
from spiffworkflow_backend.services.task_service import TaskService
|
||||
|
||||
|
||||
def process_instance_create(
|
||||
|
@ -449,7 +447,6 @@ def process_instance_delete(
|
|||
|
||||
# (Pdb) db.session.delete
|
||||
# <bound method delete of <sqlalchemy.orm.scoping.scoped_session object at 0x103eaab30>>
|
||||
db.session.query(SpiffStepDetailsModel).filter_by(process_instance_id=process_instance.id).delete()
|
||||
db.session.query(ProcessInstanceQueueModel).filter_by(process_instance_id=process_instance.id).delete()
|
||||
db.session.delete(process_instance)
|
||||
db.session.commit()
|
||||
|
@ -556,142 +553,157 @@ def process_instance_report_show(
|
|||
def process_instance_task_list_without_task_data_for_me(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
all_tasks: bool = False,
|
||||
spiff_step: int = 0,
|
||||
most_recent_tasks_only: bool = False,
|
||||
bpmn_process_guid: Optional[str] = None,
|
||||
to_task_guid: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_task_list_without_task_data_for_me."""
|
||||
process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
|
||||
return process_instance_task_list(
|
||||
modified_process_model_identifier,
|
||||
process_instance,
|
||||
all_tasks,
|
||||
spiff_step,
|
||||
most_recent_tasks_only,
|
||||
_modified_process_model_identifier=modified_process_model_identifier,
|
||||
process_instance=process_instance,
|
||||
most_recent_tasks_only=most_recent_tasks_only,
|
||||
bpmn_process_guid=bpmn_process_guid,
|
||||
to_task_guid=to_task_guid,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_task_list_without_task_data(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
all_tasks: bool = False,
|
||||
spiff_step: int = 0,
|
||||
most_recent_tasks_only: bool = False,
|
||||
bpmn_process_guid: Optional[str] = None,
|
||||
to_task_guid: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_task_list_without_task_data."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
return process_instance_task_list(
|
||||
modified_process_model_identifier,
|
||||
process_instance,
|
||||
all_tasks,
|
||||
spiff_step,
|
||||
most_recent_tasks_only,
|
||||
_modified_process_model_identifier=modified_process_model_identifier,
|
||||
process_instance=process_instance,
|
||||
most_recent_tasks_only=most_recent_tasks_only,
|
||||
bpmn_process_guid=bpmn_process_guid,
|
||||
to_task_guid=to_task_guid,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_task_list(
|
||||
_modified_process_model_identifier: str,
|
||||
process_instance: ProcessInstanceModel,
|
||||
all_tasks: bool = False,
|
||||
spiff_step: int = 0,
|
||||
bpmn_process_guid: Optional[str] = None,
|
||||
to_task_guid: Optional[str] = None,
|
||||
most_recent_tasks_only: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_task_list."""
|
||||
step_detail_query = db.session.query(SpiffStepDetailsModel).filter(
|
||||
SpiffStepDetailsModel.process_instance_id == process_instance.id,
|
||||
bpmn_process_ids = []
|
||||
if bpmn_process_guid:
|
||||
bpmn_process = BpmnProcessModel.query.filter_by(guid=bpmn_process_guid).first()
|
||||
bpmn_processes = TaskService.bpmn_process_and_descendants([bpmn_process])
|
||||
bpmn_process_ids = [p.id for p in bpmn_processes]
|
||||
|
||||
task_model_query = db.session.query(TaskModel).filter(
|
||||
TaskModel.process_instance_id == process_instance.id,
|
||||
)
|
||||
|
||||
if spiff_step > 0:
|
||||
step_detail_query = step_detail_query.filter(SpiffStepDetailsModel.spiff_step <= spiff_step)
|
||||
|
||||
step_details = step_detail_query.all()
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
full_bpmn_process_dict = processor.full_bpmn_process_dict
|
||||
tasks = full_bpmn_process_dict["tasks"]
|
||||
subprocesses = full_bpmn_process_dict["subprocesses"]
|
||||
|
||||
steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details}
|
||||
|
||||
def restore_task(spiff_task: dict[str, Any], step_ended: float) -> None:
|
||||
if spiff_task["last_state_change"] > step_ended:
|
||||
spiff_task["state"] = Task.task_state_name_to_int("FUTURE")
|
||||
spiff_task["data"] = {}
|
||||
|
||||
if spiff_step > 0:
|
||||
last_change = step_details[-1].end_in_seconds or 0
|
||||
for spiff_task in tasks.values():
|
||||
restore_task(spiff_task, last_change)
|
||||
for subprocess in subprocesses.values():
|
||||
for spiff_task in subprocess["tasks"].values():
|
||||
restore_task(spiff_task, last_change)
|
||||
|
||||
bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict)
|
||||
if spiff_step > 0:
|
||||
bpmn_process_instance.complete_task_from_id(UUID(step_details[-1].task_id))
|
||||
for subprocess_id, subprocess in bpmn_process_instance.subprocesses.items():
|
||||
if not subprocess.is_completed():
|
||||
task = bpmn_process_instance.get_task(subprocess_id)
|
||||
task._set_state(TaskState.WAITING)
|
||||
|
||||
spiff_tasks = None
|
||||
if all_tasks:
|
||||
spiff_tasks = bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
|
||||
else:
|
||||
spiff_tasks = processor.get_all_user_tasks()
|
||||
|
||||
(
|
||||
subprocesses_by_child_task_ids,
|
||||
task_typename_by_task_id,
|
||||
) = processor.get_subprocesses_by_child_task_ids()
|
||||
processor.get_highest_level_calling_subprocesses_by_child_task_ids(
|
||||
subprocesses_by_child_task_ids, task_typename_by_task_id
|
||||
to_task_model: Optional[TaskModel] = None
|
||||
task_models_of_parent_bpmn_processes_guids: list[str] = []
|
||||
if to_task_guid is not None:
|
||||
to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first()
|
||||
if to_task_model is None:
|
||||
raise ApiError(
|
||||
error_code="task_not_found",
|
||||
message=f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
spiff_tasks_to_process = spiff_tasks
|
||||
if most_recent_tasks_only:
|
||||
spiff_tasks_by_process_id_and_task_name: dict[str, SpiffTask] = {}
|
||||
current_tasks = {}
|
||||
for spiff_task in spiff_tasks_to_process:
|
||||
row_id = f"{spiff_task.task_spec._wf_spec.name}:{spiff_task.task_spec.name}"
|
||||
if spiff_task.state in [TaskState.READY, TaskState.WAITING]:
|
||||
current_tasks[row_id] = spiff_task
|
||||
if (
|
||||
row_id not in spiff_tasks_by_process_id_and_task_name
|
||||
or spiff_task.state > spiff_tasks_by_process_id_and_task_name[row_id].state
|
||||
):
|
||||
spiff_tasks_by_process_id_and_task_name[row_id] = spiff_task
|
||||
spiff_tasks_by_process_id_and_task_name.update(current_tasks)
|
||||
spiff_tasks_to_process = spiff_tasks_by_process_id_and_task_name.values()
|
||||
|
||||
response = []
|
||||
for spiff_task in spiff_tasks_to_process:
|
||||
task_spiff_step: Optional[int] = None
|
||||
if str(spiff_task.id) in steps_by_id:
|
||||
task_spiff_step = steps_by_id[str(spiff_task.id)].spiff_step
|
||||
calling_subprocess_task_id = subprocesses_by_child_task_ids.get(str(spiff_task.id), None)
|
||||
task = ProcessInstanceService.spiff_task_to_api_task(
|
||||
processor,
|
||||
spiff_task,
|
||||
calling_subprocess_task_id=calling_subprocess_task_id,
|
||||
task_spiff_step=task_spiff_step,
|
||||
if to_task_model.state != "COMPLETED":
|
||||
# TODO: find a better term for viewing at task state
|
||||
raise ApiError(
|
||||
error_code="task_cannot_be_viewed_at",
|
||||
message=(
|
||||
f"Desired task with guid '{to_task_guid}' for process instance '{process_instance.id}' was never"
|
||||
" completed and therefore cannot be viewed at."
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
if task.state in ["MAYBE", "LIKELY"]:
|
||||
task.state = "FUTURE"
|
||||
response.append(task)
|
||||
|
||||
return make_response(jsonify(response), 200)
|
||||
_parent_bpmn_processes, task_models_of_parent_bpmn_processes = (
|
||||
TaskService.task_models_of_parent_bpmn_processes(to_task_model)
|
||||
)
|
||||
task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid]
|
||||
task_model_query = task_model_query.filter(
|
||||
or_(
|
||||
TaskModel.end_in_seconds <= to_task_model.end_in_seconds, # type: ignore
|
||||
TaskModel.guid.in_(task_models_of_parent_bpmn_processes_guids), # type: ignore
|
||||
)
|
||||
)
|
||||
|
||||
bpmn_process_alias = aliased(BpmnProcessModel)
|
||||
direct_parent_bpmn_process_alias = aliased(BpmnProcessModel)
|
||||
direct_parent_bpmn_process_definition_alias = aliased(BpmnProcessDefinitionModel)
|
||||
|
||||
task_model_query = (
|
||||
task_model_query.order_by(TaskModel.id.desc()) # type: ignore
|
||||
.join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id)
|
||||
.join(bpmn_process_alias, bpmn_process_alias.id == TaskModel.bpmn_process_id)
|
||||
.outerjoin(
|
||||
direct_parent_bpmn_process_alias,
|
||||
direct_parent_bpmn_process_alias.id == bpmn_process_alias.direct_parent_process_id,
|
||||
)
|
||||
.outerjoin(
|
||||
direct_parent_bpmn_process_definition_alias,
|
||||
direct_parent_bpmn_process_definition_alias.id
|
||||
== direct_parent_bpmn_process_alias.bpmn_process_definition_id,
|
||||
)
|
||||
.join(
|
||||
BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id
|
||||
)
|
||||
.add_columns(
|
||||
BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore
|
||||
BpmnProcessDefinitionModel.bpmn_name.label("bpmn_process_definition_name"), # type: ignore
|
||||
direct_parent_bpmn_process_alias.guid.label("bpmn_process_direct_parent_guid"),
|
||||
direct_parent_bpmn_process_definition_alias.bpmn_identifier.label(
|
||||
"bpmn_process_direct_parent_bpmn_identifier"
|
||||
),
|
||||
TaskDefinitionModel.bpmn_identifier,
|
||||
TaskDefinitionModel.bpmn_name,
|
||||
TaskDefinitionModel.typename,
|
||||
TaskDefinitionModel.properties_json.label("task_definition_properties_json"), # type: ignore
|
||||
TaskModel.guid,
|
||||
TaskModel.state,
|
||||
TaskModel.end_in_seconds,
|
||||
TaskModel.start_in_seconds,
|
||||
)
|
||||
)
|
||||
|
||||
if len(bpmn_process_ids) > 0:
|
||||
task_model_query = task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids))
|
||||
|
||||
task_models = task_model_query.all()
|
||||
if to_task_model is not None:
|
||||
task_models_dict = json.loads(current_app.json.dumps(task_models))
|
||||
for task_model in task_models_dict:
|
||||
end_in_seconds = float(task_model["end_in_seconds"]) if task_model["end_in_seconds"] is not None else None
|
||||
if to_task_model.guid == task_model["guid"] and task_model["state"] == "COMPLETED":
|
||||
TaskService.reset_task_model_dict(task_model, state="READY")
|
||||
elif (
|
||||
end_in_seconds is None
|
||||
or to_task_model.end_in_seconds is None
|
||||
or to_task_model.end_in_seconds < end_in_seconds
|
||||
) and task_model["guid"] in task_models_of_parent_bpmn_processes_guids:
|
||||
TaskService.reset_task_model_dict(task_model, state="WAITING")
|
||||
return make_response(jsonify(task_models_dict), 200)
|
||||
|
||||
return make_response(jsonify(task_models), 200)
|
||||
|
||||
|
||||
def process_instance_reset(
|
||||
process_instance_id: int,
|
||||
modified_process_model_identifier: str,
|
||||
spiff_step: int = 0,
|
||||
to_task_guid: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Reset a process instance to a particular step."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.reset_process(spiff_step)
|
||||
ProcessInstanceProcessor.reset_process(process_instance, to_task_guid, commit=True)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
|
|
|
@ -102,7 +102,6 @@ def script_unit_test_run(
|
|||
"""Script_unit_test_run."""
|
||||
# FIXME: We should probably clear this somewhere else but this works
|
||||
current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
|
||||
current_app.config["THREAD_LOCAL_DATA"].spiff_step = None
|
||||
|
||||
python_script = _get_required_parameter_or_raise("python_script", body)
|
||||
input_json = _get_required_parameter_or_raise("input_json", body)
|
||||
|
|
|
@ -34,10 +34,15 @@ from spiffworkflow_backend.models.group import GroupModel
|
|||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.models.process_instance import (
|
||||
ProcessInstanceTaskDataCannotBeUpdatedError,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||
from spiffworkflow_backend.models.task import Task
|
||||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_find_principal_or_raise,
|
||||
|
@ -56,6 +61,7 @@ from spiffworkflow_backend.services.process_instance_service import (
|
|||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
from spiffworkflow_backend.services.task_service import TaskService
|
||||
|
||||
|
||||
class TaskDataSelectOption(TypedDict):
|
||||
|
@ -169,58 +175,99 @@ def task_list_for_my_groups(
|
|||
def task_data_show(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
spiff_step: int = 0,
|
||||
task_guid: int = 0,
|
||||
) -> flask.wrappers.Response:
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
step_detail = (
|
||||
db.session.query(SpiffStepDetailsModel)
|
||||
.filter(
|
||||
SpiffStepDetailsModel.process_instance_id == process_instance.id,
|
||||
SpiffStepDetailsModel.spiff_step == spiff_step,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if step_detail is None:
|
||||
task_model = TaskModel.query.filter_by(guid=task_guid, process_instance_id=process_instance_id).first()
|
||||
if task_model is None:
|
||||
raise ApiError(
|
||||
error_code="spiff_step_for_proces_instance_not_found",
|
||||
message="The given spiff step for the given process instance could not be found.",
|
||||
error_code="task_not_found",
|
||||
message=f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'",
|
||||
status_code=400,
|
||||
)
|
||||
task_model.data = task_model.json_data()
|
||||
return make_response(jsonify(task_model), 200)
|
||||
|
||||
|
||||
def task_data_update(
|
||||
process_instance_id: str,
|
||||
modified_process_model_identifier: str,
|
||||
task_guid: str,
|
||||
body: Dict,
|
||||
) -> Response:
|
||||
"""Update task data."""
|
||||
process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first()
|
||||
if process_instance:
|
||||
if process_instance.status != "suspended":
|
||||
raise ProcessInstanceTaskDataCannotBeUpdatedError(
|
||||
"The process instance needs to be suspended to update the task-data."
|
||||
f" It is currently: {process_instance.status}"
|
||||
)
|
||||
|
||||
task_model = TaskModel.query.filter_by(guid=task_guid).first()
|
||||
if task_model is None:
|
||||
raise ApiError(
|
||||
error_code="update_task_data_error",
|
||||
message=f"Could not find Task: {task_guid} in Instance: {process_instance_id}.",
|
||||
)
|
||||
|
||||
if "new_task_data" in body:
|
||||
new_task_data_str: str = body["new_task_data"]
|
||||
new_task_data_dict = json.loads(new_task_data_str)
|
||||
json_data_dict = TaskService.update_task_data_on_task_model(
|
||||
task_model, new_task_data_dict, "json_data_hash"
|
||||
)
|
||||
if json_data_dict is not None:
|
||||
TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict})
|
||||
# json_data = JsonDataModel(**json_data_dict)
|
||||
# db.session.add(json_data)
|
||||
ProcessInstanceProcessor.add_event_to_process_instance(
|
||||
process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid
|
||||
)
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
raise ApiError(
|
||||
error_code="update_task_data_error",
|
||||
message=f"Could not update the Instance. Original error is {e}",
|
||||
) from e
|
||||
else:
|
||||
raise ApiError(
|
||||
error_code="update_task_data_error",
|
||||
message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_guid}.",
|
||||
)
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=200,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def manual_complete_task(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: str,
|
||||
task_guid: str,
|
||||
body: Dict,
|
||||
) -> Response:
|
||||
"""Mark a task complete without executing it."""
|
||||
execute = body.get("execute", True)
|
||||
process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first()
|
||||
if process_instance:
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
|
||||
step_detail.bpmn_task_identifier, processor.bpmn_process_instance
|
||||
processor.manual_complete_task(task_guid, execute)
|
||||
else:
|
||||
raise ApiError(
|
||||
error_code="complete_task",
|
||||
message=f"Could not complete Task {task_guid} in Instance {process_instance_id}",
|
||||
)
|
||||
task_data = step_detail.task_json["task_data"] | step_detail.task_json["python_env"]
|
||||
task = ProcessInstanceService.spiff_task_to_api_task(
|
||||
processor,
|
||||
spiff_task,
|
||||
task_spiff_step=spiff_step,
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=200,
|
||||
mimetype="application/json",
|
||||
)
|
||||
task.data = task_data
|
||||
|
||||
return make_response(jsonify(task), 200)
|
||||
|
||||
|
||||
def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None:
|
||||
if task.form_ui_schema is None:
|
||||
task.form_ui_schema = {}
|
||||
|
||||
if task.data and "form_ui_hidden_fields" in task.data:
|
||||
hidden_fields = task.data["form_ui_hidden_fields"]
|
||||
for hidden_field in hidden_fields:
|
||||
hidden_field_parts = hidden_field.split(".")
|
||||
relevant_depth_of_ui_schema = task.form_ui_schema
|
||||
for ii, hidden_field_part in enumerate(hidden_field_parts):
|
||||
if hidden_field_part not in relevant_depth_of_ui_schema:
|
||||
relevant_depth_of_ui_schema[hidden_field_part] = {}
|
||||
relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part]
|
||||
if len(hidden_field_parts) == ii + 1:
|
||||
relevant_depth_of_ui_schema["ui:widget"] = "hidden"
|
||||
|
||||
|
||||
def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response:
|
||||
def task_show(process_instance_id: int, task_guid: str) -> flask.wrappers.Response:
|
||||
"""Task_show."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
|
||||
|
@ -235,12 +282,12 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
|
|||
process_instance.process_model_identifier,
|
||||
)
|
||||
|
||||
_find_human_task_or_raise(process_instance_id, task_id)
|
||||
_find_human_task_or_raise(process_instance_id, task_guid)
|
||||
|
||||
form_schema_file_name = ""
|
||||
form_ui_schema_file_name = ""
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance, processor=processor)
|
||||
spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor)
|
||||
extensions = spiff_task.task_spec.extensions
|
||||
|
||||
if "properties" in extensions:
|
||||
|
@ -273,7 +320,8 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
|
|||
ApiError(
|
||||
error_code="missing_form_file",
|
||||
message=(
|
||||
f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}"
|
||||
f"Cannot find a form file for process_instance_id: {process_instance_id}, task_guid:"
|
||||
f" {task_guid}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
@ -340,7 +388,7 @@ def process_data_show(
|
|||
|
||||
def task_submit_shared(
|
||||
process_instance_id: int,
|
||||
task_id: str,
|
||||
task_guid: str,
|
||||
body: Dict[str, Any],
|
||||
terminate_loop: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
|
@ -357,7 +405,7 @@ def task_submit_shared(
|
|||
)
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance, processor=processor)
|
||||
spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor)
|
||||
AuthorizationService.assert_user_can_complete_spiff_task(process_instance.id, spiff_task, principal.user)
|
||||
|
||||
if spiff_task.state != TaskState.READY:
|
||||
|
@ -374,7 +422,7 @@ def task_submit_shared(
|
|||
|
||||
human_task = _find_human_task_or_raise(
|
||||
process_instance_id=process_instance_id,
|
||||
task_id=task_id,
|
||||
task_guid=task_guid,
|
||||
only_tasks_that_can_be_completed=True,
|
||||
)
|
||||
|
||||
|
@ -419,13 +467,13 @@ def task_submit_shared(
|
|||
|
||||
def task_submit(
|
||||
process_instance_id: int,
|
||||
task_id: str,
|
||||
task_guid: str,
|
||||
body: Dict[str, Any],
|
||||
terminate_loop: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_submit_user_data."""
|
||||
with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"):
|
||||
return task_submit_shared(process_instance_id, task_id, body, terminate_loop)
|
||||
return task_submit_shared(process_instance_id, task_guid, body, terminate_loop)
|
||||
|
||||
|
||||
def _get_tasks(
|
||||
|
@ -580,14 +628,14 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) ->
|
|||
|
||||
|
||||
def _get_spiff_task_from_process_instance(
|
||||
task_id: str,
|
||||
task_guid: str,
|
||||
process_instance: ProcessInstanceModel,
|
||||
processor: Union[ProcessInstanceProcessor, None] = None,
|
||||
) -> SpiffTask:
|
||||
"""Get_spiff_task_from_process_instance."""
|
||||
if processor is None:
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
task_uuid = uuid.UUID(task_id)
|
||||
task_uuid = uuid.UUID(task_guid)
|
||||
spiff_task = processor.bpmn_process_instance.get_task(task_uuid)
|
||||
|
||||
if spiff_task is None:
|
||||
|
@ -679,15 +727,15 @@ def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any:
|
|||
|
||||
def _find_human_task_or_raise(
|
||||
process_instance_id: int,
|
||||
task_id: str,
|
||||
task_guid: str,
|
||||
only_tasks_that_can_be_completed: bool = False,
|
||||
) -> HumanTaskModel:
|
||||
if only_tasks_that_can_be_completed:
|
||||
human_task_query = HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, task_id=task_id, completed=False
|
||||
process_instance_id=process_instance_id, task_id=task_guid, completed=False
|
||||
)
|
||||
else:
|
||||
human_task_query = HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, task_id=task_id)
|
||||
human_task_query = HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, task_id=task_guid)
|
||||
|
||||
human_task: HumanTaskModel = human_task_query.first()
|
||||
if human_task is None:
|
||||
|
@ -695,10 +743,27 @@ def _find_human_task_or_raise(
|
|||
ApiError(
|
||||
error_code="no_human_task",
|
||||
message=(
|
||||
f"Cannot find a task to complete for task id '{task_id}' and"
|
||||
f"Cannot find a task to complete for task id '{task_guid}' and"
|
||||
f" process instance {process_instance_id}."
|
||||
),
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
return human_task
|
||||
|
||||
|
||||
def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None:
|
||||
if task.form_ui_schema is None:
|
||||
task.form_ui_schema = {}
|
||||
|
||||
if task.data and "form_ui_hidden_fields" in task.data:
|
||||
hidden_fields = task.data["form_ui_hidden_fields"]
|
||||
for hidden_field in hidden_fields:
|
||||
hidden_field_parts = hidden_field.split(".")
|
||||
relevant_depth_of_ui_schema = task.form_ui_schema
|
||||
for ii, hidden_field_part in enumerate(hidden_field_parts):
|
||||
if hidden_field_part not in relevant_depth_of_ui_schema:
|
||||
relevant_depth_of_ui_schema[hidden_field_part] = {}
|
||||
relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part]
|
||||
if len(hidden_field_parts) == ii + 1:
|
||||
relevant_depth_of_ui_schema["ui:widget"] = "hidden"
|
||||
|
|
|
@ -9,7 +9,6 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
|||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
ScriptAttributesContext,
|
||||
)
|
||||
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||
from spiffworkflow_backend.scripts.script import Script
|
||||
|
||||
|
||||
|
@ -43,14 +42,6 @@ class DeleteProcessInstancesWithCriteria(Script):
|
|||
rows_affected = len(results)
|
||||
|
||||
if rows_affected > 0:
|
||||
ids_to_delete = list(map(lambda r: r.id, results)) # type: ignore
|
||||
|
||||
step_details = SpiffStepDetailsModel.query.filter(
|
||||
SpiffStepDetailsModel.process_instance_id.in_(ids_to_delete) # type: ignore
|
||||
).all()
|
||||
|
||||
for deletion in step_details:
|
||||
db.session.delete(deletion)
|
||||
for deletion in results:
|
||||
db.session.delete(deletion)
|
||||
db.session.commit()
|
||||
|
|
|
@ -6,7 +6,6 @@ import sys
|
|||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
from flask import g
|
||||
from flask.app import Flask
|
||||
|
||||
|
||||
|
@ -88,28 +87,6 @@ class JsonFormatter(logging.Formatter):
|
|||
return json.dumps(message_dict, default=str)
|
||||
|
||||
|
||||
class SpiffFilter(logging.Filter):
|
||||
"""SpiffFilter."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
"""__init__."""
|
||||
self.app = app
|
||||
super().__init__()
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
"""Filter."""
|
||||
tld = self.app.config["THREAD_LOCAL_DATA"]
|
||||
process_instance_id = ""
|
||||
if hasattr(tld, "process_instance_id"):
|
||||
process_instance_id = tld.process_instance_id
|
||||
setattr(record, "process_instance_id", process_instance_id) # noqa: B010
|
||||
if hasattr(tld, "spiff_step"):
|
||||
setattr(record, "spiff_step", tld.spiff_step) # noqa: 8010
|
||||
if hasattr(g, "user") and g.user:
|
||||
setattr(record, "current_user_id", g.user.id) # noqa: B010
|
||||
return True
|
||||
|
||||
|
||||
def setup_logger(app: Flask) -> None:
|
||||
"""Setup_logger."""
|
||||
upper_log_level_string = app.config["SPIFFWORKFLOW_BACKEND_LOG_LEVEL"].upper()
|
||||
|
|
|
@ -39,7 +39,6 @@ from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore
|
|||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import CancelEventDefinition # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||
|
@ -84,16 +83,13 @@ from spiffworkflow_backend.models.script_attributes_context import (
|
|||
ScriptAttributesContext,
|
||||
)
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.task import TaskModel
|
||||
from spiffworkflow_backend.models.task import TaskNotFoundError
|
||||
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.scripts.script import Script
|
||||
from spiffworkflow_backend.services.custom_parser import MyCustomParser
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.process_instance_lock_service import (
|
||||
ProcessInstanceLockService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError
|
||||
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
@ -104,9 +100,6 @@ from spiffworkflow_backend.services.user_service import UserService
|
|||
from spiffworkflow_backend.services.workflow_execution_service import (
|
||||
execution_strategy_named,
|
||||
)
|
||||
from spiffworkflow_backend.services.workflow_execution_service import (
|
||||
StepDetailLoggingDelegate,
|
||||
)
|
||||
from spiffworkflow_backend.services.workflow_execution_service import (
|
||||
TaskModelSavingDelegate,
|
||||
)
|
||||
|
@ -150,14 +143,6 @@ class MissingProcessInfoError(Exception):
|
|||
"""MissingProcessInfoError."""
|
||||
|
||||
|
||||
class SpiffStepDetailIsMissingError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TaskNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore
|
||||
def __init__(self, environment_globals: Dict[str, Any]):
|
||||
"""BoxedTaskDataBasedScriptEngineEnvironment."""
|
||||
|
@ -436,7 +421,6 @@ class ProcessInstanceProcessor:
|
|||
"""Create a Workflow Processor based on the serialized information available in the process_instance model."""
|
||||
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||
tld.process_instance_id = process_instance_model.id
|
||||
tld.spiff_step = process_instance_model.spiff_step
|
||||
|
||||
# we want this to be the fully qualified path to the process model including all group subcomponents
|
||||
current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = (
|
||||
|
@ -687,7 +671,7 @@ class ProcessInstanceProcessor:
|
|||
single_bpmn_process_dict = cls._get_bpmn_process_dict(bpmn_process, get_tasks=True)
|
||||
spiff_bpmn_process_dict.update(single_bpmn_process_dict)
|
||||
|
||||
bpmn_subprocesses = BpmnProcessModel.query.filter_by(parent_process_id=bpmn_process.id).all()
|
||||
bpmn_subprocesses = BpmnProcessModel.query.filter_by(top_level_process_id=bpmn_process.id).all()
|
||||
bpmn_subprocess_id_to_guid_mappings = {}
|
||||
for bpmn_subprocess in bpmn_subprocesses:
|
||||
bpmn_subprocess_id_to_guid_mappings[bpmn_subprocess.id] = bpmn_subprocess.guid
|
||||
|
@ -817,37 +801,6 @@ class ProcessInstanceProcessor:
|
|||
"lane_assignment_id": lane_assignment_id,
|
||||
}
|
||||
|
||||
def spiff_step_details_mapping(
|
||||
self,
|
||||
spiff_task: Optional[SpiffTask] = None,
|
||||
start_in_seconds: Optional[float] = None,
|
||||
end_in_seconds: Optional[float] = None,
|
||||
) -> dict:
|
||||
"""SaveSpiffStepDetails."""
|
||||
if spiff_task is None:
|
||||
# TODO: safer to pass in task vs use last task?
|
||||
spiff_task = self.bpmn_process_instance.last_task
|
||||
|
||||
if spiff_task is None:
|
||||
return {}
|
||||
|
||||
# it's only None when we're starting a human task (it's not complete yet)
|
||||
if start_in_seconds is None:
|
||||
start_in_seconds = time.time()
|
||||
|
||||
task_json = self.get_task_dict_from_spiff_task(spiff_task)
|
||||
|
||||
return {
|
||||
"process_instance_id": self.process_instance_model.id,
|
||||
"spiff_step": self.process_instance_model.spiff_step or 1,
|
||||
"task_json": task_json,
|
||||
"task_id": str(spiff_task.id),
|
||||
"task_state": spiff_task.get_state_name(),
|
||||
"bpmn_task_identifier": spiff_task.task_spec.name,
|
||||
"start_in_seconds": start_in_seconds,
|
||||
"end_in_seconds": end_in_seconds,
|
||||
}
|
||||
|
||||
def extract_metadata(self, process_model_info: ProcessModelInfo) -> None:
|
||||
"""Extract_metadata."""
|
||||
metadata_extraction_paths = process_model_info.metadata_extraction_paths
|
||||
|
@ -1185,14 +1138,7 @@ class ProcessInstanceProcessor:
|
|||
human_task_user = HumanTaskUserModel(user_id=potential_owner_id, human_task=human_task)
|
||||
db.session.add(human_task_user)
|
||||
|
||||
self.increment_spiff_step()
|
||||
spiff_step_detail_mapping = self.spiff_step_details_mapping(
|
||||
spiff_task=ready_or_waiting_task, start_in_seconds=time.time()
|
||||
)
|
||||
spiff_step_detail = SpiffStepDetailsModel(**spiff_step_detail_mapping)
|
||||
db.session.add(spiff_step_detail)
|
||||
db.session.commit()
|
||||
# self.log_spiff_step_details(spiff_step_detail_mapping)
|
||||
|
||||
if len(human_tasks) > 0:
|
||||
for at in human_tasks:
|
||||
|
@ -1223,18 +1169,10 @@ class ProcessInstanceProcessor:
|
|||
# TODO: do_engine_steps without a lock
|
||||
self.do_engine_steps(save=True)
|
||||
|
||||
def add_step(self, step: Union[dict, None] = None) -> None:
|
||||
"""Add a spiff step."""
|
||||
if step is None:
|
||||
step = self.spiff_step_details_mapping()
|
||||
spiff_step_detail = SpiffStepDetailsModel(**step)
|
||||
db.session.add(spiff_step_detail)
|
||||
db.session.commit()
|
||||
# self.log_spiff_step_details(step)
|
||||
|
||||
def manual_complete_task(self, task_id: str, execute: bool) -> None:
|
||||
"""Mark the task complete optionally executing it."""
|
||||
spiff_tasks_updated = {}
|
||||
start_in_seconds = time.time()
|
||||
spiff_task = self.bpmn_process_instance.get_task(UUID(task_id))
|
||||
event_type = ProcessInstanceEventType.task_skipped.value
|
||||
if execute:
|
||||
|
@ -1267,6 +1205,8 @@ class ProcessInstanceProcessor:
|
|||
spiff_task.workflow.last_task = spiff_task
|
||||
spiff_tasks_updated[spiff_task.id] = spiff_task
|
||||
|
||||
end_in_seconds = time.time()
|
||||
|
||||
if isinstance(spiff_task.task_spec, EndEvent):
|
||||
for task in self.bpmn_process_instance.get_tasks(TaskState.DEFINITE_MASK, workflow=spiff_task.workflow):
|
||||
task.complete()
|
||||
|
@ -1279,9 +1219,6 @@ class ProcessInstanceProcessor:
|
|||
task.complete()
|
||||
spiff_tasks_updated[task.id] = task
|
||||
|
||||
self.increment_spiff_step()
|
||||
self.add_step()
|
||||
|
||||
for updated_spiff_task in spiff_tasks_updated.values():
|
||||
bpmn_process, task_model, new_task_models, new_json_data_dicts = (
|
||||
TaskService.find_or_create_task_model_from_spiff_task(
|
||||
|
@ -1303,6 +1240,11 @@ class ProcessInstanceProcessor:
|
|||
if bpmn_process_json_data is not None:
|
||||
new_json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data
|
||||
|
||||
# spiff_task should be the main task we are completing and only it should get the timestamps
|
||||
if task_model.guid == str(spiff_task.id):
|
||||
task_model.start_in_seconds = start_in_seconds
|
||||
task_model.end_in_seconds = end_in_seconds
|
||||
|
||||
new_task_models[task_model.guid] = task_model
|
||||
db.session.bulk_save_objects(new_task_models.values())
|
||||
TaskService.insert_or_update_json_data_records(new_json_data_dicts)
|
||||
|
@ -1312,48 +1254,118 @@ class ProcessInstanceProcessor:
|
|||
# Saving the workflow seems to reset the status
|
||||
self.suspend()
|
||||
|
||||
def reset_process(self, spiff_step: int) -> None:
|
||||
# FIXME: this currently cannot work for multi-instance tasks and loopback. It can somewhat for not those
|
||||
# if we can properly handling resetting children tasks. Right now if we set them all to FUTURE then
|
||||
# they never get picked up by spiff and processed. The process instance just stops after the to_task_guid
|
||||
# and marks itself complete without processing any of the children.
|
||||
@classmethod
|
||||
def reset_process(
|
||||
cls, process_instance: ProcessInstanceModel, to_task_guid: str, commit: Optional[bool] = False
|
||||
) -> None:
|
||||
"""Reset a process to an earlier state."""
|
||||
spiff_logger = logging.getLogger("spiff")
|
||||
spiff_logger.info(
|
||||
f"Process reset from step {spiff_step}",
|
||||
extra=self.bpmn_process_instance.log_info(),
|
||||
)
|
||||
|
||||
step_detail = (
|
||||
db.session.query(SpiffStepDetailsModel)
|
||||
.filter(
|
||||
SpiffStepDetailsModel.process_instance_id == self.process_instance_model.id,
|
||||
SpiffStepDetailsModel.spiff_step == spiff_step,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if step_detail is not None:
|
||||
self.increment_spiff_step()
|
||||
self.add_step(
|
||||
{
|
||||
"process_instance_id": self.process_instance_model.id,
|
||||
"spiff_step": self.process_instance_model.spiff_step or 1,
|
||||
"task_json": step_detail.task_json,
|
||||
"timestamp": round(time.time()),
|
||||
}
|
||||
)
|
||||
|
||||
dct = self._serializer.workflow_to_dict(self.bpmn_process_instance)
|
||||
dct["tasks"] = step_detail.task_json["tasks"]
|
||||
dct["subprocesses"] = step_detail.task_json["subprocesses"]
|
||||
self.bpmn_process_instance = self._serializer.workflow_from_dict(dct)
|
||||
|
||||
# Cascade does not seems to work on filters, only directly through the session
|
||||
tasks = self.bpmn_process_instance.get_tasks(TaskState.NOT_FINISHED_MASK)
|
||||
rows = HumanTaskModel.query.filter(
|
||||
HumanTaskModel.task_id.in_(str(t.id) for t in tasks) # type: ignore
|
||||
).all()
|
||||
for row in rows:
|
||||
db.session.delete(row)
|
||||
|
||||
self.save()
|
||||
self.suspend()
|
||||
raise Exception("This feature to reset a process instance to a given task is currently unavaiable")
|
||||
# cls.add_event_to_process_instance(
|
||||
# process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid
|
||||
# )
|
||||
#
|
||||
# to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first()
|
||||
# if to_task_model is None:
|
||||
# raise TaskNotFoundError(
|
||||
# f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'"
|
||||
# )
|
||||
#
|
||||
# parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes(
|
||||
# to_task_model
|
||||
# )
|
||||
# [p.guid for p in task_models_of_parent_bpmn_processes if p.guid]
|
||||
# [p.id for p in parent_bpmn_processes]
|
||||
# tasks_to_update_query = db.session.query(TaskModel).filter(
|
||||
# and_(
|
||||
# or_(
|
||||
# TaskModel.end_in_seconds > to_task_model.end_in_seconds,
|
||||
# TaskModel.end_in_seconds.is_(None), # type: ignore
|
||||
# ),
|
||||
# TaskModel.process_instance_id == process_instance.id,
|
||||
# # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore
|
||||
# )
|
||||
# )
|
||||
# tasks_to_update = tasks_to_update_query.all()
|
||||
#
|
||||
# # run all queries before making changes to task_model
|
||||
# if commit:
|
||||
# # tasks_to_delete_query = db.session.query(TaskModel).filter(
|
||||
# # and_(
|
||||
# # or_(
|
||||
# # TaskModel.end_in_seconds > to_task_model.end_in_seconds,
|
||||
# # TaskModel.end_in_seconds.is_not(None), # type: ignore
|
||||
# # ),
|
||||
# # TaskModel.process_instance_id == process_instance.id,
|
||||
# # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore
|
||||
# # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore
|
||||
# # )
|
||||
# # )
|
||||
# #
|
||||
# # tasks_to_delete = tasks_to_delete_query.all()
|
||||
# #
|
||||
# # # delete any later tasks from to_task_model and delete bpmn processes that may be
|
||||
# # # link directly to one of those tasks.
|
||||
# # tasks_to_delete_guids = [t.guid for t in tasks_to_delete]
|
||||
# # tasks_to_delete_ids = [t.id for t in tasks_to_delete]
|
||||
# # bpmn_processes_to_delete = BpmnProcessModel.query.filter(
|
||||
# # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore
|
||||
# # ).order_by(BpmnProcessModel.id.desc()).all()
|
||||
# # human_tasks_to_delete = HumanTaskModel.query.filter(
|
||||
# # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore
|
||||
# # ).all()
|
||||
# #
|
||||
# #
|
||||
# # import pdb; pdb.set_trace()
|
||||
# # # ensure the correct order for foreign keys
|
||||
# # for human_task_to_delete in human_tasks_to_delete:
|
||||
# # db.session.delete(human_task_to_delete)
|
||||
# # db.session.commit()
|
||||
# # for task_to_delete in tasks_to_delete:
|
||||
# # db.session.delete(task_to_delete)
|
||||
# # db.session.commit()
|
||||
# # for bpmn_process_to_delete in bpmn_processes_to_delete:
|
||||
# # db.session.delete(bpmn_process_to_delete)
|
||||
# # db.session.commit()
|
||||
#
|
||||
# related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first()
|
||||
# if related_human_task is not None:
|
||||
# db.session.delete(related_human_task)
|
||||
#
|
||||
# tasks_to_update_ids = [t.id for t in tasks_to_update]
|
||||
# human_tasks_to_delete = HumanTaskModel.query.filter(
|
||||
# HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore
|
||||
# ).all()
|
||||
# for human_task_to_delete in human_tasks_to_delete:
|
||||
# db.session.delete(human_task_to_delete)
|
||||
# db.session.commit()
|
||||
#
|
||||
# for task_to_update in tasks_to_update:
|
||||
# TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit)
|
||||
#
|
||||
# parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first()
|
||||
# if parent_task_model is None:
|
||||
# raise TaskNotFoundError(
|
||||
# f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'"
|
||||
# )
|
||||
#
|
||||
# TaskService.reset_task_model(
|
||||
# to_task_model,
|
||||
# state="READY",
|
||||
# json_data_hash=parent_task_model.json_data_hash,
|
||||
# python_env_data_hash=parent_task_model.python_env_data_hash,
|
||||
# commit=commit,
|
||||
# )
|
||||
# for task_model in task_models_of_parent_bpmn_processes:
|
||||
# TaskService.reset_task_model(task_model, state="WAITING", commit=commit)
|
||||
#
|
||||
# if commit:
|
||||
# processor = ProcessInstanceProcessor(process_instance)
|
||||
# processor.save()
|
||||
# processor.suspend()
|
||||
|
||||
@staticmethod
|
||||
def get_parser() -> MyCustomParser:
|
||||
|
@ -1591,31 +1603,15 @@ class ProcessInstanceProcessor:
|
|||
db.session.add(message_instance)
|
||||
db.session.commit()
|
||||
|
||||
def increment_spiff_step(self) -> None:
|
||||
"""Spiff_step++."""
|
||||
spiff_step = self.process_instance_model.spiff_step or 0
|
||||
spiff_step += 1
|
||||
self.process_instance_model.spiff_step = spiff_step
|
||||
current_app.config["THREAD_LOCAL_DATA"].spiff_step = spiff_step
|
||||
db.session.add(self.process_instance_model)
|
||||
|
||||
def do_engine_steps(
|
||||
self,
|
||||
exit_at: None = None,
|
||||
save: bool = False,
|
||||
execution_strategy_name: Optional[str] = None,
|
||||
) -> None:
|
||||
# NOTE: To avoid saving spiff step details, just comment out this function and the step_delegate and
|
||||
# set the TaskModelSavingDelegate's secondary_engine_step_delegate to None.
|
||||
def spiff_step_details_mapping_builder(task: SpiffTask, start: float, end: float) -> dict:
|
||||
self._script_engine.environment.revise_state_with_task_data(task)
|
||||
return self.spiff_step_details_mapping(task, start, end)
|
||||
|
||||
self._add_bpmn_process_definitions()
|
||||
|
||||
step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder)
|
||||
task_model_delegate = TaskModelSavingDelegate(
|
||||
secondary_engine_step_delegate=step_delegate,
|
||||
serializer=self._serializer,
|
||||
process_instance=self.process_instance_model,
|
||||
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
|
||||
|
@ -1643,31 +1639,6 @@ class ProcessInstanceProcessor:
|
|||
):
|
||||
self._script_engine.failing_spiff_task = None
|
||||
|
||||
# log the spiff step details so we know what is processing the process
|
||||
# instance when a human task has a timer event.
|
||||
def log_spiff_step_details(self, step_details: Any) -> None:
|
||||
if ProcessInstanceLockService.has_lock(self.process_instance_model.id):
|
||||
locked_by = ProcessInstanceLockService.locked_by()
|
||||
message = f"ADDING SPIFF BULK STEP DETAILS: {locked_by}: {step_details}"
|
||||
current_app.logger.debug(message)
|
||||
|
||||
def cancel_notify(self) -> None:
|
||||
"""Cancel_notify."""
|
||||
self.__cancel_notify(self.bpmn_process_instance)
|
||||
|
||||
@staticmethod
|
||||
def __cancel_notify(bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
"""__cancel_notify."""
|
||||
try:
|
||||
# A little hackly, but make the bpmn_process_instance catch a cancel event.
|
||||
bpmn_process_instance.signal("cancel") # generate a cancel signal.
|
||||
bpmn_process_instance.catch(CancelEventDefinition())
|
||||
# Due to this being static, can't save granular step details in this case
|
||||
# TODO: do_engine_steps without a lock
|
||||
bpmn_process_instance.do_engine_steps()
|
||||
except WorkflowTaskException as we:
|
||||
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
|
||||
|
||||
@classmethod
|
||||
def get_tasks_with_data(cls, bpmn_process_instance: BpmnWorkflow) -> List[SpiffTask]:
|
||||
return [task for task in bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK) if len(task.data) > 0]
|
||||
|
@ -1816,28 +1787,6 @@ class ProcessInstanceProcessor:
|
|||
human_task.task_status = spiff_task.get_state_name()
|
||||
db.session.add(human_task)
|
||||
|
||||
# FIXME: remove when we switch over to using tasks only
|
||||
details_model = (
|
||||
SpiffStepDetailsModel.query.filter_by(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
task_id=str(spiff_task.id),
|
||||
task_state="READY",
|
||||
)
|
||||
.order_by(SpiffStepDetailsModel.id.desc()) # type: ignore
|
||||
.first()
|
||||
)
|
||||
if details_model is None:
|
||||
raise SpiffStepDetailIsMissingError(
|
||||
"Cannot find a ready spiff_step_detail entry for process instance"
|
||||
f" {self.process_instance_model.id} and task_id is {spiff_task.id}"
|
||||
)
|
||||
|
||||
details_model.task_state = spiff_task.get_state_name()
|
||||
details_model.end_in_seconds = time.time()
|
||||
details_model.task_json = self.get_task_dict_from_spiff_task(spiff_task)
|
||||
db.session.add(details_model)
|
||||
# #######
|
||||
|
||||
json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self._serializer)
|
||||
for json_data_dict in json_data_dict_list:
|
||||
if json_data_dict is not None:
|
||||
|
|
|
@ -404,7 +404,6 @@ class ProcessInstanceService:
|
|||
spiff_task: SpiffTask,
|
||||
add_docs_and_forms: bool = False,
|
||||
calling_subprocess_task_id: Optional[str] = None,
|
||||
task_spiff_step: Optional[int] = None,
|
||||
) -> Task:
|
||||
"""Spiff_task_to_api_task."""
|
||||
task_type = spiff_task.task_spec.spec_type
|
||||
|
@ -443,7 +442,6 @@ class ProcessInstanceService:
|
|||
event_definition=serialized_task_spec.get("event_definition"),
|
||||
call_activity_process_identifier=call_activity_process_identifier,
|
||||
calling_subprocess_task_id=calling_subprocess_task_id,
|
||||
task_spiff_step=task_spiff_step,
|
||||
)
|
||||
|
||||
return task
|
||||
|
|
|
@ -9,11 +9,13 @@ from flask import current_app
|
|||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.task import TaskStateNames
|
||||
from sqlalchemy.dialects.mysql import insert as mysql_insert
|
||||
from sqlalchemy.dialects.postgresql import insert as postgres_insert
|
||||
|
||||
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
||||
from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
@ -144,7 +146,7 @@ class TaskService:
|
|||
bpmn_process, new_task_models, new_json_data_dicts = cls.add_bpmn_process(
|
||||
bpmn_process_dict=serializer.workflow_to_dict(subprocess),
|
||||
process_instance=process_instance,
|
||||
bpmn_process_parent=process_instance.bpmn_process,
|
||||
top_level_process=process_instance.bpmn_process,
|
||||
bpmn_process_guid=subprocess_guid,
|
||||
bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings,
|
||||
spiff_workflow=spiff_workflow,
|
||||
|
@ -160,7 +162,7 @@ class TaskService:
|
|||
bpmn_definition_to_task_definitions_mappings: dict,
|
||||
spiff_workflow: BpmnWorkflow,
|
||||
serializer: BpmnWorkflowSerializer,
|
||||
bpmn_process_parent: Optional[BpmnProcessModel] = None,
|
||||
top_level_process: Optional[BpmnProcessModel] = None,
|
||||
bpmn_process_guid: Optional[str] = None,
|
||||
) -> Tuple[BpmnProcessModel, dict[str, TaskModel], dict[str, JsonDataDict]]:
|
||||
"""This creates and adds a bpmn_process to the Db session.
|
||||
|
@ -182,9 +184,9 @@ class TaskService:
|
|||
new_json_data_dicts: dict[str, JsonDataDict] = {}
|
||||
|
||||
bpmn_process = None
|
||||
if bpmn_process_parent is not None:
|
||||
if top_level_process is not None:
|
||||
bpmn_process = BpmnProcessModel.query.filter_by(
|
||||
parent_process_id=bpmn_process_parent.id, guid=bpmn_process_guid
|
||||
top_level_process_id=top_level_process.id, guid=bpmn_process_guid
|
||||
).first()
|
||||
elif process_instance.bpmn_process_id is not None:
|
||||
bpmn_process = process_instance.bpmn_process
|
||||
|
@ -194,6 +196,32 @@ class TaskService:
|
|||
bpmn_process_is_new = True
|
||||
bpmn_process = BpmnProcessModel(guid=bpmn_process_guid)
|
||||
|
||||
bpmn_process_definition = bpmn_definition_to_task_definitions_mappings[spiff_workflow.spec.name][
|
||||
"bpmn_process_definition"
|
||||
]
|
||||
bpmn_process.bpmn_process_definition = bpmn_process_definition
|
||||
|
||||
if top_level_process is not None:
|
||||
subprocesses = spiff_workflow._get_outermost_workflow().subprocesses
|
||||
direct_bpmn_process_parent = top_level_process
|
||||
for subprocess_guid, subprocess in subprocesses.items():
|
||||
if subprocess == spiff_workflow.outer_workflow:
|
||||
direct_bpmn_process_parent = BpmnProcessModel.query.filter_by(
|
||||
guid=str(subprocess_guid)
|
||||
).first()
|
||||
if direct_bpmn_process_parent is None:
|
||||
raise BpmnProcessNotFoundError(
|
||||
f"Could not find bpmn process with guid: {str(subprocess_guid)} "
|
||||
f"while searching for direct parent process of {bpmn_process_guid}."
|
||||
)
|
||||
|
||||
if direct_bpmn_process_parent is None:
|
||||
raise BpmnProcessNotFoundError(
|
||||
f"Could not find a direct bpmn process parent for guid: {bpmn_process_guid}"
|
||||
)
|
||||
|
||||
bpmn_process.direct_parent_process_id = direct_bpmn_process_parent.id
|
||||
|
||||
# Point the root id to the Start task instead of the Root task
|
||||
# since we are ignoring the Root task.
|
||||
for task_id, task_properties in tasks.items():
|
||||
|
@ -206,15 +234,10 @@ class TaskService:
|
|||
if bpmn_process_json_data is not None:
|
||||
new_json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data
|
||||
|
||||
if bpmn_process_parent is None:
|
||||
if top_level_process is None:
|
||||
process_instance.bpmn_process = bpmn_process
|
||||
elif bpmn_process.parent_process_id is None:
|
||||
bpmn_process.parent_process_id = bpmn_process_parent.id
|
||||
|
||||
bpmn_process_definition = bpmn_definition_to_task_definitions_mappings[spiff_workflow.spec.name][
|
||||
"bpmn_process_definition"
|
||||
]
|
||||
bpmn_process.bpmn_process_definition = bpmn_process_definition
|
||||
elif bpmn_process.top_level_process_id is None:
|
||||
bpmn_process.top_level_process_id = top_level_process.id
|
||||
|
||||
# Since we bulk insert tasks later we need to add the bpmn_process to the session
|
||||
# to ensure we have an id.
|
||||
|
@ -285,6 +308,76 @@ class TaskService:
|
|||
setattr(task_model, task_model_data_column, task_data_hash)
|
||||
return json_data_dict
|
||||
|
||||
@classmethod
|
||||
def bpmn_process_and_descendants(cls, bpmn_processes: list[BpmnProcessModel]) -> list[BpmnProcessModel]:
|
||||
bpmn_process_ids = [p.id for p in bpmn_processes]
|
||||
direct_children = BpmnProcessModel.query.filter(
|
||||
BpmnProcessModel.direct_parent_process_id.in_(bpmn_process_ids) # type: ignore
|
||||
).all()
|
||||
if len(direct_children) > 0:
|
||||
return bpmn_processes + cls.bpmn_process_and_descendants(direct_children)
|
||||
return bpmn_processes
|
||||
|
||||
@classmethod
|
||||
def task_models_of_parent_bpmn_processes(
|
||||
cls, task_model: TaskModel
|
||||
) -> Tuple[list[BpmnProcessModel], list[TaskModel]]:
|
||||
bpmn_process = task_model.bpmn_process
|
||||
task_models: list[TaskModel] = []
|
||||
bpmn_processes: list[BpmnProcessModel] = [bpmn_process]
|
||||
if bpmn_process.guid is not None:
|
||||
parent_task_model = TaskModel.query.filter_by(guid=bpmn_process.guid).first()
|
||||
if parent_task_model is not None:
|
||||
b, t = cls.task_models_of_parent_bpmn_processes(parent_task_model)
|
||||
return (bpmn_processes + b, [parent_task_model] + t)
|
||||
return (bpmn_processes, task_models)
|
||||
|
||||
@classmethod
|
||||
def reset_task_model_dict(
|
||||
cls,
|
||||
task_model: dict,
|
||||
state: str,
|
||||
) -> None:
|
||||
task_model["state"] = state
|
||||
task_model["start_in_seconds"] = None
|
||||
task_model["end_in_seconds"] = None
|
||||
|
||||
@classmethod
|
||||
def reset_task_model(
|
||||
cls,
|
||||
task_model: TaskModel,
|
||||
state: str,
|
||||
commit: Optional[bool] = True,
|
||||
json_data_hash: Optional[str] = None,
|
||||
python_env_data_hash: Optional[str] = None,
|
||||
) -> None:
|
||||
if json_data_hash is None:
|
||||
cls.update_task_data_on_task_model(task_model, {}, "json_data_hash")
|
||||
else:
|
||||
task_model.json_data_hash = json_data_hash
|
||||
if python_env_data_hash is None:
|
||||
cls.update_task_data_on_task_model(task_model, {}, "python_env_data")
|
||||
else:
|
||||
task_model.python_env_data_hash = python_env_data_hash
|
||||
|
||||
new_properties_json = task_model.properties_json
|
||||
task_model.state = state
|
||||
task_model.start_in_seconds = None
|
||||
task_model.end_in_seconds = None
|
||||
|
||||
if commit:
|
||||
db.session.add(task_model)
|
||||
db.session.commit()
|
||||
|
||||
new_properties_json["state"] = getattr(TaskState, state)
|
||||
task_model.properties_json = new_properties_json
|
||||
|
||||
if commit:
|
||||
# if we commit the properties json at the same time as the other items
|
||||
# the json gets reset for some reason.
|
||||
db.session.add(task_model)
|
||||
db.session.commit()
|
||||
|
||||
@classmethod
|
||||
def _create_task(
|
||||
cls,
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import logging
|
||||
import time
|
||||
from typing import Callable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||
|
@ -19,7 +18,6 @@ from spiffworkflow_backend.models.message_instance_correlation import (
|
|||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
|
||||
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
|
||||
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||
from spiffworkflow_backend.services.assertion_service import safe_assertion
|
||||
from spiffworkflow_backend.services.process_instance_lock_service import (
|
||||
|
@ -45,10 +43,6 @@ class EngineStepDelegate:
|
|||
pass
|
||||
|
||||
|
||||
SpiffStepIncrementer = Callable[[], None]
|
||||
SpiffStepDetailsMappingBuilder = Callable[[SpiffTask, float, float], dict]
|
||||
|
||||
|
||||
class TaskModelSavingDelegate(EngineStepDelegate):
|
||||
"""Engine step delegate that takes care of saving a task model to the database.
|
||||
|
||||
|
@ -107,6 +101,8 @@ class TaskModelSavingDelegate(EngineStepDelegate):
|
|||
|
||||
def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
if self._should_update_task_model():
|
||||
# TODO: also include children of the last task processed. This may help with task resets
|
||||
# if we have to set their states to FUTURE.
|
||||
# excludes FUTURE and COMPLETED. the others were required to get PP1 to go to completion.
|
||||
for waiting_spiff_task in bpmn_process_instance.get_tasks(
|
||||
TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY
|
||||
|
@ -165,58 +161,6 @@ class TaskModelSavingDelegate(EngineStepDelegate):
|
|||
return task_model
|
||||
|
||||
|
||||
class StepDetailLoggingDelegate(EngineStepDelegate):
|
||||
"""Engine step delegate that takes care of logging spiff step details.
|
||||
|
||||
This separates the concerns of step execution and step logging.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
increment_spiff_step: SpiffStepIncrementer,
|
||||
spiff_step_details_mapping: SpiffStepDetailsMappingBuilder,
|
||||
):
|
||||
"""__init__."""
|
||||
self.increment_spiff_step = increment_spiff_step
|
||||
self.spiff_step_details_mapping = spiff_step_details_mapping
|
||||
self.step_details: List[dict] = []
|
||||
self.current_task_start_in_seconds = 0.0
|
||||
self.tasks_to_log = {
|
||||
"BPMN Task",
|
||||
"Script Task",
|
||||
"Service Task",
|
||||
"Default Start Event",
|
||||
"Exclusive Gateway",
|
||||
"Call Activity",
|
||||
# "End Join",
|
||||
"End Event",
|
||||
"Default Throwing Event",
|
||||
"Subprocess",
|
||||
"Transactional Subprocess",
|
||||
}
|
||||
|
||||
def should_log(self, spiff_task: SpiffTask) -> bool:
|
||||
return spiff_task.task_spec.spec_type in self.tasks_to_log and not spiff_task.task_spec.name.endswith(
|
||||
".EndJoin"
|
||||
)
|
||||
|
||||
def will_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
if self.should_log(spiff_task):
|
||||
self.current_task_start_in_seconds = time.time()
|
||||
self.increment_spiff_step()
|
||||
|
||||
def did_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
if self.should_log(spiff_task):
|
||||
self.step_details.append(
|
||||
self.spiff_step_details_mapping(spiff_task, self.current_task_start_in_seconds, time.time())
|
||||
)
|
||||
|
||||
def save(self, _bpmn_process_instance: BpmnWorkflow, commit: bool = True) -> None:
|
||||
db.session.bulk_insert_mappings(SpiffStepDetailsModel, self.step_details)
|
||||
if commit:
|
||||
db.session.commit()
|
||||
|
||||
|
||||
class ExecutionStrategy:
|
||||
"""Interface of sorts for a concrete execution strategy."""
|
||||
|
||||
|
|
|
@ -22,11 +22,10 @@
|
|||
<bpmn:script>set_in_top_level_script = 1</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_1fktmf7" sourceRef="top_level_script" targetRef="manual_task" />
|
||||
<bpmn:sequenceFlow id="Flow_1i7syph" sourceRef="top_level_subprocess" targetRef="top_level_call_activity" />
|
||||
<bpmn:sequenceFlow id="Flow_09gjylo" sourceRef="manual_task" targetRef="top_level_subprocess" />
|
||||
<bpmn:subProcess id="top_level_subprocess">
|
||||
<bpmn:incoming>Flow_09gjylo</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1i7syph</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_0yxus36</bpmn:outgoing>
|
||||
<bpmn:startEvent id="Event_0g7txdo">
|
||||
<bpmn:outgoing>Flow_00k1tii</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
|
@ -48,7 +47,7 @@ except:
|
|||
</bpmn:scriptTask>
|
||||
</bpmn:subProcess>
|
||||
<bpmn:callActivity id="top_level_call_activity" calledElement="test_process_to_call">
|
||||
<bpmn:incoming>Flow_1i7syph</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_0yxus36</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_187mcqe</bpmn:outgoing>
|
||||
</bpmn:callActivity>
|
||||
<bpmn:exclusiveGateway id="Gateway_0p8naw0" default="Flow_1t9ywmr">
|
||||
|
@ -67,6 +66,7 @@ except:
|
|||
<bpmn:script>set_top_level_process_script_after_gate = 1</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_1t9ywmr" sourceRef="Gateway_0p8naw0" targetRef="manual_task" />
|
||||
<bpmn:sequenceFlow id="Flow_0yxus36" sourceRef="top_level_subprocess" targetRef="top_level_call_activity" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="top_level_process">
|
||||
|
@ -102,10 +102,6 @@ except:
|
|||
<di:waypoint x="370" y="177" />
|
||||
<di:waypoint x="400" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1i7syph_di" bpmnElement="Flow_1i7syph">
|
||||
<di:waypoint x="630" y="177" />
|
||||
<di:waypoint x="680" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_09gjylo_di" bpmnElement="Flow_09gjylo">
|
||||
<di:waypoint x="500" y="177" />
|
||||
<di:waypoint x="530" y="177" />
|
||||
|
@ -128,6 +124,10 @@ except:
|
|||
<di:waypoint x="450" y="100" />
|
||||
<di:waypoint x="450" y="137" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0yxus36_di" bpmnElement="Flow_0yxus36">
|
||||
<di:waypoint x="630" y="177" />
|
||||
<di:waypoint x="680" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_01cbxj3">
|
||||
|
|
|
@ -1,38 +1,132 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:process id="test_process_to_call" name="Test Process To Call" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_06g687y</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_06g687y" sourceRef="StartEvent_1" targetRef="test_process_to_call_script" />
|
||||
<bpmn:endEvent id="Event_1nn875f">
|
||||
<bpmn:incoming>Flow_01e21r0</bpmn:incoming>
|
||||
<bpmn:endEvent id="Event_03zsjvn">
|
||||
<bpmn:incoming>Flow_089aeua</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_01e21r0" sourceRef="test_process_to_call_script" targetRef="Event_1nn875f" />
|
||||
<bpmn:startEvent id="Event_0pp84tn">
|
||||
<bpmn:outgoing>Flow_1qsx5et</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:subProcess id="test_process_to_call_subprocess">
|
||||
<bpmn:incoming>Flow_1qsx5et</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0zedtvv</bpmn:outgoing>
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0bkk554</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:scriptTask id="test_process_to_call_subprocess_script" name="Test Process To Call Subprocess Script">
|
||||
<bpmn:incoming>Flow_1cnuh2a</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_17hgw9g</bpmn:outgoing>
|
||||
<bpmn:script>set_in_test_process_to_call_subprocess_script = 1</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:endEvent id="Event_1nn875f">
|
||||
<bpmn:incoming>Flow_17hgw9g</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_17hgw9g" sourceRef="test_process_to_call_subprocess_script" targetRef="Event_1nn875f" />
|
||||
<bpmn:sequenceFlow id="Flow_0bkk554" sourceRef="StartEvent_1" targetRef="test_process_to_call_subprocess_subprocess" />
|
||||
<bpmn:sequenceFlow id="Flow_1cnuh2a" sourceRef="test_process_to_call_subprocess_subprocess" targetRef="test_process_to_call_subprocess_script" />
|
||||
<bpmn:subProcess id="test_process_to_call_subprocess_subprocess">
|
||||
<bpmn:incoming>Flow_0bkk554</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1cnuh2a</bpmn:outgoing>
|
||||
<bpmn:startEvent id="Event_17bk1sd">
|
||||
<bpmn:outgoing>Flow_1nri60d</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1nri60d" sourceRef="Event_17bk1sd" targetRef="test_process_to_call_subprocess_subprocess_script" />
|
||||
<bpmn:endEvent id="Event_1sec2vg">
|
||||
<bpmn:incoming>Flow_1bfzrzu</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1bfzrzu" sourceRef="test_process_to_call_subprocess_subprocess_script" targetRef="Event_1sec2vg" />
|
||||
<bpmn:scriptTask id="test_process_to_call_subprocess_subprocess_script" name="Test Process To Call Subprocess Subprocess Script">
|
||||
<bpmn:incoming>Flow_1nri60d</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1bfzrzu</bpmn:outgoing>
|
||||
<bpmn:script>set_in_test_process_to_call_subprocess_subprocess_script = 1</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:subProcess>
|
||||
</bpmn:subProcess>
|
||||
<bpmn:sequenceFlow id="Flow_1qsx5et" sourceRef="Event_0pp84tn" targetRef="test_process_to_call_subprocess" />
|
||||
<bpmn:sequenceFlow id="Flow_0zedtvv" sourceRef="test_process_to_call_subprocess" targetRef="test_process_to_call_script" />
|
||||
<bpmn:sequenceFlow id="Flow_089aeua" sourceRef="test_process_to_call_script" targetRef="Event_03zsjvn" />
|
||||
<bpmn:scriptTask id="test_process_to_call_script" name="Test Process To Call Script">
|
||||
<bpmn:incoming>Flow_06g687y</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_01e21r0</bpmn:outgoing>
|
||||
<bpmn:incoming>Flow_0zedtvv</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_089aeua</bpmn:outgoing>
|
||||
<bpmn:script>set_in_test_process_to_call_script = 1</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="test_process_to_call">
|
||||
<bpmndi:BPMNShape id="Event_03zsjvn_di" bpmnElement="Event_03zsjvn">
|
||||
<dc:Bounds x="612" y="132" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0pp84tn_di" bpmnElement="Event_0pp84tn">
|
||||
<dc:Bounds x="162.33333333333334" y="132" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_14ywg4w_di" bpmnElement="test_process_to_call_subprocess" isExpanded="false">
|
||||
<dc:Bounds x="270" y="110" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_17algy3_di" bpmnElement="test_process_to_call_script">
|
||||
<dc:Bounds x="450" y="110" width="100" height="80" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1qsx5et_di" bpmnElement="Flow_1qsx5et">
|
||||
<di:waypoint x="198" y="150" />
|
||||
<di:waypoint x="270" y="150" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0zedtvv_di" bpmnElement="Flow_0zedtvv">
|
||||
<di:waypoint x="370" y="150" />
|
||||
<di:waypoint x="450" y="150" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_089aeua_di" bpmnElement="Flow_089aeua">
|
||||
<di:waypoint x="550" y="150" />
|
||||
<di:waypoint x="612" y="150" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1ikntvh">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_18euprj" bpmnElement="test_process_to_call_subprocess">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
<dc:Bounds x="180" y="182" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_059upl6_di" bpmnElement="test_process_to_call_subprocess_script">
|
||||
<dc:Bounds x="420" y="160" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1nn875f_di" bpmnElement="Event_1nn875f">
|
||||
<dc:Bounds x="432" y="159" width="36" height="36" />
|
||||
<dc:Bounds x="562" y="182" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_059upl6_di" bpmnElement="test_process_to_call_script">
|
||||
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||
<bpmndi:BPMNShape id="Activity_0hixo7p_di" bpmnElement="test_process_to_call_subprocess_subprocess">
|
||||
<dc:Bounds x="270" y="160" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_06g687y_di" bpmnElement="Flow_06g687y">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="270" y="177" />
|
||||
<bpmndi:BPMNEdge id="Flow_17hgw9g_di" bpmnElement="Flow_17hgw9g">
|
||||
<di:waypoint x="520" y="200" />
|
||||
<di:waypoint x="562" y="200" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_01e21r0_di" bpmnElement="Flow_01e21r0">
|
||||
<di:waypoint x="370" y="177" />
|
||||
<di:waypoint x="432" y="177" />
|
||||
<bpmndi:BPMNEdge id="Flow_0bkk554_di" bpmnElement="Flow_0bkk554">
|
||||
<di:waypoint x="216" y="200" />
|
||||
<di:waypoint x="270" y="200" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1cnuh2a_di" bpmnElement="Flow_1cnuh2a">
|
||||
<di:waypoint x="370" y="200" />
|
||||
<di:waypoint x="420" y="200" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_0x7adrd">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_17ay7ws" bpmnElement="test_process_to_call_subprocess_subprocess">
|
||||
<bpmndi:BPMNShape id="Event_17bk1sd_di" bpmnElement="Event_17bk1sd">
|
||||
<dc:Bounds x="262" y="172" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1sec2vg_di" bpmnElement="Event_1sec2vg">
|
||||
<dc:Bounds x="502" y="172" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1kjd6i3_di" bpmnElement="test_process_to_call_subprocess_subprocess_script">
|
||||
<dc:Bounds x="350" y="150" width="100" height="80" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1nri60d_di" bpmnElement="Flow_1nri60d">
|
||||
<di:waypoint x="298" y="190" />
|
||||
<di:waypoint x="350" y="190" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1bfzrzu_di" bpmnElement="Flow_1bfzrzu">
|
||||
<di:waypoint x="450" y="190" />
|
||||
<di:waypoint x="502" y="190" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
</bpmn:intermediateCatchEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0tzaigt" sourceRef="Gateway_0n53kj7" targetRef="Event_0himdx6" />
|
||||
<bpmn:sequenceFlow id="Flow_1vld4r2" sourceRef="Gateway_0n53kj7" targetRef="Event_0e4owa3" />
|
||||
<bpmn:sequenceFlow id="Flow_13ai5vv" sourceRef="Event_0e4owa3" targetRef="Activity_0uum4kq" />
|
||||
<bpmn:sequenceFlow id="Flow_13ai5vv" sourceRef="Event_0e4owa3" targetRef="manual_task_one" />
|
||||
<bpmn:endEvent id="Event_0vmxgb9">
|
||||
<bpmn:incoming>Flow_1q47ol8</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
|
@ -36,7 +36,7 @@
|
|||
<bpmn:endEvent id="Event_174a838">
|
||||
<bpmn:incoming>Flow_1w3n49n</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1vwnf3n" sourceRef="Activity_0uum4kq" targetRef="Event_1ixib8a" />
|
||||
<bpmn:sequenceFlow id="Flow_1vwnf3n" sourceRef="manual_task_one" targetRef="Event_1ixib8a" />
|
||||
<bpmn:intermediateCatchEvent id="Event_0e4owa3">
|
||||
<bpmn:incoming>Flow_1vld4r2</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_13ai5vv</bpmn:outgoing>
|
||||
|
@ -44,7 +44,7 @@
|
|||
<bpmn:timeDuration xsi:type="bpmn:tFormalExpression">"PT1H"</bpmn:timeDuration>
|
||||
</bpmn:timerEventDefinition>
|
||||
</bpmn:intermediateCatchEvent>
|
||||
<bpmn:manualTask id="Activity_0uum4kq" name="Any Task">
|
||||
<bpmn:manualTask id="manual_task_one" name="Any Task">
|
||||
<bpmn:extensionElements>
|
||||
<spiffworkflow:instructionsForEndUser>Click the button.</spiffworkflow:instructionsForEndUser>
|
||||
</bpmn:extensionElements>
|
||||
|
@ -91,7 +91,7 @@
|
|||
<bpmndi:BPMNShape id="Event_0e4owa3_di" bpmnElement="Event_0e4owa3">
|
||||
<dc:Bounds x="392" y="272" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_104jmxm_di" bpmnElement="Activity_0uum4kq">
|
||||
<bpmndi:BPMNShape id="Activity_104jmxm_di" bpmnElement="manual_task_one">
|
||||
<dc:Bounds x="480" y="250" width="100" height="80" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
|
|
|
@ -2619,9 +2619,9 @@ class TestProcessApi(BaseTest):
|
|||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
end_task = next(task for task in response.json if task["type"] == "End Event")
|
||||
end_task = next(task for task in response.json if task["bpmn_identifier"] == "Event_174a838")
|
||||
response = client.get(
|
||||
f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{end_task['task_spiff_step']}",
|
||||
f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{end_task['guid']}",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
@ -2688,17 +2688,17 @@ class TestProcessApi(BaseTest):
|
|||
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/task-info",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
assert len(response.json) == 1
|
||||
task = response.json[0]
|
||||
assert len(response.json) == 9
|
||||
human_task = next(task for task in response.json if task["bpmn_identifier"] == "manual_task_one")
|
||||
|
||||
response = client.post(
|
||||
f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{task['id']}",
|
||||
f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{human_task['guid']}",
|
||||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
content_type="application/json",
|
||||
data=json.dumps({"execute": False}),
|
||||
)
|
||||
assert response.json["status"] == "suspended"
|
||||
task_model = TaskModel.query.filter_by(guid=task["id"]).first()
|
||||
task_model = TaskModel.query.filter_by(guid=human_task["guid"]).first()
|
||||
assert task_model is not None
|
||||
assert task_model.state == "COMPLETED"
|
||||
|
||||
|
@ -2707,7 +2707,7 @@ class TestProcessApi(BaseTest):
|
|||
headers=self.logged_in_headers(with_super_admin_user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert len(response.json) == 1
|
||||
assert len(response.json) == 9
|
||||
|
||||
def setup_initial_groups_for_move_tests(self, client: FlaskClient, with_super_admin_user: UserModel) -> None:
|
||||
"""Setup_initial_groups_for_move_tests."""
|
||||
|
|
|
@ -91,7 +91,6 @@ class TestErrorHandlingService(BaseTest):
|
|||
# Both send and receive messages should be generated, matched
|
||||
# and considered complete.
|
||||
messages = db.session.query(MessageInstanceModel).all()
|
||||
# import pdb; pdb.set_trace()
|
||||
assert 2 == len(messages)
|
||||
assert "completed" == messages[0].status
|
||||
assert "completed" == messages[1].status
|
||||
|
|
|
@ -256,6 +256,61 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
assert spiff_task is not None
|
||||
assert spiff_task.state == TaskState.COMPLETED
|
||||
|
||||
# TODO: FIX resetting a process instance to a task
|
||||
# def test_properly_resets_process_to_given_task(
|
||||
# self,
|
||||
# app: Flask,
|
||||
# client: FlaskClient,
|
||||
# with_db_and_bpmn_file_cleanup: None,
|
||||
# with_super_admin_user: UserModel,
|
||||
# ) -> None:
|
||||
# self.create_process_group(client, with_super_admin_user, "test_group", "test_group")
|
||||
# initiator_user = self.find_or_create_user("initiator_user")
|
||||
# finance_user_three = self.find_or_create_user("testuser3")
|
||||
# assert initiator_user.principal is not None
|
||||
# assert finance_user_three.principal is not None
|
||||
# AuthorizationService.import_permissions_from_yaml_file()
|
||||
#
|
||||
# finance_group = GroupModel.query.filter_by(identifier="Finance Team").first()
|
||||
# assert finance_group is not None
|
||||
#
|
||||
# process_model = load_test_spec(
|
||||
# process_model_id="test_group/manual_task_with_subprocesses",
|
||||
# process_model_source_directory="manual_task_with_subprocesses",
|
||||
# )
|
||||
# process_instance = self.create_process_instance_from_process_model(
|
||||
# process_model=process_model, user=initiator_user
|
||||
# )
|
||||
# processor = ProcessInstanceProcessor(process_instance)
|
||||
# processor.do_engine_steps(save=True)
|
||||
# assert len(process_instance.active_human_tasks) == 1
|
||||
# initial_human_task_id = process_instance.active_human_tasks[0].id
|
||||
#
|
||||
# # save again to ensure we go attempt to process the human tasks again
|
||||
# processor.save()
|
||||
#
|
||||
# assert len(process_instance.active_human_tasks) == 1
|
||||
# assert initial_human_task_id == process_instance.active_human_tasks[0].id
|
||||
#
|
||||
# processor = ProcessInstanceProcessor(process_instance)
|
||||
# human_task_one = process_instance.active_human_tasks[0]
|
||||
# spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier(
|
||||
# human_task_one.task_name, processor.bpmn_process_instance
|
||||
# )
|
||||
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
|
||||
#
|
||||
# processor.suspend()
|
||||
# ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True)
|
||||
#
|
||||
# process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
|
||||
# processor = ProcessInstanceProcessor(process_instance)
|
||||
# human_task_one = process_instance.active_human_tasks[0]
|
||||
# spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id))
|
||||
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
|
||||
# human_task_one = process_instance.active_human_tasks[0]
|
||||
# spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id))
|
||||
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
|
||||
|
||||
def test_properly_saves_tasks_when_running(
|
||||
self,
|
||||
app: Flask,
|
||||
|
@ -263,7 +318,6 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
"""Test_does_not_recreate_human_tasks_on_multiple_saves."""
|
||||
self.create_process_group(client, with_super_admin_user, "test_group", "test_group")
|
||||
initiator_user = self.find_or_create_user("initiator_user")
|
||||
finance_user_three = self.find_or_create_user("testuser3")
|
||||
|
@ -317,7 +371,11 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
}
|
||||
third_data_set = {
|
||||
**second_data_set,
|
||||
**{"set_in_test_process_to_call_script": 1},
|
||||
**{
|
||||
"set_in_test_process_to_call_script": 1,
|
||||
"set_in_test_process_to_call_subprocess_subprocess_script": 1,
|
||||
"set_in_test_process_to_call_subprocess_script": 1,
|
||||
},
|
||||
}
|
||||
fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}}
|
||||
fifth_data_set = {**fourth_data_set, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}}
|
||||
|
@ -326,11 +384,11 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
"manual_task": first_data_set,
|
||||
"top_level_subprocess_script": second_data_set,
|
||||
"top_level_subprocess": second_data_set,
|
||||
"test_process_to_call_script": third_data_set,
|
||||
"test_process_to_call_subprocess_script": third_data_set,
|
||||
"top_level_call_activity": third_data_set,
|
||||
"end_event_of_manual_task_model": third_data_set,
|
||||
"top_level_subprocess_script_second": fourth_data_set,
|
||||
"test_process_to_call_script_second": fourth_data_set,
|
||||
"test_process_to_call_subprocess_script_second": fourth_data_set,
|
||||
}
|
||||
|
||||
spiff_tasks_checked_once: list = []
|
||||
|
@ -338,10 +396,16 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
# TODO: also check task data here from the spiff_task directly to ensure we hydrated spiff correctly
|
||||
def assert_spiff_task_is_in_process(spiff_task_identifier: str, bpmn_process_identifier: str) -> None:
|
||||
if spiff_task.task_spec.name == spiff_task_identifier:
|
||||
base_failure_message = f"Failed on {bpmn_process_identifier} - {spiff_task_identifier}."
|
||||
expected_python_env_data = expected_task_data[spiff_task.task_spec.name]
|
||||
expected_task_data_key = spiff_task.task_spec.name
|
||||
if spiff_task.task_spec.name in spiff_tasks_checked_once:
|
||||
expected_python_env_data = expected_task_data[f"{spiff_task.task_spec.name}_second"]
|
||||
expected_task_data_key = f"{spiff_task.task_spec.name}_second"
|
||||
|
||||
expected_python_env_data = expected_task_data[expected_task_data_key]
|
||||
|
||||
base_failure_message = (
|
||||
f"Failed on {bpmn_process_identifier} - {spiff_task_identifier} - task data key"
|
||||
f" {expected_task_data_key}."
|
||||
)
|
||||
task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
|
||||
|
||||
assert task_model.start_in_seconds is not None
|
||||
|
@ -354,7 +418,8 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier
|
||||
|
||||
message = (
|
||||
f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task_model.json_data()}"
|
||||
f"{base_failure_message} Expected: {sorted(expected_python_env_data)}. Received:"
|
||||
f" {sorted(task_model.json_data())}"
|
||||
)
|
||||
# TODO: if we split out env data again we will need to use it here instead of json_data
|
||||
# assert task_model.python_env_data() == expected_python_env_data, message
|
||||
|
@ -365,7 +430,9 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
assert len(all_spiff_tasks) > 1
|
||||
for spiff_task in all_spiff_tasks:
|
||||
assert spiff_task.state == TaskState.COMPLETED
|
||||
assert_spiff_task_is_in_process("test_process_to_call_script", "test_process_to_call")
|
||||
assert_spiff_task_is_in_process(
|
||||
"test_process_to_call_subprocess_script", "test_process_to_call_subprocess"
|
||||
)
|
||||
assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess")
|
||||
assert_spiff_task_is_in_process("top_level_script", "top_level_process")
|
||||
|
||||
|
@ -378,6 +445,23 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
assert bpmn_process_definition.bpmn_identifier == "test_process_to_call"
|
||||
assert bpmn_process_definition.bpmn_name == "Test Process To Call"
|
||||
|
||||
# Check that the direct parent of the called activity subprocess task is the
|
||||
# name of the process that was called from the activity.
|
||||
if spiff_task.task_spec.name == "test_process_to_call_subprocess_script":
|
||||
task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
|
||||
assert task_model is not None
|
||||
bpmn_process = task_model.bpmn_process
|
||||
assert bpmn_process is not None
|
||||
bpmn_process_definition = bpmn_process.bpmn_process_definition
|
||||
assert bpmn_process_definition is not None
|
||||
assert bpmn_process_definition.bpmn_identifier == "test_process_to_call_subprocess"
|
||||
assert bpmn_process.direct_parent_process_id is not None
|
||||
direct_parent_process = BpmnProcessModel.query.filter_by(
|
||||
id=bpmn_process.direct_parent_process_id
|
||||
).first()
|
||||
assert direct_parent_process is not None
|
||||
assert direct_parent_process.bpmn_process_definition.bpmn_identifier == "test_process_to_call"
|
||||
|
||||
assert processor.get_data() == fifth_data_set
|
||||
|
||||
def test_does_not_recreate_human_tasks_on_multiple_saves(
|
||||
|
|
|
@ -162,7 +162,7 @@ describe('process-instances', () => {
|
|||
cy.getBySel('process-instance-log-list-link').click();
|
||||
cy.getBySel('process-instance-log-detailed').click();
|
||||
cy.contains('process_model_one');
|
||||
cy.contains('State change to COMPLETED');
|
||||
cy.contains('task_completed');
|
||||
cy.basicPaginationTest();
|
||||
});
|
||||
|
||||
|
@ -182,6 +182,9 @@ describe('process-instances', () => {
|
|||
cy.url().should('include', `status=${processStatus}`);
|
||||
cy.assertAtLeastOneItemInPaginatedResults();
|
||||
cy.getBySel(`process-instance-status-${processStatus}`);
|
||||
|
||||
// maybe waiting a bit before trying to click makes this work consistently?
|
||||
cy.wait(1000);
|
||||
// there should really only be one, but in CI there are sometimes more
|
||||
cy.get('div[aria-label="Clear all selected items"]:first').click();
|
||||
cy.get('div[aria-label="Clear all selected items"]').should(
|
||||
|
|
|
@ -154,6 +154,10 @@ Cypress.Commands.add(
|
|||
.then(($element) => {
|
||||
const oldId = $element.text().trim();
|
||||
cy.get('.cds--pagination__button--forward').click();
|
||||
cy.contains(
|
||||
`[data-qa=${dataQaTagToUseToEnsureTableHasLoaded}]`,
|
||||
oldId
|
||||
).should('not.exist');
|
||||
cy.contains(/\b3–4 of \d+/);
|
||||
cy.get('.cds--pagination__button--backward').click();
|
||||
cy.contains(/\b1–2 of \d+/);
|
||||
|
|
|
@ -1295,7 +1295,6 @@ export default function ProcessInstanceListTable({
|
|||
end_in_seconds: 'End Time',
|
||||
status: 'Status',
|
||||
process_initiator_username: 'Started By',
|
||||
spiff_step: 'SpiffWorkflow Step',
|
||||
};
|
||||
const getHeaderLabel = (header: string) => {
|
||||
return headerLabels[header] ?? header;
|
||||
|
|
|
@ -60,14 +60,14 @@ import HttpService from '../services/HttpService';
|
|||
import ButtonWithConfirmation from './ButtonWithConfirmation';
|
||||
import { getBpmnProcessIdentifiers, makeid } from '../helpers';
|
||||
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
||||
import { PermissionsToCheck, ProcessInstanceTask } from '../interfaces';
|
||||
import { PermissionsToCheck, Task } from '../interfaces';
|
||||
import { usePermissionFetcher } from '../hooks/PermissionService';
|
||||
|
||||
type OwnProps = {
|
||||
processModelId: string;
|
||||
diagramType: string;
|
||||
readyOrWaitingProcessInstanceTasks?: ProcessInstanceTask[] | null;
|
||||
completedProcessInstanceTasks?: ProcessInstanceTask[] | null;
|
||||
readyOrWaitingProcessInstanceTasks?: Task[] | null;
|
||||
completedProcessInstanceTasks?: Task[] | null;
|
||||
saveDiagram?: (..._args: any[]) => any;
|
||||
onDeleteFile?: (..._args: any[]) => any;
|
||||
isPrimaryFile?: boolean;
|
||||
|
@ -364,18 +364,18 @@ export default function ReactDiagramEditor({
|
|||
|
||||
function highlightBpmnIoElement(
|
||||
canvas: any,
|
||||
processInstanceTask: ProcessInstanceTask,
|
||||
task: Task,
|
||||
bpmnIoClassName: string,
|
||||
bpmnProcessIdentifiers: string[]
|
||||
) {
|
||||
if (checkTaskCanBeHighlighted(processInstanceTask.name)) {
|
||||
if (checkTaskCanBeHighlighted(task.bpmn_identifier)) {
|
||||
try {
|
||||
if (
|
||||
bpmnProcessIdentifiers.includes(
|
||||
processInstanceTask.process_identifier
|
||||
task.bpmn_process_definition_identifier
|
||||
)
|
||||
) {
|
||||
canvas.addMarker(processInstanceTask.name, bpmnIoClassName);
|
||||
canvas.addMarker(task.bpmn_identifier, bpmnIoClassName);
|
||||
}
|
||||
} catch (bpmnIoError: any) {
|
||||
// the task list also contains task for processes called from call activities which will
|
||||
|
|
|
@ -21,8 +21,47 @@ export interface RecentProcessModel {
|
|||
processModelDisplayName: string;
|
||||
}
|
||||
|
||||
export interface ProcessInstanceTask {
|
||||
export interface TaskPropertiesJson {
|
||||
parent: string;
|
||||
}
|
||||
|
||||
export interface TaskDefinitionPropertiesJson {
|
||||
spec: string;
|
||||
}
|
||||
|
||||
export interface EventDefinition {
|
||||
typename: string;
|
||||
payload: any;
|
||||
event_definitions: [EventDefinition];
|
||||
|
||||
message_var?: string;
|
||||
}
|
||||
|
||||
export interface Task {
|
||||
id: number;
|
||||
guid: string;
|
||||
bpmn_identifier: string;
|
||||
|
||||
bpmn_name?: string;
|
||||
|
||||
bpmn_process_direct_parent_guid: string;
|
||||
bpmn_process_definition_identifier: string;
|
||||
data: any;
|
||||
state: string;
|
||||
typename: string;
|
||||
properties_json: TaskPropertiesJson;
|
||||
task_definition_properties_json: TaskDefinitionPropertiesJson;
|
||||
|
||||
event_definition?: EventDefinition;
|
||||
}
|
||||
|
||||
export interface TaskIds {
|
||||
completed: Task[];
|
||||
readyOrWaiting: Task[];
|
||||
}
|
||||
|
||||
export interface ProcessInstanceTask {
|
||||
id: string;
|
||||
task_id: string;
|
||||
|
||||
calling_subprocess_task_id: string;
|
||||
|
@ -46,7 +85,6 @@ export interface ProcessInstanceTask {
|
|||
type: string;
|
||||
updated_at_in_seconds: number;
|
||||
|
||||
task_spiff_step?: number;
|
||||
potential_owner_usernames?: string;
|
||||
assigned_user_group_identifier?: string;
|
||||
}
|
||||
|
@ -90,7 +128,6 @@ export interface ProcessInstance {
|
|||
end_in_seconds: number | null;
|
||||
process_initiator_username: string;
|
||||
bpmn_xml_file_contents?: string;
|
||||
spiff_step?: number;
|
||||
created_at_in_seconds: number;
|
||||
updated_at_in_seconds: number;
|
||||
bpmn_version_control_identifier: string;
|
||||
|
@ -258,3 +295,20 @@ export interface JsonSchemaForm {
|
|||
process_model_id: string;
|
||||
required: string[];
|
||||
}
|
||||
|
||||
export interface ProcessInstanceLogEntry {
|
||||
bpmn_process_definition_identifier: string;
|
||||
bpmn_process_definition_name: string;
|
||||
bpmn_task_type: string;
|
||||
event_type: string;
|
||||
spiff_task_guid: string;
|
||||
task_definition_identifier: string;
|
||||
task_guid: string;
|
||||
timestamp: number;
|
||||
id: number;
|
||||
process_instance_id: number;
|
||||
|
||||
task_definition_name?: string;
|
||||
user_id?: number;
|
||||
username?: string;
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ export default function AdminRoutes() {
|
|||
element={<ProcessInstanceShow variant="for-me" />}
|
||||
/>
|
||||
<Route
|
||||
path="process-instances/for-me/:process_model_id/:process_instance_id/:spiff_step"
|
||||
path="process-instances/for-me/:process_model_id/:process_instance_id/:to_task_guid"
|
||||
element={<ProcessInstanceShow variant="for-me" />}
|
||||
/>
|
||||
<Route
|
||||
|
@ -81,7 +81,7 @@ export default function AdminRoutes() {
|
|||
element={<ProcessInstanceShow variant="all" />}
|
||||
/>
|
||||
<Route
|
||||
path="process-instances/:process_model_id/:process_instance_id/:spiff_step"
|
||||
path="process-instances/:process_model_id/:process_instance_id/:to_task_guid"
|
||||
element={<ProcessInstanceShow variant="all" />}
|
||||
/>
|
||||
<Route
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { useEffect, useState } from 'react';
|
||||
// @ts-ignore
|
||||
import { Table, Tabs, TabList, Tab } from '@carbon/react';
|
||||
import { useParams, useSearchParams } from 'react-router-dom';
|
||||
import { Link, useParams, useSearchParams } from 'react-router-dom';
|
||||
import PaginationForTable from '../components/PaginationForTable';
|
||||
import ProcessBreadcrumb from '../components/ProcessBreadcrumb';
|
||||
import {
|
||||
|
@ -10,6 +10,7 @@ import {
|
|||
} from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
||||
import { ProcessInstanceLogEntry } from '../interfaces';
|
||||
|
||||
type OwnProps = {
|
||||
variant: string;
|
||||
|
@ -50,25 +51,25 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) {
|
|||
isDetailedView,
|
||||
]);
|
||||
|
||||
const getTableRow = (row: any) => {
|
||||
const getTableRow = (logEntry: ProcessInstanceLogEntry) => {
|
||||
const tableRow = [];
|
||||
const taskNameCell = (
|
||||
<td>
|
||||
{row.task_definition_name ||
|
||||
(row.bpmn_task_type === 'StartEvent' ? 'Process Started' : '') ||
|
||||
(row.bpmn_task_type === 'EndEvent' ? 'Process Ended' : '')}
|
||||
{logEntry.task_definition_name ||
|
||||
(logEntry.bpmn_task_type === 'StartEvent' ? 'Process Started' : '') ||
|
||||
(logEntry.bpmn_task_type === 'EndEvent' ? 'Process Ended' : '')}
|
||||
</td>
|
||||
);
|
||||
const bpmnProcessCell = (
|
||||
<td>
|
||||
{row.bpmn_process_definition_name ||
|
||||
row.bpmn_process_definition_identifier}
|
||||
{logEntry.bpmn_process_definition_name ||
|
||||
logEntry.bpmn_process_definition_identifier}
|
||||
</td>
|
||||
);
|
||||
if (isDetailedView) {
|
||||
tableRow.push(
|
||||
<>
|
||||
<td data-qa="paginated-entity-id">{row.id}</td>
|
||||
<td data-qa="paginated-entity-id">{logEntry.id}</td>
|
||||
{bpmnProcessCell}
|
||||
{taskNameCell}
|
||||
</>
|
||||
|
@ -84,24 +85,44 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) {
|
|||
if (isDetailedView) {
|
||||
tableRow.push(
|
||||
<>
|
||||
<td>{row.bpmn_task_type}</td>
|
||||
<td>{row.event_type}</td>
|
||||
<td>{logEntry.bpmn_task_type}</td>
|
||||
<td>{logEntry.event_type}</td>
|
||||
<td>
|
||||
{row.username || (
|
||||
{logEntry.username || (
|
||||
<span className="system-user-log-entry">system</span>
|
||||
)}
|
||||
</td>
|
||||
</>
|
||||
);
|
||||
}
|
||||
tableRow.push(<td>{convertSecondsToFormattedDateTime(row.timestamp)}</td>);
|
||||
return <tr key={row.id}>{tableRow}</tr>;
|
||||
|
||||
let timestampComponent = (
|
||||
<td>{convertSecondsToFormattedDateTime(logEntry.timestamp)}</td>
|
||||
);
|
||||
if (logEntry.spiff_task_guid) {
|
||||
timestampComponent = (
|
||||
<td>
|
||||
<Link
|
||||
data-qa="process-instance-show-link"
|
||||
to={`${processInstanceShowPageBaseUrl}/${logEntry.process_instance_id}/${logEntry.spiff_task_guid}`}
|
||||
title="View state when task was completed"
|
||||
>
|
||||
{convertSecondsToFormattedDateTime(logEntry.timestamp)}
|
||||
</Link>
|
||||
</td>
|
||||
);
|
||||
}
|
||||
tableRow.push(timestampComponent);
|
||||
|
||||
return <tr key={logEntry.id}>{tableRow}</tr>;
|
||||
};
|
||||
|
||||
const buildTable = () => {
|
||||
const rows = processInstanceLogs.map((row) => {
|
||||
return getTableRow(row);
|
||||
});
|
||||
const rows = processInstanceLogs.map(
|
||||
(logEntry: ProcessInstanceLogEntry) => {
|
||||
return getTableRow(logEntry);
|
||||
}
|
||||
);
|
||||
|
||||
const tableHeaders = [];
|
||||
if (isDetailedView) {
|
||||
|
|
|
@ -7,12 +7,10 @@ import {
|
|||
useSearchParams,
|
||||
} from 'react-router-dom';
|
||||
import {
|
||||
CaretRight,
|
||||
TrashCan,
|
||||
StopOutline,
|
||||
PauseOutline,
|
||||
PlayOutline,
|
||||
CaretLeft,
|
||||
InProgress,
|
||||
Checkmark,
|
||||
Warning,
|
||||
|
@ -42,11 +40,14 @@ import {
|
|||
import ButtonWithConfirmation from '../components/ButtonWithConfirmation';
|
||||
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
||||
import {
|
||||
EventDefinition,
|
||||
PermissionsToCheck,
|
||||
ProcessData,
|
||||
ProcessInstance,
|
||||
ProcessInstanceMetadata,
|
||||
ProcessInstanceTask,
|
||||
Task,
|
||||
TaskDefinitionPropertiesJson,
|
||||
TaskIds,
|
||||
} from '../interfaces';
|
||||
import { usePermissionFetcher } from '../hooks/PermissionService';
|
||||
import ProcessInstanceClass from '../classes/ProcessInstanceClass';
|
||||
|
@ -64,10 +65,12 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
|
||||
const [processInstance, setProcessInstance] =
|
||||
useState<ProcessInstance | null>(null);
|
||||
const [tasks, setTasks] = useState<ProcessInstanceTask[] | null>(null);
|
||||
const [tasks, setTasks] = useState<Task[] | null>(null);
|
||||
const [tasksCallHadError, setTasksCallHadError] = useState<boolean>(false);
|
||||
const [taskToDisplay, setTaskToDisplay] =
|
||||
useState<ProcessInstanceTask | null>(null);
|
||||
const [taskToDisplay, setTaskToDisplay] = useState<Task | null>(null);
|
||||
const [taskToTimeTravelTo, setTaskToTimeTravelTo] = useState<Task | null>(
|
||||
null
|
||||
);
|
||||
const [taskDataToDisplay, setTaskDataToDisplay] = useState<string>('');
|
||||
const [showTaskDataLoading, setShowTaskDataLoading] =
|
||||
useState<boolean>(false);
|
||||
|
@ -127,10 +130,23 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (permissionsLoaded) {
|
||||
if (!permissionsLoaded) {
|
||||
return undefined;
|
||||
}
|
||||
const processTaskFailure = () => {
|
||||
setTasksCallHadError(true);
|
||||
};
|
||||
const processTasksSuccess = (results: Task[]) => {
|
||||
if (params.to_task_guid) {
|
||||
const matchingTask = results.find(
|
||||
(task: Task) => task.guid === params.to_task_guid
|
||||
);
|
||||
if (matchingTask) {
|
||||
setTaskToTimeTravelTo(matchingTask);
|
||||
}
|
||||
}
|
||||
setTasks(results);
|
||||
};
|
||||
let queryParams = '';
|
||||
const processIdentifier = searchParams.get('process_identifier');
|
||||
if (processIdentifier) {
|
||||
|
@ -144,9 +160,13 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
path: `${apiPath}/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`,
|
||||
successCallback: setProcessInstance,
|
||||
});
|
||||
let taskParams = '?all_tasks=true&most_recent_tasks_only=true';
|
||||
if (typeof params.spiff_step !== 'undefined') {
|
||||
taskParams = `${taskParams}&spiff_step=${params.spiff_step}`;
|
||||
let taskParams = '?most_recent_tasks_only=true';
|
||||
if (typeof params.to_task_guid !== 'undefined') {
|
||||
taskParams = `${taskParams}&to_task_guid=${params.to_task_guid}`;
|
||||
}
|
||||
const bpmnProcessGuid = searchParams.get('bpmn_process_guid');
|
||||
if (bpmnProcessGuid) {
|
||||
taskParams = `${taskParams}&bpmn_process_guid=${bpmnProcessGuid}`;
|
||||
}
|
||||
let taskPath = '';
|
||||
if (ability.can('GET', taskListPath)) {
|
||||
|
@ -155,13 +175,13 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
if (taskPath) {
|
||||
HttpService.makeCallToBackend({
|
||||
path: taskPath,
|
||||
successCallback: setTasks,
|
||||
successCallback: processTasksSuccess,
|
||||
failureCallback: processTaskFailure,
|
||||
});
|
||||
} else {
|
||||
setTasksCallHadError(true);
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}, [
|
||||
targetUris,
|
||||
params,
|
||||
|
@ -211,21 +231,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
};
|
||||
|
||||
const getTaskIds = () => {
|
||||
const taskIds = { completed: [], readyOrWaiting: [] };
|
||||
const taskIds: TaskIds = { completed: [], readyOrWaiting: [] };
|
||||
if (tasks) {
|
||||
const callingSubprocessId = searchParams.get('call_activity_task_id');
|
||||
tasks.forEach(function getUserTasksElement(task: ProcessInstanceTask) {
|
||||
if (
|
||||
callingSubprocessId &&
|
||||
callingSubprocessId !== task.calling_subprocess_task_id
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
tasks.forEach(function getUserTasksElement(task: Task) {
|
||||
if (task.state === 'COMPLETED') {
|
||||
(taskIds.completed as any).push(task);
|
||||
taskIds.completed.push(task);
|
||||
}
|
||||
if (task.state === 'READY' || task.state === 'WAITING') {
|
||||
(taskIds.readyOrWaiting as any).push(task);
|
||||
taskIds.readyOrWaiting.push(task);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
@ -233,31 +246,29 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
return taskIds;
|
||||
};
|
||||
|
||||
const currentSpiffStep = () => {
|
||||
if (processInstance && typeof params.spiff_step === 'undefined') {
|
||||
return processInstance.spiff_step || 0;
|
||||
const currentToTaskGuid = () => {
|
||||
if (taskToTimeTravelTo) {
|
||||
return taskToTimeTravelTo.guid;
|
||||
}
|
||||
|
||||
return Number(params.spiff_step);
|
||||
return null;
|
||||
};
|
||||
|
||||
const showingFirstSpiffStep = () => {
|
||||
return currentSpiffStep() === 1;
|
||||
// right now this just assume if taskToTimeTravelTo was passed in then
|
||||
// this cannot be the active task.
|
||||
// we may need a better way to figure this out.
|
||||
const showingActiveTask = () => {
|
||||
return !taskToTimeTravelTo;
|
||||
};
|
||||
|
||||
const showingLastSpiffStep = () => {
|
||||
return processInstance && currentSpiffStep() === processInstance.spiff_step;
|
||||
};
|
||||
|
||||
const spiffStepLink = (label: any, spiffStep: number) => {
|
||||
const completionViewLink = (label: any, taskGuid: string) => {
|
||||
const processIdentifier = searchParams.get('process_identifier');
|
||||
const callActivityTaskId = searchParams.get('call_activity_task_id');
|
||||
const callActivityTaskId = searchParams.get('bpmn_process_guid');
|
||||
const queryParamArray = [];
|
||||
if (processIdentifier) {
|
||||
queryParamArray.push(`process_identifier=${processIdentifier}`);
|
||||
}
|
||||
if (callActivityTaskId) {
|
||||
queryParamArray.push(`call_activity_task_id=${callActivityTaskId}`);
|
||||
queryParamArray.push(`bpmn_process_guid=${callActivityTaskId}`);
|
||||
}
|
||||
let queryParams = '';
|
||||
if (queryParamArray.length > 0) {
|
||||
|
@ -268,37 +279,21 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
<Link
|
||||
reloadDocument
|
||||
data-qa="process-instance-step-link"
|
||||
to={`${processInstanceShowPageBaseUrl}/${spiffStep}${queryParams}`}
|
||||
to={`${processInstanceShowPageBaseUrl}/${taskGuid}${queryParams}`}
|
||||
>
|
||||
{label}
|
||||
</Link>
|
||||
);
|
||||
};
|
||||
|
||||
const previousStepLink = () => {
|
||||
if (showingFirstSpiffStep()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return spiffStepLink(<CaretLeft />, currentSpiffStep() - 1);
|
||||
};
|
||||
|
||||
const nextStepLink = () => {
|
||||
if (showingLastSpiffStep()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return spiffStepLink(<CaretRight />, currentSpiffStep() + 1);
|
||||
};
|
||||
|
||||
const returnToLastSpiffStep = () => {
|
||||
const returnToProcessInstance = () => {
|
||||
window.location.href = processInstanceShowPageBaseUrl;
|
||||
};
|
||||
|
||||
const resetProcessInstance = () => {
|
||||
HttpService.makeCallToBackend({
|
||||
path: `${targetUris.processInstanceResetPath}/${currentSpiffStep()}`,
|
||||
successCallback: returnToLastSpiffStep,
|
||||
path: `${targetUris.processInstanceResetPath}/${currentToTaskGuid()}`,
|
||||
successCallback: returnToProcessInstance,
|
||||
httpMethod: 'POST',
|
||||
});
|
||||
};
|
||||
|
@ -509,7 +504,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
return <div />;
|
||||
};
|
||||
|
||||
const processTaskResult = (result: ProcessInstanceTask) => {
|
||||
const processTaskResult = (result: Task) => {
|
||||
if (result == null) {
|
||||
setTaskDataToDisplay('');
|
||||
} else {
|
||||
|
@ -518,15 +513,15 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
setShowTaskDataLoading(false);
|
||||
};
|
||||
|
||||
const initializeTaskDataToDisplay = (task: ProcessInstanceTask | null) => {
|
||||
const initializeTaskDataToDisplay = (task: Task | null) => {
|
||||
if (
|
||||
task &&
|
||||
task.state === 'COMPLETED' &&
|
||||
(task.state === 'COMPLETED' || task.state === 'READY') &&
|
||||
ability.can('GET', targetUris.processInstanceTaskDataPath)
|
||||
) {
|
||||
setShowTaskDataLoading(true);
|
||||
HttpService.makeCallToBackend({
|
||||
path: `${targetUris.processInstanceTaskDataPath}/${task.task_spiff_step}`,
|
||||
path: `${targetUris.processInstanceTaskDataPath}/${task.guid}`,
|
||||
httpMethod: 'GET',
|
||||
successCallback: processTaskResult,
|
||||
failureCallback: (error: any) => {
|
||||
|
@ -577,13 +572,12 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
successCallback: handleProcessDataShowResponse,
|
||||
});
|
||||
} else if (tasks) {
|
||||
const matchingTask: any = tasks.find((task: any) => {
|
||||
const callingSubprocessId = searchParams.get('call_activity_task_id');
|
||||
const matchingTask: Task | undefined = tasks.find((task: Task) => {
|
||||
return (
|
||||
(!callingSubprocessId ||
|
||||
callingSubprocessId === task.calling_subprocess_task_id) &&
|
||||
task.name === shapeElement.id &&
|
||||
bpmnProcessIdentifiers.includes(task.process_identifier)
|
||||
task.bpmn_identifier === shapeElement.id &&
|
||||
bpmnProcessIdentifiers.includes(
|
||||
task.bpmn_process_definition_identifier
|
||||
)
|
||||
);
|
||||
});
|
||||
if (matchingTask) {
|
||||
|
@ -600,7 +594,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
|
||||
const getTaskById = (taskId: string) => {
|
||||
if (tasks !== null) {
|
||||
return tasks.find((task: any) => task.id === taskId);
|
||||
return tasks.find((task: Task) => task.guid === taskId) || null;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
@ -609,81 +603,88 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
console.log('result', result);
|
||||
};
|
||||
|
||||
const getParentTaskFromTask = (task: Task) => {
|
||||
return task.properties_json.parent;
|
||||
};
|
||||
|
||||
const createScriptUnitTest = () => {
|
||||
if (taskToDisplay) {
|
||||
const taskToUse: any = taskToDisplay;
|
||||
const previousTask: any = getTaskById(taskToUse.parent);
|
||||
const previousTask: Task | null = getTaskById(
|
||||
getParentTaskFromTask(taskToDisplay)
|
||||
);
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/process-models/${modifiedProcessModelId}/script-unit-tests`,
|
||||
httpMethod: 'POST',
|
||||
successCallback: processScriptUnitTestCreateResult,
|
||||
postBody: {
|
||||
bpmn_task_identifier: taskToUse.name,
|
||||
input_json: previousTask.data,
|
||||
expected_output_json: taskToUse.data,
|
||||
bpmn_task_identifier: taskToDisplay.bpmn_identifier,
|
||||
input_json: previousTask ? previousTask.data : '',
|
||||
expected_output_json: taskToDisplay.data,
|
||||
},
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const isCurrentTask = (task: any) => {
|
||||
const isActiveTask = (task: Task) => {
|
||||
const subprocessTypes = [
|
||||
'Subprocess',
|
||||
'Call Activity',
|
||||
'CallActivity',
|
||||
'Transactional Subprocess',
|
||||
];
|
||||
return (
|
||||
(task.state === 'WAITING' &&
|
||||
subprocessTypes.filter((t) => t === task.type).length > 0) ||
|
||||
subprocessTypes.filter((t) => t === task.typename).length > 0) ||
|
||||
task.state === 'READY'
|
||||
);
|
||||
};
|
||||
|
||||
const canEditTaskData = (task: any) => {
|
||||
const canEditTaskData = (task: Task) => {
|
||||
return (
|
||||
processInstance &&
|
||||
ability.can('PUT', targetUris.processInstanceTaskDataPath) &&
|
||||
isCurrentTask(task) &&
|
||||
isActiveTask(task) &&
|
||||
processInstance.status === 'suspended' &&
|
||||
showingLastSpiffStep()
|
||||
showingActiveTask()
|
||||
);
|
||||
};
|
||||
|
||||
const canSendEvent = (task: any) => {
|
||||
const canSendEvent = (task: Task) => {
|
||||
// We actually could allow this for any waiting events
|
||||
const taskTypes = ['Event Based Gateway'];
|
||||
return (
|
||||
processInstance &&
|
||||
processInstance.status === 'waiting' &&
|
||||
ability.can('POST', targetUris.processInstanceSendEventPath) &&
|
||||
taskTypes.filter((t) => t === task.type).length > 0 &&
|
||||
taskTypes.filter((t) => t === task.typename).length > 0 &&
|
||||
task.state === 'WAITING' &&
|
||||
showingLastSpiffStep()
|
||||
showingActiveTask()
|
||||
);
|
||||
};
|
||||
|
||||
const canCompleteTask = (task: any) => {
|
||||
const canCompleteTask = (task: Task) => {
|
||||
return (
|
||||
processInstance &&
|
||||
processInstance.status === 'suspended' &&
|
||||
ability.can('POST', targetUris.processInstanceCompleteTaskPath) &&
|
||||
isCurrentTask(task) &&
|
||||
showingLastSpiffStep()
|
||||
isActiveTask(task) &&
|
||||
showingActiveTask()
|
||||
);
|
||||
};
|
||||
|
||||
const canResetProcess = (task: any) => {
|
||||
return (
|
||||
ability.can('POST', targetUris.processInstanceResetPath) &&
|
||||
processInstance &&
|
||||
processInstance.status === 'suspended' &&
|
||||
task.state === 'READY' &&
|
||||
!showingLastSpiffStep()
|
||||
);
|
||||
const canResetProcess = (_task: Task) => {
|
||||
// disabling this feature for now
|
||||
return false;
|
||||
// return (
|
||||
// ability.can('POST', targetUris.processInstanceResetPath) &&
|
||||
// processInstance &&
|
||||
// processInstance.status === 'suspended' &&
|
||||
// task.state === 'READY' &&
|
||||
// !showingActiveTask()
|
||||
// );
|
||||
};
|
||||
|
||||
const getEvents = (task: any) => {
|
||||
const handleMessage = (eventDefinition: any) => {
|
||||
const getEvents = (task: Task) => {
|
||||
const handleMessage = (eventDefinition: EventDefinition) => {
|
||||
if (eventDefinition.typename === 'MessageEventDefinition') {
|
||||
const newEvent = eventDefinition;
|
||||
delete newEvent.message_var;
|
||||
|
@ -693,7 +694,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
return eventDefinition;
|
||||
};
|
||||
if (task.event_definition && task.event_definition.event_definitions)
|
||||
return task.event_definition.event_definitions.map((e: any) =>
|
||||
return task.event_definition.event_definitions.map((e: EventDefinition) =>
|
||||
handleMessage(e)
|
||||
);
|
||||
if (task.event_definition) return [handleMessage(task.event_definition)];
|
||||
|
@ -717,7 +718,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
setEditingTaskData(false);
|
||||
const dataObject = taskDataStringToObject(taskDataToDisplay);
|
||||
if (taskToDisplay) {
|
||||
const taskToDisplayCopy: ProcessInstanceTask = {
|
||||
const taskToDisplayCopy: Task = {
|
||||
...taskToDisplay,
|
||||
data: dataObject,
|
||||
}; // spread operator
|
||||
|
@ -730,13 +731,12 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
if (!taskToDisplay) {
|
||||
return;
|
||||
}
|
||||
console.log('saveTaskData');
|
||||
removeError();
|
||||
|
||||
// taskToUse is copy of taskToDisplay, with taskDataToDisplay in data attribute
|
||||
const taskToUse: any = { ...taskToDisplay, data: taskDataToDisplay };
|
||||
const taskToUse: Task = { ...taskToDisplay, data: taskDataToDisplay };
|
||||
HttpService.makeCallToBackend({
|
||||
path: `${targetUris.processInstanceTaskDataPath}/${taskToUse.id}`,
|
||||
path: `${targetUris.processInstanceTaskDataPath}/${taskToUse.guid}`,
|
||||
httpMethod: 'PUT',
|
||||
successCallback: saveTaskDataResult,
|
||||
failureCallback: addError,
|
||||
|
@ -759,20 +759,21 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
};
|
||||
|
||||
const completeTask = (execute: boolean) => {
|
||||
const taskToUse: any = taskToDisplay;
|
||||
if (taskToDisplay) {
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToUse.id}`,
|
||||
path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToDisplay.guid}`,
|
||||
httpMethod: 'POST',
|
||||
successCallback: returnToLastSpiffStep,
|
||||
successCallback: returnToProcessInstance,
|
||||
postBody: { execute },
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const taskDisplayButtons = (task: any) => {
|
||||
const taskDisplayButtons = (task: Task) => {
|
||||
const buttons = [];
|
||||
|
||||
if (
|
||||
task.type === 'Script Task' &&
|
||||
task.typename === 'Script Task' &&
|
||||
ability.can('PUT', targetUris.processModelShowPath)
|
||||
) {
|
||||
buttons.push(
|
||||
|
@ -785,11 +786,15 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
);
|
||||
}
|
||||
|
||||
if (task.type === 'Call Activity') {
|
||||
if (task.typename === 'CallActivity') {
|
||||
console.log('task', task);
|
||||
const taskDefinitionPropertiesJson: TaskDefinitionPropertiesJson =
|
||||
task.task_definition_properties_json;
|
||||
console.log('taskDefinitionPropertiesJson', taskDefinitionPropertiesJson);
|
||||
buttons.push(
|
||||
<Link
|
||||
data-qa="go-to-call-activity-result"
|
||||
to={`${window.location.pathname}?process_identifier=${task.call_activity_process_identifier}&call_activity_task_id=${task.id}`}
|
||||
to={`${window.location.pathname}?process_identifier=${taskDefinitionPropertiesJson.spec}&bpmn_process_guid=${task.guid}`}
|
||||
target="_blank"
|
||||
>
|
||||
View Call Activity Diagram
|
||||
|
@ -971,12 +976,15 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
};
|
||||
|
||||
const taskUpdateDisplayArea = () => {
|
||||
const taskToUse: any = { ...taskToDisplay, data: taskDataToDisplay };
|
||||
if (!taskToDisplay) {
|
||||
return null;
|
||||
}
|
||||
const taskToUse: Task = { ...taskToDisplay, data: taskDataToDisplay };
|
||||
const candidateEvents: any = getEvents(taskToUse);
|
||||
if (taskToDisplay) {
|
||||
let taskTitleText = taskToUse.id;
|
||||
if (taskToUse.title) {
|
||||
taskTitleText += ` (${taskToUse.title})`;
|
||||
let taskTitleText = taskToUse.guid;
|
||||
if (taskToUse.bpmn_name) {
|
||||
taskTitleText += ` (${taskToUse.bpmn_name})`;
|
||||
}
|
||||
return (
|
||||
<Modal
|
||||
|
@ -985,18 +993,17 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
onRequestClose={handleTaskDataDisplayClose}
|
||||
>
|
||||
<Stack orientation="horizontal" gap={2}>
|
||||
<span title={taskTitleText}>{taskToUse.name}</span> (
|
||||
{taskToUse.type}
|
||||
<span title={taskTitleText}>{taskToUse.bpmn_identifier}</span> (
|
||||
{taskToUse.typename}
|
||||
): {taskToUse.state}
|
||||
{taskDisplayButtons(taskToUse)}
|
||||
</Stack>
|
||||
{taskToUse.task_spiff_step ? (
|
||||
{taskToUse.state === 'COMPLETED' ? (
|
||||
<div>
|
||||
<Stack orientation="horizontal" gap={2}>
|
||||
Task completed at step:{' '}
|
||||
{spiffStepLink(
|
||||
`${taskToUse.task_spiff_step}`,
|
||||
taskToUse.task_spiff_step
|
||||
{completionViewLink(
|
||||
'View process instance at the time when this task was active.',
|
||||
taskToUse.guid
|
||||
)}
|
||||
</Stack>
|
||||
<br />
|
||||
|
@ -1012,23 +1019,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
return null;
|
||||
};
|
||||
|
||||
const stepsElement = () => {
|
||||
if (!processInstance) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Grid condensed fullWidth>
|
||||
<Column sm={3} md={3} lg={3}>
|
||||
<Stack orientation="horizontal" gap={3} className="smaller-text">
|
||||
{previousStepLink()}
|
||||
Step {currentSpiffStep()} of {processInstance.spiff_step}
|
||||
{nextStepLink()}
|
||||
</Stack>
|
||||
</Column>
|
||||
</Grid>
|
||||
);
|
||||
};
|
||||
|
||||
const buttonIcons = () => {
|
||||
if (!processInstance) {
|
||||
return null;
|
||||
|
@ -1063,6 +1053,39 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
return elements;
|
||||
};
|
||||
|
||||
const viewMostRecentStateComponent = () => {
|
||||
if (!taskToTimeTravelTo) {
|
||||
return null;
|
||||
}
|
||||
const title = `${taskToTimeTravelTo.id}: ${taskToTimeTravelTo.guid}: ${taskToTimeTravelTo.bpmn_identifier}`;
|
||||
return (
|
||||
<>
|
||||
<Grid condensed fullWidth>
|
||||
<Column md={8} lg={16} sm={4}>
|
||||
<p>
|
||||
Viewing process instance at the time when{' '}
|
||||
<span title={title}>
|
||||
<strong>
|
||||
{taskToTimeTravelTo.bpmn_name ||
|
||||
taskToTimeTravelTo.bpmn_identifier}
|
||||
</strong>
|
||||
</span>{' '}
|
||||
was active.{' '}
|
||||
<Link
|
||||
reloadDocument
|
||||
data-qa="process-instance-view-active-task-link"
|
||||
to={processInstanceShowPageBaseUrl}
|
||||
>
|
||||
View current process instance state.
|
||||
</Link>
|
||||
</p>
|
||||
</Column>
|
||||
</Grid>
|
||||
<br />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
if (processInstance && (tasks || tasksCallHadError)) {
|
||||
const taskIds = getTaskIds();
|
||||
const processModelId = unModifyProcessIdentifierForPathParam(
|
||||
|
@ -1116,8 +1139,8 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
{taskUpdateDisplayArea()}
|
||||
{processDataDisplayArea()}
|
||||
{processInstanceMetadataArea()}
|
||||
{stepsElement()}
|
||||
<br />
|
||||
{viewMostRecentStateComponent()}
|
||||
<ReactDiagramEditor
|
||||
processModelId={processModelId || ''}
|
||||
diagramXML={processInstance.bpmn_xml_file_contents || ''}
|
||||
|
|
Loading…
Reference in New Issue