resolved merge conflicts

This commit is contained in:
jasquat 2023-03-28 08:24:53 -04:00
commit fdd6d92506
7 changed files with 14 additions and 12 deletions

View File

@ -1895,7 +1895,7 @@ lxml = "*"
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "f162aac43af3af18d1a55186aeccea154fb8b05d"
resolved_reference = "3c3345c85dd7f3b7112ad04aaa6487abbd2e9414"
[[package]]
name = "SQLAlchemy"

View File

@ -175,7 +175,7 @@ def task_list_for_my_groups(
def task_data_show(
modified_process_model_identifier: str,
process_instance_id: int,
task_guid: int = 0,
task_guid: str,
) -> flask.wrappers.Response:
task_model = TaskModel.query.filter_by(guid=task_guid, process_instance_id=process_instance_id).first()
if task_model is None:
@ -636,7 +636,7 @@ def _get_spiff_task_from_process_instance(
if processor is None:
processor = ProcessInstanceProcessor(process_instance)
task_uuid = uuid.UUID(task_guid)
spiff_task = processor.bpmn_process_instance.get_task(task_uuid)
spiff_task = processor.bpmn_process_instance.get_task_from_id(task_uuid)
if spiff_task is None:
raise (

View File

@ -1,8 +1,6 @@
"""Process_instance_processor."""
import copy
import _strptime # type: ignore
from sqlalchemy import or_
from sqlalchemy import and_
import decimal
import json
import logging
@ -54,6 +52,8 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from sqlalchemy import and_
from sqlalchemy import or_
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
@ -1176,7 +1176,7 @@ class ProcessInstanceProcessor:
"""Mark the task complete optionally executing it."""
spiff_tasks_updated = {}
start_in_seconds = time.time()
spiff_task = self.bpmn_process_instance.get_task(UUID(task_id))
spiff_task = self.bpmn_process_instance.get_task_from_id(UUID(task_id))
event_type = ProcessInstanceEventType.task_skipped.value
if execute:
current_app.logger.info(

View File

@ -256,7 +256,7 @@ class TaskService:
task_data_dict = task_properties.pop("data")
state_int = task_properties["state"]
spiff_task = spiff_workflow.get_task(UUID(task_id))
spiff_task = spiff_workflow.get_task_from_id(UUID(task_id))
task_model = TaskModel.query.filter_by(guid=task_id).first()
if task_model is None:

View File

@ -1,8 +1,8 @@
import logging
import time
from uuid import UUID
from typing import Callable
from typing import Optional
from uuid import UUID
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
@ -83,7 +83,7 @@ class TaskModelSavingDelegate(EngineStepDelegate):
raise Exception("Could not find cached current_task_start_in_seconds. This should never have happend")
task_model.start_in_seconds = self.current_task_start_in_seconds
task_model.end_in_seconds = time.time()
self.last_completed_spiff_task= spiff_task
self.last_completed_spiff_task = spiff_task
if self.secondary_engine_step_delegate:
self.secondary_engine_step_delegate.did_complete_task(spiff_task)
@ -123,7 +123,9 @@ class TaskModelSavingDelegate(EngineStepDelegate):
def _process_spiff_task_parents(self, spiff_task: SpiffTask) -> None:
(parent_subprocess_guid, _parent_subprocess) = TaskService.task_subprocess(spiff_task)
if parent_subprocess_guid is not None:
spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task(UUID(parent_subprocess_guid))
spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task(
UUID(parent_subprocess_guid)
)
if spiff_task_of_parent_subprocess is not None:
self._update_task_model_with_spiff_task(spiff_task_of_parent_subprocess)

View File

@ -114,7 +114,7 @@ class TestLoggingService(BaseTest):
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
processor = ProcessInstanceProcessor(process_instance)
human_task_one = process_instance.active_human_tasks[0]
spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id))
spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
headers = self.logged_in_headers(with_super_admin_user)

View File

@ -410,7 +410,7 @@ class TestProcessInstanceProcessor(BaseTest):
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
processor = ProcessInstanceProcessor(process_instance)
human_task_one = process_instance.active_human_tasks[0]
spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id))
spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
# recreate variables to ensure all bpmn json was recreated from scratch from the db