some more debugging, it kinda works but makes no sense w/ burnettk

This commit is contained in:
jasquat 2024-07-01 15:13:27 -04:00
parent 0dacda0ae9
commit 1a37f3f2c3
No known key found for this signature in database
3 changed files with 71 additions and 53 deletions

View File

@ -553,6 +553,12 @@ class ProcessInstanceProcessor:
task_service.update_task_model_with_spiff_task(
spiff_task, store_process_instance_events=store_process_instance_events, start_and_end_times=start_and_end_times
)
# new_tm = TaskModel.query.filter(TaskModel.guid.in_(task_model_mapping.keys())).all()
# for tm in new_tm:
# task_model_mapping[tm.guid] = tm
# new_bp = BpmnProcessModel.query.filter(BpmnProcessModel.guid.in_(bpmn_subprocess_mapping.keys())).all()
# for bp in new_bp:
# bpmn_subprocess_mapping[bp.guid] = bp
task_service.save_objects_to_database()
db.session.commit()

View File

@ -142,8 +142,13 @@ class TaskService:
self.run_started_at: float | None = run_started_at
def save_objects_to_database(self, save_process_instance_events: bool = True) -> None:
# ProcessInstanceModel.query.first()
db.session.bulk_save_objects(self.bpmn_processes.values())
db.session.bulk_save_objects(self.task_models.values())
# bp = BpmnProcessModel.query.all()
# print(f"➡️ ➡️ ➡️ bp: {bp}")
# first_item = list(self.bpmn_processes.values())[0]
# print(f"➡️ ➡️ ➡️ self.bpmn_processes.values()[0]: {first_item}")
# db.session.bulk_save_objects(self.task_models.values())
# db.session.bulk_save_objects(self.task_model_mapping_existing.values())
# db.session.bulk_save_objects(self.task_model_mapping_new.values())
@ -152,24 +157,23 @@ class TaskService:
# db.session.merge(tm)
# for tm in self.task_models.values():
# db.session.refresh(tm)
# new_tm = [tm.__dict__ for tm in self.task_model_mapping_new.values()]
# # existing_tm = [tm.__dict__ for tm in self.task_model_mapping_existing.values()]
# existing_tm = [
# {k: v for k, v in tm.__dict__.items() if k != "_sa_instance_state"}
# for tm in self.task_model_mapping_existing.values()
# ]
# existing_tm = {k: v for k, v in self.task_model_mapping_existing.values.__dict__.items() if k != "_sa_instance_state"}
new_tm = [tm.__dict__ for tm in self.task_model_mapping_new.values()]
print(f"➡️ ➡️ ➡️ new_tm: {new_tm}")
existing_tm = [
{k: v for k, v in tm.__dict__.items() if k != "_sa_instance_state"}
for tm in self.task_model_mapping_existing.values()
]
# print(f"➡️ ➡️ ➡️ self.task_model_mapping_new: {self.task_model_mapping_new}")
# print(f"➡️ ➡️ ➡️ new_tm: {new_tm}")
# if new_tm:
# db.session.execute(db.insert(TaskModel), new_tm)
# if existing_tm:
# # for t in existing_tm:
# # if "_sa_instance_state" not in t:
# # print(f"➡️ ➡️ ➡️ t: {t}")
# # print(f"➡️ ➡️ ➡️ existing_tm: {existing_tm[0]}")
# # print(f"➡️ ➡️ ➡️ existing_tm: {existing_tm}")
# db.session.execute(db.update(TaskModel), existing_tm)
if new_tm:
db.session.execute(db.insert(TaskModel), new_tm)
if existing_tm:
# for t in existing_tm:
# if "_sa_instance_state" not in t:
# print(f"➡️ ➡️ ➡️ t: {t}")
# print(f"➡️ ➡️ ➡️ existing_tm: {existing_tm[0]}")
# print(f"➡️ ➡️ ➡️ existing_tm: {existing_tm}")
db.session.execute(db.update(TaskModel), existing_tm)
self.task_model_mapping_existing.update(self.task_model_mapping_new)
self.task_model_mapping_new = {}
# new_tm = TaskModel.query.filter(TaskModel.guid.in_(self.task_models.keys())).all()
@ -251,7 +255,7 @@ class TaskService:
bpmn_process_json_data = self.update_task_data_on_bpmn_process(bpmn_process, bpmn_process_instance=spiff_task.workflow)
if bpmn_process_json_data is not None:
self.json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data
print(f"➡️ ➡️ ➡️ task_model: {task_model}")
# print(f"➡️ ➡️ ➡️ task_model: {task_model}")
self.task_models[task_model.guid] = task_model
if start_and_end_times:
@ -368,10 +372,11 @@ class TaskService:
task_definition = self.bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][
spiff_task.task_spec.name
]
print(f"➡️ ➡️ ➡️ bpmn_process.id2: {bpmn_process.id}")
# print(f"➡️ ➡️ ➡️ bpmn_process.id2: {bpmn_process.id}")
task_model = TaskModel(
guid=spiff_task_guid,
bpmn_process_id=bpmn_process.id,
# bpmn_process_id=bpmn_process.id,
bpmn_process=bpmn_process,
process_instance_id=self.process_instance.id,
task_definition_id=task_definition.id,
)
@ -531,7 +536,9 @@ class TaskService:
# TaskModel.query.filter_by(guid=task_id).first()
# task_model = TaskModel.query.filter_by(guid=task_id).first()
# print(f"➡️ ➡️ ➡️ task_model1: {task_model}")
# print(f"➡️ ➡️ ➡️ task_model1: {task_model}")
task_model = self.get_cached_task_model(task_id)
print(f"➡️ ➡️ ➡️ task_model2: {task_model}")
if task_model is None:
task_model = self.__class__._create_task(
bpmn_process,
@ -542,7 +549,7 @@ class TaskService:
self.task_model_mapping_new[task_model.guid] = task_model
# print(f"➡️ ➡️ ➡️ task_model.guid2: {task_model.guid}")
elif task_id not in self.task_model_mapping_new:
print(f"➡️ ➡️ ➡️ task_model2: {task_model}")
# print(f"➡️ ➡️ ➡️ task_model2: {task_model}")
self.task_model_mapping_existing[task_model.guid] = task_model
self.update_task_model(task_model, spiff_task)
self.task_models[task_model.guid] = task_model
@ -864,7 +871,8 @@ class TaskService:
print(f"➡️ ➡️ ➡️ bpmn_process.id: {bpmn_process.id}")
task_model = TaskModel(
guid=str(spiff_task.id),
bpmn_process_id=bpmn_process.id,
# bpmn_process_id=bpmn_process.id,
bpmn_process=bpmn_process,
process_instance_id=process_instance.id,
task_definition_id=task_definition.id,
)

View File

@ -183,32 +183,32 @@ class TestProcessInstanceMigrator(BaseTest):
(process_instance, _bpmn_process_dict_before_import) = self._import_bpmn_json_for_test(
app, "bpmn_multi_instance_task_version_4.json", process_model
)
tasks = (
TaskModel.query.filter_by(process_instance_id=process_instance.id)
.join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id)
.filter(TaskDefinitionModel.bpmn_identifier == "manual_task")
.all()
)
assert len(tasks) == 1
assert tasks[0].state == "WAITING"
Version5.run(process_instance)
db.session.commit()
tasks = (
TaskModel.query.filter_by(process_instance_id=process_instance.id)
.join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id)
.filter(TaskDefinitionModel.bpmn_identifier == "manual_task")
.all()
)
assert len(tasks) == 1
assert tasks[0].state == "STARTED"
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
processor = ProcessInstanceProcessor(process_instance)
# save the processor so it creates the human tasks
processor.save()
self.complete_next_manual_task(processor, execution_mode="synchronous")
self.complete_next_manual_task(processor, execution_mode="synchronous")
assert process_instance.status == ProcessInstanceStatus.complete.value
# tasks = (
# TaskModel.query.filter_by(process_instance_id=process_instance.id)
# .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id)
# .filter(TaskDefinitionModel.bpmn_identifier == "manual_task")
# .all()
# )
# assert len(tasks) == 1
# assert tasks[0].state == "WAITING"
# Version5.run(process_instance)
# db.session.commit()
# tasks = (
# TaskModel.query.filter_by(process_instance_id=process_instance.id)
# .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id)
# .filter(TaskDefinitionModel.bpmn_identifier == "manual_task")
# .all()
# )
# assert len(tasks) == 1
# assert tasks[0].state == "STARTED"
#
# process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
# processor = ProcessInstanceProcessor(process_instance)
# # save the processor so it creates the human tasks
# processor.save()
# self.complete_next_manual_task(processor, execution_mode="synchronous")
# self.complete_next_manual_task(processor, execution_mode="synchronous")
# assert process_instance.status == ProcessInstanceStatus.complete.value
def _import_bpmn_json_for_test(self, app: Flask, bpmn_json_file_name: str, process_model: ProcessModelInfo) -> tuple:
bpmn_json_file = os.path.join(
@ -222,11 +222,15 @@ class TestProcessInstanceMigrator(BaseTest):
with open(bpmn_json_file) as f:
bpmn_process_dict_before_import = json.loads(f.read())
process_instance = self.create_process_instance_from_process_model(process_model=process_model)
try:
ProcessInstanceProcessor.persist_bpmn_process_dict(
bpmn_process_dict_before_import,
process_instance_model=process_instance,
bpmn_definition_to_task_definitions_mappings={},
)
except Exception as ex:
print(f"➡️ ➡️ ➡️ ex: {ex}")
raise ex
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
# ensure data was imported correctly and is in expected state