some more test stuff w/ burnettk

This commit is contained in:
jasquat 2023-03-16 10:29:15 -04:00
parent bbdac3c586
commit e3513073e9
4 changed files with 48 additions and 42 deletions

View File

@ -1894,8 +1894,8 @@ lxml = "*"
[package.source] [package.source]
type = "git" type = "git"
url = "https://github.com/sartography/SpiffWorkflow" url = "https://github.com/sartography/SpiffWorkflow"
reference = "main" reference = "6cad2981712bb61eca23af1adfafce02d3277cb9"
resolved_reference = "f162aac43af3af18d1a55186aeccea154fb8b05d" resolved_reference = "6cad2981712bb61eca23af1adfafce02d3277cb9"
[[package]] [[package]]
name = "SQLAlchemy" name = "SQLAlchemy"
@ -2274,7 +2274,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = ">=3.9,<3.12" python-versions = ">=3.9,<3.12"
content-hash = "b9ea32912509637f1378d060771de7548d93953aa3db12d6a48098f7dc15205f" content-hash = "253dc24203f175f363158329b0303c11044bc1bb400b17189658251cb37029f7"
[metadata.files] [metadata.files]
alabaster = [ alabaster = [

View File

@ -27,7 +27,8 @@ flask-marshmallow = "*"
flask-migrate = "*" flask-migrate = "*"
flask-restful = "*" flask-restful = "*"
werkzeug = "*" werkzeug = "*"
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} # SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "f162aac43af3af18d1a55186aeccea154fb8b05d"}
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"}
# SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } # SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" }
sentry-sdk = "^1.10" sentry-sdk = "^1.10"
sphinx-autoapi = "^2.0" sphinx-autoapi = "^2.0"

View File

@ -225,6 +225,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
) -> None: ) -> None:
# TODO: once integrated look at the tests that fail without Box # TODO: once integrated look at the tests that fail without Box
# context is task.data # context is task.data
# import pdb; pdb.set_trace()
Box.convert_to_box(context) Box.convert_to_box(context)
self.state.update(self.globals) self.state.update(self.globals)
self.state.update(external_methods or {}) self.state.update(external_methods or {})
@ -234,16 +235,20 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
finally: finally:
# since the task data is not directly mutated when the script executes, need to determine which keys # since the task data is not directly mutated when the script executes, need to determine which keys
# have been deleted from the environment and remove them from task data if present. # have been deleted from the environment and remove them from task data if present.
context_keys_to_drop = context.keys() - self.state.keys()
# import pdb; pdb.set_trace() # import pdb; pdb.set_trace()
context_keys_to_drop = context.keys() - self.state.keys()
# import pdb; pdb.set_trace()
for key_to_drop in context_keys_to_drop: for key_to_drop in context_keys_to_drop:
context.pop(key_to_drop) context.pop(key_to_drop)
# import pdb; pdb.set_trace()
self.state = self.user_defined_state(external_methods) self.state = self.user_defined_state(external_methods)
# import pdb; pdb.set_trace()
# the task data needs to be updated with the current state so data references can be resolved properly. # the task data needs to be updated with the current state so data references can be resolved properly.
# the state will be removed later once the task is completed. # the state will be removed later once the task is completed.
import pdb; pdb.set_trace()
context.update(self.state) context.update(self.state)
def user_defined_state(self, external_methods: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: def user_defined_state(self, external_methods: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
@ -1038,8 +1043,8 @@ class ProcessInstanceProcessor:
Expects the save method to commit it. Expects the save method to commit it.
""" """
# if self.process_instance_model.bpmn_process_definition_id is not None: if self.process_instance_model.bpmn_process_definition_id is not None:
# return None return None
# we may have to already process bpmn_defintions if we ever care about the Root task again # we may have to already process bpmn_defintions if we ever care about the Root task again
bpmn_dict = self.serialize() bpmn_dict = self.serialize()
@ -1052,40 +1057,40 @@ class ProcessInstanceProcessor:
else: else:
process_instance_data_dict[bpmn_key] = bpmn_dict[bpmn_key] process_instance_data_dict[bpmn_key] = bpmn_dict[bpmn_key]
# if self.process_instance_model.bpmn_process_definition_id is not None: # if self.process_instance_model.bpmn_process_definition_id is None:
self._add_bpmn_process_definitions(bpmn_spec_dict) self._add_bpmn_process_definitions(bpmn_spec_dict)
subprocesses = process_instance_data_dict.pop("subprocesses") # subprocesses = process_instance_data_dict.pop("subprocesses")
bpmn_process_parent, new_task_models, new_json_data_dicts = TaskService.add_bpmn_process( # bpmn_process_parent, new_task_models, new_json_data_dicts = TaskService.add_bpmn_process(
bpmn_process_dict=process_instance_data_dict, # bpmn_process_dict=process_instance_data_dict,
process_instance=self.process_instance_model, # process_instance=self.process_instance_model,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, # bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
spiff_workflow=self.bpmn_process_instance, # spiff_workflow=self.bpmn_process_instance,
serializer=self._serializer, # serializer=self._serializer,
) # )
for subprocess_task_id, subprocess_properties in subprocesses.items(): # for subprocess_task_id, subprocess_properties in subprocesses.items():
( # (
_bpmn_subprocess, # _bpmn_subprocess,
subprocess_new_task_models, # subprocess_new_task_models,
subprocess_new_json_data_models, # subprocess_new_json_data_models,
) = TaskService.add_bpmn_process( # ) = TaskService.add_bpmn_process(
bpmn_process_dict=subprocess_properties, # bpmn_process_dict=subprocess_properties,
process_instance=self.process_instance_model, # process_instance=self.process_instance_model,
bpmn_process_parent=bpmn_process_parent, # bpmn_process_parent=bpmn_process_parent,
bpmn_process_guid=subprocess_task_id, # bpmn_process_guid=subprocess_task_id,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, # bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
spiff_workflow=self.bpmn_process_instance, # spiff_workflow=self.bpmn_process_instance,
serializer=self._serializer, # serializer=self._serializer,
) # )
new_task_models.update(subprocess_new_task_models) # new_task_models.update(subprocess_new_task_models)
new_json_data_dicts.update(subprocess_new_json_data_models) # new_json_data_dicts.update(subprocess_new_json_data_models)
db.session.bulk_save_objects(new_task_models.values()) # db.session.bulk_save_objects(new_task_models.values())
#
TaskService.insert_or_update_json_data_records(new_json_data_dicts) # TaskService.insert_or_update_json_data_records(new_json_data_dicts)
def save(self) -> None: def save(self) -> None:
"""Saves the current state of this processor to the database.""" """Saves the current state of this processor to the database."""
self._add_bpmn_json_records() # self._add_bpmn_json_records()
self.process_instance_model.spiff_serializer_version = self.SERIALIZER_VERSION self.process_instance_model.spiff_serializer_version = self.SERIALIZER_VERSION
complete_states = [TaskState.CANCELLED, TaskState.COMPLETED] complete_states = [TaskState.CANCELLED, TaskState.COMPLETED]
@ -1546,7 +1551,7 @@ class ProcessInstanceProcessor:
self._script_engine.environment.revise_state_with_task_data(task) self._script_engine.environment.revise_state_with_task_data(task)
return self.spiff_step_details_mapping(task, start, end) return self.spiff_step_details_mapping(task, start, end)
# self._add_bpmn_json_records() self._add_bpmn_json_records()
step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder) step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder)
task_model_delegate = TaskModelSavingDelegate( task_model_delegate = TaskModelSavingDelegate(

View File

@ -77,8 +77,8 @@ class TaskModelSavingDelegate(EngineStepDelegate):
Use the bpmn_process_id to do this. Use the bpmn_process_id to do this.
""" """
return self.process_instance.bpmn_process_id is not None # return self.process_instance.bpmn_process_id is not None
# return True return True
def _update_json_data_dicts_using_list(self, json_data_dict_list: list[Optional[JsonDataDict]]) -> None: def _update_json_data_dicts_using_list(self, json_data_dict_list: list[Optional[JsonDataDict]]) -> None:
for json_data_dict in json_data_dict_list: for json_data_dict in json_data_dict_list:
@ -106,9 +106,9 @@ class TaskModelSavingDelegate(EngineStepDelegate):
def did_complete_task(self, spiff_task: SpiffTask) -> None: def did_complete_task(self, spiff_task: SpiffTask) -> None:
# if self.current_task_model and self.should_update_task_model(): # if self.current_task_model and self.should_update_task_model():
if self.should_update_task_model(): if self.should_update_task_model():
# if spiff_task.task_spec.name == 'top_level_script': if spiff_task.task_spec.name == 'top_level_script':
# import pdb; pdb.set_trace() import pdb; pdb.set_trace()
spiff_task.workflow.script_engine.environment.revise_state_with_task_data(spiff_task) # spiff_task.workflow.script_engine.environment.revise_state_with_task_data(spiff_task)
_bpmn_process, task_model, new_task_models, new_json_data_dicts = ( _bpmn_process, task_model, new_task_models, new_json_data_dicts = (
TaskService.find_or_create_task_model_from_spiff_task( TaskService.find_or_create_task_model_from_spiff_task(
spiff_task, spiff_task,