fallout from recent changes to Main.

This commit is contained in:
Dan 2023-04-20 16:55:13 -04:00
parent a6adb98a19
commit 259290846d
7 changed files with 34 additions and 31 deletions

View File

@ -18,13 +18,13 @@ def setup_database_uri(app: Flask) -> None:
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None: if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}" database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "sqlite": if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "sqlite":
app.config["SQLALCHEMY_DATABASE_URI"] = ( app.config[
f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3" "SQLALCHEMY_DATABASE_URI"
) ] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
elif app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "postgres": elif app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "postgres":
app.config["SQLALCHEMY_DATABASE_URI"] = ( app.config[
f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}" "SQLALCHEMY_DATABASE_URI"
) ] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
else: else:
# use pswd to trick flake8 with hardcoded passwords # use pswd to trick flake8 with hardcoded passwords
db_pswd = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD") db_pswd = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD")

View File

@ -127,9 +127,9 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
def serialized_with_metadata(self) -> dict[str, Any]: def serialized_with_metadata(self) -> dict[str, Any]:
process_instance_attributes = self.serialized process_instance_attributes = self.serialized
process_instance_attributes["process_metadata"] = self.process_metadata process_instance_attributes["process_metadata"] = self.process_metadata
process_instance_attributes["process_model_with_diagram_identifier"] = ( process_instance_attributes[
self.process_model_with_diagram_identifier "process_model_with_diagram_identifier"
) ] = self.process_model_with_diagram_identifier
return process_instance_attributes return process_instance_attributes
@property @property

View File

@ -124,7 +124,7 @@ def process_instance_run(
processor = None processor = None
try: try:
processor = ProcessInstanceService.run_process_intance_with_processor(process_instance) processor = ProcessInstanceService.run_process_instance_with_processor(process_instance)
except ( except (
ApiError, ApiError,
ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsNotEnqueuedError,

View File

@ -423,9 +423,9 @@ class ProcessInstanceProcessor:
tld.process_instance_id = process_instance_model.id tld.process_instance_id = process_instance_model.id
# we want this to be the fully qualified path to the process model including all group subcomponents # we want this to be the fully qualified path to the process model including all group subcomponents
current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = ( current_app.config[
f"{process_instance_model.process_model_identifier}" "THREAD_LOCAL_DATA"
) ].process_model_identifier = f"{process_instance_model.process_model_identifier}"
self.process_instance_model = process_instance_model self.process_instance_model = process_instance_model
self.process_model_service = ProcessModelService() self.process_model_service = ProcessModelService()
@ -585,9 +585,9 @@ class ProcessInstanceProcessor:
bpmn_subprocess_definition.bpmn_identifier bpmn_subprocess_definition.bpmn_identifier
] = bpmn_process_definition_dict ] = bpmn_process_definition_dict
spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition.bpmn_identifier]["task_specs"] = {} spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition.bpmn_identifier]["task_specs"] = {}
bpmn_subprocess_definition_bpmn_identifiers[bpmn_subprocess_definition.id] = ( bpmn_subprocess_definition_bpmn_identifiers[
bpmn_subprocess_definition.bpmn_identifier bpmn_subprocess_definition.id
) ] = bpmn_subprocess_definition.bpmn_identifier
task_definitions = TaskDefinitionModel.query.filter( task_definitions = TaskDefinitionModel.query.filter(
TaskDefinitionModel.bpmn_process_definition_id.in_( # type: ignore TaskDefinitionModel.bpmn_process_definition_id.in_( # type: ignore

View File

@ -115,10 +115,14 @@ class ProcessInstanceService:
.filter(ProcessInstanceModel.id.in_(process_instance_ids_to_check)) # type: ignore .filter(ProcessInstanceModel.id.in_(process_instance_ids_to_check)) # type: ignore
.all() .all()
) )
execution_strategy_name = current_app.config[
"SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND"
]
for process_instance in records: for process_instance in records:
current_app.logger.info(f"Processing process_instance {process_instance.id}") current_app.logger.info(f"Processing process_instance {process_instance.id}")
try: try:
cls.run_process_intance_with_processor(process_instance, status_value=status_value) cls.run_process_instance_with_processor(process_instance, status_value=status_value,
execution_strategy_name=execution_strategy_name)
except ProcessInstanceIsAlreadyLockedError: except ProcessInstanceIsAlreadyLockedError:
continue continue
except Exception as e: except Exception as e:
@ -130,8 +134,9 @@ class ProcessInstanceService:
current_app.logger.error(error_message) current_app.logger.error(error_message)
@classmethod @classmethod
def run_process_intance_with_processor( def run_process_instance_with_processor(
cls, process_instance: ProcessInstanceModel, status_value: Optional[str] = None cls, process_instance: ProcessInstanceModel, status_value: Optional[str] = None,
execution_strategy_name = None
) -> Optional[ProcessInstanceProcessor]: ) -> Optional[ProcessInstanceProcessor]:
processor = None processor = None
with ProcessInstanceQueueService.dequeued(process_instance): with ProcessInstanceQueueService.dequeued(process_instance):
@ -142,9 +147,6 @@ class ProcessInstanceService:
db.session.refresh(process_instance) db.session.refresh(process_instance)
if status_value is None or process_instance.status == status_value: if status_value is None or process_instance.status == status_value:
execution_strategy_name = current_app.config[
"SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND"
]
processor.do_engine_steps(save=True, execution_strategy_name=execution_strategy_name) processor.do_engine_steps(save=True, execution_strategy_name=execution_strategy_name)
return processor return processor

View File

@ -4,9 +4,6 @@ import copy
import time import time
from abc import abstractmethod from abc import abstractmethod
from typing import Callable from typing import Callable
from typing import List
from typing import Optional
from typing import Set
from uuid import UUID from uuid import UUID
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
@ -93,7 +90,7 @@ class ExecutionStrategy:
def save(self, bpmn_process_instance: BpmnWorkflow) -> None: def save(self, bpmn_process_instance: BpmnWorkflow) -> None:
self.delegate.save(bpmn_process_instance) self.delegate.save(bpmn_process_instance)
def get_ready_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> List[SpiffTask]: def get_ready_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> list[SpiffTask]:
return list( return list(
[ [
t t
@ -102,6 +99,7 @@ class ExecutionStrategy:
] ]
) )
class TaskModelSavingDelegate(EngineStepDelegate): class TaskModelSavingDelegate(EngineStepDelegate):
"""Engine step delegate that takes care of saving a task model to the database. """Engine step delegate that takes care of saving a task model to the database.
@ -302,7 +300,7 @@ class RunUntilServiceTaskExecutionStrategy(ExecutionStrategy):
class RunUntilUserTaskOrMessageExecutionStrategy(ExecutionStrategy): class RunUntilUserTaskOrMessageExecutionStrategy(ExecutionStrategy):
"""When you want to run tasks until you hit something to report to the end user.""" """When you want to run tasks until you hit something to report to the end user."""
def get_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> List[SpiffTask]: def get_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> list[SpiffTask]:
return list( return list(
[ [
t t

View File

@ -1,4 +1,4 @@
import React, { useEffect, useMemo, useState } from 'react'; import React, { useCallback, useEffect, useMemo, useState } from 'react';
import { useNavigate, useParams } from 'react-router-dom'; import { useNavigate, useParams } from 'react-router-dom';
import { fetchEventSource } from '@microsoft/fetch-event-source'; import { fetchEventSource } from '@microsoft/fetch-event-source';
// @ts-ignore // @ts-ignore
@ -40,9 +40,12 @@ export default function ProcessInterstitial() {
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []); // it is critical to only run this once. }, []); // it is critical to only run this once.
const shouldRedirect = (myTask: ProcessInstanceTask): boolean => { const shouldRedirect = useCallback(
return myTask && myTask.can_complete && userTasks.includes(myTask.type); (myTask: ProcessInstanceTask): boolean => {
}; return myTask && myTask.can_complete && userTasks.includes(myTask.type);
},
[userTasks]
);
useEffect(() => { useEffect(() => {
// Added this seperate use effect so that the timer interval will be cleared if // Added this seperate use effect so that the timer interval will be cleared if