ensure we delete related items when deleting a process instance w/ burnettk

This commit is contained in:
jasquat 2023-05-26 10:51:24 -04:00
parent 97b22d2930
commit a58a9532fc
9 changed files with 1947 additions and 1947 deletions

File diff suppressed because it is too large Load Diff

View File

@ -63,7 +63,9 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
failure_cause: str = db.Column(db.Text())
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)
correlation_rules = relationship("MessageInstanceCorrelationRuleModel", back_populates="message_instance")
correlation_rules = relationship(
"MessageInstanceCorrelationRuleModel", back_populates="message_instance", cascade="delete"
)
@validates("message_type")
def validate_message_type(self, key: str, value: Any) -> Any:

View File

@ -58,19 +58,16 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
process_model_identifier: str = db.Column(db.String(255), nullable=False, index=True)
process_model_display_name: str = db.Column(db.String(255), nullable=False, index=True)
process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
process_initiator = relationship("UserModel")
bpmn_process_definition_id: int | None = db.Column(
ForeignKey(BpmnProcessDefinitionModel.id), nullable=True, index=True # type: ignore
)
bpmn_process_definition = relationship(BpmnProcessDefinitionModel)
bpmn_process_id: int | None = db.Column(ForeignKey(BpmnProcessModel.id), nullable=True, index=True) # type: ignore
bpmn_process = relationship(BpmnProcessModel, cascade="delete")
tasks = relationship("TaskModel", cascade="delete") # type: ignore
process_instance_events = relationship("ProcessInstanceEventModel", cascade="delete") # type: ignore
spiff_serializer_version = db.Column(db.String(50), nullable=True)
process_initiator = relationship("UserModel")
bpmn_process_definition = relationship(BpmnProcessDefinitionModel)
active_human_tasks = relationship(
"HumanTaskModel",
primaryjoin=(
@ -78,6 +75,10 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
),
) # type: ignore
bpmn_process = relationship(BpmnProcessModel, cascade="delete")
tasks = relationship("TaskModel", cascade="delete") # type: ignore
process_instance_events = relationship("ProcessInstanceEventModel", cascade="delete") # type: ignore
process_instance_file_data = relationship("ProcessInstanceFileDataModel", cascade="delete") # type: ignore
human_tasks = relationship(
"HumanTaskModel",
cascade="delete",

View File

@ -12,8 +12,6 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@dataclass
class ProcessInstanceFileDataModel(SpiffworkflowBaseDBModel):
"""ProcessInstanceFileDataModel."""
__tablename__ = "process_instance_file_data"
id: int = db.Column(db.Integer, primary_key=True)

View File

@ -1,4 +1,3 @@
"""Script_attributes_context."""
from dataclasses import dataclass
from typing import Optional
@ -7,8 +6,6 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore
@dataclass
class ScriptAttributesContext:
"""ScriptAttributesContext."""
task: Optional[SpiffTask]
environment_identifier: str
process_instance_id: Optional[int]

View File

@ -1,4 +1,3 @@
"""Authentication_service."""
import base64
import enum
import json
@ -16,7 +15,7 @@ from spiffworkflow_backend.models.refresh_token import RefreshTokenModel
class MissingAccessTokenError(Exception):
"""MissingAccessTokenError."""
pass
class NotAuthorizedError(Exception):
@ -35,20 +34,22 @@ class UserNotLoggedInError(Exception):
class TokenExpiredError(Exception):
"""TokenExpiredError."""
pass
class TokenInvalidError(Exception):
"""TokenInvalidError."""
pass
class TokenNotProvidedError(Exception):
pass
class AuthenticationProviderTypes(enum.Enum):
"""AuthenticationServiceProviders."""
class OpenIdConnectionError(Exception):
pass
class AuthenticationProviderTypes(enum.Enum):
open_id = "open_id"
internal = "internal"
@ -78,8 +79,11 @@ class AuthenticationService:
"""All openid systems provide a mapping of static names to the full path of that endpoint."""
openid_config_url = f"{cls.server_url()}/.well-known/openid-configuration"
if name not in AuthenticationService.ENDPOINT_CACHE:
response = requests.get(openid_config_url)
AuthenticationService.ENDPOINT_CACHE = response.json()
try:
response = requests.get(openid_config_url)
AuthenticationService.ENDPOINT_CACHE = response.json()
except requests.exceptions.ConnectionError as ce:
raise OpenIdConnectionError(f"Cannot connect to given open id url: {openid_config_url}") from ce
if name not in AuthenticationService.ENDPOINT_CACHE:
raise Exception(f"Unknown OpenID Endpoint: {name}. Tried to get from {openid_config_url}")
return AuthenticationService.ENDPOINT_CACHE.get(name, "")

View File

@ -134,7 +134,6 @@ class MessageService:
def get_process_instance_for_message_instance(
message_instance_receive: MessageInstanceModel,
) -> ProcessInstanceModel:
"""Process_message_receive."""
process_instance_receive: ProcessInstanceModel = ProcessInstanceModel.query.filter_by(
id=message_instance_receive.process_instance_id
).first()
@ -157,7 +156,6 @@ class MessageService:
message_model_name: str,
message_payload: dict,
) -> None:
"""process_message_receive."""
processor_receive = ProcessInstanceProcessor(process_instance_receive)
processor_receive.bpmn_process_instance.catch_bpmn_message(message_model_name, message_payload)
processor_receive.do_engine_steps(save=True)

View File

@ -484,11 +484,11 @@ class WorkflowExecutionService:
)
for correlation_property in event["value"]:
message_correlation = MessageInstanceCorrelationRuleModel(
message_instance_id=message_instance.id,
message_instance=message_instance,
name=correlation_property.name,
retrieval_expression=correlation_property.retrieval_expression,
)
message_instance.correlation_rules.append(message_correlation)
db.session.add(message_correlation)
db.session.add(message_instance)
bpmn_process = self.process_instance_model.bpmn_process

View File

@ -34,8 +34,8 @@ class TestMessageService(BaseTest):
"amount": "100.00",
}
# Load up the definition for the receiving process (it has a message start event that should cause it to
# fire when a unique message comes through.
# Load up the definition for the receiving process
# It has a message start event that should cause it to fire when a unique message comes through
# Fire up the first process
load_test_spec(
"test_group/message_receive",