run_pyl
This commit is contained in:
parent
1db065fb65
commit
1d37eed860
|
@ -18,13 +18,13 @@ def setup_database_uri(app: Flask) -> None:
|
|||
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
|
||||
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
|
||||
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "sqlite":
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
||||
f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
|
||||
)
|
||||
app.config[
|
||||
"SQLALCHEMY_DATABASE_URI"
|
||||
] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
|
||||
elif app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "postgres":
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
||||
f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
|
||||
)
|
||||
app.config[
|
||||
"SQLALCHEMY_DATABASE_URI"
|
||||
] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
|
||||
else:
|
||||
# use pswd to trick flake8 with hardcoded passwords
|
||||
db_pswd = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD")
|
||||
|
|
|
@ -3,7 +3,7 @@ from __future__ import annotations
|
|||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship, backref
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
|
||||
from spiffworkflow_backend.models.db import db
|
||||
|
@ -38,8 +38,7 @@ class BpmnProcessModel(SpiffworkflowBaseDBModel):
|
|||
json_data_hash: str = db.Column(db.String(255), nullable=False, index=True)
|
||||
|
||||
tasks = relationship("TaskModel", back_populates="bpmn_process", cascade="delete") # type: ignore
|
||||
child_processes = relationship("BpmnProcessModel", foreign_keys=[direct_parent_process_id],
|
||||
cascade="all") # type: ignore
|
||||
child_processes = relationship("BpmnProcessModel", foreign_keys=[direct_parent_process_id], cascade="all")
|
||||
|
||||
# FIXME: find out how to set this but it'd be cool
|
||||
start_in_seconds: float = db.Column(db.DECIMAL(17, 6))
|
||||
|
|
|
@ -129,9 +129,9 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
def serialized_with_metadata(self) -> dict[str, Any]:
|
||||
process_instance_attributes = self.serialized
|
||||
process_instance_attributes["process_metadata"] = self.process_metadata
|
||||
process_instance_attributes["process_model_with_diagram_identifier"] = (
|
||||
self.process_model_with_diagram_identifier
|
||||
)
|
||||
process_instance_attributes[
|
||||
"process_model_with_diagram_identifier"
|
||||
] = self.process_model_with_diagram_identifier
|
||||
return process_instance_attributes
|
||||
|
||||
@property
|
||||
|
|
|
@ -403,7 +403,6 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st
|
|||
return_hash[return_type] = entity
|
||||
return f"data: {current_app.json.dumps(return_hash)} \n\n"
|
||||
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
reported_ids = [] # A list of all the ids reported by this endpoint so far.
|
||||
tasks = get_reportable_tasks()
|
||||
|
|
|
@ -24,7 +24,8 @@ from uuid import UUID
|
|||
|
||||
import dateparser
|
||||
import pytz
|
||||
from flask import current_app, g
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from lxml import etree # type: ignore
|
||||
from lxml.etree import XMLSyntaxError # type: ignore
|
||||
from RestrictedPython import safe_globals # type: ignore
|
||||
|
@ -40,7 +41,6 @@ from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore
|
|||
)
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
||||
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
|
||||
|
@ -1111,7 +1111,7 @@ class ProcessInstanceProcessor:
|
|||
|
||||
def manual_complete_task(self, task_id: str, execute: bool) -> None:
|
||||
"""Mark the task complete optionally executing it."""
|
||||
start_in_seconds = time.time()
|
||||
time.time()
|
||||
spiff_task = self.bpmn_process_instance.get_task_from_id(UUID(task_id))
|
||||
event_type = ProcessInstanceEventType.task_skipped.value
|
||||
start_time = time.time()
|
||||
|
|
|
@ -339,6 +339,7 @@ class OneAtATimeExecutionStrategy(ExecutionStrategy):
|
|||
self.delegate.did_complete_task(spiff_task)
|
||||
self.delegate.after_engine_steps(bpmn_process_instance)
|
||||
|
||||
|
||||
class SkipOneExecutionStrategy(ExecutionStrategy):
|
||||
"""When you want to to skip over the next task, rather than execute it."""
|
||||
|
||||
|
@ -351,6 +352,7 @@ class SkipOneExecutionStrategy(ExecutionStrategy):
|
|||
self.delegate.did_complete_task(spiff_task)
|
||||
self.delegate.after_engine_steps(bpmn_process_instance)
|
||||
|
||||
|
||||
def execution_strategy_named(
|
||||
name: str, delegate: EngineStepDelegate, spec_loader: SubprocessSpecLoader
|
||||
) -> ExecutionStrategy:
|
||||
|
|
|
@ -504,18 +504,27 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
processor.do_engine_steps(save=True)
|
||||
assert len(process_instance.active_human_tasks) == 1
|
||||
human_task_one = process_instance.active_human_tasks[0]
|
||||
spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
|
||||
processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
|
||||
processor.manual_complete_task(str(human_task_one.task_id), execute=True)
|
||||
processor.save()
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
assert processor.get_task_by_bpmn_identifier('step_1', processor.bpmn_process_instance).state == TaskState.COMPLETED
|
||||
assert processor.get_task_by_bpmn_identifier('Gateway_Open', processor.bpmn_process_instance).state == TaskState.READY
|
||||
assert (
|
||||
processor.get_task_by_bpmn_identifier("step_1", processor.bpmn_process_instance).state # type: ignore
|
||||
== TaskState.COMPLETED
|
||||
)
|
||||
assert (
|
||||
processor.get_task_by_bpmn_identifier("Gateway_Open", processor.bpmn_process_instance).state # type: ignore
|
||||
== TaskState.READY
|
||||
)
|
||||
|
||||
gateway_task = processor.bpmn_process_instance.get_tasks(TaskState.READY)[0]
|
||||
processor.manual_complete_task(str(gateway_task.id), execute=True)
|
||||
processor.save()
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
assert processor.get_task_by_bpmn_identifier('Gateway_Open', processor.bpmn_process_instance).state == TaskState.COMPLETED
|
||||
assert (
|
||||
processor.get_task_by_bpmn_identifier("Gateway_Open", processor.bpmn_process_instance).state # type: ignore
|
||||
== TaskState.COMPLETED
|
||||
)
|
||||
print(processor)
|
||||
|
||||
def test_properly_saves_tasks_when_running(
|
||||
|
|
Loading…
Reference in New Issue