use latest spiffworkflow in main
This commit is contained in:
danfunk 2023-05-10 15:42:07 -04:00
parent 775a9ee860
commit ea7b693b57
6 changed files with 19 additions and 28 deletions

View File

@ -3120,8 +3120,8 @@ lxml = "*"
[package.source]
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "3a6abe1"
resolved_reference = "3a6abe157e2953e19d761477370016fa83f466cd"
reference = "main"
resolved_reference = "0a455cdd221137ec52d39801dd5ad6dabad4ed4f"
[[package]]
name = "sqlalchemy"
@ -3734,4 +3734,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.12"
content-hash = "e06ab07948340458ffb4331534061ac2a34c6921159aff0bf77ea3dada8835c8"
content-hash = "bc4e0dabc679dd84e7803930d043a934276298715657bf85226ec9541eeebfb9"

View File

@ -27,10 +27,9 @@ flask-marshmallow = "*"
flask-migrate = "*"
flask-restful = "*"
werkzeug = "*"
#SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
# SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"}
# SpiffWorkflow = {develop = true, path = "../../spiffworkflow/" }
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "309937362638ccc"}
sentry-sdk = "^1.10"
sphinx-autoapi = "^2.0"
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}

View File

@ -18,13 +18,13 @@ def setup_database_uri(app: Flask) -> None:
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "sqlite":
app.config["SQLALCHEMY_DATABASE_URI"] = (
f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
)
app.config[
"SQLALCHEMY_DATABASE_URI"
] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
elif app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "postgres":
app.config["SQLALCHEMY_DATABASE_URI"] = (
f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
)
app.config[
"SQLALCHEMY_DATABASE_URI"
] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
else:
# use pswd to trick flake8 with hardcoded passwords
db_pswd = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD")

View File

@ -129,9 +129,9 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
def serialized_with_metadata(self) -> dict[str, Any]:
process_instance_attributes = self.serialized
process_instance_attributes["process_metadata"] = self.process_metadata
process_instance_attributes["process_model_with_diagram_identifier"] = (
self.process_model_with_diagram_identifier
)
process_instance_attributes[
"process_model_with_diagram_identifier"
] = self.process_model_with_diagram_identifier
return process_instance_attributes
@property

View File

@ -2,7 +2,6 @@
# TODO: clean up this service for a clear distinction between it and the process_instance_service
# where this points to the pi service
import _strptime # type: ignore
import copy
import decimal
import json
import logging
@ -52,8 +51,6 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from sqlalchemy import and_
from sqlalchemy import or_
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
@ -423,9 +420,9 @@ class ProcessInstanceProcessor:
tld.process_instance_id = process_instance_model.id
# we want this to be the fully qualified path to the process model including all group subcomponents
current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = (
f"{process_instance_model.process_model_identifier}"
)
current_app.config[
"THREAD_LOCAL_DATA"
].process_model_identifier = f"{process_instance_model.process_model_identifier}"
self.process_instance_model = process_instance_model
self.process_model_service = ProcessModelService()
@ -585,9 +582,9 @@ class ProcessInstanceProcessor:
bpmn_subprocess_definition.bpmn_identifier
] = bpmn_process_definition_dict
spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition.bpmn_identifier]["task_specs"] = {}
bpmn_subprocess_definition_bpmn_identifiers[bpmn_subprocess_definition.id] = (
bpmn_subprocess_definition.bpmn_identifier
)
bpmn_subprocess_definition_bpmn_identifiers[
bpmn_subprocess_definition.id
] = bpmn_subprocess_definition.bpmn_identifier
task_definitions = TaskDefinitionModel.query.filter(
TaskDefinitionModel.bpmn_process_definition_id.in_( # type: ignore
@ -1199,7 +1196,6 @@ class ProcessInstanceProcessor:
# Saving the workflow seems to reset the status
self.suspend()
@classmethod
def reset_process(cls, process_instance: ProcessInstanceModel, to_task_guid: str) -> None:
"""Reset a process to an earlier state."""
@ -1224,8 +1220,6 @@ class ProcessInstanceProcessor:
processor.save()
processor.suspend()
@staticmethod
def get_parser() -> MyCustomParser:
"""Get_parser."""

View File

@ -1,6 +1,5 @@
import copy
import json
from spiffworkflow_backend.models.human_task import HumanTaskModel
import time
from hashlib import sha256
from typing import List
@ -599,7 +598,6 @@ class TaskService:
task_model["start_in_seconds"] = None
task_model["end_in_seconds"] = None
@classmethod
def get_extensions_from_task_model(cls, task_model: TaskModel) -> dict:
task_definition = task_model.task_definition