Squashed 'spiffworkflow-backend/' changes from 52fad891c..c1ecc9035

c1ecc9035 Merge commit '9275b67b0dddac9628342741278c89f6f2bd3620'
82388b66e pyl is passing w/ burnettk cullerton
3f6df855b Merge branch 'main' into update-workflow-json
3ff8820f0 Insert every spiff log, see if it helps the deadlock issue on demo (#51)
8aa126e75 precommit
8580205f7 Merge branch 'update-workflow-json' of https://github.com/sartography/spiff-arena into update-workflow-json
44e1df0ec mypy
d3bd96a16 ran some pyl w/ burnettk cullerton
bd0cb3c9f Tweak temp report metadata
4752ac46c Get ./bin/pyl to pass
fb5e0a411 Accept report_identifier
4736fe2cd Don't save `id` in json files Added helper method `write_json_file`
3e655885b No need for script.
d6f8fddb5 WIP
23436331a display groups as tiles on list page w/ burnettk
35448b632 Merge branch 'main' into update-workflow-json
36369bfd6 rename workflow.json -> process_model.json
d694fe14a Don't save `id` in the process_model json file This allows us to move models around on the file system Add `id` back in when instantiating ProcessModelInfo from json file data
4f898e1a6 simplified
d8898d51e Cleanup - renaming frenzy, use os.path.abspath
eba3c096d Rename CAT_JSON_FILE, WF_JSON_FILE, add_spec, update_spec

git-subtree-dir: spiffworkflow-backend
git-subtree-split: c1ecc903521c045d45626a24950504998f0585a8
This commit is contained in:
burnettk 2022-11-20 19:57:16 -05:00
parent 9275b67b0d
commit c9766981fb
19 changed files with 184 additions and 106 deletions

View File

@ -1,22 +0,0 @@
"""Updates all JSON files, based on the current state of BPMN_SPEC_ABSOLUTE_DIR."""
from spiffworkflow_backend import get_hacked_up_app_for_script
from spiffworkflow_backend.services.process_model_service import ProcessModelService
def main() -> None:
"""Main."""
app = get_hacked_up_app_for_script()
with app.app_context():
groups = ProcessModelService().get_process_groups()
for group in groups:
for process_model in group.process_models:
update_items = {
"process_group_id": "",
"id": f"{group.id}/{process_model.id}",
}
ProcessModelService().update_spec(process_model, update_items)
if __name__ == "__main__":
main()

View File

@ -1,3 +1,5 @@
from __future__ import with_statement
import logging import logging
from logging.config import fileConfig from logging.config import fileConfig

View File

@ -1,8 +1,8 @@
"""empty message """empty message
Revision ID: b7790c9c8174 Revision ID: 70223f5c7b98
Revises: Revises:
Create Date: 2022-11-15 14:11:47.309399 Create Date: 2022-11-20 19:54:45.061376
""" """
from alembic import op from alembic import op
@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'b7790c9c8174' revision = '70223f5c7b98'
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None

View File

@ -375,7 +375,8 @@ paths:
/processes: /processes:
get: get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_list operationId: spiffworkflow_backend.routes.process_api_blueprint.process_list
summary: Return a list of all processes (not just primary process of a process model) summary:
Return a list of all processes (not just primary process of a process model)
useful for finding processes for call activites. useful for finding processes for call activites.
tags: tags:
- Process Models - Process Models
@ -445,7 +446,12 @@ paths:
description: For filtering - indicates the user has manually entered a query description: For filtering - indicates the user has manually entered a query
schema: schema:
type: boolean type: boolean
# process_instance_list - name: report_identifier
in: query
required: false
description: Specifies the identifier of a report to use, if any
schema:
type: string
get: get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list
summary: Returns a list of process instances for a given process model summary: Returns a list of process instances for a given process model

View File

@ -55,13 +55,24 @@ class ProcessGroupSchema(Schema):
"""Meta.""" """Meta."""
model = ProcessGroup model = ProcessGroup
fields = ["id", "display_name", "display_order", "admin", "process_models"] fields = [
"id",
"display_name",
"display_order",
"admin",
"process_models",
"description",
"process_groups",
]
process_models = marshmallow.fields.List( process_models = marshmallow.fields.List(
marshmallow.fields.Nested( marshmallow.fields.Nested(
"ProcessModelInfoSchema", dump_only=True, required=False "ProcessModelInfoSchema", dump_only=True, required=False
) )
) )
process_groups = marshmallow.fields.List(
marshmallow.fields.Nested("ProcessGroupSchema", dump_only=True, required=False)
)
@post_load @post_load
def make_process_group( def make_process_group(

View File

@ -79,6 +79,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
identifier=identifier, created_by_id=user.id identifier=identifier, created_by_id=user.id
).first() ).first()
# TODO replace with system report that is loaded on launch (or similar)
if process_instance_report is None: if process_instance_report is None:
report_metadata = { report_metadata = {
"columns": [ "columns": [

View File

@ -1,6 +1,5 @@
"""APIs for dealing with process groups, process models, and process instances.""" """APIs for dealing with process groups, process models, and process instances."""
import json import json
import os
import random import random
import string import string
import uuid import uuid
@ -253,16 +252,11 @@ def process_model_create(
status_code=400, status_code=400,
) )
modified_process_model_id = process_model_info.id unmodified_process_group_id = un_modify_modified_process_model_id(
unmodified_process_model_id = un_modify_modified_process_model_id( modified_process_group_id
modified_process_model_id
) )
process_model_info.id = unmodified_process_model_id
process_group_id, _ = os.path.split(process_model_info.id)
process_model_service = ProcessModelService() process_model_service = ProcessModelService()
process_group = process_model_service.get_process_group( process_group = process_model_service.get_process_group(unmodified_process_group_id)
un_modify_modified_process_model_id(process_group_id)
)
if process_group is None: if process_group is None:
raise ApiError( raise ApiError(
error_code="process_model_could_not_be_created", error_code="process_model_could_not_be_created",
@ -270,7 +264,7 @@ def process_model_create(
status_code=400, status_code=400,
) )
process_model_service.add_spec(process_model_info) process_model_service.add_process_model(process_model_info)
return Response( return Response(
json.dumps(ProcessModelInfoSchema().dump(process_model_info)), json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
status=201, status=201,
@ -307,7 +301,7 @@ def process_model_update(
# process_model_identifier = f"{process_group_id}/{process_model_id}" # process_model_identifier = f"{process_group_id}/{process_model_id}"
process_model = get_process_model(process_model_identifier) process_model = get_process_model(process_model_identifier)
ProcessModelService().update_spec(process_model, body_filtered) ProcessModelService().update_process_model(process_model, body_filtered)
return ProcessModelInfoSchema().dump(process_model) return ProcessModelInfoSchema().dump(process_model)
@ -736,9 +730,12 @@ def process_instance_list(
end_to: Optional[int] = None, end_to: Optional[int] = None,
process_status: Optional[str] = None, process_status: Optional[str] = None,
user_filter: Optional[bool] = False, user_filter: Optional[bool] = False,
report_identifier: Optional[str] = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_list.""" """Process_instance_list."""
process_instance_report = ProcessInstanceReportModel.default_report(g.user) process_instance_report = ProcessInstanceReportService.report_with_identifier(
g.user, report_identifier
)
if user_filter: if user_filter:
report_filter = ProcessInstanceReportFilter( report_filter = ProcessInstanceReportFilter(

View File

@ -20,8 +20,8 @@ class FileSystemService:
""" Simple Service meant for extension that provides some useful """ Simple Service meant for extension that provides some useful
methods for dealing with the File system. methods for dealing with the File system.
""" """
CAT_JSON_FILE = "process_group.json" PROCESS_GROUP_JSON_FILE = "process_group.json"
WF_JSON_FILE = "workflow.json" PROCESS_MODEL_JSON_FILE = "process_model.json"
@staticmethod @staticmethod
def root_path() -> str: def root_path() -> str:
@ -135,7 +135,7 @@ class FileSystemService:
if item.is_file(): if item.is_file():
if item.name.startswith("."): if item.name.startswith("."):
continue # Ignore hidden files continue # Ignore hidden files
if item.name == FileSystemService.WF_JSON_FILE: if item.name == FileSystemService.PROCESS_MODEL_JSON_FILE:
continue # Ignore the json files. continue # Ignore the json files.
if file_name is not None and item.name != file_name: if file_name is not None and item.name != file_name:
continue continue

View File

@ -236,5 +236,5 @@ class DBHandler(logging.Handler):
"spiff_step": spiff_step, "spiff_step": spiff_step,
} }
) )
if len(self.logs) % 1000 == 0: if len(self.logs) % 1 == 0:
self.bulk_insert_logs() self.bulk_insert_logs()

View File

@ -5,6 +5,7 @@ from typing import Optional
from spiffworkflow_backend.models.process_instance_report import ( from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel, ProcessInstanceReportModel,
) )
from spiffworkflow_backend.models.user import UserModel
@dataclass @dataclass
@ -41,6 +42,62 @@ class ProcessInstanceReportFilter:
class ProcessInstanceReportService: class ProcessInstanceReportService:
"""ProcessInstanceReportService.""" """ProcessInstanceReportService."""
@classmethod
def report_with_identifier(
cls, user: UserModel, report_identifier: Optional[str] = None
) -> ProcessInstanceReportModel:
"""Report_with_filter."""
if report_identifier is None:
return ProcessInstanceReportModel.default_report(user)
# TODO replace with system reports that are loaded on launch (or similar)
temp_system_metadata_map = {
"system_report_instances_initiated_by_me": {
"columns": [
{
"Header": "process_model_identifier",
"accessor": "process_model_identifier",
},
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
{"Header": "id", "accessor": "id"},
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
{"Header": "status", "accessor": "status"},
],
},
"system_report_instances_with_tasks_completed_by_me": {
"columns": [
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
{"Header": "status", "accessor": "status"},
{"Header": "id", "accessor": "id"},
{
"Header": "process_model_identifier",
"accessor": "process_model_identifier",
},
],
},
"system_report_instances_with_tasks_completed_by_my_groups": {
"columns": [
{
"Header": "process_model_identifier",
"accessor": "process_model_identifier",
},
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
{"Header": "status", "accessor": "status"},
{"Header": "id", "accessor": "id"},
],
},
}
process_instance_report = ProcessInstanceReportModel(
identifier=report_identifier,
created_by_id=user.id,
report_metadata=temp_system_metadata_map[report_identifier],
)
return process_instance_report
@classmethod @classmethod
def filter_by_to_dict( def filter_by_to_dict(
cls, process_instance_report: ProcessInstanceReportModel cls, process_instance_report: ProcessInstanceReportModel

View File

@ -320,7 +320,7 @@ class ProcessInstanceService:
def serialize_flat_with_task_data( def serialize_flat_with_task_data(
process_instance: ProcessInstanceModel, process_instance: ProcessInstanceModel,
) -> dict[str, Any]: ) -> dict[str, Any]:
"""serialize_flat_with_task_data.""" """Serialize_flat_with_task_data."""
results = {} results = {}
try: try:
original_status = process_instance.status original_status = process_instance.status

View File

@ -32,22 +32,30 @@ class ProcessModelService(FileSystemService):
the workflow process_models at once, or manage those file in a git repository. """ the workflow process_models at once, or manage those file in a git repository. """
GROUP_SCHEMA = ProcessGroupSchema() GROUP_SCHEMA = ProcessGroupSchema()
WF_SCHEMA = ProcessModelInfoSchema() PROCESS_MODEL_SCHEMA = ProcessModelInfoSchema()
def is_group(self, path: str) -> bool: def is_group(self, path: str) -> bool:
"""Is_group.""" """Is_group."""
group_json_path = os.path.join(path, self.CAT_JSON_FILE) group_json_path = os.path.join(path, self.PROCESS_GROUP_JSON_FILE)
if os.path.exists(group_json_path): if os.path.exists(group_json_path):
return True return True
return False return False
def is_model(self, path: str) -> bool: def is_model(self, path: str) -> bool:
"""Is_model.""" """Is_model."""
model_json_path = os.path.join(path, self.WF_JSON_FILE) model_json_path = os.path.join(path, self.PROCESS_MODEL_JSON_FILE)
if os.path.exists(model_json_path): if os.path.exists(model_json_path):
return True return True
return False return False
@staticmethod
def write_json_file(
file_path: str, json_data: dict, indent: int = 4, sort_keys: bool = True
) -> None:
"""Write json file."""
with open(file_path, "w") as h_open:
json.dump(json_data, h_open, indent=indent, sort_keys=sort_keys)
@staticmethod @staticmethod
def get_batch( def get_batch(
items: list[T], items: list[T],
@ -59,13 +67,13 @@ class ProcessModelService(FileSystemService):
end = start + per_page end = start + per_page
return items[start:end] return items[start:end]
def add_spec(self, process_model: ProcessModelInfo) -> None: def add_process_model(self, process_model: ProcessModelInfo) -> None:
"""Add_spec.""" """Add_spec."""
display_order = self.next_display_order(process_model) display_order = self.next_display_order(process_model)
process_model.display_order = display_order process_model.display_order = display_order
self.save_process_model(process_model) self.save_process_model(process_model)
def update_spec( def update_process_model(
self, process_model: ProcessModelInfo, attributes_to_update: dict self, process_model: ProcessModelInfo, attributes_to_update: dict
) -> None: ) -> None:
"""Update_spec.""" """Update_spec."""
@ -76,13 +84,21 @@ class ProcessModelService(FileSystemService):
def save_process_model(self, process_model: ProcessModelInfo) -> None: def save_process_model(self, process_model: ProcessModelInfo) -> None:
"""Save_process_model.""" """Save_process_model."""
spec_path = os.path.join(FileSystemService.root_path(), process_model.id) process_model_path = os.path.abspath(
os.makedirs(spec_path, exist_ok=True) os.path.join(FileSystemService.root_path(), process_model.id)
json_path = os.path.join(spec_path, self.WF_JSON_FILE)
with open(json_path, "w") as wf_json:
json.dump(
self.WF_SCHEMA.dump(process_model), wf_json, indent=4, sort_keys=True
) )
os.makedirs(process_model_path, exist_ok=True)
json_path = os.path.abspath(
os.path.join(process_model_path, self.PROCESS_MODEL_JSON_FILE)
)
process_model_id = process_model.id
# we don't save id in the json file
# this allows us to move models around on the filesystem
# the id is determined by its location on the filesystem
delattr(process_model, "id")
json_data = self.PROCESS_MODEL_SCHEMA.dump(process_model)
self.write_json_file(json_path, json_data)
process_model.id = process_model_id
def process_model_delete(self, process_model_id: str) -> None: def process_model_delete(self, process_model_id: str) -> None:
"""Delete Procecss Model.""" """Delete Procecss Model."""
@ -107,7 +123,7 @@ class ProcessModelService(FileSystemService):
process_group_identifier, _ = os.path.split(relative_path) process_group_identifier, _ = os.path.split(relative_path)
process_group = cls().get_process_group(process_group_identifier) process_group = cls().get_process_group(process_group_identifier)
path = os.path.join(FileSystemService.root_path(), relative_path) path = os.path.join(FileSystemService.root_path(), relative_path)
return cls().__scan_spec(path, process_group=process_group) return cls().__scan_process_model(path, process_group=process_group)
def get_process_model(self, process_model_id: str) -> ProcessModelInfo: def get_process_model(self, process_model_id: str) -> ProcessModelInfo:
"""Get a process model from a model and group id. """Get a process model from a model and group id.
@ -117,7 +133,9 @@ class ProcessModelService(FileSystemService):
if not os.path.exists(FileSystemService.root_path()): if not os.path.exists(FileSystemService.root_path()):
raise ProcessEntityNotFoundError("process_model_root_not_found") raise ProcessEntityNotFoundError("process_model_root_not_found")
model_path = os.path.join(FileSystemService.root_path(), process_model_id) model_path = os.path.abspath(
os.path.join(FileSystemService.root_path(), process_model_id)
)
if self.is_model(model_path): if self.is_model(model_path):
process_model = self.get_process_model_from_relative_path(process_model_id) process_model = self.get_process_model_from_relative_path(process_model_id)
return process_model return process_model
@ -140,7 +158,7 @@ class ProcessModelService(FileSystemService):
# process_group = self.__scan_process_group( # process_group = self.__scan_process_group(
# process_group_dir # process_group_dir
# ) # )
# return self.__scan_spec(sd.path, sd.name, process_group) # return self.__scan_process_model(sd.path, sd.name, process_group)
raise ProcessEntityNotFoundError("process_model_not_found") raise ProcessEntityNotFoundError("process_model_not_found")
def get_process_models( def get_process_models(
@ -172,8 +190,8 @@ class ProcessModelService(FileSystemService):
def get_process_group(self, process_group_id: str) -> ProcessGroup: def get_process_group(self, process_group_id: str) -> ProcessGroup:
"""Look for a given process_group, and return it.""" """Look for a given process_group, and return it."""
if os.path.exists(FileSystemService.root_path()): if os.path.exists(FileSystemService.root_path()):
process_group_path = os.path.join( process_group_path = os.path.abspath(
FileSystemService.root_path(), process_group_id os.path.join(FileSystemService.root_path(), process_group_id)
) )
if self.is_group(process_group_path): if self.is_group(process_group_path):
return self.__scan_process_group(process_group_path) return self.__scan_process_group(process_group_path)
@ -205,14 +223,12 @@ class ProcessModelService(FileSystemService):
"""Update_process_group.""" """Update_process_group."""
cat_path = self.process_group_path(process_group.id) cat_path = self.process_group_path(process_group.id)
os.makedirs(cat_path, exist_ok=True) os.makedirs(cat_path, exist_ok=True)
json_path = os.path.join(cat_path, self.CAT_JSON_FILE) json_path = os.path.join(cat_path, self.PROCESS_GROUP_JSON_FILE)
with open(json_path, "w") as cat_json: serialized_process_group = process_group.serialized
json.dump( # we don't store `id` in the json files
process_group.serialized, # this allows us to move groups around on the filesystem
cat_json, del serialized_process_group["id"]
indent=4, self.write_json_file(json_path, serialized_process_group)
sort_keys=True,
)
return process_group return process_group
def __get_all_nested_models(self, group_path: str) -> list: def __get_all_nested_models(self, group_path: str) -> list:
@ -279,10 +295,13 @@ class ProcessModelService(FileSystemService):
def __scan_process_group(self, dir_path: str) -> ProcessGroup: def __scan_process_group(self, dir_path: str) -> ProcessGroup:
"""Reads the process_group.json file, and any nested directories.""" """Reads the process_group.json file, and any nested directories."""
cat_path = os.path.join(dir_path, self.CAT_JSON_FILE) cat_path = os.path.join(dir_path, self.PROCESS_GROUP_JSON_FILE)
if os.path.exists(cat_path): if os.path.exists(cat_path):
with open(cat_path) as cat_json: with open(cat_path) as cat_json:
data = json.load(cat_json) data = json.load(cat_json)
# we don't store `id` in the json files, so we add it back in here
relative_path = os.path.relpath(dir_path, FileSystemService.root_path())
data["id"] = relative_path
process_group = ProcessGroup(**data) process_group = ProcessGroup(**data)
if process_group is None: if process_group is None:
raise ApiError( raise ApiError(
@ -292,13 +311,14 @@ class ProcessModelService(FileSystemService):
else: else:
process_group_id = dir_path.replace(FileSystemService.root_path(), "") process_group_id = dir_path.replace(FileSystemService.root_path(), "")
process_group = ProcessGroup( process_group = ProcessGroup(
id=process_group_id, id="",
display_name=process_group_id, display_name=process_group_id,
display_order=10000, display_order=10000,
admin=False, admin=False,
) )
with open(cat_path, "w") as wf_json: self.write_json_file(cat_path, self.GROUP_SCHEMA.dump(process_group))
json.dump(self.GROUP_SCHEMA.dump(process_group), wf_json, indent=4) # we don't store `id` in the json files, so we add it in here
process_group.id = process_group_id
with os.scandir(dir_path) as nested_items: with os.scandir(dir_path) as nested_items:
process_group.process_models = [] process_group.process_models = []
process_group.process_groups = [] process_group.process_groups = []
@ -312,7 +332,7 @@ class ProcessModelService(FileSystemService):
) )
elif self.is_model(nested_item.path): elif self.is_model(nested_item.path):
process_group.process_models.append( process_group.process_models.append(
self.__scan_spec( self.__scan_process_model(
nested_item.path, nested_item.path,
nested_item.name, nested_item.name,
process_group=process_group, process_group=process_group,
@ -322,22 +342,25 @@ class ProcessModelService(FileSystemService):
# process_group.process_groups.sort() # process_group.process_groups.sort()
return process_group return process_group
def __scan_spec( def __scan_process_model(
self, self,
path: str, path: str,
name: Optional[str] = None, name: Optional[str] = None,
process_group: Optional[ProcessGroup] = None, process_group: Optional[ProcessGroup] = None,
) -> ProcessModelInfo: ) -> ProcessModelInfo:
"""__scan_spec.""" """__scan_process_model."""
spec_path = os.path.join(path, self.WF_JSON_FILE) json_file_path = os.path.join(path, self.PROCESS_MODEL_JSON_FILE)
if os.path.exists(spec_path): if os.path.exists(json_file_path):
with open(spec_path) as wf_json: with open(json_file_path) as wf_json:
data = json.load(wf_json) data = json.load(wf_json)
if "process_group_id" in data: if "process_group_id" in data:
data.pop("process_group_id") data.pop("process_group_id")
spec = ProcessModelInfo(**data) # we don't save `id` in the json file, so we add it back in here.
if spec is None: relative_path = os.path.relpath(path, FileSystemService.root_path())
data["id"] = relative_path
process_model_info = ProcessModelInfo(**data)
if process_model_info is None:
raise ApiError( raise ApiError(
error_code="process_model_could_not_be_loaded_from_disk", error_code="process_model_could_not_be_loaded_from_disk",
message=f"We could not load the process_model from disk with data: {data}", message=f"We could not load the process_model from disk with data: {data}",
@ -349,15 +372,18 @@ class ProcessModelService(FileSystemService):
message="Missing name of process model. It should be given", message="Missing name of process model. It should be given",
) )
spec = ProcessModelInfo( process_model_info = ProcessModelInfo(
id=name, id="",
display_name=name, display_name=name,
description="", description="",
display_order=0, display_order=0,
is_review=False, is_review=False,
) )
with open(spec_path, "w") as wf_json: self.write_json_file(
json.dump(self.WF_SCHEMA.dump(spec), wf_json, indent=4) json_file_path, self.PROCESS_MODEL_SCHEMA.dump(process_model_info)
)
# we don't store `id` in the json files, so we add it in here
process_model_info.id = name
if process_group: if process_group:
spec.process_group = process_group.id process_model_info.process_group = process_group.id
return spec return process_model_info

View File

@ -171,7 +171,7 @@ class SpecFileService(FileSystemService):
ref.is_primary = True ref.is_primary = True
if ref.is_primary: if ref.is_primary:
ProcessModelService().update_spec( ProcessModelService().update_process_model(
process_model_info, process_model_info,
{ {
"primary_process_id": ref.identifier, "primary_process_id": ref.identifier,
@ -197,7 +197,9 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def full_file_path(spec: ProcessModelInfo, file_name: str) -> str: def full_file_path(spec: ProcessModelInfo, file_name: str) -> str:
"""File_path.""" """File_path."""
return os.path.join(SpecFileService.workflow_path(spec), file_name) return os.path.abspath(
os.path.join(SpecFileService.workflow_path(spec), file_name)
)
@staticmethod @staticmethod
def last_modified(spec: ProcessModelInfo, file_name: str) -> datetime: def last_modified(spec: ProcessModelInfo, file_name: str) -> datetime:

View File

@ -2,9 +2,7 @@
"title": "Simple form", "title": "Simple form",
"description": "A simple form example.", "description": "A simple form example.",
"type": "object", "type": "object",
"required": [ "required": ["name"],
"name"
],
"properties": { "properties": {
"name": { "name": {
"type": "string", "type": "string",
@ -14,11 +12,7 @@
"department": { "department": {
"type": "string", "type": "string",
"title": "Department", "title": "Department",
"enum": [ "enum": ["Finance", "HR", "IT"]
"Finance",
"HR",
"IT"
]
} }
} }
} }

View File

@ -137,7 +137,9 @@ class BaseTest:
# make sure we have a group # make sure we have a group
process_group_id, _ = os.path.split(process_model_id) process_group_id, _ = os.path.split(process_model_id)
modified_process_group_id = process_group_id.replace("/", ":") modified_process_group_id = process_group_id.replace("/", ":")
process_group_path = f"{FileSystemService.root_path()}/{process_group_id}" process_group_path = os.path.abspath(
os.path.join(FileSystemService.root_path(), process_group_id)
)
if ProcessModelService().is_group(process_group_path): if ProcessModelService().is_group(process_group_path):
if exception_notification_addresses is None: if exception_notification_addresses is None:

View File

@ -39,7 +39,7 @@ class ExampleDataLoader:
is_review=False, is_review=False,
) )
workflow_spec_service = ProcessModelService() workflow_spec_service = ProcessModelService()
workflow_spec_service.add_spec(spec) workflow_spec_service.add_process_model(spec)
bpmn_file_name_with_extension = bpmn_file_name bpmn_file_name_with_extension = bpmn_file_name
if not bpmn_file_name_with_extension: if not bpmn_file_name_with_extension:

View File

@ -1830,7 +1830,7 @@ class TestProcessApi(BaseTest):
process_model = ProcessModelService().get_process_model( process_model = ProcessModelService().get_process_model(
process_model_identifier process_model_identifier
) )
ProcessModelService().update_spec( ProcessModelService().update_process_model(
process_model, process_model,
{"fault_or_suspend_on_exception": NotificationType.suspend.value}, {"fault_or_suspend_on_exception": NotificationType.suspend.value},
) )
@ -1885,7 +1885,7 @@ class TestProcessApi(BaseTest):
process_model = ProcessModelService().get_process_model( process_model = ProcessModelService().get_process_model(
process_model_identifier process_model_identifier
) )
ProcessModelService().update_spec( ProcessModelService().update_process_model(
process_model, process_model,
{"exception_notification_addresses": ["with_super_admin_user@example.com"]}, {"exception_notification_addresses": ["with_super_admin_user@example.com"]},
) )

View File

@ -32,7 +32,9 @@ class TestProcessModelService(BaseTest):
primary_process_id = process_model.primary_process_id primary_process_id = process_model.primary_process_id
assert primary_process_id == "Process_HelloWorld" assert primary_process_id == "Process_HelloWorld"
ProcessModelService().update_spec(process_model, {"display_name": "new_name"}) ProcessModelService().update_process_model(
process_model, {"display_name": "new_name"}
)
assert process_model.display_name == "new_name" assert process_model.display_name == "new_name"
assert process_model.primary_process_id == primary_process_id assert process_model.primary_process_id == primary_process_id