do not save unwanted fields to process json files on disk w/ burnettk

This commit is contained in:
jasquat 2023-06-01 15:10:01 -04:00
parent b756e594ab
commit 470795a6e6
5 changed files with 46 additions and 73 deletions

View File

@ -253,7 +253,7 @@ paths:
schema:
type: array
items:
$ref: "#/components/schemas/ProcessModelCategory"
$ref: "#/components/schemas/ProcessGroup"
post:
operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_create
summary: Add process group
@ -263,14 +263,14 @@ paths:
content:
application/json:
schema:
$ref: "#/components/schemas/ProcessModelCategory"
$ref: "#/components/schemas/ProcessGroup"
responses:
"201":
description: Processs Group
content:
application/json:
schema:
$ref: "#/components/schemas/ProcessModelCategory"
$ref: "#/components/schemas/ProcessGroup"
/process-groups/{modified_process_group_id}:
parameters:
@ -292,7 +292,7 @@ paths:
content:
application/json:
schema:
$ref: "#/components/schemas/ProcessModelCategory"
$ref: "#/components/schemas/ProcessGroup"
delete:
operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_delete
summary: Deletes a single process group
@ -310,14 +310,14 @@ paths:
content:
application/json:
schema:
$ref: "#/components/schemas/ProcessModelCategory"
$ref: "#/components/schemas/ProcessGroup"
responses:
"200":
description: Process group updated successfully
content:
application/json:
schema:
$ref: "#/components/schemas/ProcessModelCategory"
$ref: "#/components/schemas/ProcessGroup"
/process-groups/{modified_process_group_identifier}/move:
parameters:
@ -344,7 +344,7 @@ paths:
content:
application/json:
schema:
$ref: "#/components/schemas/ProcessModelCategory"
$ref: "#/components/schemas/ProcessGroup"
/process-models:
parameters:
@ -684,7 +684,7 @@ paths:
content:
application/json:
schema:
$ref: "#/components/schemas/ProcessModelCategory"
$ref: "#/components/schemas/ProcessGroup"
tags:
- git
responses:
@ -1870,7 +1870,7 @@ paths:
content:
application/json:
schema:
$ref: "#/components/schemas/ProcessModelCategory"
$ref: "#/components/schemas/ProcessGroup"
responses:
"200":
description: One task
@ -2492,15 +2492,7 @@ components:
primary_process_id:
type: string
nullable: true
category_id:
type: string
nullable: true
workflow_spec_category:
$ref: "#/components/schemas/ProcessModelCategory"
is_status:
type: boolean
nullable: true
ProcessModelCategory:
ProcessGroup:
properties:
id:
type: string
@ -2508,12 +2500,6 @@ components:
type: string
display_name:
type: string
display_order:
type: integer
workflows:
type: array
items:
$ref: "#/components/schemas/Workflow"
File:
properties:
id:

View File

@ -13,6 +13,12 @@ from marshmallow import post_load
from spiffworkflow_backend.interfaces import ProcessGroupLite
from spiffworkflow_backend.models.process_model import ProcessModelInfo
# we only want to save these items to the json file
PROCESS_GROUP_SUPPORTED_KEYS_FOR_DISK_SERIALIZATION = [
"display_name",
"description",
]
@dataclass(order=True)
class ProcessGroup:
@ -21,12 +27,15 @@ class ProcessGroup:
id: str # A unique string name, lower case, under scores (ie, 'my_group')
display_name: str
description: str | None = None
display_order: int | None = 0
admin: bool | None = False
process_models: list[ProcessModelInfo] = field(default_factory=list[ProcessModelInfo])
process_groups: list[ProcessGroup] = field(default_factory=list["ProcessGroup"])
parent_groups: list[ProcessGroupLite] | None = None
# TODO: delete these once they no no longer mentioned in current
# process_group.json files
display_order: int | None = 0
admin: bool | None = False
def __post_init__(self) -> None:
self.sort_index = self.display_name
@ -53,8 +62,6 @@ class ProcessGroupSchema(Schema):
fields = [
"id",
"display_name",
"display_order",
"admin",
"process_models",
"description",
"process_groups",

View File

@ -13,6 +13,17 @@ from marshmallow.decorators import post_load
from spiffworkflow_backend.interfaces import ProcessGroupLite
from spiffworkflow_backend.models.file import File
# we only want to save these items to the json file
PROCESS_MODEL_SUPPORTED_KEYS_FOR_DISK_SERIALIZATION = [
"display_name",
"description",
"primary_file_name",
"primary_process_id",
"fault_or_suspend_on_exception",
"exception_notification_addresses",
"metadata_extraction_paths",
]
class NotificationType(enum.Enum):
fault = "fault"
@ -26,19 +37,22 @@ class ProcessModelInfo:
id: str
display_name: str
description: str
process_group: Any | None = None
primary_file_name: str | None = None
primary_process_id: str | None = None
display_order: int | None = 0
files: list[File] | None = field(default_factory=list[File])
fault_or_suspend_on_exception: str = NotificationType.fault.value
exception_notification_addresses: list[str] = field(default_factory=list)
metadata_extraction_paths: list[dict[str, str]] | None = None
process_group: Any | None = None
files: list[File] | None = field(default_factory=list[File])
# just for the API
parent_groups: list[ProcessGroupLite] | None = None
bpmn_version_control_identifier: str | None = None
# TODO: delete these once they no no longer mentioned in current process_model.json files
display_order: int | None = 0
def __post_init__(self) -> None:
self.sort_index = self.id
@ -71,7 +85,6 @@ class ProcessModelInfoSchema(Schema):
id = marshmallow.fields.String(required=True)
display_name = marshmallow.fields.String(required=True)
description = marshmallow.fields.String()
display_order = marshmallow.fields.Integer(allow_none=True)
primary_file_name = marshmallow.fields.String(allow_none=True)
primary_process_id = marshmallow.fields.String(allow_none=True)
files = marshmallow.fields.List(marshmallow.fields.Nested("File"))

View File

@ -3,16 +3,17 @@ import os
import shutil
import uuid
from glob import glob
from typing import Any
from typing import TypeVar
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.exceptions.process_entity_not_found_error import ProcessEntityNotFoundError
from spiffworkflow_backend.interfaces import ProcessGroupLite
from spiffworkflow_backend.interfaces import ProcessGroupLitesWithCache
from spiffworkflow_backend.models.process_group import PROCESS_GROUP_SUPPORTED_KEYS_FOR_DISK_SERIALIZATION
from spiffworkflow_backend.models.process_group import ProcessGroup
from spiffworkflow_backend.models.process_group import ProcessGroupSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_model import PROCESS_MODEL_SUPPORTED_KEYS_FOR_DISK_SERIALIZATION
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.services.authorization_service import AuthorizationService
@ -74,7 +75,6 @@ class ProcessModelService(FileSystemService):
@staticmethod
def write_json_file(file_path: str, json_data: dict, indent: int = 4, sort_keys: bool = True) -> None:
"""Write json file."""
with open(file_path, "w") as h_open:
json.dump(json_data, h_open, indent=indent, sort_keys=sort_keys)
@ -106,14 +106,11 @@ class ProcessModelService(FileSystemService):
)
os.makedirs(process_model_path, exist_ok=True)
json_path = os.path.abspath(os.path.join(process_model_path, cls.PROCESS_MODEL_JSON_FILE))
process_model_id = process_model.id
# we don't save id in the json file
# this allows us to move models around on the filesystem
# the id is determined by its location on the filesystem
delattr(process_model, "id")
json_data = cls.PROCESS_MODEL_SCHEMA.dump(process_model)
for key in list(json_data.keys()):
if key not in PROCESS_MODEL_SUPPORTED_KEYS_FOR_DISK_SERIALIZATION:
del json_data[key]
cls.write_json_file(json_path, json_data)
process_model.id = process_model_id
@classmethod
def process_model_delete(cls, process_model_id: str) -> None:
@ -306,9 +303,9 @@ class ProcessModelService(FileSystemService):
os.makedirs(cat_path, exist_ok=True)
json_path = os.path.join(cat_path, cls.PROCESS_GROUP_JSON_FILE)
serialized_process_group = process_group.serialized
# we don't store `id` in the json files
# this allows us to move groups around on the filesystem
del serialized_process_group["id"]
for key in list(serialized_process_group.keys()):
if key not in PROCESS_GROUP_SUPPORTED_KEYS_FOR_DISK_SERIALIZATION:
del serialized_process_group[key]
cls.write_json_file(json_path, serialized_process_group)
return process_group
@ -352,17 +349,6 @@ class ProcessModelService(FileSystemService):
f" {problem_models}"
)
shutil.rmtree(path)
cls._cleanup_process_group_display_order()
@classmethod
def _cleanup_process_group_display_order(cls) -> list[Any]:
process_groups = cls.get_process_groups() # Returns an ordered list
index = 0
for process_group in process_groups:
process_group.display_order = index
cls.update_process_group(process_group)
index += 1
return process_groups
@classmethod
def __scan_process_groups(cls, process_group_id: str | None = None) -> list[ProcessGroup]:
@ -403,8 +389,6 @@ class ProcessModelService(FileSystemService):
process_group = ProcessGroup(
id="",
display_name=process_group_id,
display_order=10000,
admin=False,
)
cls.write_json_file(cat_path, cls.GROUP_SCHEMA.dump(process_group))
# we don't store `id` in the json files, so we add it in here
@ -467,7 +451,6 @@ class ProcessModelService(FileSystemService):
id="",
display_name=name,
description="",
display_order=0,
)
cls.write_json_file(json_file_path, cls.PROCESS_MODEL_SCHEMA.dump(process_model_info))
# we don't store `id` in the json files, so we add it in here

View File

@ -241,7 +241,6 @@ class SpecFileService(FileSystemService):
def update_caches(ref: SpecReference) -> None:
SpecFileService.update_process_cache(ref)
SpecFileService.update_process_caller_cache(ref)
SpecFileService.update_message_cache(ref)
SpecFileService.update_message_trigger_cache(ref)
SpecFileService.update_correlation_cache(ref)
@ -298,21 +297,6 @@ class SpecFileService(FileSystemService):
def update_process_caller_cache(ref: SpecReference) -> None:
ProcessCallerService.add_caller(ref.identifier, ref.called_element_ids)
@staticmethod
def update_message_cache(ref: SpecReference) -> None:
"""Assure we have a record in the database of all possible message ids and names."""
# for message_model_identifier in ref.messages.keys():
# message_model = MessageModel.query.filter_by(
# identifier=message_model_identifier
# ).first()
# if message_model is None:
# message_model = MessageModel(
# identifier=message_model_identifier,
# name=ref.messages[message_model_identifier],
# )
# db.session.add(message_model)
# db.session.commit()
@staticmethod
def update_message_trigger_cache(ref: SpecReference) -> None:
"""Assure we know which messages can trigger the start of a process."""