Assure that the list of cached Spec References includes all Process Instances and DMNs (even those that are not primary)

This commit is contained in:
Dan 2022-11-14 20:31:20 -05:00
parent d740518e75
commit 88a40c73ea
3 changed files with 53 additions and 53 deletions

View File

@ -9,6 +9,7 @@ from marshmallow import INCLUDE
from marshmallow import Schema from marshmallow import Schema
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.spec_reference import SpecReference
class FileType(SpiffEnum): class FileType(SpiffEnum):
@ -62,25 +63,6 @@ CONTENT_TYPES = {
} }
@dataclass()
class FileReference:
"""File Reference Information.
Includes items such as the process id and name for a BPMN,
or the Decision id and Decision name for a DMN file. There may be more than
one reference that points to a particular file.
"""
id: str
name: str
type: str # can be 'process', 'decision', or just 'file'
file_name: str
file_path: str
has_lanes: bool
executable: bool
messages: dict
correlations: dict
start_messages: list
@dataclass(order=True) @dataclass(order=True)
class File: class File:
@ -93,7 +75,7 @@ class File:
type: str type: str
last_modified: datetime last_modified: datetime
size: int size: int
references: Optional[list[FileReference]] = None references: Optional[list[SpecReference]] = None
file_contents: Optional[bytes] = None file_contents: Optional[bytes] = None
process_model_id: Optional[str] = None process_model_id: Optional[str] = None
process_group_id: Optional[str] = None process_group_id: Optional[str] = None
@ -146,16 +128,8 @@ class FileSchema(Schema):
] ]
unknown = INCLUDE unknown = INCLUDE
references = marshmallow.fields.List( references = marshmallow.fields.List(
marshmallow.fields.Nested("FileReferenceSchema") marshmallow.fields.Nested("SpecReferenceSchema")
) )
class FileReferenceSchema(Schema):
"""FileSchema."""
class Meta:
"""Meta."""
model = FileReference
fields = ["id", "name", "type"]
unknown = INCLUDE

View File

@ -144,36 +144,32 @@ class SpecFileService(FileSystemService):
) -> File: ) -> File:
"""Update_file.""" """Update_file."""
SpecFileService.assert_valid_file_name(file_name) SpecFileService.assert_valid_file_name(file_name)
# file_path = SpecFileService.file_path(process_model_info, file_name)
file_path = os.path.join( file_path = os.path.join(
FileSystemService.root_path(), process_model_info.id, file_name FileSystemService.root_path(), process_model_info.id, file_name
) )
SpecFileService.write_file_data_to_system(file_path, binary_data) SpecFileService.write_file_data_to_system(file_path, binary_data)
file = SpecFileService.to_file_object(file_name, file_path) file = SpecFileService.to_file_object(file_name, file_path)
if file.type == FileType.bpmn.value: references = SpecFileService.get_references_for_file(file, process_model_info)
set_primary_file = False primary_process_ref = next((ref for ref in references if ref.is_primary), None)
if ( for ref in references:
process_model_info.primary_file_name is None # If no valid primary process is defined, default to the first process in the
or file_name == process_model_info.primary_file_name # updated file.
): if not primary_process_ref and ref.type == "process":
# If no primary process exists, make this primary process. ref.is_primary = True
set_primary_file = True
references = SpecFileService.get_references_for_file(file, process_model_info) if ref.is_primary:
for ref in references: ProcessModelService().update_spec(
if ref.type == "process": process_model_info, {
ProcessModelService().update_spec( "primary_process_id": ref.identifier,
process_model_info, { "primary_file_name": file_name,
"primary_process_id": ref.identifier, "is_review": ref.has_lanes,
"primary_file_name": file_name, }
"is_review": ref.has_lanes, )
} SpecFileService.update_process_cache(ref)
) SpecFileService.update_message_cache(ref)
SpecFileService.update_process_cache(ref) SpecFileService.update_message_trigger_cache(ref, process_model_info)
SpecFileService.update_message_cache(ref) SpecFileService.update_correlation_cache(ref)
SpecFileService.update_message_trigger_cache(ref, process_model_info)
SpecFileService.update_correlation_cache(ref)
break
return file return file

View File

@ -9,6 +9,8 @@ import pytest
from flask.app import Flask from flask.app import Flask
from flask.testing import FlaskClient from flask.testing import FlaskClient
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@ -439,6 +441,34 @@ class TestProcessApi(BaseTest):
assert response.json["pagination"]["total"] == 5 assert response.json["pagination"]["total"] == 5
assert response.json["pagination"]["pages"] == 2 assert response.json["pagination"]["pages"] == 2
def test_process_list(self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
):
"""It should be possible to get a list of all processes known to the system."""
load_test_spec(
"test_group_one/simple_form",
process_model_source_directory='simple_form',
bpmn_file_name='simple_form'
)
# When adding a process model with one Process, no decisions, and some json files, only one process is recorded.
assert(len(SpecReferenceCache.query.all()) == 1)
self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
process_group_id='test_group_two',
process_model_id='call_activity_nested',
bpmn_file_location='call_activity_nested'
)
# When adding a process model with 4 processes and a decision, 5 new records will be in the Cache
assert(len(SpecReferenceCache.query.all()) == 6)
def test_process_group_add( def test_process_group_add(
self, self,
app: Flask, app: Flask,