Called elements (#218)

This commit is contained in:
jbirddog 2023-04-19 18:07:15 -04:00 committed by GitHub
parent eebe987337
commit ff0202f4d4
12 changed files with 2630 additions and 2309 deletions

View File

@ -0,0 +1,39 @@
"""empty message
Revision ID: 36241ec6747b
Revises: 44a8f46cc508
Create Date: 2023-04-19 10:31:23.202482
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '36241ec6747b'
down_revision = '44a8f46cc508'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('process_caller_cache',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_identifier', sa.String(length=255), nullable=True),
sa.Column('calling_process_identifier', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
with op.batch_alter_table('process_caller_cache', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_process_caller_cache_process_identifier'), ['process_identifier'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('process_caller_cache', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_process_caller_cache_process_identifier'))
op.drop_table('process_caller_cache')
# ### end Alembic commands ###

File diff suppressed because it is too large Load Diff

View File

@ -519,6 +519,31 @@ paths:
schema:
type: string
/processes/{bpmn_process_identifier}/callers:
parameters:
- name: bpmn_process_identifier
in: path
required: true
description: the modified process model id
schema:
type: string
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_caller_lists
summary:
Return a list of information about all processes that call the provided process id
tags:
- Process Models
responses:
"200":
description: Successfully return the requested calling processes
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Process"
/processes:
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_list

View File

@ -21,6 +21,9 @@ from spiffworkflow_backend.models.human_task import HumanTaskModel # noqa: F401
from spiffworkflow_backend.models.spec_reference import (
SpecReferenceCache,
) # noqa: F401
from spiffworkflow_backend.models.process_caller import (
ProcessCallerCacheModel,
) # noqa: F401
from spiffworkflow_backend.models.message_instance import (
MessageInstanceModel,
) # noqa: F401

View File

@ -0,0 +1,12 @@
"""ProcessCaller_model."""
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
class ProcessCallerCacheModel(SpiffworkflowBaseDBModel):
"""A cache of calling process ids for all Processes defined in all files."""
__tablename__ = "process_caller_cache"
id = db.Column(db.Integer, primary_key=True)
process_identifier = db.Column(db.String(255), index=True)
calling_process_identifier = db.Column(db.String(255))

View File

@ -35,6 +35,7 @@ class SpecReference:
messages: dict # Any messages defined in the same file where this process is defined.
correlations: dict # Any correlations defined in the same file with this process.
start_messages: list # The names of any messages that would start this process.
called_element_ids: list # The element ids of any called elements
class SpecReferenceCache(SpiffworkflowBaseDBModel):

View File

@ -28,6 +28,7 @@ from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.process_caller_service import ProcessCallerService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@ -77,6 +78,14 @@ def process_list() -> Any:
return SpecReferenceSchema(many=True).dump(references)
def process_caller_lists(bpmn_process_identifier: str) -> Any:
callers = ProcessCallerService.callers(bpmn_process_identifier)
references = (
SpecReferenceCache.query.filter_by(type="process").filter(SpecReferenceCache.identifier.in_(callers)).all()
)
return SpecReferenceSchema(many=True).dump(references)
def _process_data_fetcher(
process_instance_id: int,
process_data_identifier: str,

View File

@ -0,0 +1,42 @@
from typing import List
from sqlalchemy import or_
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_caller import ProcessCallerCacheModel
class ProcessCallerService:
@staticmethod
def count() -> int:
return ProcessCallerCacheModel.query.count() # type: ignore
@staticmethod
def clear_cache() -> None:
db.session.query(ProcessCallerCacheModel).delete()
@staticmethod
def clear_cache_for_process_ids(process_ids: List[str]) -> None:
db.session.query(ProcessCallerCacheModel).filter(
or_(
ProcessCallerCacheModel.process_identifier.in_(process_ids),
ProcessCallerCacheModel.calling_process_identifier.in_(process_ids),
)
).delete()
@staticmethod
def add_caller(process_id: str, called_process_ids: List[str]) -> None:
for called_process_id in called_process_ids:
db.session.add(
ProcessCallerCacheModel(process_identifier=called_process_id, calling_process_identifier=process_id)
)
db.session.commit()
@staticmethod
def callers(process_id: str) -> List[str]:
records = (
db.session.query(ProcessCallerCacheModel)
.filter(ProcessCallerCacheModel.process_identifier == process_id)
.all()
)
return list(set(map(lambda r: r.calling_process_identifier, records))) # type: ignore

View File

@ -22,6 +22,7 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.services.custom_parser import MyCustomParser
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_caller_service import ProcessCallerService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
@ -112,6 +113,7 @@ class SpecFileService(FileSystemService):
messages = {}
correlations = {}
start_messages = []
called_element_ids = []
if file_type.value == FileType.bpmn.value:
parser.add_bpmn_xml(cls.get_etree_from_xml_bytes(binary_data))
parser_type = "process"
@ -130,6 +132,7 @@ class SpecFileService(FileSystemService):
is_executable = sub_parser.process_executable
start_messages = sub_parser.start_messages()
is_primary = sub_parser.get_id() == process_model_info.primary_process_id
called_element_ids = sub_parser.called_element_ids()
references.append(
SpecReference(
@ -145,6 +148,7 @@ class SpecFileService(FileSystemService):
is_primary=is_primary,
correlations=correlations,
start_messages=start_messages,
called_element_ids=called_element_ids,
)
)
return references
@ -258,6 +262,7 @@ class SpecFileService(FileSystemService):
def update_caches(ref: SpecReference) -> None:
"""Update_caches."""
SpecFileService.update_process_cache(ref)
SpecFileService.update_process_caller_cache(ref)
SpecFileService.update_message_cache(ref)
SpecFileService.update_message_trigger_cache(ref)
SpecFileService.update_correlation_cache(ref)
@ -265,15 +270,27 @@ class SpecFileService(FileSystemService):
@staticmethod
def clear_caches_for_file(file_name: str, process_model_info: ProcessModelInfo) -> None:
"""Clear all caches related to a file."""
db.session.query(SpecReferenceCache).filter(SpecReferenceCache.file_name == file_name).filter(
SpecReferenceCache.process_model_id == process_model_info.id
).delete()
records = (
db.session.query(SpecReferenceCache)
.filter(SpecReferenceCache.file_name == file_name)
.filter(SpecReferenceCache.process_model_id == process_model_info.id)
.all()
)
process_ids = []
for record in records:
process_ids.append(record.identifier)
db.session.delete(record)
ProcessCallerService.clear_cache_for_process_ids(process_ids)
# fixme: likely the other caches should be cleared as well, but we don't have a clean way to do so yet.
@staticmethod
def clear_caches() -> None:
"""Clear_caches."""
db.session.query(SpecReferenceCache).delete()
ProcessCallerService.clear_cache()
# fixme: likely the other caches should be cleared as well, but we don't have a clean way to do so yet.
@staticmethod
@ -301,6 +318,10 @@ class SpecFileService(FileSystemService):
db.session.add(process_id_lookup)
db.session.commit()
@staticmethod
def update_process_caller_cache(ref: SpecReference) -> None:
ProcessCallerService.add_caller(ref.identifier, ref.called_element_ids)
@staticmethod
def update_message_cache(ref: SpecReference) -> None:
"""Assure we have a record in the database of all possible message ids and names."""

View File

@ -35,6 +35,7 @@ from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_caller_service import ProcessCallerService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@ -557,6 +558,47 @@ class TestProcessApi(BaseTest):
assert simple_form["is_executable"] is True
assert simple_form["is_primary"] is True
def test_process_callers(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""It should be possible to get a list of all processes that call another process."""
load_test_spec(
"test_group_one/simple_form",
process_model_source_directory="simple_form",
bpmn_file_name="simple_form",
)
# When adding a process model with one Process, no decisions, and some json files, only one process is recorded.
assert len(SpecReferenceCache.query.all()) == 1
# but no callers are recorded
assert ProcessCallerService.count() == 0
self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
process_group_id="test_group_two",
process_model_id="call_activity_nested",
bpmn_file_location="call_activity_nested",
)
# When adding a process model with 4 processes and a decision, 5 new records will be in the Cache
assert len(SpecReferenceCache.query.all()) == 6
# and 4 callers recorded
assert ProcessCallerService.count() == 4
# get the results
response = client.get(
"/v1.0/processes/Level2/callers",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
# We should get 1 back, Level1 calls Level2
assert len(response.json) == 1
caller = response.json[0]
assert caller["identifier"] == "Level1"
def test_process_group_add(
self,
app: Flask,

View File

@ -0,0 +1,128 @@
from typing import Generator
import pytest
from flask.app import Flask
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_caller import ProcessCallerCacheModel
from spiffworkflow_backend.services.process_caller_service import ProcessCallerService
@pytest.fixture()
def with_clean_cache(app: Flask) -> Generator[None, None, None]:
db.session.query(ProcessCallerCacheModel).delete()
db.session.commit()
yield
@pytest.fixture()
def with_no_process_callers(with_clean_cache: None) -> Generator[None, None, None]:
yield
@pytest.fixture()
def with_single_process_caller(with_clean_cache: None) -> Generator[None, None, None]:
db.session.add(ProcessCallerCacheModel(process_identifier="called_once", calling_process_identifier="one_caller"))
db.session.commit()
yield
@pytest.fixture()
def with_multiple_process_callers(with_clean_cache: None) -> Generator[None, None, None]:
db.session.add(ProcessCallerCacheModel(process_identifier="called_many", calling_process_identifier="one_caller"))
db.session.add(ProcessCallerCacheModel(process_identifier="called_many", calling_process_identifier="two_caller"))
db.session.add(
ProcessCallerCacheModel(process_identifier="called_many", calling_process_identifier="three_caller")
)
db.session.commit()
yield
class TestProcessCallerService(BaseTest):
"""Infer from class name."""
def test_has_zero_count_when_empty(self, with_no_process_callers: None) -> None:
assert ProcessCallerService.count() == 0
def test_has_expected_count_when_not_empty(self, with_multiple_process_callers: None) -> None:
assert ProcessCallerService.count() == 3
def test_can_clear_the_cache(self, with_multiple_process_callers: None) -> None:
ProcessCallerService.clear_cache()
assert ProcessCallerService.count() == 0
def test_can_clear_the_cache_when_empty(self, with_no_process_callers: None) -> None:
ProcessCallerService.clear_cache()
assert ProcessCallerService.count() == 0
def test_can_clear_the_cache_for_process_id(self, with_single_process_caller: None) -> None:
ProcessCallerService.clear_cache_for_process_ids(["called_once"])
assert ProcessCallerService.count() == 0
def test_can_clear_the_cache_for_calling_process_id(self, with_multiple_process_callers: None) -> None:
ProcessCallerService.clear_cache_for_process_ids(["one_caller"])
assert ProcessCallerService.count() == 2
def test_can_clear_the_cache_for_callee_caller_process_id(
self, with_single_process_caller: None, with_multiple_process_callers: None
) -> None:
ProcessCallerService.clear_cache_for_process_ids(["one_caller"])
assert ProcessCallerService.count() == 2
def test_can_clear_the_cache_for_process_id_and_leave_other_process_ids_alone(
self,
with_single_process_caller: None,
with_multiple_process_callers: None,
) -> None:
ProcessCallerService.clear_cache_for_process_ids(["called_many"])
assert ProcessCallerService.count() == 1
def test_can_clear_the_cache_for_process_id_when_it_doesnt_exist(
self,
with_multiple_process_callers: None,
) -> None:
ProcessCallerService.clear_cache_for_process_ids(["garbage"])
assert ProcessCallerService.count() == 3
def test_no_records_added_if_calling_process_ids_is_empty(self, with_no_process_callers: None) -> None:
ProcessCallerService.add_caller("bob", [])
assert ProcessCallerService.count() == 0
def test_can_add_caller_for_new_process(self, with_no_process_callers: None) -> None:
ProcessCallerService.add_caller("bob", ["new_caller"])
assert ProcessCallerService.count() == 1
def test_can_many_callers_for_new_process(self, with_no_process_callers: None) -> None:
ProcessCallerService.add_caller("bob", ["new_caller", "another_new_caller"])
assert ProcessCallerService.count() == 2
def test_can_add_caller_for_existing_process(self, with_multiple_process_callers: None) -> None:
ProcessCallerService.add_caller("called_many", ["new_caller"])
assert ProcessCallerService.count() == 4
def test_can_add_many_callers_for_existing_process(self, with_multiple_process_callers: None) -> None:
ProcessCallerService.add_caller("called_many", ["new_caller", "another_new_caller"])
assert ProcessCallerService.count() == 5
def test_can_track_duplicate_callers(self, with_no_process_callers: None) -> None:
ProcessCallerService.add_caller("bob", ["new_caller", "new_caller"])
assert ProcessCallerService.count() == 2
def test_can_return_no_callers_when_no_records(self, with_no_process_callers: None) -> None:
assert ProcessCallerService.callers("bob") == []
def test_can_return_no_callers_when_process_id_is_unknown(self, with_multiple_process_callers: None) -> None:
assert ProcessCallerService.callers("bob") == []
def test_can_return_single_caller(self, with_single_process_caller: None) -> None:
assert ProcessCallerService.callers("called_once") == ["one_caller"]
def test_can_return_mulitple_callers(self, with_multiple_process_callers: None) -> None:
callers = sorted(ProcessCallerService.callers("called_many"))
assert callers == ["one_caller", "three_caller", "two_caller"]
def test_can_return_single_caller_when_there_are_other_process_ids(
self, with_single_process_caller: None, with_multiple_process_callers: None
) -> None:
assert ProcessCallerService.callers("called_once") == ["one_caller"]

View File

@ -50052,7 +50052,7 @@
"@csstools/postcss-text-decoration-shorthand": "^1.0.0",
"@csstools/postcss-trigonometric-functions": "^1.0.2",
"@csstools/postcss-unset-value": "^1.0.2",
"autoprefixer": "10.4.5",
"autoprefixer": "^10.4.13",
"browserslist": "^4.21.4",
"css-blank-pseudo": "^3.0.3",
"css-has-pseudo": "^3.0.4",
@ -50090,8 +50090,7 @@
},
"dependencies": {
"autoprefixer": {
"version": "10.4.5",
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.5.tgz",
"version": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.5.tgz",
"integrity": "sha512-Fvd8yCoA7lNX/OUllvS+aS1I7WRBclGXsepbvT8ZaPgrH24rgXpZzF0/6Hh3ZEkwg+0AES/Osd196VZmYoEFtw==",
"requires": {
"browserslist": "^4.20.2",