New calling process table (#1480)

* added new table to handle called process relationships

* process caller api endpoint is working again w/ burnettk

* fixed more tests and mypy w/ burnettk

* unit tests are passing and commented out unrelated failing test w/ burnettk

* all tests are passing almost w/ burnettk

* uncommented flakey test w/ burnettk

* minor change while reviewing w/ burnettk

* some changes from coderabbit suggestions w/ burnettk

* commented out sampling change w/ burnettk

* name the foreign keys on the process caller table w/ burnettk

---------

Co-authored-by: jasquat <jasquat@users.noreply.github.com>
This commit is contained in:
jasquat 2024-05-03 14:09:32 +00:00 committed by GitHub
parent b9e70d12a6
commit 86f97d5937
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
24 changed files with 389 additions and 226 deletions

View File

@ -73,7 +73,10 @@ if [[ "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" == "true" ]]; then
fi fi
if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then
SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py if [[ -z "${SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false
fi
poetry run python bin/save_all_bpmn.py
fi fi
if [[ -n "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY:-}" ]]; then if [[ -n "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY:-}" ]]; then

View File

@ -76,3 +76,7 @@ fi
export SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP export SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP
export FLASK_APP=src/spiffworkflow_backend export FLASK_APP=src/spiffworkflow_backend
if [[ -z "${SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false
fi

View File

@ -24,7 +24,7 @@ else
else else
if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP=false SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP=false poetry run python bin/save_all_bpmn.py
fi fi
# this line blocks # this line blocks

View File

@ -7,7 +7,6 @@ from spiffworkflow_backend.services.data_setup_service import DataSetupService
def main() -> None: def main() -> None:
"""Main."""
app = create_app() app = create_app()
with app.app_context(): with app.app_context():
failing_process_models = DataSetupService.save_all_process_models() failing_process_models = DataSetupService.save_all_process_models()

View File

@ -1,7 +1,7 @@
#!/usr/bin/env bash #!/usr/bin/env bash
function error_handler() { function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." echo >&2 "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2" exit "$2"
} }
trap 'error_handler ${LINENO} $?' ERR trap 'error_handler ${LINENO} $?' ERR
@ -16,7 +16,7 @@ user_file_with_one_email_per_line="${1:-}"
keycloak_realm="${2:-spiffworkflow}" keycloak_realm="${2:-spiffworkflow}"
if [[ -z "${1:-}" ]]; then if [[ -z "${1:-}" ]]; then
>&2 echo "usage: $(basename "$0") [user_file_with_one_email_per_line]" echo >&2 "usage: $(basename "$0") [user_file_with_one_email_per_line]"
exit 1 exit 1
fi fi
@ -42,7 +42,8 @@ else
fi fi
# https://www.appsdeveloperblog.com/keycloak-rest-api-create-a-new-user/ # https://www.appsdeveloperblog.com/keycloak-rest-api-create-a-new-user/
result=$(curl --fail -s -X POST "$KEYCLOAK_URL" "$INSECURE" \ result=$(
curl --fail -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
--header 'Content-Type: application/x-www-form-urlencoded' \ --header 'Content-Type: application/x-www-form-urlencoded' \
--data-urlencode "username=${ADMIN_USERNAME}" \ --data-urlencode "username=${ADMIN_USERNAME}" \
--data-urlencode "password=${ADMIN_PASSWORD}" \ --data-urlencode "password=${ADMIN_PASSWORD}" \
@ -82,11 +83,11 @@ while read -r input_line; do
email_header=$(awk -F ',' '{print $1}' <<<"$input_line") email_header=$(awk -F ',' '{print $1}' <<<"$input_line")
pass_header=$(awk -F ',' '{print $2}' <<<"$input_line") pass_header=$(awk -F ',' '{print $2}' <<<"$input_line")
if [[ "$email_header" != "email" ]]; then if [[ "$email_header" != "email" ]]; then
>&2 echo "ERROR: the first column in the first row must be email." echo >&2 "ERROR: the first column in the first row must be email."
exit 1 exit 1
fi fi
if [[ "$pass_header" != "pass" ]]; then if [[ "$pass_header" != "pass" ]]; then
>&2 echo "ERROR: the first column in the first row must be pass." echo >&2 "ERROR: the first column in the first row must be pass."
exit 1 exit 1
fi fi
custom_attribute_one=$(awk -F ',' '{print $3}' <<<"$input_line") custom_attribute_one=$(awk -F ',' '{print $3}' <<<"$input_line")
@ -97,7 +98,7 @@ while read -r input_line; do
username=$(awk -F '@' '{print $1}' <<<"$user_email") username=$(awk -F '@' '{print $1}' <<<"$user_email")
if [[ "$username" == "$ADMIN_USERNAME" || "$user_email" == "$ADMIN_USERNAME" ]]; then if [[ "$username" == "$ADMIN_USERNAME" || "$user_email" == "$ADMIN_USERNAME" ]]; then
>&2 echo "ERROR: The user used as the admin user matches a user in the current import list. This should not happen. Comment out that user from the list or use a different admin user: ${ADMIN_USERNAME}" echo >&2 "ERROR: The user used as the admin user matches a user in the current import list. This should not happen. Comment out that user from the list or use a different admin user: ${ADMIN_USERNAME}"
exit 1 exit 1
fi fi
@ -113,7 +114,7 @@ while read -r input_line; do
user_id=$(jq -r '.[0] | .id' <<<"$user_info") user_id=$(jq -r '.[0] | .id' <<<"$user_info")
if [[ -z "$user_id" ]]; then if [[ -z "$user_id" ]]; then
>&2 echo "ERROR: Could not find user_id for user: ${user_email}" echo >&2 "ERROR: Could not find user_id for user: ${user_email}"
exit 1 exit 1
fi fi
curl --fail --location --silent --request DELETE "${KEYCLOAK_BASE_URL}/admin/realms/${keycloak_realm}/users/${user_id}" \ curl --fail --location --silent --request DELETE "${KEYCLOAK_BASE_URL}/admin/realms/${keycloak_realm}/users/${user_id}" \
@ -123,7 +124,7 @@ while read -r input_line; do
http_code=$(add_user "$user_email" "$username" "$password" "$user_attribute_one") http_code=$(add_user "$user_email" "$username" "$password" "$user_attribute_one")
fi fi
if [[ "$http_code" != "201" ]]; then if [[ "$http_code" != "201" ]]; then
>&2 echo "ERROR: Failed to create user: ${user_email} with http_code: ${http_code}" echo >&2 "ERROR: Failed to create user: ${user_email} with http_code: ${http_code}"
exit 1 exit 1
fi fi
fi fi

View File

@ -0,0 +1,42 @@
"""empty message
Revision ID: d4b900e71852
Revises: c6e246c3c04e
Create Date: 2024-05-02 16:21:48.287934
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd4b900e71852'
down_revision = 'c6e246c3c04e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('process_caller_relationship',
sa.Column('called_reference_cache_process_id', sa.Integer(), nullable=False),
sa.Column('calling_reference_cache_process_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['called_reference_cache_process_id'], ['reference_cache.id'], name='called_reference_cache_process_id_fk'),
sa.ForeignKeyConstraint(['calling_reference_cache_process_id'], ['reference_cache.id'], name='calling_reference_cache_process_id_fk'),
sa.PrimaryKeyConstraint('called_reference_cache_process_id', 'calling_reference_cache_process_id', name='process_caller_relationship_pk')
)
with op.batch_alter_table('process_caller_relationship', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_process_caller_relationship_called_reference_cache_process_id'), ['called_reference_cache_process_id'], unique=False)
batch_op.create_index(batch_op.f('ix_process_caller_relationship_calling_reference_cache_process_id'), ['calling_reference_cache_process_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('process_caller_relationship', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_process_caller_relationship_calling_reference_cache_process_id'))
batch_op.drop_index(batch_op.f('ix_process_caller_relationship_called_reference_cache_process_id'))
op.drop_table('process_caller_relationship')
# ### end Alembic commands ###

View File

@ -112,5 +112,6 @@ from spiffworkflow_backend.models.future_task import (
from spiffworkflow_backend.models.feature_flag import ( from spiffworkflow_backend.models.feature_flag import (
FeatureFlagModel, FeatureFlagModel,
) # noqa: F401 ) # noqa: F401
from spiffworkflow_backend.models.process_caller_relationship import ProcessCallerRelationshipModel # noqa: F401
add_listeners() add_listeners()

View File

@ -2,6 +2,7 @@ from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
# TODO: delete this file
class ProcessCallerCacheModel(SpiffworkflowBaseDBModel): class ProcessCallerCacheModel(SpiffworkflowBaseDBModel):
"""A cache of calling process ids for all Processes defined in all files.""" """A cache of calling process ids for all Processes defined in all files."""

View File

@ -0,0 +1,56 @@
from flask import current_app
from sqlalchemy import ForeignKey
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy.dialects.mysql import insert as mysql_insert
from sqlalchemy.dialects.postgresql import insert as postgres_insert
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db
class CalledProcessNotFoundError(Exception):
pass
class CallingProcessNotFoundError(Exception):
pass
class ProcessCallerRelationshipModel(SpiffworkflowBaseDBModel):
"""A cache of calling process ids for all Processes defined in all files."""
__tablename__ = "process_caller_relationship"
__table_args__ = (
PrimaryKeyConstraint(
"called_reference_cache_process_id",
"calling_reference_cache_process_id",
name="process_caller_relationship_pk",
),
)
called_reference_cache_process_id = db.Column(
ForeignKey("reference_cache.id", name="called_reference_cache_process_id_fk"), nullable=False, index=True
)
calling_reference_cache_process_id = db.Column(
ForeignKey("reference_cache.id", name="calling_reference_cache_process_id_fk"), nullable=False, index=True
)
@classmethod
def insert_or_update(cls, called_reference_cache_process_id: int, calling_reference_cache_process_id: int) -> None:
caller_info = {
"called_reference_cache_process_id": called_reference_cache_process_id,
"calling_reference_cache_process_id": calling_reference_cache_process_id,
}
on_duplicate_key_stmt = None
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "mysql":
insert_stmt = mysql_insert(ProcessCallerRelationshipModel).values(caller_info)
# We don't actually want to update anything but it doesn't really matter if we do since it should be the same value
on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(
called_reference_cache_process_id=insert_stmt.inserted.called_reference_cache_process_id
)
else:
insert_stmt = postgres_insert(ProcessCallerRelationshipModel).values(caller_info)
on_duplicate_key_stmt = insert_stmt.on_conflict_do_nothing(
index_elements=["called_reference_cache_process_id", "calling_reference_cache_process_id"]
)
db.session.execute(on_duplicate_key_stmt)

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import os import os
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any from typing import Any
@ -13,6 +15,7 @@ from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.cache_generation import CacheGenerationModel from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_caller_relationship import ProcessCallerRelationshipModel
# SpecReferenceNotFoundError # SpecReferenceNotFoundError
@ -83,6 +86,20 @@ class ReferenceCacheModel(SpiffworkflowBaseDBModel):
generation = relationship(CacheGenerationModel) generation = relationship(CacheGenerationModel)
process_callers = relationship(
ProcessCallerRelationshipModel,
foreign_keys="[ProcessCallerRelationshipModel.called_reference_cache_process_id]",
cascade="all, delete-orphan",
single_parent=True,
)
calling_processes = relationship(
ProcessCallerRelationshipModel,
foreign_keys="[ProcessCallerRelationshipModel.calling_reference_cache_process_id]",
cascade="all, delete-orphan",
single_parent=True,
)
def relative_path(self) -> str: def relative_path(self) -> str:
return os.path.join(self.relative_location, self.file_name).replace("/", os.sep) return os.path.join(self.relative_location, self.file_name).replace("/", os.sep)
@ -104,7 +121,7 @@ class ReferenceCacheModel(SpiffworkflowBaseDBModel):
relative_location: str, relative_location: str,
properties: dict | None = None, properties: dict | None = None,
use_current_cache_generation: bool = False, use_current_cache_generation: bool = False,
) -> "ReferenceCacheModel": ) -> ReferenceCacheModel:
reference_cache = cls( reference_cache = cls(
identifier=identifier, identifier=identifier,
display_name=display_name, display_name=display_name,
@ -124,7 +141,7 @@ class ReferenceCacheModel(SpiffworkflowBaseDBModel):
return reference_cache return reference_cache
@classmethod @classmethod
def from_spec_reference(cls, ref: Reference, use_current_cache_generation: bool = False) -> "ReferenceCacheModel": def from_spec_reference(cls, ref: Reference, use_current_cache_generation: bool = False) -> ReferenceCacheModel:
reference_cache = cls.from_params( reference_cache = cls.from_params(
identifier=ref.identifier, identifier=ref.identifier,
display_name=ref.display_name, display_name=ref.display_name,

View File

@ -46,11 +46,11 @@ from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.git_service import GitCommandError from spiffworkflow_backend.services.git_service import GitCommandError
from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.jinja_service import JinjaService from spiffworkflow_backend.services.jinja_service import JinjaService
from spiffworkflow_backend.services.process_caller_service import ProcessCallerService
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.reference_cache_service import ReferenceCacheService
from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.spec_file_service import SpecFileService
from spiffworkflow_backend.services.task_service import TaskModelError from spiffworkflow_backend.services.task_service import TaskModelError
from spiffworkflow_backend.services.task_service import TaskService from spiffworkflow_backend.services.task_service import TaskService
@ -122,14 +122,9 @@ def process_list() -> Any:
return ReferenceSchema(many=True).dump(permitted_references) return ReferenceSchema(many=True).dump(permitted_references)
# if we pass in bpmn_process_identifiers of [a], a is "called" and we want to find which processes are *callers* of a
def process_caller_list(bpmn_process_identifiers: list[str]) -> Any: def process_caller_list(bpmn_process_identifiers: list[str]) -> Any:
callers = ProcessCallerService.callers(bpmn_process_identifiers) references = ReferenceCacheService.get_reference_cache_entries_calling_process(bpmn_process_identifiers)
references = (
ReferenceCacheModel.basic_query()
.filter_by(type="process")
.filter(ReferenceCacheModel.identifier.in_(callers)) # type: ignore
.all()
)
return ReferenceSchema(many=True).dump(references) return ReferenceSchema(many=True).dump(references)

View File

@ -472,21 +472,13 @@ def process_model_create_with_natural_language(modified_process_group_id: str, b
"required": [], "required": [],
} }
SpecFileService.add_file( files_to_update = {
process_model_info, f"{process_model_identifier}.bpmn": str.encode(bpmn_template_contents),
f"{process_model_identifier}.bpmn", f"{form_identifier}-schema.json": str.encode(json.dumps(form_schema_json)),
str.encode(bpmn_template_contents), f"{form_identifier}-uischema.json": str.encode(json.dumps(form_uischema_json)),
) }
SpecFileService.add_file( for file_name, contents in files_to_update.items():
process_model_info, SpecFileService.update_file(process_model_info, file_name, contents)
f"{form_identifier}-schema.json",
str.encode(json.dumps(form_schema_json)),
)
SpecFileService.add_file(
process_model_info,
f"{form_identifier}-uischema.json",
str.encode(json.dumps(form_uischema_json)),
)
_commit_and_push_to_git(f"User: {g.user.username} created process model via natural language: {process_model_info.id}") _commit_and_push_to_git(f"User: {g.user.username} created process model via natural language: {process_model_info.id}")
@ -585,7 +577,7 @@ def _create_or_update_process_model_file(
file = None file = None
try: try:
file = SpecFileService.update_file(process_model, request_file.filename, request_file_contents, user=g.user) file, _ = SpecFileService.update_file(process_model, request_file.filename, request_file_contents, user=g.user)
except ProcessModelFileInvalidError as exception: except ProcessModelFileInvalidError as exception:
raise ( raise (
ApiError( ApiError(

View File

@ -33,6 +33,7 @@ class DataSetupService:
files = FileSystemService.walk_files_from_root_path(True, None) files = FileSystemService.walk_files_from_root_path(True, None)
reference_objects: dict[str, ReferenceCacheModel] = {} reference_objects: dict[str, ReferenceCacheModel] = {}
all_data_store_specifications: dict[tuple[str, str, str], Any] = {} all_data_store_specifications: dict[tuple[str, str, str], Any] = {}
references = []
for file in files: for file in files:
if FileSystemService.is_process_model_json_file(file): if FileSystemService.is_process_model_json_file(file):
@ -46,13 +47,13 @@ class DataSetupService:
try: try:
reference_cache = ReferenceCacheModel.from_spec_reference(ref) reference_cache = ReferenceCacheModel.from_spec_reference(ref)
ReferenceCacheService.add_unique_reference_cache_object(reference_objects, reference_cache) ReferenceCacheService.add_unique_reference_cache_object(reference_objects, reference_cache)
SpecFileService.update_caches_except_process(ref)
db.session.commit() db.session.commit()
references.append(ref)
except Exception as ex: except Exception as ex:
failing_process_models.append( failing_process_models.append(
( (
f"{ref.relative_location}/{ref.file_name}", f"{ref.relative_location}/{ref.file_name}",
str(ex), repr(ex),
) )
) )
except Exception as ex2: except Exception as ex2:
@ -103,6 +104,18 @@ class DataSetupService:
ReferenceCacheService.add_new_generation(reference_objects) ReferenceCacheService.add_new_generation(reference_objects)
cls._sync_data_store_models_with_specifications(all_data_store_specifications) cls._sync_data_store_models_with_specifications(all_data_store_specifications)
for ref in references:
try:
SpecFileService.update_caches_except_process(ref)
db.session.commit()
except Exception as ex:
failing_process_models.append(
(
f"{ref.relative_location}/{ref.file_name}",
repr(ex),
)
)
return failing_process_models return failing_process_models
@classmethod @classmethod

View File

@ -34,18 +34,19 @@ def traces_sampler(sampling_context: Any) -> Any:
if sampling_context["parent_sampled"] is not None: if sampling_context["parent_sampled"] is not None:
return sampling_context["parent_sampled"] return sampling_context["parent_sampled"]
if "wsgi_environ" in sampling_context: # sample some requests at a higher rate
wsgi_environ = sampling_context["wsgi_environ"] # if "wsgi_environ" in sampling_context:
path_info = wsgi_environ.get("PATH_INFO") # wsgi_environ = sampling_context["wsgi_environ"]
request_method = wsgi_environ.get("REQUEST_METHOD") # path_info = wsgi_environ.get("PATH_INFO")
# request_method = wsgi_environ.get("REQUEST_METHOD")
# tasks_controller.task_submit #
# this is the current pain point as of 31 jan 2023. # # tasks_controller.task_submit
if path_info and ( # # this is the current pain point as of 31 jan 2023.
(path_info.startswith("/v1.0/tasks/") and request_method == "PUT") # if path_info and (
or (path_info.startswith("/v1.0/task-data/") and request_method == "GET") # (path_info.startswith("/v1.0/tasks/") and request_method == "PUT")
): # or (path_info.startswith("/v1.0/task-data/") and request_method == "GET")
return 1 # ):
# return 1
# Default sample rate for all others (replaces traces_sample_rate) # Default sample rate for all others (replaces traces_sample_rate)
return 0.01 return 0.01

View File

@ -1,35 +1,50 @@
from sqlalchemy import or_ from sqlalchemy import or_
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_caller import ProcessCallerCacheModel from spiffworkflow_backend.models.process_caller_relationship import CalledProcessNotFoundError
from spiffworkflow_backend.models.process_caller_relationship import CallingProcessNotFoundError
from spiffworkflow_backend.models.process_caller_relationship import ProcessCallerRelationshipModel
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
# TODO: delete this file
class ProcessCallerService: class ProcessCallerService:
@staticmethod @staticmethod
def count() -> int: def count() -> int:
return ProcessCallerCacheModel.query.count() # type: ignore """This is used in tests ONLY."""
return ProcessCallerRelationshipModel.query.count() # type: ignore
@staticmethod @staticmethod
def clear_cache() -> None: def clear_cache() -> None:
db.session.query(ProcessCallerCacheModel).delete() db.session.query(ProcessCallerRelationshipModel).delete()
@staticmethod @staticmethod
def clear_cache_for_process_ids(process_ids: list[str]) -> None: def clear_cache_for_process_ids(reference_cache_ids: list[int]) -> None:
db.session.query(ProcessCallerCacheModel).filter( ProcessCallerRelationshipModel.query.filter(
or_( or_(
ProcessCallerCacheModel.process_identifier.in_(process_ids), ProcessCallerRelationshipModel.called_reference_cache_process_id.in_(reference_cache_ids),
ProcessCallerCacheModel.calling_process_identifier.in_(process_ids), ProcessCallerRelationshipModel.calling_reference_cache_process_id.in_(reference_cache_ids),
) )
).delete() ).delete()
@staticmethod @staticmethod
def add_caller(process_id: str, called_process_ids: list[str]) -> None: def add_caller(calling_process_identifier: str, called_process_identifiers: list[str]) -> None:
for called_process_id in called_process_ids: reference_cache_records = (
db.session.add(ProcessCallerCacheModel(process_identifier=called_process_id, calling_process_identifier=process_id)) ReferenceCacheModel.basic_query()
.filter(ReferenceCacheModel.identifier.in_(called_process_identifiers + [calling_process_identifier])) # type: ignore
@staticmethod .all()
def callers(process_ids: list[str]) -> list[str]: )
records = ( reference_cache_dict = {r.identifier: r.id for r in reference_cache_records}
db.session.query(ProcessCallerCacheModel).filter(ProcessCallerCacheModel.process_identifier.in_(process_ids)).all() if calling_process_identifier not in reference_cache_dict:
raise CallingProcessNotFoundError(
f"Could not find calling process id '{calling_process_identifier}' in reference_cache table."
)
for called_process_identifier in called_process_identifiers:
if called_process_identifier not in reference_cache_dict:
raise CalledProcessNotFoundError(
f"Could not find called process id '{called_process_identifier}' in reference_cache table."
)
ProcessCallerRelationshipModel.insert_or_update(
called_reference_cache_process_id=reference_cache_dict[called_process_identifier],
calling_reference_cache_process_id=reference_cache_dict[calling_process_identifier],
) )
return sorted({r.calling_process_identifier for r in records})

View File

@ -1,7 +1,9 @@
from sqlalchemy import insert from sqlalchemy import insert
from sqlalchemy.orm import aliased
from spiffworkflow_backend.models.cache_generation import CacheGenerationModel from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_caller_relationship import ProcessCallerRelationshipModel
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
from spiffworkflow_backend.services.upsearch_service import UpsearchService from spiffworkflow_backend.services.upsearch_service import UpsearchService
@ -32,16 +34,12 @@ class ReferenceCacheService:
@classmethod @classmethod
def upsearch(cls, location: str, identifier: str, type: str) -> str | None: def upsearch(cls, location: str, identifier: str, type: str) -> str | None:
# really want to be able to join to this table on max(id)
cache_generation = CacheGenerationModel.newest_generation_for_table("reference_cache")
if cache_generation is None:
return None
locations = UpsearchService.upsearch_locations(location) locations = UpsearchService.upsearch_locations(location)
references = ( references = (
ReferenceCacheModel.query.filter_by( ReferenceCacheModel.basic_query()
.filter_by(
identifier=identifier, identifier=identifier,
type=type, type=type,
generation=cache_generation,
) )
.filter(ReferenceCacheModel.relative_location.in_(locations)) # type: ignore .filter(ReferenceCacheModel.relative_location.in_(locations)) # type: ignore
.order_by(ReferenceCacheModel.relative_location.desc()) # type: ignore .order_by(ReferenceCacheModel.relative_location.desc()) # type: ignore
@ -53,3 +51,20 @@ class ReferenceCacheService:
return reference.relative_location # type: ignore return reference.relative_location # type: ignore
return None return None
@classmethod
def get_reference_cache_entries_calling_process(cls, bpmn_process_identifiers: list[str]) -> list[ReferenceCacheModel]:
called_reference_alias = aliased(ReferenceCacheModel)
references: list[ReferenceCacheModel] = (
ReferenceCacheModel.basic_query()
.join(
ProcessCallerRelationshipModel,
ProcessCallerRelationshipModel.calling_reference_cache_process_id == ReferenceCacheModel.id,
)
.join(
called_reference_alias,
called_reference_alias.id == ProcessCallerRelationshipModel.called_reference_cache_process_id,
)
.filter(called_reference_alias.identifier.in_(bpmn_process_identifiers))
).all()
return references

View File

@ -138,11 +138,6 @@ class SpecFileService(FileSystemService):
) )
return references return references
@staticmethod
def add_file(process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes) -> File:
# Same as update
return SpecFileService.update_file(process_model_info, file_name, binary_data)
@classmethod @classmethod
def validate_bpmn_xml(cls, file_name: str, binary_data: bytes) -> None: def validate_bpmn_xml(cls, file_name: str, binary_data: bytes) -> None:
file_type = FileSystemService.file_type(file_name) file_type = FileSystemService.file_type(file_name)
@ -156,8 +151,13 @@ class SpecFileService(FileSystemService):
@classmethod @classmethod
def update_file( def update_file(
cls, process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes, user: UserModel | None = None cls,
) -> File: process_model_info: ProcessModelInfo,
file_name: str,
binary_data: bytes,
user: UserModel | None = None,
update_process_cache_only: bool = False,
) -> tuple[File, list[Reference]]:
SpecFileService.assert_valid_file_name(file_name) SpecFileService.assert_valid_file_name(file_name)
cls.validate_bpmn_xml(file_name, binary_data) cls.validate_bpmn_xml(file_name, binary_data)
@ -189,6 +189,9 @@ class SpecFileService(FileSystemService):
) )
all_called_element_ids = all_called_element_ids | set(ref.called_element_ids) all_called_element_ids = all_called_element_ids | set(ref.called_element_ids)
if update_process_cache_only:
SpecFileService.update_process_cache(ref)
else:
SpecFileService.update_all_caches(ref) SpecFileService.update_all_caches(ref)
if user is not None: if user is not None:
@ -217,7 +220,7 @@ class SpecFileService(FileSystemService):
# make sure we save the file as the last thing we do to ensure validations have run # make sure we save the file as the last thing we do to ensure validations have run
full_file_path = SpecFileService.full_file_path(process_model_info, file_name) full_file_path = SpecFileService.full_file_path(process_model_info, file_name)
SpecFileService.write_file_data_to_system(full_file_path, binary_data) SpecFileService.write_file_data_to_system(full_file_path, binary_data)
return SpecFileService.to_file_object(file_name, full_file_path) return (SpecFileService.to_file_object(file_name, full_file_path), references)
@staticmethod @staticmethod
def last_modified(process_model: ProcessModelInfo, file_name: str) -> datetime: def last_modified(process_model: ProcessModelInfo, file_name: str) -> datetime:
@ -264,20 +267,13 @@ class SpecFileService(FileSystemService):
.all() .all()
) )
process_ids = [] reference_cache_ids = []
for record in records: for record in records:
process_ids.append(record.identifier) reference_cache_ids.append(record.id)
db.session.delete(record) db.session.delete(record)
ProcessCallerService.clear_cache_for_process_ids(process_ids) ProcessCallerService.clear_cache_for_process_ids(reference_cache_ids)
# fixme: likely the other caches should be cleared as well, but we don't have a clean way to do so yet.
@staticmethod
def clear_caches() -> None:
db.session.query(ReferenceCacheModel).delete()
ProcessCallerService.clear_cache()
# fixme: likely the other caches should be cleared as well, but we don't have a clean way to do so yet.
@staticmethod @staticmethod
def update_process_cache(ref: Reference) -> None: def update_process_cache(ref: Reference) -> None:

View File

@ -2,6 +2,7 @@ import glob
import os import os
from flask import current_app from flask import current_app
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.spec_file_service import SpecFileService
@ -63,6 +64,7 @@ class ExampleDataLoader:
if len(files) == 0: if len(files) == 0:
raise Exception(f"Could not find any files with file_glob: {file_glob}") raise Exception(f"Could not find any files with file_glob: {file_glob}")
all_references = []
for file_path in files: for file_path in files:
if os.path.isdir(file_path): if os.path.isdir(file_path):
continue # Don't try to process sub directories continue # Don't try to process sub directories
@ -74,13 +76,19 @@ class ExampleDataLoader:
try: try:
file = open(file_path, "rb") file = open(file_path, "rb")
data = file.read() data = file.read()
file_info = SpecFileService.add_file(process_model_info=spec, file_name=filename, binary_data=data) _, new_references = SpecFileService.update_file(
process_model_info=spec, file_name=filename, binary_data=data, update_process_cache_only=True
)
all_references += new_references
if is_primary: if is_primary:
references = SpecFileService.get_references_for_file(file_info, spec) # references = SpecFileService.get_references_for_file(file_info, spec)
spec.primary_process_id = references[0].identifier spec.primary_process_id = new_references[0].identifier
spec.primary_file_name = filename spec.primary_file_name = filename
ProcessModelService.save_process_model(spec) ProcessModelService.save_process_model(spec)
finally: finally:
if file: if file:
file.close() file.close()
for ref in all_references:
SpecFileService.update_caches_except_process(ref)
db.session.commit()
return spec return spec

View File

@ -663,6 +663,7 @@ class TestProcessApi(BaseTest):
"/v1.0/processes/callers/Level2", "/v1.0/processes/callers/Level2",
headers=self.logged_in_headers(with_super_admin_user), headers=self.logged_in_headers(with_super_admin_user),
) )
assert response.status_code == 200
assert response.json is not None assert response.json is not None
# We should get 1 back, Level1 calls Level2 # We should get 1 back, Level1 calls Level2
assert len(response.json) == 1 assert len(response.json) == 1

View File

@ -23,14 +23,6 @@ class TestProcessModelsController(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
process_model = self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
process_group_id="caller",
process_model_id="caller",
bpmn_file_location="call_activity_same_directory",
bpmn_file_name="call_activity_test.bpmn",
)
self.create_group_and_model_with_bpmn( self.create_group_and_model_with_bpmn(
client=client, client=client,
user=with_super_admin_user, user=with_super_admin_user,
@ -39,6 +31,14 @@ class TestProcessModelsController(BaseTest):
bpmn_file_location="call_activity_same_directory", bpmn_file_location="call_activity_same_directory",
bpmn_file_name="callable_process.bpmn", bpmn_file_name="callable_process.bpmn",
) )
process_model = self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
process_group_id="caller",
process_model_id="caller",
bpmn_file_location="call_activity_same_directory",
bpmn_file_name="call_activity_test.bpmn",
)
user_one = self.create_user_with_permission(username="user_one", target_uri="/v1.0/process-groups/caller:*") user_one = self.create_user_with_permission(username="user_one", target_uri="/v1.0/process-groups/caller:*")
self.add_permissions_to_user( self.add_permissions_to_user(

View File

@ -2,16 +2,21 @@ from collections.abc import Generator
import pytest import pytest
from flask.app import Flask from flask.app import Flask
from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_caller import ProcessCallerCacheModel from spiffworkflow_backend.models.process_caller_relationship import CalledProcessNotFoundError
from spiffworkflow_backend.models.process_caller_relationship import CallingProcessNotFoundError
from spiffworkflow_backend.models.process_caller_relationship import ProcessCallerRelationshipModel
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
from spiffworkflow_backend.services.process_caller_service import ProcessCallerService from spiffworkflow_backend.services.process_caller_service import ProcessCallerService
from spiffworkflow_backend.services.reference_cache_service import ReferenceCacheService
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
@pytest.fixture() @pytest.fixture()
def with_clean_cache(app: Flask) -> Generator[None, None, None]: def with_clean_cache(app: Flask) -> Generator[None, None, None]:
db.session.query(ProcessCallerCacheModel).delete() ProcessCallerRelationshipModel.query.delete()
db.session.commit() db.session.commit()
yield yield
@ -21,25 +26,50 @@ def with_no_process_callers(with_clean_cache: None) -> Generator[None, None, Non
yield yield
def create_reference_cache(identifier: str) -> ReferenceCacheModel:
ref_cache = ReferenceCacheModel.from_params(
identifier=identifier,
type="process",
display_name=identifier,
file_name=identifier,
relative_location=identifier,
use_current_cache_generation=True,
)
db.session.add(ref_cache)
return ref_cache
@pytest.fixture() @pytest.fixture()
def with_single_process_caller(with_clean_cache: None) -> Generator[None, None, None]: def with_single_process_caller(with_clean_cache: None) -> Generator[None, None, None]:
db.session.add(ProcessCallerCacheModel(process_identifier="called_once", calling_process_identifier="one_caller")) ReferenceCacheService.add_new_generation({})
cache_generation = CacheGenerationModel(cache_table="reference_cache")
db.session.add(cache_generation)
db.session.commit()
called_cache = create_reference_cache("called_once")
calling_cache = create_reference_cache("calling_cache")
db.session.add(called_cache)
db.session.add(calling_cache)
db.session.commit()
ProcessCallerService.add_caller(calling_cache.identifier, [called_cache.identifier])
db.session.commit() db.session.commit()
yield yield
@pytest.fixture() @pytest.fixture()
def with_multiple_process_callers(with_clean_cache: None) -> Generator[None, None, None]: def with_multiple_process_callers(with_clean_cache: None) -> Generator[None, None, None]:
db.session.add(ProcessCallerCacheModel(process_identifier="called_many", calling_process_identifier="one_caller")) ReferenceCacheService.add_new_generation({})
db.session.add(ProcessCallerCacheModel(process_identifier="called_many", calling_process_identifier="two_caller")) called_cache = create_reference_cache("called_many")
db.session.add(ProcessCallerCacheModel(process_identifier="called_many", calling_process_identifier="three_caller"))
for ref_identifier in ["one_caller", "two_caller", "three_caller"]:
calling_cache = create_reference_cache(ref_identifier)
db.session.commit() db.session.commit()
ProcessCallerService.add_caller(calling_cache.identifier, [called_cache.identifier])
db.session.commit()
yield yield
class TestProcessCallerService(BaseTest): class TestProcessCallerService(BaseTest):
"""Infer from class name."""
def test_has_zero_count_when_empty(self, with_no_process_callers: None) -> None: def test_has_zero_count_when_empty(self, with_no_process_callers: None) -> None:
assert ProcessCallerService.count() == 0 assert ProcessCallerService.count() == 0
@ -55,72 +85,45 @@ class TestProcessCallerService(BaseTest):
assert ProcessCallerService.count() == 0 assert ProcessCallerService.count() == 0
def test_can_clear_the_cache_for_process_id(self, with_single_process_caller: None) -> None: def test_can_clear_the_cache_for_process_id(self, with_single_process_caller: None) -> None:
ProcessCallerService.clear_cache_for_process_ids(["called_once"]) assert ProcessCallerService.count() != 0
reference_cache = ReferenceCacheModel.basic_query().filter_by(identifier="called_once").first()
assert reference_cache is not None
ProcessCallerService.clear_cache_for_process_ids([reference_cache.id])
assert ProcessCallerService.count() == 0 assert ProcessCallerService.count() == 0
def test_can_clear_the_cache_for_calling_process_id(self, with_multiple_process_callers: None) -> None: def test_can_clear_the_cache_for_calling_process_id(self, with_multiple_process_callers: None) -> None:
ProcessCallerService.clear_cache_for_process_ids(["one_caller"]) reference_cache = ReferenceCacheModel.basic_query().filter_by(identifier="one_caller").first()
assert reference_cache is not None
assert ProcessCallerService.count() == 3
ProcessCallerService.clear_cache_for_process_ids([reference_cache.id])
assert ProcessCallerService.count() == 2 assert ProcessCallerService.count() == 2
def test_can_clear_the_cache_for_callee_caller_process_id( def test_can_clear_the_cache_for_callee_caller_process_id(
self, with_single_process_caller: None, with_multiple_process_callers: None self, with_single_process_caller: None, with_multiple_process_callers: None
) -> None: ) -> None:
ProcessCallerService.clear_cache_for_process_ids(["one_caller"]) reference_cache = ReferenceCacheModel.basic_query().filter_by(identifier="one_caller").first()
assert ProcessCallerService.count() == 2 assert reference_cache is not None
assert ProcessCallerService.count() == 4
ProcessCallerService.clear_cache_for_process_ids([reference_cache.id])
assert ProcessCallerService.count() == 3
def test_can_clear_the_cache_for_process_id_and_leave_other_process_ids_alone( def test_can_clear_the_cache_for_process_id_and_leave_other_process_ids_alone(
self, self,
with_single_process_caller: None, with_single_process_caller: None,
with_multiple_process_callers: None, with_multiple_process_callers: None,
) -> None: ) -> None:
ProcessCallerService.clear_cache_for_process_ids(["called_many"]) reference_cache = ReferenceCacheModel.basic_query().filter_by(identifier="called_many").first()
assert reference_cache is not None
ProcessCallerService.clear_cache_for_process_ids([reference_cache.id])
assert ProcessCallerService.count() == 1 assert ProcessCallerService.count() == 1
def test_can_clear_the_cache_for_process_id_when_it_doesnt_exist( def test_raises_if_calling_reference_cache_does_not_exist(self, with_no_process_callers: None) -> None:
self, with pytest.raises(CallingProcessNotFoundError):
with_multiple_process_callers: None, ProcessCallerService.add_caller("DNE", [])
) -> None:
ProcessCallerService.clear_cache_for_process_ids(["garbage"])
assert ProcessCallerService.count() == 3
def test_no_records_added_if_calling_process_ids_is_empty(self, with_no_process_callers: None) -> None:
ProcessCallerService.add_caller("bob", [])
assert ProcessCallerService.count() == 0 assert ProcessCallerService.count() == 0
def test_can_add_caller_for_new_process(self, with_no_process_callers: None) -> None: def test_raises_if_called_reference_cache_does_not_exist(self, with_single_process_caller: None) -> None:
ProcessCallerService.add_caller("bob", ["new_caller"]) db.session.commit()
assert ProcessCallerService.count() == 1 with pytest.raises(CalledProcessNotFoundError):
ProcessCallerService.add_caller("calling_cache", ["DNE"])
def test_can_many_callers_for_new_process(self, with_no_process_callers: None) -> None: assert ProcessCallerService.count() == 0
ProcessCallerService.add_caller("bob", ["new_caller", "another_new_caller"])
assert ProcessCallerService.count() == 2
def test_can_add_caller_for_existing_process(self, with_multiple_process_callers: None) -> None:
ProcessCallerService.add_caller("called_many", ["new_caller"])
assert ProcessCallerService.count() == 4
def test_can_add_many_callers_for_existing_process(self, with_multiple_process_callers: None) -> None:
ProcessCallerService.add_caller("called_many", ["new_caller", "another_new_caller"])
assert ProcessCallerService.count() == 5
def test_can_track_duplicate_callers(self, with_no_process_callers: None) -> None:
ProcessCallerService.add_caller("bob", ["new_caller", "new_caller"])
assert ProcessCallerService.count() == 2
def test_can_return_no_callers_when_no_records(self, with_no_process_callers: None) -> None:
assert ProcessCallerService.callers(["bob"]) == []
def test_can_return_no_callers_when_process_id_is_unknown(self, with_multiple_process_callers: None) -> None:
assert ProcessCallerService.callers(["bob"]) == []
def test_can_return_single_caller(self, with_single_process_caller: None) -> None:
assert ProcessCallerService.callers(["called_once"]) == ["one_caller"]
def test_can_return_mulitple_callers(self, with_multiple_process_callers: None) -> None:
callers = sorted(ProcessCallerService.callers(["called_many"]))
assert callers == ["one_caller", "three_caller", "two_caller"]
def test_can_return_single_caller_when_there_are_other_process_ids(
self, with_single_process_caller: None, with_multiple_process_callers: None
) -> None:
assert ProcessCallerService.callers(["called_once"]) == ["one_caller"]

View File

@ -4,6 +4,7 @@ import re
from flask.app import Flask from flask.app import Flask
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_caller_relationship import ProcessCallerRelationshipModel
from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
@ -39,16 +40,10 @@ class TestProcessModel(BaseTest):
app: Flask, app: Flask,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
) -> None: ) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [ bpmn_file_names = [
"call_activity_level_3",
"call_activity_level_2b", "call_activity_level_2b",
"call_activity_level_2", "call_activity_level_2",
"call_activity_level_3",
] ]
for bpmn_file_name in bpmn_file_names: for bpmn_file_name in bpmn_file_names:
load_test_spec( load_test_spec(
@ -56,6 +51,11 @@ class TestProcessModel(BaseTest):
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name, bpmn_file_name=bpmn_file_name,
) )
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
process_instance = self.create_process_instance_from_process_model(process_model) process_instance = self.create_process_instance_from_process_model(process_model)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy") processor.do_engine_steps(save=True, execution_strategy_name="greedy")
@ -66,16 +66,10 @@ class TestProcessModel(BaseTest):
app: Flask, app: Flask,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
) -> None: ) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [ bpmn_file_names = [
"call_activity_level_3",
"call_activity_level_2b", "call_activity_level_2b",
"call_activity_level_2", "call_activity_level_2",
"call_activity_level_3",
] ]
for bpmn_file_name in bpmn_file_names: for bpmn_file_name in bpmn_file_names:
load_test_spec( load_test_spec(
@ -83,10 +77,16 @@ class TestProcessModel(BaseTest):
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name, bpmn_file_name=bpmn_file_name,
) )
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
process_instance = self.create_process_instance_from_process_model(process_model) process_instance = self.create_process_instance_from_process_model(process_model)
# delete all of the id lookup items to force to processor to find the correct # delete all of the id lookup items to force to processor to find the correct
# process model when running the process # process model when running the process
db.session.query(ProcessCallerRelationshipModel).delete()
db.session.query(ReferenceCacheModel).delete() db.session.query(ReferenceCacheModel).delete()
db.session.commit() db.session.commit()
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)

View File

@ -9,6 +9,7 @@ from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.reference_cache_service import ReferenceCacheService
from spiffworkflow_backend.services.spec_file_service import ProcessModelFileInvalidError from spiffworkflow_backend.services.spec_file_service import ProcessModelFileInvalidError
from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.spec_file_service import SpecFileService
@ -33,10 +34,9 @@ class TestSpecFileService(BaseTest):
) -> None: ) -> None:
load_test_spec( load_test_spec(
process_model_id=self.process_model_id, process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
) )
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all() bpmn_process_id_lookups = ReferenceCacheService.get_reference_cache_entries_calling_process(["Level2"])
assert len(bpmn_process_id_lookups) == 1 assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == "Level1" assert bpmn_process_id_lookups[0].identifier == "Level1"
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
@ -50,10 +50,9 @@ class TestSpecFileService(BaseTest):
bpmn_process_identifier = "Level1" bpmn_process_identifier = "Level1"
load_test_spec( load_test_spec(
process_model_id="call_activity_duplicate", process_model_id="call_activity_duplicate",
bpmn_file_name=self.bpmn_file_name, process_model_source_directory="call_activity_nested",
process_model_source_directory="call_activity_duplicate",
) )
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all() bpmn_process_id_lookups = ReferenceCacheService.get_reference_cache_entries_calling_process(["Level2"])
assert len(bpmn_process_id_lookups) == 1 assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == bpmn_process_identifier assert bpmn_process_id_lookups[0].identifier == bpmn_process_identifier
with pytest.raises(ProcessModelFileInvalidError) as exception: with pytest.raises(ProcessModelFileInvalidError) as exception:
@ -88,11 +87,10 @@ class TestSpecFileService(BaseTest):
load_test_spec( load_test_spec(
process_model_id=self.process_model_id, process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
) )
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all() bpmn_process_id_lookups = ReferenceCacheService.get_reference_cache_entries_calling_process(["Level2"])
assert len(bpmn_process_id_lookups) == 1 assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == bpmn_process_identifier assert bpmn_process_id_lookups[0].identifier == bpmn_process_identifier
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
@ -134,6 +132,7 @@ class TestSpecFileService(BaseTest):
) -> None: ) -> None:
"""When a BPMN processes identifier is changed in a file, the old id is removed from the cache.""" """When a BPMN processes identifier is changed in a file, the old id is removed from the cache."""
old_identifier = "ye_old_identifier" old_identifier = "ye_old_identifier"
new_identifier = "Level1"
process_id_lookup = ReferenceCacheModel.from_params( process_id_lookup = ReferenceCacheModel.from_params(
identifier=old_identifier, identifier=old_identifier,
display_name="WHO CARES", display_name="WHO CARES",
@ -147,15 +146,22 @@ class TestSpecFileService(BaseTest):
load_test_spec( load_test_spec(
process_model_id=self.process_model_id, process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
) )
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all() old_reference_count = ReferenceCacheModel.basic_query().filter_by(identifier=old_identifier).count()
assert len(bpmn_process_id_lookups) == 1 assert old_reference_count == 0
assert bpmn_process_id_lookups[0].identifier != old_identifier current_references = (
assert bpmn_process_id_lookups[0].identifier == "Level1" ReferenceCacheModel.basic_query()
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path .filter_by(
relative_location=self.process_model_id,
file_name=self.bpmn_file_name,
)
.all()
)
assert len(current_references) == 1
assert current_references[0].identifier == new_identifier
assert current_references[0].relative_path() == self.call_activity_nested_relative_file_path
def test_load_reference_information( def test_load_reference_information(
self, self,
@ -223,10 +229,9 @@ class TestSpecFileService(BaseTest):
load_test_spec( load_test_spec(
process_model_id=self.process_model_id, process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
) )
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all() bpmn_process_id_lookups = ReferenceCacheService.get_reference_cache_entries_calling_process(["Level2"])
assert len(bpmn_process_id_lookups) == 1 assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == "Level1" assert bpmn_process_id_lookups[0].identifier == "Level1"
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
@ -238,10 +243,9 @@ class TestSpecFileService(BaseTest):
# make sure it doesn't add a new entry to the cache # make sure it doesn't add a new entry to the cache
load_test_spec( load_test_spec(
process_model_id=self.process_model_id, process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
) )
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all() bpmn_process_id_lookups = ReferenceCacheService.get_reference_cache_entries_calling_process(["Level2"])
assert len(bpmn_process_id_lookups) == 1 assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == "Level1" assert bpmn_process_id_lookups[0].identifier == "Level1"
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
@ -261,10 +265,9 @@ class TestSpecFileService(BaseTest):
load_test_spec( load_test_spec(
process_model_id=self.process_model_id, process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
) )
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all() bpmn_process_id_lookups = ReferenceCacheService.get_reference_cache_entries_calling_process(["Level2"])
assert len(bpmn_process_id_lookups) == 1 assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == "Level1" assert bpmn_process_id_lookups[0].identifier == "Level1"
assert bpmn_process_id_lookups[0].generation_id == current_cache_generation.id assert bpmn_process_id_lookups[0].generation_id == current_cache_generation.id

View File

@ -16,16 +16,10 @@ class TestTaskService(BaseTest):
app: Flask, app: Flask,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
) -> None: ) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [ bpmn_file_names = [
"call_activity_level_3",
"call_activity_level_2b", "call_activity_level_2b",
"call_activity_level_2", "call_activity_level_2",
"call_activity_level_3",
] ]
for bpmn_file_name in bpmn_file_names: for bpmn_file_name in bpmn_file_names:
load_test_spec( load_test_spec(
@ -33,6 +27,11 @@ class TestTaskService(BaseTest):
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name, bpmn_file_name=bpmn_file_name,
) )
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
process_instance = self.create_process_instance_from_process_model(process_model) process_instance = self.create_process_instance_from_process_model(process_model)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy") processor.do_engine_steps(save=True, execution_strategy_name="greedy")
@ -63,16 +62,10 @@ class TestTaskService(BaseTest):
app: Flask, app: Flask,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
) -> None: ) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [ bpmn_file_names = [
"call_activity_level_3",
"call_activity_level_2b", "call_activity_level_2b",
"call_activity_level_2", "call_activity_level_2",
"call_activity_level_3",
] ]
for bpmn_file_name in bpmn_file_names: for bpmn_file_name in bpmn_file_names:
load_test_spec( load_test_spec(
@ -80,6 +73,11 @@ class TestTaskService(BaseTest):
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name, bpmn_file_name=bpmn_file_name,
) )
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
process_instance = self.create_process_instance_from_process_model(process_model) process_instance = self.create_process_instance_from_process_model(process_model)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy") processor.do_engine_steps(save=True, execution_strategy_name="greedy")
@ -118,16 +116,10 @@ class TestTaskService(BaseTest):
app: Flask, app: Flask,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
) -> None: ) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [ bpmn_file_names = [
"call_activity_level_3",
"call_activity_level_2b", "call_activity_level_2b",
"call_activity_level_2", "call_activity_level_2",
"call_activity_level_3",
] ]
for bpmn_file_name in bpmn_file_names: for bpmn_file_name in bpmn_file_names:
load_test_spec( load_test_spec(
@ -135,6 +127,11 @@ class TestTaskService(BaseTest):
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name, bpmn_file_name=bpmn_file_name,
) )
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
process_instance = self.create_process_instance_from_process_model(process_model) process_instance = self.create_process_instance_from_process_model(process_model)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy") processor.do_engine_steps(save=True, execution_strategy_name="greedy")