New calling process table (#1480)

* added new table to handle called process relationships

* process caller api endpoint is working again w/ burnettk

* fixed more tests and mypy w/ burnettk

* unit tests are passing and commented out unrelated failing test w/ burnettk

* all tests are passing almost w/ burnettk

* uncommented flakey test w/ burnettk

* minor change while reviewing w/ burnettk

* some changes from coderabbit suggestions w/ burnettk

* commented out sampling change w/ burnettk

* name the foreign keys on the process caller table w/ burnettk

---------

Co-authored-by: jasquat <jasquat@users.noreply.github.com>
This commit is contained in:
jasquat 2024-05-03 14:09:32 +00:00 committed by GitHub
parent b9e70d12a6
commit 86f97d5937
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
24 changed files with 389 additions and 226 deletions

View File

@ -73,7 +73,10 @@ if [[ "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" == "true" ]]; then
fi
if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then
SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py
if [[ -z "${SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false
fi
poetry run python bin/save_all_bpmn.py
fi
if [[ -n "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY:-}" ]]; then

View File

@ -76,3 +76,7 @@ fi
export SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP
export FLASK_APP=src/spiffworkflow_backend
if [[ -z "${SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false
fi

View File

@ -24,7 +24,7 @@ else
else
if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP=false SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP=false poetry run python bin/save_all_bpmn.py
fi
# this line blocks

View File

@ -7,7 +7,6 @@ from spiffworkflow_backend.services.data_setup_service import DataSetupService
def main() -> None:
"""Main."""
app = create_app()
with app.app_context():
failing_process_models = DataSetupService.save_all_process_models()

View File

@ -1,7 +1,7 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
echo >&2 "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
@ -16,7 +16,7 @@ user_file_with_one_email_per_line="${1:-}"
keycloak_realm="${2:-spiffworkflow}"
if [[ -z "${1:-}" ]]; then
>&2 echo "usage: $(basename "$0") [user_file_with_one_email_per_line]"
echo >&2 "usage: $(basename "$0") [user_file_with_one_email_per_line]"
exit 1
fi
@ -36,20 +36,21 @@ SECURE=false
KEYCLOAK_URL=$KEYCLOAK_BASE_URL/realms/$REALM_NAME/protocol/openid-connect/token
if [[ $SECURE = 'y' ]]; then
INSECURE=
INSECURE=
else
INSECURE=--insecure
INSECURE=--insecure
fi
# https://www.appsdeveloperblog.com/keycloak-rest-api-create-a-new-user/
result=$(curl --fail -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
--header 'Content-Type: application/x-www-form-urlencoded' \
--data-urlencode "username=${ADMIN_USERNAME}" \
--data-urlencode "password=${ADMIN_PASSWORD}" \
--data-urlencode 'grant_type=password' \
--data-urlencode 'client_id=admin-cli'
result=$(
curl --fail -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
--header 'Content-Type: application/x-www-form-urlencoded' \
--data-urlencode "username=${ADMIN_USERNAME}" \
--data-urlencode "password=${ADMIN_PASSWORD}" \
--data-urlencode 'grant_type=password' \
--data-urlencode 'client_id=admin-cli'
)
backend_token=$(jq -r '.access_token' <<< "$result")
backend_token=$(jq -r '.access_token' <<<"$result")
function add_user() {
local user_email=$1
@ -77,16 +78,16 @@ first_line_processed="false"
custom_attribute_one=''
while read -r input_line; do
if ! grep -qE '^#' <<<"$input_line" ; then
if ! grep -qE '^#' <<<"$input_line"; then
if [[ "$first_line_processed" == "false" ]]; then
email_header=$(awk -F ',' '{print $1}' <<<"$input_line")
pass_header=$(awk -F ',' '{print $2}' <<<"$input_line")
if [[ "$email_header" != "email" ]]; then
>&2 echo "ERROR: the first column in the first row must be email."
echo >&2 "ERROR: the first column in the first row must be email."
exit 1
fi
if [[ "$pass_header" != "pass" ]]; then
>&2 echo "ERROR: the first column in the first row must be pass."
echo >&2 "ERROR: the first column in the first row must be pass."
exit 1
fi
custom_attribute_one=$(awk -F ',' '{print $3}' <<<"$input_line")
@ -97,7 +98,7 @@ while read -r input_line; do
username=$(awk -F '@' '{print $1}' <<<"$user_email")
if [[ "$username" == "$ADMIN_USERNAME" || "$user_email" == "$ADMIN_USERNAME" ]]; then
>&2 echo "ERROR: The user used as the admin user matches a user in the current import list. This should not happen. Comment out that user from the list or use a different admin user: ${ADMIN_USERNAME}"
echo >&2 "ERROR: The user used as the admin user matches a user in the current import list. This should not happen. Comment out that user from the list or use a different admin user: ${ADMIN_USERNAME}"
exit 1
fi
@ -113,7 +114,7 @@ while read -r input_line; do
user_id=$(jq -r '.[0] | .id' <<<"$user_info")
if [[ -z "$user_id" ]]; then
>&2 echo "ERROR: Could not find user_id for user: ${user_email}"
echo >&2 "ERROR: Could not find user_id for user: ${user_email}"
exit 1
fi
curl --fail --location --silent --request DELETE "${KEYCLOAK_BASE_URL}/admin/realms/${keycloak_realm}/users/${user_id}" \
@ -123,7 +124,7 @@ while read -r input_line; do
http_code=$(add_user "$user_email" "$username" "$password" "$user_attribute_one")
fi
if [[ "$http_code" != "201" ]]; then
>&2 echo "ERROR: Failed to create user: ${user_email} with http_code: ${http_code}"
echo >&2 "ERROR: Failed to create user: ${user_email} with http_code: ${http_code}"
exit 1
fi
fi

View File

@ -0,0 +1,42 @@
"""empty message
Revision ID: d4b900e71852
Revises: c6e246c3c04e
Create Date: 2024-05-02 16:21:48.287934
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd4b900e71852'
down_revision = 'c6e246c3c04e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('process_caller_relationship',
sa.Column('called_reference_cache_process_id', sa.Integer(), nullable=False),
sa.Column('calling_reference_cache_process_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['called_reference_cache_process_id'], ['reference_cache.id'], name='called_reference_cache_process_id_fk'),
sa.ForeignKeyConstraint(['calling_reference_cache_process_id'], ['reference_cache.id'], name='calling_reference_cache_process_id_fk'),
sa.PrimaryKeyConstraint('called_reference_cache_process_id', 'calling_reference_cache_process_id', name='process_caller_relationship_pk')
)
with op.batch_alter_table('process_caller_relationship', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_process_caller_relationship_called_reference_cache_process_id'), ['called_reference_cache_process_id'], unique=False)
batch_op.create_index(batch_op.f('ix_process_caller_relationship_calling_reference_cache_process_id'), ['calling_reference_cache_process_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('process_caller_relationship', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_process_caller_relationship_calling_reference_cache_process_id'))
batch_op.drop_index(batch_op.f('ix_process_caller_relationship_called_reference_cache_process_id'))
op.drop_table('process_caller_relationship')
# ### end Alembic commands ###

View File

@ -112,5 +112,6 @@ from spiffworkflow_backend.models.future_task import (
from spiffworkflow_backend.models.feature_flag import (
FeatureFlagModel,
) # noqa: F401
from spiffworkflow_backend.models.process_caller_relationship import ProcessCallerRelationshipModel # noqa: F401
add_listeners()

View File

@ -2,6 +2,7 @@ from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db
# TODO: delete this file
class ProcessCallerCacheModel(SpiffworkflowBaseDBModel):
"""A cache of calling process ids for all Processes defined in all files."""

View File

@ -0,0 +1,56 @@
from flask import current_app
from sqlalchemy import ForeignKey
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy.dialects.mysql import insert as mysql_insert
from sqlalchemy.dialects.postgresql import insert as postgres_insert
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db
class CalledProcessNotFoundError(Exception):
pass
class CallingProcessNotFoundError(Exception):
pass
class ProcessCallerRelationshipModel(SpiffworkflowBaseDBModel):
"""A cache of calling process ids for all Processes defined in all files."""
__tablename__ = "process_caller_relationship"
__table_args__ = (
PrimaryKeyConstraint(
"called_reference_cache_process_id",
"calling_reference_cache_process_id",
name="process_caller_relationship_pk",
),
)
called_reference_cache_process_id = db.Column(
ForeignKey("reference_cache.id", name="called_reference_cache_process_id_fk"), nullable=False, index=True
)
calling_reference_cache_process_id = db.Column(
ForeignKey("reference_cache.id", name="calling_reference_cache_process_id_fk"), nullable=False, index=True
)
@classmethod
def insert_or_update(cls, called_reference_cache_process_id: int, calling_reference_cache_process_id: int) -> None:
caller_info = {
"called_reference_cache_process_id": called_reference_cache_process_id,
"calling_reference_cache_process_id": calling_reference_cache_process_id,
}
on_duplicate_key_stmt = None
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "mysql":
insert_stmt = mysql_insert(ProcessCallerRelationshipModel).values(caller_info)
# We don't actually want to update anything but it doesn't really matter if we do since it should be the same value
on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(
called_reference_cache_process_id=insert_stmt.inserted.called_reference_cache_process_id
)
else:
insert_stmt = postgres_insert(ProcessCallerRelationshipModel).values(caller_info)
on_duplicate_key_stmt = insert_stmt.on_conflict_do_nothing(
index_elements=["called_reference_cache_process_id", "calling_reference_cache_process_id"]
)
db.session.execute(on_duplicate_key_stmt)

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import os
from dataclasses import dataclass
from typing import Any
@ -13,6 +15,7 @@ from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_caller_relationship import ProcessCallerRelationshipModel
# SpecReferenceNotFoundError
@ -83,6 +86,20 @@ class ReferenceCacheModel(SpiffworkflowBaseDBModel):
generation = relationship(CacheGenerationModel)
process_callers = relationship(
ProcessCallerRelationshipModel,
foreign_keys="[ProcessCallerRelationshipModel.called_reference_cache_process_id]",
cascade="all, delete-orphan",
single_parent=True,
)
calling_processes = relationship(
ProcessCallerRelationshipModel,
foreign_keys="[ProcessCallerRelationshipModel.calling_reference_cache_process_id]",
cascade="all, delete-orphan",
single_parent=True,
)
def relative_path(self) -> str:
return os.path.join(self.relative_location, self.file_name).replace("/", os.sep)
@ -104,7 +121,7 @@ class ReferenceCacheModel(SpiffworkflowBaseDBModel):
relative_location: str,
properties: dict | None = None,
use_current_cache_generation: bool = False,
) -> "ReferenceCacheModel":
) -> ReferenceCacheModel:
reference_cache = cls(
identifier=identifier,
display_name=display_name,
@ -124,7 +141,7 @@ class ReferenceCacheModel(SpiffworkflowBaseDBModel):
return reference_cache
@classmethod
def from_spec_reference(cls, ref: Reference, use_current_cache_generation: bool = False) -> "ReferenceCacheModel":
def from_spec_reference(cls, ref: Reference, use_current_cache_generation: bool = False) -> ReferenceCacheModel:
reference_cache = cls.from_params(
identifier=ref.identifier,
display_name=ref.display_name,

View File

@ -46,11 +46,11 @@ from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.git_service import GitCommandError
from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.jinja_service import JinjaService
from spiffworkflow_backend.services.process_caller_service import ProcessCallerService
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.reference_cache_service import ReferenceCacheService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
from spiffworkflow_backend.services.task_service import TaskModelError
from spiffworkflow_backend.services.task_service import TaskService
@ -122,14 +122,9 @@ def process_list() -> Any:
return ReferenceSchema(many=True).dump(permitted_references)
# if we pass in bpmn_process_identifiers of [a], a is "called" and we want to find which processes are *callers* of a
def process_caller_list(bpmn_process_identifiers: list[str]) -> Any:
callers = ProcessCallerService.callers(bpmn_process_identifiers)
references = (
ReferenceCacheModel.basic_query()
.filter_by(type="process")
.filter(ReferenceCacheModel.identifier.in_(callers)) # type: ignore
.all()
)
references = ReferenceCacheService.get_reference_cache_entries_calling_process(bpmn_process_identifiers)
return ReferenceSchema(many=True).dump(references)

View File

@ -472,21 +472,13 @@ def process_model_create_with_natural_language(modified_process_group_id: str, b
"required": [],
}
SpecFileService.add_file(
process_model_info,
f"{process_model_identifier}.bpmn",
str.encode(bpmn_template_contents),
)
SpecFileService.add_file(
process_model_info,
f"{form_identifier}-schema.json",
str.encode(json.dumps(form_schema_json)),
)
SpecFileService.add_file(
process_model_info,
f"{form_identifier}-uischema.json",
str.encode(json.dumps(form_uischema_json)),
)
files_to_update = {
f"{process_model_identifier}.bpmn": str.encode(bpmn_template_contents),
f"{form_identifier}-schema.json": str.encode(json.dumps(form_schema_json)),
f"{form_identifier}-uischema.json": str.encode(json.dumps(form_uischema_json)),
}
for file_name, contents in files_to_update.items():
SpecFileService.update_file(process_model_info, file_name, contents)
_commit_and_push_to_git(f"User: {g.user.username} created process model via natural language: {process_model_info.id}")
@ -585,7 +577,7 @@ def _create_or_update_process_model_file(
file = None
try:
file = SpecFileService.update_file(process_model, request_file.filename, request_file_contents, user=g.user)
file, _ = SpecFileService.update_file(process_model, request_file.filename, request_file_contents, user=g.user)
except ProcessModelFileInvalidError as exception:
raise (
ApiError(

View File

@ -33,6 +33,7 @@ class DataSetupService:
files = FileSystemService.walk_files_from_root_path(True, None)
reference_objects: dict[str, ReferenceCacheModel] = {}
all_data_store_specifications: dict[tuple[str, str, str], Any] = {}
references = []
for file in files:
if FileSystemService.is_process_model_json_file(file):
@ -46,13 +47,13 @@ class DataSetupService:
try:
reference_cache = ReferenceCacheModel.from_spec_reference(ref)
ReferenceCacheService.add_unique_reference_cache_object(reference_objects, reference_cache)
SpecFileService.update_caches_except_process(ref)
db.session.commit()
references.append(ref)
except Exception as ex:
failing_process_models.append(
(
f"{ref.relative_location}/{ref.file_name}",
str(ex),
repr(ex),
)
)
except Exception as ex2:
@ -103,6 +104,18 @@ class DataSetupService:
ReferenceCacheService.add_new_generation(reference_objects)
cls._sync_data_store_models_with_specifications(all_data_store_specifications)
for ref in references:
try:
SpecFileService.update_caches_except_process(ref)
db.session.commit()
except Exception as ex:
failing_process_models.append(
(
f"{ref.relative_location}/{ref.file_name}",
repr(ex),
)
)
return failing_process_models
@classmethod

View File

@ -34,18 +34,19 @@ def traces_sampler(sampling_context: Any) -> Any:
if sampling_context["parent_sampled"] is not None:
return sampling_context["parent_sampled"]
if "wsgi_environ" in sampling_context:
wsgi_environ = sampling_context["wsgi_environ"]
path_info = wsgi_environ.get("PATH_INFO")
request_method = wsgi_environ.get("REQUEST_METHOD")
# tasks_controller.task_submit
# this is the current pain point as of 31 jan 2023.
if path_info and (
(path_info.startswith("/v1.0/tasks/") and request_method == "PUT")
or (path_info.startswith("/v1.0/task-data/") and request_method == "GET")
):
return 1
# sample some requests at a higher rate
# if "wsgi_environ" in sampling_context:
# wsgi_environ = sampling_context["wsgi_environ"]
# path_info = wsgi_environ.get("PATH_INFO")
# request_method = wsgi_environ.get("REQUEST_METHOD")
#
# # tasks_controller.task_submit
# # this is the current pain point as of 31 jan 2023.
# if path_info and (
# (path_info.startswith("/v1.0/tasks/") and request_method == "PUT")
# or (path_info.startswith("/v1.0/task-data/") and request_method == "GET")
# ):
# return 1
# Default sample rate for all others (replaces traces_sample_rate)
return 0.01

View File

@ -1,35 +1,50 @@
from sqlalchemy import or_
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_caller import ProcessCallerCacheModel
from spiffworkflow_backend.models.process_caller_relationship import CalledProcessNotFoundError
from spiffworkflow_backend.models.process_caller_relationship import CallingProcessNotFoundError
from spiffworkflow_backend.models.process_caller_relationship import ProcessCallerRelationshipModel
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
# TODO: delete this file
class ProcessCallerService:
@staticmethod
def count() -> int:
return ProcessCallerCacheModel.query.count() # type: ignore
"""This is used in tests ONLY."""
return ProcessCallerRelationshipModel.query.count() # type: ignore
@staticmethod
def clear_cache() -> None:
db.session.query(ProcessCallerCacheModel).delete()
db.session.query(ProcessCallerRelationshipModel).delete()
@staticmethod
def clear_cache_for_process_ids(process_ids: list[str]) -> None:
db.session.query(ProcessCallerCacheModel).filter(
def clear_cache_for_process_ids(reference_cache_ids: list[int]) -> None:
ProcessCallerRelationshipModel.query.filter(
or_(
ProcessCallerCacheModel.process_identifier.in_(process_ids),
ProcessCallerCacheModel.calling_process_identifier.in_(process_ids),
ProcessCallerRelationshipModel.called_reference_cache_process_id.in_(reference_cache_ids),
ProcessCallerRelationshipModel.calling_reference_cache_process_id.in_(reference_cache_ids),
)
).delete()
@staticmethod
def add_caller(process_id: str, called_process_ids: list[str]) -> None:
for called_process_id in called_process_ids:
db.session.add(ProcessCallerCacheModel(process_identifier=called_process_id, calling_process_identifier=process_id))
@staticmethod
def callers(process_ids: list[str]) -> list[str]:
records = (
db.session.query(ProcessCallerCacheModel).filter(ProcessCallerCacheModel.process_identifier.in_(process_ids)).all()
def add_caller(calling_process_identifier: str, called_process_identifiers: list[str]) -> None:
reference_cache_records = (
ReferenceCacheModel.basic_query()
.filter(ReferenceCacheModel.identifier.in_(called_process_identifiers + [calling_process_identifier])) # type: ignore
.all()
)
return sorted({r.calling_process_identifier for r in records})
reference_cache_dict = {r.identifier: r.id for r in reference_cache_records}
if calling_process_identifier not in reference_cache_dict:
raise CallingProcessNotFoundError(
f"Could not find calling process id '{calling_process_identifier}' in reference_cache table."
)
for called_process_identifier in called_process_identifiers:
if called_process_identifier not in reference_cache_dict:
raise CalledProcessNotFoundError(
f"Could not find called process id '{called_process_identifier}' in reference_cache table."
)
ProcessCallerRelationshipModel.insert_or_update(
called_reference_cache_process_id=reference_cache_dict[called_process_identifier],
calling_reference_cache_process_id=reference_cache_dict[calling_process_identifier],
)

View File

@ -1,7 +1,9 @@
from sqlalchemy import insert
from sqlalchemy.orm import aliased
from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_caller_relationship import ProcessCallerRelationshipModel
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
from spiffworkflow_backend.services.upsearch_service import UpsearchService
@ -32,16 +34,12 @@ class ReferenceCacheService:
@classmethod
def upsearch(cls, location: str, identifier: str, type: str) -> str | None:
# really want to be able to join to this table on max(id)
cache_generation = CacheGenerationModel.newest_generation_for_table("reference_cache")
if cache_generation is None:
return None
locations = UpsearchService.upsearch_locations(location)
references = (
ReferenceCacheModel.query.filter_by(
ReferenceCacheModel.basic_query()
.filter_by(
identifier=identifier,
type=type,
generation=cache_generation,
)
.filter(ReferenceCacheModel.relative_location.in_(locations)) # type: ignore
.order_by(ReferenceCacheModel.relative_location.desc()) # type: ignore
@ -53,3 +51,20 @@ class ReferenceCacheService:
return reference.relative_location # type: ignore
return None
@classmethod
def get_reference_cache_entries_calling_process(cls, bpmn_process_identifiers: list[str]) -> list[ReferenceCacheModel]:
called_reference_alias = aliased(ReferenceCacheModel)
references: list[ReferenceCacheModel] = (
ReferenceCacheModel.basic_query()
.join(
ProcessCallerRelationshipModel,
ProcessCallerRelationshipModel.calling_reference_cache_process_id == ReferenceCacheModel.id,
)
.join(
called_reference_alias,
called_reference_alias.id == ProcessCallerRelationshipModel.called_reference_cache_process_id,
)
.filter(called_reference_alias.identifier.in_(bpmn_process_identifiers))
).all()
return references

View File

@ -138,11 +138,6 @@ class SpecFileService(FileSystemService):
)
return references
@staticmethod
def add_file(process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes) -> File:
# Same as update
return SpecFileService.update_file(process_model_info, file_name, binary_data)
@classmethod
def validate_bpmn_xml(cls, file_name: str, binary_data: bytes) -> None:
file_type = FileSystemService.file_type(file_name)
@ -156,8 +151,13 @@ class SpecFileService(FileSystemService):
@classmethod
def update_file(
cls, process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes, user: UserModel | None = None
) -> File:
cls,
process_model_info: ProcessModelInfo,
file_name: str,
binary_data: bytes,
user: UserModel | None = None,
update_process_cache_only: bool = False,
) -> tuple[File, list[Reference]]:
SpecFileService.assert_valid_file_name(file_name)
cls.validate_bpmn_xml(file_name, binary_data)
@ -189,7 +189,10 @@ class SpecFileService(FileSystemService):
)
all_called_element_ids = all_called_element_ids | set(ref.called_element_ids)
SpecFileService.update_all_caches(ref)
if update_process_cache_only:
SpecFileService.update_process_cache(ref)
else:
SpecFileService.update_all_caches(ref)
if user is not None:
called_element_refs = (
@ -217,7 +220,7 @@ class SpecFileService(FileSystemService):
# make sure we save the file as the last thing we do to ensure validations have run
full_file_path = SpecFileService.full_file_path(process_model_info, file_name)
SpecFileService.write_file_data_to_system(full_file_path, binary_data)
return SpecFileService.to_file_object(file_name, full_file_path)
return (SpecFileService.to_file_object(file_name, full_file_path), references)
@staticmethod
def last_modified(process_model: ProcessModelInfo, file_name: str) -> datetime:
@ -264,20 +267,13 @@ class SpecFileService(FileSystemService):
.all()
)
process_ids = []
reference_cache_ids = []
for record in records:
process_ids.append(record.identifier)
reference_cache_ids.append(record.id)
db.session.delete(record)
ProcessCallerService.clear_cache_for_process_ids(process_ids)
# fixme: likely the other caches should be cleared as well, but we don't have a clean way to do so yet.
@staticmethod
def clear_caches() -> None:
db.session.query(ReferenceCacheModel).delete()
ProcessCallerService.clear_cache()
# fixme: likely the other caches should be cleared as well, but we don't have a clean way to do so yet.
ProcessCallerService.clear_cache_for_process_ids(reference_cache_ids)
@staticmethod
def update_process_cache(ref: Reference) -> None:

View File

@ -2,6 +2,7 @@ import glob
import os
from flask import current_app
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
@ -63,6 +64,7 @@ class ExampleDataLoader:
if len(files) == 0:
raise Exception(f"Could not find any files with file_glob: {file_glob}")
all_references = []
for file_path in files:
if os.path.isdir(file_path):
continue # Don't try to process sub directories
@ -74,13 +76,19 @@ class ExampleDataLoader:
try:
file = open(file_path, "rb")
data = file.read()
file_info = SpecFileService.add_file(process_model_info=spec, file_name=filename, binary_data=data)
_, new_references = SpecFileService.update_file(
process_model_info=spec, file_name=filename, binary_data=data, update_process_cache_only=True
)
all_references += new_references
if is_primary:
references = SpecFileService.get_references_for_file(file_info, spec)
spec.primary_process_id = references[0].identifier
# references = SpecFileService.get_references_for_file(file_info, spec)
spec.primary_process_id = new_references[0].identifier
spec.primary_file_name = filename
ProcessModelService.save_process_model(spec)
finally:
if file:
file.close()
for ref in all_references:
SpecFileService.update_caches_except_process(ref)
db.session.commit()
return spec

View File

@ -663,6 +663,7 @@ class TestProcessApi(BaseTest):
"/v1.0/processes/callers/Level2",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
# We should get 1 back, Level1 calls Level2
assert len(response.json) == 1

View File

@ -23,14 +23,6 @@ class TestProcessModelsController(BaseTest):
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
process_model = self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
process_group_id="caller",
process_model_id="caller",
bpmn_file_location="call_activity_same_directory",
bpmn_file_name="call_activity_test.bpmn",
)
self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
@ -39,6 +31,14 @@ class TestProcessModelsController(BaseTest):
bpmn_file_location="call_activity_same_directory",
bpmn_file_name="callable_process.bpmn",
)
process_model = self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
process_group_id="caller",
process_model_id="caller",
bpmn_file_location="call_activity_same_directory",
bpmn_file_name="call_activity_test.bpmn",
)
user_one = self.create_user_with_permission(username="user_one", target_uri="/v1.0/process-groups/caller:*")
self.add_permissions_to_user(

View File

@ -2,16 +2,21 @@ from collections.abc import Generator
import pytest
from flask.app import Flask
from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_caller import ProcessCallerCacheModel
from spiffworkflow_backend.models.process_caller_relationship import CalledProcessNotFoundError
from spiffworkflow_backend.models.process_caller_relationship import CallingProcessNotFoundError
from spiffworkflow_backend.models.process_caller_relationship import ProcessCallerRelationshipModel
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
from spiffworkflow_backend.services.process_caller_service import ProcessCallerService
from spiffworkflow_backend.services.reference_cache_service import ReferenceCacheService
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
@pytest.fixture()
def with_clean_cache(app: Flask) -> Generator[None, None, None]:
db.session.query(ProcessCallerCacheModel).delete()
ProcessCallerRelationshipModel.query.delete()
db.session.commit()
yield
@ -21,25 +26,50 @@ def with_no_process_callers(with_clean_cache: None) -> Generator[None, None, Non
yield
def create_reference_cache(identifier: str) -> ReferenceCacheModel:
ref_cache = ReferenceCacheModel.from_params(
identifier=identifier,
type="process",
display_name=identifier,
file_name=identifier,
relative_location=identifier,
use_current_cache_generation=True,
)
db.session.add(ref_cache)
return ref_cache
@pytest.fixture()
def with_single_process_caller(with_clean_cache: None) -> Generator[None, None, None]:
db.session.add(ProcessCallerCacheModel(process_identifier="called_once", calling_process_identifier="one_caller"))
ReferenceCacheService.add_new_generation({})
cache_generation = CacheGenerationModel(cache_table="reference_cache")
db.session.add(cache_generation)
db.session.commit()
called_cache = create_reference_cache("called_once")
calling_cache = create_reference_cache("calling_cache")
db.session.add(called_cache)
db.session.add(calling_cache)
db.session.commit()
ProcessCallerService.add_caller(calling_cache.identifier, [called_cache.identifier])
db.session.commit()
yield
@pytest.fixture()
def with_multiple_process_callers(with_clean_cache: None) -> Generator[None, None, None]:
db.session.add(ProcessCallerCacheModel(process_identifier="called_many", calling_process_identifier="one_caller"))
db.session.add(ProcessCallerCacheModel(process_identifier="called_many", calling_process_identifier="two_caller"))
db.session.add(ProcessCallerCacheModel(process_identifier="called_many", calling_process_identifier="three_caller"))
ReferenceCacheService.add_new_generation({})
called_cache = create_reference_cache("called_many")
for ref_identifier in ["one_caller", "two_caller", "three_caller"]:
calling_cache = create_reference_cache(ref_identifier)
db.session.commit()
ProcessCallerService.add_caller(calling_cache.identifier, [called_cache.identifier])
db.session.commit()
yield
class TestProcessCallerService(BaseTest):
"""Infer from class name."""
def test_has_zero_count_when_empty(self, with_no_process_callers: None) -> None:
assert ProcessCallerService.count() == 0
@ -55,72 +85,45 @@ class TestProcessCallerService(BaseTest):
assert ProcessCallerService.count() == 0
def test_can_clear_the_cache_for_process_id(self, with_single_process_caller: None) -> None:
ProcessCallerService.clear_cache_for_process_ids(["called_once"])
assert ProcessCallerService.count() != 0
reference_cache = ReferenceCacheModel.basic_query().filter_by(identifier="called_once").first()
assert reference_cache is not None
ProcessCallerService.clear_cache_for_process_ids([reference_cache.id])
assert ProcessCallerService.count() == 0
def test_can_clear_the_cache_for_calling_process_id(self, with_multiple_process_callers: None) -> None:
ProcessCallerService.clear_cache_for_process_ids(["one_caller"])
reference_cache = ReferenceCacheModel.basic_query().filter_by(identifier="one_caller").first()
assert reference_cache is not None
assert ProcessCallerService.count() == 3
ProcessCallerService.clear_cache_for_process_ids([reference_cache.id])
assert ProcessCallerService.count() == 2
def test_can_clear_the_cache_for_callee_caller_process_id(
self, with_single_process_caller: None, with_multiple_process_callers: None
) -> None:
ProcessCallerService.clear_cache_for_process_ids(["one_caller"])
assert ProcessCallerService.count() == 2
reference_cache = ReferenceCacheModel.basic_query().filter_by(identifier="one_caller").first()
assert reference_cache is not None
assert ProcessCallerService.count() == 4
ProcessCallerService.clear_cache_for_process_ids([reference_cache.id])
assert ProcessCallerService.count() == 3
def test_can_clear_the_cache_for_process_id_and_leave_other_process_ids_alone(
self,
with_single_process_caller: None,
with_multiple_process_callers: None,
) -> None:
ProcessCallerService.clear_cache_for_process_ids(["called_many"])
reference_cache = ReferenceCacheModel.basic_query().filter_by(identifier="called_many").first()
assert reference_cache is not None
ProcessCallerService.clear_cache_for_process_ids([reference_cache.id])
assert ProcessCallerService.count() == 1
def test_can_clear_the_cache_for_process_id_when_it_doesnt_exist(
self,
with_multiple_process_callers: None,
) -> None:
ProcessCallerService.clear_cache_for_process_ids(["garbage"])
assert ProcessCallerService.count() == 3
def test_raises_if_calling_reference_cache_does_not_exist(self, with_no_process_callers: None) -> None:
with pytest.raises(CallingProcessNotFoundError):
ProcessCallerService.add_caller("DNE", [])
assert ProcessCallerService.count() == 0
def test_no_records_added_if_calling_process_ids_is_empty(self, with_no_process_callers: None) -> None:
ProcessCallerService.add_caller("bob", [])
assert ProcessCallerService.count() == 0
def test_can_add_caller_for_new_process(self, with_no_process_callers: None) -> None:
ProcessCallerService.add_caller("bob", ["new_caller"])
assert ProcessCallerService.count() == 1
def test_can_many_callers_for_new_process(self, with_no_process_callers: None) -> None:
ProcessCallerService.add_caller("bob", ["new_caller", "another_new_caller"])
assert ProcessCallerService.count() == 2
def test_can_add_caller_for_existing_process(self, with_multiple_process_callers: None) -> None:
ProcessCallerService.add_caller("called_many", ["new_caller"])
assert ProcessCallerService.count() == 4
def test_can_add_many_callers_for_existing_process(self, with_multiple_process_callers: None) -> None:
ProcessCallerService.add_caller("called_many", ["new_caller", "another_new_caller"])
assert ProcessCallerService.count() == 5
def test_can_track_duplicate_callers(self, with_no_process_callers: None) -> None:
ProcessCallerService.add_caller("bob", ["new_caller", "new_caller"])
assert ProcessCallerService.count() == 2
def test_can_return_no_callers_when_no_records(self, with_no_process_callers: None) -> None:
assert ProcessCallerService.callers(["bob"]) == []
def test_can_return_no_callers_when_process_id_is_unknown(self, with_multiple_process_callers: None) -> None:
assert ProcessCallerService.callers(["bob"]) == []
def test_can_return_single_caller(self, with_single_process_caller: None) -> None:
assert ProcessCallerService.callers(["called_once"]) == ["one_caller"]
def test_can_return_mulitple_callers(self, with_multiple_process_callers: None) -> None:
callers = sorted(ProcessCallerService.callers(["called_many"]))
assert callers == ["one_caller", "three_caller", "two_caller"]
def test_can_return_single_caller_when_there_are_other_process_ids(
self, with_single_process_caller: None, with_multiple_process_callers: None
) -> None:
assert ProcessCallerService.callers(["called_once"]) == ["one_caller"]
def test_raises_if_called_reference_cache_does_not_exist(self, with_single_process_caller: None) -> None:
db.session.commit()
with pytest.raises(CalledProcessNotFoundError):
ProcessCallerService.add_caller("calling_cache", ["DNE"])
assert ProcessCallerService.count() == 0

View File

@ -4,6 +4,7 @@ import re
from flask.app import Flask
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_caller_relationship import ProcessCallerRelationshipModel
from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
@ -39,16 +40,10 @@ class TestProcessModel(BaseTest):
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [
"call_activity_level_3",
"call_activity_level_2b",
"call_activity_level_2",
"call_activity_level_3",
]
for bpmn_file_name in bpmn_file_names:
load_test_spec(
@ -56,6 +51,11 @@ class TestProcessModel(BaseTest):
process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name,
)
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
process_instance = self.create_process_instance_from_process_model(process_model)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
@ -66,16 +66,10 @@ class TestProcessModel(BaseTest):
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [
"call_activity_level_3",
"call_activity_level_2b",
"call_activity_level_2",
"call_activity_level_3",
]
for bpmn_file_name in bpmn_file_names:
load_test_spec(
@ -83,10 +77,16 @@ class TestProcessModel(BaseTest):
process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name,
)
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
process_instance = self.create_process_instance_from_process_model(process_model)
# delete all of the id lookup items to force to processor to find the correct
# process model when running the process
db.session.query(ProcessCallerRelationshipModel).delete()
db.session.query(ReferenceCacheModel).delete()
db.session.commit()
processor = ProcessInstanceProcessor(process_instance)

View File

@ -9,6 +9,7 @@ from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.reference_cache_service import ReferenceCacheService
from spiffworkflow_backend.services.spec_file_service import ProcessModelFileInvalidError
from spiffworkflow_backend.services.spec_file_service import SpecFileService
@ -33,10 +34,9 @@ class TestSpecFileService(BaseTest):
) -> None:
load_test_spec(
process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_nested",
)
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
bpmn_process_id_lookups = ReferenceCacheService.get_reference_cache_entries_calling_process(["Level2"])
assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == "Level1"
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
@ -50,10 +50,9 @@ class TestSpecFileService(BaseTest):
bpmn_process_identifier = "Level1"
load_test_spec(
process_model_id="call_activity_duplicate",
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_duplicate",
process_model_source_directory="call_activity_nested",
)
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
bpmn_process_id_lookups = ReferenceCacheService.get_reference_cache_entries_calling_process(["Level2"])
assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == bpmn_process_identifier
with pytest.raises(ProcessModelFileInvalidError) as exception:
@ -88,11 +87,10 @@ class TestSpecFileService(BaseTest):
load_test_spec(
process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_nested",
)
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
bpmn_process_id_lookups = ReferenceCacheService.get_reference_cache_entries_calling_process(["Level2"])
assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == bpmn_process_identifier
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
@ -134,6 +132,7 @@ class TestSpecFileService(BaseTest):
) -> None:
"""When a BPMN processes identifier is changed in a file, the old id is removed from the cache."""
old_identifier = "ye_old_identifier"
new_identifier = "Level1"
process_id_lookup = ReferenceCacheModel.from_params(
identifier=old_identifier,
display_name="WHO CARES",
@ -147,15 +146,22 @@ class TestSpecFileService(BaseTest):
load_test_spec(
process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_nested",
)
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier != old_identifier
assert bpmn_process_id_lookups[0].identifier == "Level1"
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
old_reference_count = ReferenceCacheModel.basic_query().filter_by(identifier=old_identifier).count()
assert old_reference_count == 0
current_references = (
ReferenceCacheModel.basic_query()
.filter_by(
relative_location=self.process_model_id,
file_name=self.bpmn_file_name,
)
.all()
)
assert len(current_references) == 1
assert current_references[0].identifier == new_identifier
assert current_references[0].relative_path() == self.call_activity_nested_relative_file_path
def test_load_reference_information(
self,
@ -223,10 +229,9 @@ class TestSpecFileService(BaseTest):
load_test_spec(
process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_nested",
)
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
bpmn_process_id_lookups = ReferenceCacheService.get_reference_cache_entries_calling_process(["Level2"])
assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == "Level1"
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
@ -238,10 +243,9 @@ class TestSpecFileService(BaseTest):
# make sure it doesn't add a new entry to the cache
load_test_spec(
process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_nested",
)
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
bpmn_process_id_lookups = ReferenceCacheService.get_reference_cache_entries_calling_process(["Level2"])
assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == "Level1"
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
@ -261,10 +265,9 @@ class TestSpecFileService(BaseTest):
load_test_spec(
process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
process_model_source_directory="call_activity_nested",
)
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
bpmn_process_id_lookups = ReferenceCacheService.get_reference_cache_entries_calling_process(["Level2"])
assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == "Level1"
assert bpmn_process_id_lookups[0].generation_id == current_cache_generation.id

View File

@ -16,16 +16,10 @@ class TestTaskService(BaseTest):
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [
"call_activity_level_3",
"call_activity_level_2b",
"call_activity_level_2",
"call_activity_level_3",
]
for bpmn_file_name in bpmn_file_names:
load_test_spec(
@ -33,6 +27,11 @@ class TestTaskService(BaseTest):
process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name,
)
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
process_instance = self.create_process_instance_from_process_model(process_model)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
@ -63,16 +62,10 @@ class TestTaskService(BaseTest):
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [
"call_activity_level_3",
"call_activity_level_2b",
"call_activity_level_2",
"call_activity_level_3",
]
for bpmn_file_name in bpmn_file_names:
load_test_spec(
@ -80,6 +73,11 @@ class TestTaskService(BaseTest):
process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name,
)
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
process_instance = self.create_process_instance_from_process_model(process_model)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
@ -118,16 +116,10 @@ class TestTaskService(BaseTest):
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
bpmn_file_names = [
"call_activity_level_3",
"call_activity_level_2b",
"call_activity_level_2",
"call_activity_level_3",
]
for bpmn_file_name in bpmn_file_names:
load_test_spec(
@ -135,6 +127,11 @@ class TestTaskService(BaseTest):
process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name,
)
process_model = load_test_spec(
"test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
process_instance = self.create_process_instance_from_process_model(process_model)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")