Feature/new reference cache (#511)
* added basic model for new reference cache w/ burnettk * switched out SpecReferenceCache for ReferenceCacheModel w/ burnettk jbirddog * pyl w/ burnettk jbirddog * save items to the db using the new cache with generation table w/ burnettk * bulk save for performance * tests are passing * actually use the new generation table - we still need a test to ensure we are using it * added test to ensure using new cache generation * corrected reference interface on frontend w/ burnettk * do not perform git pull in webhook if the revision is the same as the current w/ burnettk jbirddog --------- Co-authored-by: jasquat <jasquat@users.noreply.github.com> Co-authored-by: burnettk <burnettk@users.noreply.github.com>
This commit is contained in:
parent
b035191964
commit
8bf38aaa1c
|
@ -0,0 +1,68 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 4d438975ff4d
|
||||
Revises: 9d5b6c5c31a5
|
||||
Create Date: 2023-09-21 16:25:44.574756
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4d438975ff4d'
|
||||
down_revision = '9d5b6c5c31a5'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('cache_generation',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('cache_table', sa.String(length=255), nullable=False),
|
||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
with op.batch_alter_table('cache_generation', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('ix_cache_generation_cache_table'), ['cache_table'], unique=False)
|
||||
|
||||
op.create_table('reference_cache',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('generation_id', sa.Integer(), nullable=False),
|
||||
sa.Column('identifier', sa.String(length=255), nullable=False),
|
||||
sa.Column('display_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('type', sa.String(length=255), nullable=False),
|
||||
sa.Column('file_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('relative_location', sa.String(length=255), nullable=False),
|
||||
sa.Column('properties', sa.JSON(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['generation_id'], ['cache_generation.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('generation_id', 'identifier', 'relative_location', 'type', name='reference_cache_uniq')
|
||||
)
|
||||
with op.batch_alter_table('reference_cache', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('ix_reference_cache_display_name'), ['display_name'], unique=False)
|
||||
batch_op.create_index(batch_op.f('ix_reference_cache_generation_id'), ['generation_id'], unique=False)
|
||||
batch_op.create_index(batch_op.f('ix_reference_cache_identifier'), ['identifier'], unique=False)
|
||||
batch_op.create_index(batch_op.f('ix_reference_cache_relative_location'), ['relative_location'], unique=False)
|
||||
batch_op.create_index(batch_op.f('ix_reference_cache_type'), ['type'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('reference_cache', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_reference_cache_type'))
|
||||
batch_op.drop_index(batch_op.f('ix_reference_cache_relative_location'))
|
||||
batch_op.drop_index(batch_op.f('ix_reference_cache_identifier'))
|
||||
batch_op.drop_index(batch_op.f('ix_reference_cache_generation_id'))
|
||||
batch_op.drop_index(batch_op.f('ix_reference_cache_display_name'))
|
||||
|
||||
op.drop_table('reference_cache')
|
||||
with op.batch_alter_table('cache_generation', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_cache_generation_cache_table'))
|
||||
|
||||
op.drop_table('cache_generation')
|
||||
# ### end Alembic commands ###
|
|
@ -26,6 +26,9 @@ from spiffworkflow_backend.models.human_task import HumanTaskModel # noqa: F401
|
|||
from spiffworkflow_backend.models.spec_reference import (
|
||||
SpecReferenceCache,
|
||||
) # noqa: F401
|
||||
from spiffworkflow_backend.models.reference_cache import (
|
||||
ReferenceCacheModel,
|
||||
) # noqa: F401
|
||||
from spiffworkflow_backend.models.process_caller import (
|
||||
ProcessCallerCacheModel,
|
||||
) # noqa: F401
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.db import db
|
||||
|
||||
|
||||
class CacheGenerationTable(SpiffEnum):
|
||||
reference_cache = "reference_cache"
|
||||
|
||||
|
||||
class CacheGenerationModel(SpiffworkflowBaseDBModel):
|
||||
__tablename__ = "cache_generation"
|
||||
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
cache_table: str = db.Column(db.String(255), index=True, nullable=False)
|
||||
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
|
||||
@classmethod
|
||||
def newest_generation_for_table(cls, cache_table: str) -> CacheGenerationModel | None:
|
||||
order_by_clause = CacheGenerationModel.id.desc() # type: ignore
|
||||
cache_generation: CacheGenerationModel | None = (
|
||||
CacheGenerationModel.query.filter_by(cache_table=cache_table).order_by(order_by_clause).first()
|
||||
)
|
||||
return cache_generation
|
||||
|
||||
@validates("cache_table")
|
||||
def validate_cache_table(self, key: str, value: Any) -> Any:
|
||||
return self.validate_enum_field(key, value, CacheGenerationTable)
|
|
@ -6,7 +6,7 @@ from datetime import datetime
|
|||
from typing import Any
|
||||
|
||||
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReference
|
||||
from spiffworkflow_backend.models.reference_cache import Reference
|
||||
|
||||
|
||||
class FileType(SpiffEnum):
|
||||
|
@ -67,7 +67,7 @@ class File:
|
|||
type: str
|
||||
last_modified: datetime
|
||||
size: int
|
||||
references: list[SpecReference] | None = None
|
||||
references: list[Reference] | None = None
|
||||
file_contents: bytes | None = None
|
||||
process_model_id: str | None = None
|
||||
bpmn_process_ids: list[str] | None = None
|
||||
|
|
|
@ -0,0 +1,160 @@
|
|||
import os
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from flask_marshmallow import Schema # type: ignore
|
||||
from marshmallow import INCLUDE
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy import UniqueConstraint
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
|
||||
from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.db import db
|
||||
|
||||
|
||||
# SpecReferenceNotFoundError
|
||||
class ReferenceNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ReferenceType(SpiffEnum):
|
||||
decision = "decision"
|
||||
process = "process"
|
||||
data_store = "data_store"
|
||||
|
||||
|
||||
# SpecReference
|
||||
@dataclass()
|
||||
class Reference:
|
||||
"""File Reference Information.
|
||||
|
||||
Includes items such as the process id and name for a BPMN,
|
||||
or the Decision id and Decision name for a DMN file. There may be more than
|
||||
one reference that points to a particular file - if for instance, there are
|
||||
three executable processes in a collaboration within a BPMN Diagram.
|
||||
"""
|
||||
|
||||
identifier: str # The id of the process or decision. "Process_1234"
|
||||
display_name: str # The name of the process or decision. "Invoice Submission"
|
||||
relative_location: str
|
||||
type: str # can be 'process' or 'decision'
|
||||
file_name: str # The name of the file where this process or decision is defined.
|
||||
messages: dict # Any messages defined in the same file where this process is defined.
|
||||
correlations: dict # Any correlations defined in the same file with this process.
|
||||
start_messages: list # The names of any messages that would start this process.
|
||||
called_element_ids: list # The element ids of any called elements
|
||||
|
||||
properties: dict
|
||||
|
||||
def prop_is_true(self, prop_name: str) -> bool:
|
||||
return prop_name in self.properties and self.properties[prop_name] is True
|
||||
|
||||
def set_prop(self, prop_name: str, value: Any) -> None:
|
||||
self.properties[prop_name] = value
|
||||
|
||||
def relative_path(self) -> str:
|
||||
return os.path.join(self.relative_location, self.file_name).replace("/", os.sep)
|
||||
|
||||
|
||||
# SpecReferenceCache
|
||||
class ReferenceCacheModel(SpiffworkflowBaseDBModel):
|
||||
"""A cache of information about all the Processes and Decisions defined in all files."""
|
||||
|
||||
__tablename__ = "reference_cache"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("generation_id", "identifier", "relative_location", "type", name="reference_cache_uniq"),
|
||||
)
|
||||
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
generation_id: int = db.Column(ForeignKey(CacheGenerationModel.id), nullable=False, index=True) # type: ignore
|
||||
|
||||
identifier: str = db.Column(db.String(255), index=True, nullable=False)
|
||||
display_name: str = db.Column(db.String(255), index=True, nullable=False)
|
||||
type: str = db.Column(db.String(255), index=True, nullable=False)
|
||||
file_name: str = db.Column(db.String(255), nullable=False)
|
||||
|
||||
# relative to SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR
|
||||
relative_location: str = db.Column(db.String(255), index=True, nullable=False)
|
||||
|
||||
properties: dict | None = db.Column(db.JSON)
|
||||
# has_lanes = db.Column(db.Boolean())
|
||||
# is_executable = db.Column(db.Boolean())
|
||||
# is_primary = db.Column(db.Boolean())
|
||||
|
||||
generation = relationship(CacheGenerationModel)
|
||||
|
||||
def relative_path(self) -> str:
|
||||
return os.path.join(self.relative_location, self.file_name).replace("/", os.sep)
|
||||
|
||||
@classmethod
|
||||
def basic_query(cls) -> Any:
|
||||
cache_generation = CacheGenerationModel.newest_generation_for_table("reference_cache")
|
||||
basic_query = cls.query
|
||||
if cache_generation is not None:
|
||||
basic_query = basic_query.filter_by(generation_id=cache_generation.id)
|
||||
return basic_query
|
||||
|
||||
@classmethod
|
||||
def from_params(
|
||||
cls,
|
||||
identifier: str,
|
||||
display_name: str,
|
||||
type: str,
|
||||
file_name: str,
|
||||
relative_location: str,
|
||||
properties: dict | None = None,
|
||||
use_current_cache_generation: bool = False,
|
||||
) -> "ReferenceCacheModel":
|
||||
reference_cache = cls(
|
||||
identifier=identifier,
|
||||
display_name=display_name,
|
||||
relative_location=relative_location,
|
||||
type=type,
|
||||
file_name=file_name,
|
||||
properties=properties,
|
||||
)
|
||||
if use_current_cache_generation:
|
||||
cache_generation = CacheGenerationModel.newest_generation_for_table("reference_cache")
|
||||
if cache_generation is None:
|
||||
# NOTE: we may want to raise here instead since this should never happen in real environments
|
||||
# but it does happen in tests
|
||||
cache_generation = CacheGenerationModel(cache_table="reference_cache")
|
||||
db.session.add(cache_generation)
|
||||
reference_cache.generation = cache_generation
|
||||
return reference_cache
|
||||
|
||||
@classmethod
|
||||
def from_spec_reference(cls, ref: Reference, use_current_cache_generation: bool = False) -> "ReferenceCacheModel":
|
||||
reference_cache = cls.from_params(
|
||||
identifier=ref.identifier,
|
||||
display_name=ref.display_name,
|
||||
relative_location=ref.relative_location,
|
||||
type=ref.type,
|
||||
file_name=ref.file_name,
|
||||
properties=ref.properties,
|
||||
use_current_cache_generation=use_current_cache_generation,
|
||||
)
|
||||
return reference_cache
|
||||
|
||||
@validates("type")
|
||||
def validate_type(self, key: str, value: Any) -> Any:
|
||||
return self.validate_enum_field(key, value, ReferenceType)
|
||||
|
||||
|
||||
# SpecReferenceSchema
|
||||
class ReferenceSchema(Schema): # type: ignore
|
||||
class Meta:
|
||||
model = Reference
|
||||
fields = [
|
||||
"identifier",
|
||||
"display_name",
|
||||
"process_group_id",
|
||||
"relative_location",
|
||||
"type",
|
||||
"file_name",
|
||||
"properties",
|
||||
]
|
||||
unknown = INCLUDE
|
|
@ -16,8 +16,8 @@ from spiffworkflow_backend.models.principal import PrincipalModel
|
|||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance_file_data import ProcessInstanceFileDataModel
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceSchema
|
||||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
from spiffworkflow_backend.services.git_service import GitService
|
||||
|
@ -63,8 +63,8 @@ def process_list() -> Any:
|
|||
This includes processes that are not the
|
||||
primary process - helpful for finding possible call activities.
|
||||
"""
|
||||
references = SpecReferenceCache.query.filter_by(type="process").all()
|
||||
process_model_identifiers = [r.process_model_id for r in references]
|
||||
references = ReferenceCacheModel.basic_query().filter_by(type="process").all()
|
||||
process_model_identifiers = [r.relative_location for r in references]
|
||||
permitted_process_model_identifiers = ProcessModelService.process_model_identifiers_with_permission_for_user(
|
||||
user=g.user,
|
||||
permission_to_check="create",
|
||||
|
@ -73,17 +73,20 @@ def process_list() -> Any:
|
|||
)
|
||||
permitted_references = []
|
||||
for spec_reference in references:
|
||||
if spec_reference.process_model_id in permitted_process_model_identifiers:
|
||||
if spec_reference.relative_location in permitted_process_model_identifiers:
|
||||
permitted_references.append(spec_reference)
|
||||
return SpecReferenceSchema(many=True).dump(permitted_references)
|
||||
return ReferenceSchema(many=True).dump(permitted_references)
|
||||
|
||||
|
||||
def process_caller_list(bpmn_process_identifiers: list[str]) -> Any:
|
||||
callers = ProcessCallerService.callers(bpmn_process_identifiers)
|
||||
references = (
|
||||
SpecReferenceCache.query.filter_by(type="process").filter(SpecReferenceCache.identifier.in_(callers)).all()
|
||||
ReferenceCacheModel.basic_query()
|
||||
.filter_by(type="process")
|
||||
.filter(ReferenceCacheModel.identifier.in_(callers)) # type: ignore
|
||||
.all()
|
||||
)
|
||||
return SpecReferenceSchema(many=True).dump(references)
|
||||
return ReferenceSchema(many=True).dump(references)
|
||||
|
||||
|
||||
def _process_data_fetcher(
|
||||
|
|
|
@ -28,8 +28,8 @@ from spiffworkflow_backend.models.process_instance_queue import ProcessInstanceQ
|
|||
from spiffworkflow_backend.models.process_instance_report import ProcessInstanceReportModel
|
||||
from spiffworkflow_backend.models.process_instance_report import Report
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceNotFoundError
|
||||
from spiffworkflow_backend.models.task import TaskModel
|
||||
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _find_process_instance_by_id_or_raise
|
||||
|
@ -702,13 +702,13 @@ def _get_process_instance(
|
|||
process_model_with_diagram = None
|
||||
name_of_file_with_diagram = None
|
||||
if process_identifier:
|
||||
spec_reference = SpecReferenceCache.query.filter_by(identifier=process_identifier, type="process").first()
|
||||
spec_reference = (
|
||||
ReferenceCacheModel.basic_query().filter_by(identifier=process_identifier, type="process").first()
|
||||
)
|
||||
if spec_reference is None:
|
||||
raise SpecReferenceNotFoundError(
|
||||
f"Could not find given process identifier in the cache: {process_identifier}"
|
||||
)
|
||||
raise ReferenceNotFoundError(f"Could not find given process identifier in the cache: {process_identifier}")
|
||||
|
||||
process_model_with_diagram = ProcessModelService.get_process_model(spec_reference.process_model_id)
|
||||
process_model_with_diagram = ProcessModelService.get_process_model(spec_reference.relative_location)
|
||||
name_of_file_with_diagram = spec_reference.file_name
|
||||
process_instance.process_model_with_diagram_identifier = process_model_with_diagram.id
|
||||
else:
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
from flask import current_app
|
||||
from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
from sqlalchemy import insert
|
||||
|
||||
|
||||
class DataSetupService:
|
||||
|
@ -16,25 +19,29 @@ class DataSetupService:
|
|||
These all exist within processes located on the file system, so we can quickly reference them
|
||||
from the database.
|
||||
"""
|
||||
# Clear out all of the cached data.
|
||||
SpecFileService.clear_caches()
|
||||
|
||||
current_app.logger.debug("DataSetupService.save_all_process_models() start")
|
||||
failing_process_models = []
|
||||
process_models = ProcessModelService.get_process_models(recursive=True)
|
||||
SpecFileService.clear_caches()
|
||||
reference_objects = {}
|
||||
for process_model in process_models:
|
||||
current_app.logger.debug(f"Process Model: {process_model.display_name}")
|
||||
try:
|
||||
# FIXME: get_references_for_file_contents is erroring out for elements in the list
|
||||
refs = SpecFileService.get_references_for_process(process_model)
|
||||
|
||||
for ref in refs:
|
||||
try:
|
||||
SpecFileService.update_caches(ref)
|
||||
reference_cache = ReferenceCacheModel.from_spec_reference(ref)
|
||||
reference_cache_unique = (
|
||||
f"{reference_cache.identifier}{reference_cache.relative_location}{reference_cache.type}"
|
||||
)
|
||||
reference_objects[reference_cache_unique] = reference_cache
|
||||
SpecFileService.update_caches_except_process(ref)
|
||||
db.session.commit()
|
||||
except Exception as ex:
|
||||
failing_process_models.append(
|
||||
(
|
||||
f"{ref.process_model_id}/{ref.file_name}",
|
||||
f"{ref.relative_location}/{ref.file_name}",
|
||||
str(ex),
|
||||
)
|
||||
)
|
||||
|
@ -47,5 +54,18 @@ class DataSetupService:
|
|||
)
|
||||
|
||||
current_app.logger.debug("DataSetupService.save_all_process_models() end")
|
||||
|
||||
# get inserted autoincrement primary key value back in a database agnostic way without committing the db session
|
||||
ins = insert(CacheGenerationModel).values(cache_table="reference_cache") # type: ignore
|
||||
res = db.session.execute(ins)
|
||||
cache_generation_id = res.inserted_primary_key[0]
|
||||
|
||||
# add primary key value to each element in reference objects list and store in new list
|
||||
reference_object_list_with_cache_generation_id = []
|
||||
for reference_object in reference_objects.values():
|
||||
reference_object.generation_id = cache_generation_id
|
||||
reference_object_list_with_cache_generation_id.append(reference_object)
|
||||
|
||||
db.session.bulk_save_objects(reference_object_list_with_cache_generation_id)
|
||||
db.session.commit()
|
||||
return failing_process_models
|
||||
|
|
|
@ -183,7 +183,15 @@ class GitService:
|
|||
return False
|
||||
|
||||
if "ref" not in webhook:
|
||||
raise InvalidGitWebhookBodyError(f"Could not find the 'ref' arg in the webhook boy: {webhook}")
|
||||
raise InvalidGitWebhookBodyError(f"Could not find the 'ref' arg in the webhook body: {webhook}")
|
||||
if "after" not in webhook:
|
||||
raise InvalidGitWebhookBodyError(f"Could not find the 'after' arg in the webhook body: {webhook}")
|
||||
|
||||
git_revision_before_pull = cls.get_current_revision()
|
||||
git_revision_after = webhook["after"]
|
||||
if git_revision_before_pull == git_revision_after:
|
||||
current_app.logger.info("Skipping git pull because we already have the current git revision, git boy!")
|
||||
return True
|
||||
|
||||
if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH"] is None:
|
||||
raise MissingGitConfigsError(
|
||||
|
|
|
@ -65,8 +65,8 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
|||
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
|
||||
from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
|
||||
from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.task import TaskModel
|
||||
from spiffworkflow_backend.models.task import TaskNotFoundError
|
||||
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
|
||||
|
@ -1244,7 +1244,9 @@ class ProcessInstanceProcessor:
|
|||
"bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None"
|
||||
)
|
||||
|
||||
spec_reference = SpecReferenceCache.query.filter_by(identifier=bpmn_process_identifier, type="process").first()
|
||||
spec_reference = (
|
||||
ReferenceCacheModel.basic_query().filter_by(identifier=bpmn_process_identifier, type="process").first()
|
||||
)
|
||||
bpmn_file_full_path = None
|
||||
if spec_reference is None:
|
||||
bpmn_file_full_path = ProcessInstanceProcessor.backfill_missing_spec_reference_records(
|
||||
|
@ -1253,7 +1255,7 @@ class ProcessInstanceProcessor:
|
|||
else:
|
||||
bpmn_file_full_path = os.path.join(
|
||||
FileSystemService.root_path(),
|
||||
spec_reference.relative_path,
|
||||
spec_reference.relative_path(),
|
||||
)
|
||||
if bpmn_file_full_path is None:
|
||||
raise (
|
||||
|
|
|
@ -8,10 +8,10 @@ from spiffworkflow_backend.models.correlation_property_cache import CorrelationP
|
|||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.file import File
|
||||
from spiffworkflow_backend.models.file import FileType
|
||||
from spiffworkflow_backend.models.file import SpecReference
|
||||
from spiffworkflow_backend.models.file import Reference
|
||||
from spiffworkflow_backend.models.message_triggerable_process_model import MessageTriggerableProcessModel
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.authentication_service import NotAuthorizedError
|
||||
from spiffworkflow_backend.services.custom_parser import MyCustomParser
|
||||
|
@ -32,7 +32,7 @@ class SpecFileService(FileSystemService):
|
|||
"""
|
||||
|
||||
@staticmethod
|
||||
def reference_map(references: list[SpecReference]) -> dict[str, SpecReference]:
|
||||
def reference_map(references: list[Reference]) -> dict[str, Reference]:
|
||||
"""Creates a dict with provided references organized by id."""
|
||||
ref_map = {}
|
||||
for ref in references:
|
||||
|
@ -42,7 +42,7 @@ class SpecFileService(FileSystemService):
|
|||
@staticmethod
|
||||
def get_references_for_process(
|
||||
process_model_info: ProcessModelInfo,
|
||||
) -> list[SpecReference]:
|
||||
) -> list[Reference]:
|
||||
files = FileSystemService.get_files(process_model_info)
|
||||
references = []
|
||||
for file in files:
|
||||
|
@ -50,7 +50,7 @@ class SpecFileService(FileSystemService):
|
|||
return references
|
||||
|
||||
@classmethod
|
||||
def get_references_for_file(cls, file: File, process_model_info: ProcessModelInfo) -> list[SpecReference]:
|
||||
def get_references_for_file(cls, file: File, process_model_info: ProcessModelInfo) -> list[Reference]:
|
||||
full_file_path = SpecFileService.full_file_path(process_model_info, file.name)
|
||||
file_contents: bytes = b""
|
||||
with open(full_file_path) as f:
|
||||
|
@ -71,7 +71,7 @@ class SpecFileService(FileSystemService):
|
|||
@classmethod
|
||||
def get_references_for_file_contents(
|
||||
cls, process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
|
||||
) -> list[SpecReference]:
|
||||
) -> list[Reference]:
|
||||
"""Uses spiffworkflow to parse BPMN and DMN files to determine how they can be externally referenced.
|
||||
|
||||
Returns a list of Reference objects that contain the type of reference, the id, the name.
|
||||
|
@ -80,8 +80,8 @@ class SpecFileService(FileSystemService):
|
|||
name = {str} 'Level 3'
|
||||
type = {str} 'process' / 'decision'
|
||||
"""
|
||||
references: list[SpecReference] = []
|
||||
file_path = os.path.join(process_model_info.id_for_file_path(), file_name)
|
||||
references: list[Reference] = []
|
||||
os.path.join(process_model_info.id_for_file_path(), file_name)
|
||||
file_type = FileSystemService.file_type(file_name)
|
||||
parser = MyCustomParser()
|
||||
parser_type = None
|
||||
|
@ -116,20 +116,17 @@ class SpecFileService(FileSystemService):
|
|||
called_element_ids = sub_parser.called_element_ids()
|
||||
|
||||
references.append(
|
||||
SpecReference(
|
||||
Reference(
|
||||
identifier=sub_parser.bpmn_id,
|
||||
display_name=sub_parser.get_name(),
|
||||
process_model_id=process_model_info.id,
|
||||
relative_location=process_model_info.id,
|
||||
type=parser_type,
|
||||
file_name=file_name,
|
||||
relative_path=file_path,
|
||||
has_lanes=has_lanes,
|
||||
is_executable=is_executable,
|
||||
messages=messages,
|
||||
is_primary=is_primary,
|
||||
correlations=correlations,
|
||||
start_messages=start_messages,
|
||||
called_element_ids=called_element_ids,
|
||||
properties={"is_primary": is_primary, "has_lanes": has_lanes, "is_executable": is_executable},
|
||||
)
|
||||
)
|
||||
return references
|
||||
|
@ -160,17 +157,19 @@ class SpecFileService(FileSystemService):
|
|||
cls.validate_bpmn_xml(file_name, binary_data)
|
||||
|
||||
references = cls.get_references_for_file_contents(process_model_info, file_name, binary_data)
|
||||
primary_process_ref = next((ref for ref in references if ref.is_primary and ref.is_executable), None)
|
||||
primary_process_ref = next(
|
||||
(ref for ref in references if ref.prop_is_true("is_primary") and ref.prop_is_true("is_executable")), None
|
||||
)
|
||||
|
||||
SpecFileService.clear_caches_for_file(file_name, process_model_info)
|
||||
all_called_element_ids: set[str] = set()
|
||||
for ref in references:
|
||||
# If no valid primary process is defined, default to the first process in the
|
||||
# updated file.
|
||||
if not primary_process_ref and ref.type == "process" and ref.is_executable:
|
||||
ref.is_primary = True
|
||||
if not primary_process_ref and ref.type == "process" and ref.prop_is_true("is_executable"):
|
||||
ref.set_prop("is_primary", True)
|
||||
|
||||
if ref.is_primary:
|
||||
if ref.prop_is_true("is_primary"):
|
||||
update_hash = {}
|
||||
if not process_model_info.primary_file_name:
|
||||
update_hash["primary_process_id"] = ref.identifier
|
||||
|
@ -185,14 +184,16 @@ class SpecFileService(FileSystemService):
|
|||
)
|
||||
|
||||
all_called_element_ids = all_called_element_ids | set(ref.called_element_ids)
|
||||
SpecFileService.update_caches(ref)
|
||||
SpecFileService.update_all_caches(ref)
|
||||
|
||||
if user is not None:
|
||||
called_element_refs = SpecReferenceCache.query.filter(
|
||||
SpecReferenceCache.identifier.in_(all_called_element_ids)
|
||||
).all()
|
||||
called_element_refs = (
|
||||
ReferenceCacheModel.basic_query()
|
||||
.filter(ReferenceCacheModel.identifier.in_(all_called_element_ids)) # type: ignore
|
||||
.all()
|
||||
)
|
||||
if len(called_element_refs) > 0:
|
||||
process_model_identifiers: list[str] = [r.process_model_id for r in called_element_refs]
|
||||
process_model_identifiers: list[str] = [r.relative_location for r in called_element_refs]
|
||||
permitted_process_model_identifiers = (
|
||||
ProcessModelService.process_model_identifiers_with_permission_for_user(
|
||||
user=user,
|
||||
|
@ -246,8 +247,12 @@ class SpecFileService(FileSystemService):
|
|||
# fixme: Place all the caching stuff in a different service.
|
||||
|
||||
@staticmethod
|
||||
def update_caches(ref: SpecReference) -> None:
|
||||
def update_all_caches(ref: Reference) -> None:
|
||||
SpecFileService.update_process_cache(ref)
|
||||
SpecFileService.update_caches_except_process(ref)
|
||||
|
||||
@staticmethod
|
||||
def update_caches_except_process(ref: Reference) -> None:
|
||||
SpecFileService.update_process_caller_cache(ref)
|
||||
SpecFileService.update_message_trigger_cache(ref)
|
||||
SpecFileService.update_correlation_cache(ref)
|
||||
|
@ -256,9 +261,9 @@ class SpecFileService(FileSystemService):
|
|||
def clear_caches_for_file(file_name: str, process_model_info: ProcessModelInfo) -> None:
|
||||
"""Clear all caches related to a file."""
|
||||
records = (
|
||||
db.session.query(SpecReferenceCache)
|
||||
.filter(SpecReferenceCache.file_name == file_name)
|
||||
.filter(SpecReferenceCache.process_model_id == process_model_info.id)
|
||||
db.session.query(ReferenceCacheModel)
|
||||
.filter(ReferenceCacheModel.file_name == file_name)
|
||||
.filter(ReferenceCacheModel.relative_location == process_model_info.id)
|
||||
.all()
|
||||
)
|
||||
|
||||
|
@ -273,38 +278,41 @@ class SpecFileService(FileSystemService):
|
|||
|
||||
@staticmethod
|
||||
def clear_caches() -> None:
|
||||
db.session.query(SpecReferenceCache).delete()
|
||||
db.session.query(ReferenceCacheModel).delete()
|
||||
ProcessCallerService.clear_cache()
|
||||
# fixme: likely the other caches should be cleared as well, but we don't have a clean way to do so yet.
|
||||
|
||||
@staticmethod
|
||||
def update_process_cache(ref: SpecReference) -> None:
|
||||
def update_process_cache(ref: Reference) -> None:
|
||||
process_id_lookup = (
|
||||
SpecReferenceCache.query.filter_by(identifier=ref.identifier).filter_by(type=ref.type).first()
|
||||
ReferenceCacheModel.basic_query()
|
||||
.filter_by(identifier=ref.identifier, relative_location=ref.relative_location, type=ref.type)
|
||||
.first()
|
||||
)
|
||||
if process_id_lookup is None:
|
||||
process_id_lookup = SpecReferenceCache.from_spec_reference(ref)
|
||||
process_id_lookup = ReferenceCacheModel.from_spec_reference(ref, use_current_cache_generation=True)
|
||||
db.session.add(process_id_lookup)
|
||||
else:
|
||||
if ref.relative_path != process_id_lookup.relative_path:
|
||||
full_bpmn_file_path = SpecFileService.full_path_from_relative_path(process_id_lookup.relative_path)
|
||||
if ref.relative_path() != process_id_lookup.relative_path():
|
||||
full_bpmn_file_path = SpecFileService.full_path_from_relative_path(process_id_lookup.relative_path())
|
||||
# if the old relative bpmn file no longer exists, then assume things were moved around
|
||||
# on the file system. Otherwise, assume it is a duplicate process id and error.
|
||||
if os.path.isfile(full_bpmn_file_path):
|
||||
raise ProcessModelFileInvalidError(
|
||||
f"Process id ({ref.identifier}) has already been used for "
|
||||
f"{process_id_lookup.relative_path}. It cannot be reused."
|
||||
f"{process_id_lookup.relative_path()}. It cannot be reused."
|
||||
)
|
||||
else:
|
||||
process_id_lookup.relative_path = ref.relative_path
|
||||
process_id_lookup.relative_location = ref.relative_location
|
||||
process_id_lookup.file_name = ref.file_name
|
||||
db.session.add(process_id_lookup)
|
||||
|
||||
@staticmethod
|
||||
def update_process_caller_cache(ref: SpecReference) -> None:
|
||||
def update_process_caller_cache(ref: Reference) -> None:
|
||||
ProcessCallerService.add_caller(ref.identifier, ref.called_element_ids)
|
||||
|
||||
@staticmethod
|
||||
def update_message_trigger_cache(ref: SpecReference) -> None:
|
||||
def update_message_trigger_cache(ref: Reference) -> None:
|
||||
"""Assure we know which messages can trigger the start of a process."""
|
||||
for message_name in ref.start_messages:
|
||||
message_triggerable_process_model = MessageTriggerableProcessModel.query.filter_by(
|
||||
|
@ -313,24 +321,24 @@ class SpecFileService(FileSystemService):
|
|||
if message_triggerable_process_model is None:
|
||||
message_triggerable_process_model = MessageTriggerableProcessModel(
|
||||
message_name=message_name,
|
||||
process_model_identifier=ref.process_model_id,
|
||||
process_model_identifier=ref.relative_location,
|
||||
)
|
||||
db.session.add(message_triggerable_process_model)
|
||||
else:
|
||||
if message_triggerable_process_model.process_model_identifier != ref.process_model_id:
|
||||
if message_triggerable_process_model.process_model_identifier != ref.relative_location:
|
||||
raise ProcessModelFileInvalidError(
|
||||
f"Message model is already used to start process model {ref.process_model_id}"
|
||||
f"Message model is already used to start process model {ref.relative_location}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_correlation_cache(ref: SpecReference) -> None:
|
||||
def update_correlation_cache(ref: Reference) -> None:
|
||||
for name in ref.correlations.keys():
|
||||
correlation_property_retrieval_expressions = ref.correlations[name]["retrieval_expressions"]
|
||||
|
||||
for cpre in correlation_property_retrieval_expressions:
|
||||
message_name = ref.messages.get(cpre["messageRef"], None)
|
||||
retrieval_expression = cpre["expression"]
|
||||
process_model_id = ref.process_model_id
|
||||
process_model_id = ref.relative_location
|
||||
|
||||
existing = CorrelationPropertyCache.query.filter_by(
|
||||
name=name,
|
||||
|
|
|
@ -20,8 +20,8 @@ from spiffworkflow_backend.models.json_data import JsonDataModel
|
|||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
|
||||
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceNotFoundError
|
||||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.task import TaskNotFoundError
|
||||
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
|
||||
|
@ -683,21 +683,23 @@ class TaskService:
|
|||
try:
|
||||
filename: str | None = cls.get_spec_reference_from_bpmn_process(bpmn_process).file_name
|
||||
return filename
|
||||
except SpecReferenceNotFoundError:
|
||||
except ReferenceNotFoundError:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def get_spec_reference_from_bpmn_process(cls, bpmn_process: BpmnProcessModel) -> SpecReferenceCache:
|
||||
def get_spec_reference_from_bpmn_process(cls, bpmn_process: BpmnProcessModel) -> ReferenceCacheModel:
|
||||
"""Get the bpmn file for a given task model.
|
||||
|
||||
This involves several queries so avoid calling in a tight loop.
|
||||
"""
|
||||
bpmn_process_definition = bpmn_process.bpmn_process_definition
|
||||
spec_reference: SpecReferenceCache | None = SpecReferenceCache.query.filter_by(
|
||||
identifier=bpmn_process_definition.bpmn_identifier, type="process"
|
||||
).first()
|
||||
spec_reference: ReferenceCacheModel | None = (
|
||||
ReferenceCacheModel.basic_query()
|
||||
.filter_by(identifier=bpmn_process_definition.bpmn_identifier, type="process")
|
||||
.first()
|
||||
)
|
||||
if spec_reference is None:
|
||||
raise SpecReferenceNotFoundError(
|
||||
raise ReferenceNotFoundError(
|
||||
f"Could not find given process identifier in the cache: {bpmn_process_definition.bpmn_identifier}"
|
||||
)
|
||||
return spec_reference
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_f07329e" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0">
|
||||
<bpmn:process id="Level1" name="Level1" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_1g3dpd7</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1g3dpd7" sourceRef="StartEvent_1" targetRef="Activity_12zat0d" />
|
||||
<bpmn:callActivity id="Activity_12zat0d" name="call level 2" calledElement="Level2">
|
||||
<bpmn:incoming>Flow_1g3dpd7</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0qdgvah</bpmn:outgoing>
|
||||
</bpmn:callActivity>
|
||||
<bpmn:sequenceFlow id="Flow_0qdgvah" sourceRef="Activity_12zat0d" targetRef="Activity_0rkbhbz" />
|
||||
<bpmn:endEvent id="Event_18dla68">
|
||||
<bpmn:documentation># Main Workflow
|
||||
Hello {{my_other_var}}
|
||||
|
||||
</bpmn:documentation>
|
||||
<bpmn:incoming>Flow_0upce00</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0upce00" sourceRef="Activity_0rkbhbz" targetRef="Event_18dla68" />
|
||||
<bpmn:callActivity id="Activity_0rkbhbz" name="call level 2B" calledElement="Level2b">
|
||||
<bpmn:incoming>Flow_0qdgvah</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0upce00</bpmn:outgoing>
|
||||
</bpmn:callActivity>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Level1">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0mcej1g_di" bpmnElement="Activity_12zat0d">
|
||||
<dc:Bounds x="280" y="77" width="100" height="80" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_18dla68_di" bpmnElement="Event_18dla68">
|
||||
<dc:Bounds x="702" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0jddvat_di" bpmnElement="Activity_0rkbhbz">
|
||||
<dc:Bounds x="420" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1g3dpd7_di" bpmnElement="Flow_1g3dpd7">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="280" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0qdgvah_di" bpmnElement="Flow_0qdgvah">
|
||||
<di:waypoint x="380" y="117" />
|
||||
<di:waypoint x="420" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0upce00_di" bpmnElement="Flow_0upce00">
|
||||
<di:waypoint x="520" y="117" />
|
||||
<di:waypoint x="702" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -22,7 +22,7 @@ from spiffworkflow_backend.models.process_instance_report import ProcessInstance
|
|||
from spiffworkflow_backend.models.process_instance_report import ReportMetadata
|
||||
from spiffworkflow_backend.models.process_model import NotificationType
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
|
||||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
|
@ -505,7 +505,7 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_name="simple_form",
|
||||
)
|
||||
# When adding a process model with one Process, no decisions, and some json files, only one process is recorded.
|
||||
assert len(SpecReferenceCache.query.all()) == 1
|
||||
assert len(ReferenceCacheModel.basic_query().all()) == 1
|
||||
|
||||
self.create_group_and_model_with_bpmn(
|
||||
client=client,
|
||||
|
@ -515,7 +515,7 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_location="call_activity_nested",
|
||||
)
|
||||
# When adding a process model with 4 processes and a decision, 5 new records will be in the Cache
|
||||
assert len(SpecReferenceCache.query.all()) == 6
|
||||
assert len(ReferenceCacheModel.basic_query().all()) == 6
|
||||
|
||||
# get the results
|
||||
response = client.get(
|
||||
|
@ -529,10 +529,10 @@ class TestProcessApi(BaseTest):
|
|||
assert len(response.json) == 5
|
||||
simple_form = next(p for p in response.json if p["identifier"] == "Process_WithForm")
|
||||
assert simple_form["display_name"] == "Process With Form"
|
||||
assert simple_form["process_model_id"] == "test_group_one/simple_form"
|
||||
assert simple_form["has_lanes"] is False
|
||||
assert simple_form["is_executable"] is True
|
||||
assert simple_form["is_primary"] is True
|
||||
assert simple_form["relative_location"] == "test_group_one/simple_form"
|
||||
assert simple_form["properties"]["has_lanes"] is False
|
||||
assert simple_form["properties"]["is_executable"] is True
|
||||
assert simple_form["properties"]["is_primary"] is True
|
||||
|
||||
def test_process_list_with_restricted_access(
|
||||
self,
|
||||
|
@ -547,7 +547,7 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_name="simple_form",
|
||||
)
|
||||
# When adding a process model with one Process, no decisions, and some json files, only one process is recorded.
|
||||
assert len(SpecReferenceCache.query.all()) == 1
|
||||
assert len(ReferenceCacheModel.basic_query().all()) == 1
|
||||
|
||||
self.create_group_and_model_with_bpmn(
|
||||
client=client,
|
||||
|
@ -557,7 +557,7 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_location="call_activity_nested",
|
||||
)
|
||||
# When adding a process model with 4 processes and a decision, 5 new records will be in the Cache
|
||||
assert len(SpecReferenceCache.query.all()) == 6
|
||||
assert len(ReferenceCacheModel.basic_query().all()) == 6
|
||||
|
||||
user_one = self.create_user_with_permission(
|
||||
username="user_one", target_uri="/v1.0/process-groups/test_group_one:*"
|
||||
|
@ -580,10 +580,10 @@ class TestProcessApi(BaseTest):
|
|||
assert len(response.json) == 1
|
||||
simple_form = next(p for p in response.json if p["identifier"] == "Process_WithForm")
|
||||
assert simple_form["display_name"] == "Process With Form"
|
||||
assert simple_form["process_model_id"] == "test_group_one/simple_form"
|
||||
assert simple_form["has_lanes"] is False
|
||||
assert simple_form["is_executable"] is True
|
||||
assert simple_form["is_primary"] is True
|
||||
assert simple_form["relative_location"] == "test_group_one/simple_form"
|
||||
assert simple_form["properties"]["has_lanes"] is False
|
||||
assert simple_form["properties"]["is_executable"] is True
|
||||
assert simple_form["properties"]["is_primary"] is True
|
||||
|
||||
def test_process_callers(
|
||||
self,
|
||||
|
@ -599,7 +599,7 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_name="simple_form",
|
||||
)
|
||||
# When adding a process model with one Process, no decisions, and some json files, only one process is recorded.
|
||||
assert len(SpecReferenceCache.query.all()) == 1
|
||||
assert len(ReferenceCacheModel.basic_query().all()) == 1
|
||||
# but no callers are recorded
|
||||
assert ProcessCallerService.count() == 0
|
||||
|
||||
|
@ -611,7 +611,7 @@ class TestProcessApi(BaseTest):
|
|||
bpmn_file_location="call_activity_nested",
|
||||
)
|
||||
# When adding a process model with 4 processes and a decision, 5 new records will be in the Cache
|
||||
assert len(SpecReferenceCache.query.all()) == 6
|
||||
assert len(ReferenceCacheModel.basic_query().all()) == 6
|
||||
# and 4 callers recorded
|
||||
assert ProcessCallerService.count() == 4
|
||||
|
||||
|
@ -1330,7 +1330,7 @@ class TestProcessApi(BaseTest):
|
|||
process_model_id=process_model_id,
|
||||
bpmn_file_location="call_activity_nested",
|
||||
)
|
||||
spec_reference = SpecReferenceCache.query.filter_by(identifier="Level2b").first()
|
||||
spec_reference = ReferenceCacheModel.basic_query().filter_by(identifier="Level2b").first()
|
||||
assert spec_reference
|
||||
modified_process_model_identifier = self.modify_process_identifier_for_path_param(process_model.id)
|
||||
headers = self.logged_in_headers(with_super_admin_user)
|
||||
|
@ -1356,7 +1356,7 @@ class TestProcessApi(BaseTest):
|
|||
with open(process_instance_file_path) as f_open:
|
||||
xml_file_contents = f_open.read()
|
||||
assert show_response.json["bpmn_xml_file_contents"] != xml_file_contents
|
||||
spec_reference_file_path = os.path.join(file_system_root, spec_reference.relative_path)
|
||||
spec_reference_file_path = os.path.join(file_system_root, spec_reference.relative_path())
|
||||
with open(spec_reference_file_path) as f_open:
|
||||
xml_file_contents = f_open.read()
|
||||
assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents
|
||||
|
|
|
@ -5,7 +5,7 @@ from flask.app import Flask
|
|||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
|
||||
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
|
||||
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
@ -86,7 +86,7 @@ class TestProcessModel(BaseTest):
|
|||
|
||||
# delete all of the id lookup items to force to processor to find the correct
|
||||
# process model when running the process
|
||||
db.session.query(SpecReferenceCache).delete()
|
||||
db.session.query(ReferenceCacheModel).delete()
|
||||
db.session.commit()
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
|
||||
|
|
|
@ -5,8 +5,9 @@ import pytest
|
|||
from flask import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from lxml import etree # type: ignore
|
||||
from spiffworkflow_backend.models.cache_generation import CacheGenerationModel
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import ProcessModelFileInvalidError
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
|
@ -22,7 +23,7 @@ class TestSpecFileService(BaseTest):
|
|||
# process_model_id = "call_activity_nested"
|
||||
bpmn_file_name = "call_activity_nested.bpmn"
|
||||
|
||||
call_activity_nested_relative_file_path = os.path.join(process_group_id, process_model_id, bpmn_file_name)
|
||||
call_activity_nested_relative_file_path = os.path.join(process_model_id, bpmn_file_name)
|
||||
|
||||
def test_can_store_process_ids_for_lookup(
|
||||
self,
|
||||
|
@ -35,12 +36,12 @@ class TestSpecFileService(BaseTest):
|
|||
bpmn_file_name=self.bpmn_file_name,
|
||||
process_model_source_directory="call_activity_nested",
|
||||
)
|
||||
bpmn_process_id_lookups = SpecReferenceCache.query.all()
|
||||
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
|
||||
assert len(bpmn_process_id_lookups) == 1
|
||||
assert bpmn_process_id_lookups[0].identifier == "Level1"
|
||||
assert bpmn_process_id_lookups[0].relative_path == self.call_activity_nested_relative_file_path
|
||||
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
|
||||
|
||||
def test_fails_to_save_duplicate_process_id(
|
||||
def test_fails_to_save_duplicate_process_id_in_same_process_model(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
|
@ -48,23 +49,22 @@ class TestSpecFileService(BaseTest):
|
|||
) -> None:
|
||||
bpmn_process_identifier = "Level1"
|
||||
load_test_spec(
|
||||
process_model_id=self.process_model_id,
|
||||
process_model_id="call_activity_duplicate",
|
||||
bpmn_file_name=self.bpmn_file_name,
|
||||
process_model_source_directory="call_activity_nested",
|
||||
process_model_source_directory="call_activity_duplicate",
|
||||
)
|
||||
bpmn_process_id_lookups = SpecReferenceCache.query.all()
|
||||
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
|
||||
assert len(bpmn_process_id_lookups) == 1
|
||||
assert bpmn_process_id_lookups[0].identifier == bpmn_process_identifier
|
||||
assert bpmn_process_id_lookups[0].relative_path == self.call_activity_nested_relative_file_path
|
||||
with pytest.raises(ProcessModelFileInvalidError) as exception:
|
||||
load_test_spec(
|
||||
"call_activity_nested_duplicate",
|
||||
process_model_id="call_activity_duplicate",
|
||||
process_model_source_directory="call_activity_duplicate",
|
||||
bpmn_file_name="call_activity_nested_duplicate",
|
||||
)
|
||||
assert f"Process id ({bpmn_process_identifier}) has already been used" in str(exception.value)
|
||||
assert f"Process id ({bpmn_process_identifier}) has already been used" in str(exception.value)
|
||||
|
||||
process_model = ProcessModelService.get_process_model("call_activity_nested_duplicate")
|
||||
process_model = ProcessModelService.get_process_model("call_activity_duplicate")
|
||||
full_file_path = SpecFileService.full_file_path(process_model, "call_activity_nested_duplicate.bpmn")
|
||||
assert not os.path.isfile(full_file_path)
|
||||
|
||||
|
@ -75,10 +75,13 @@ class TestSpecFileService(BaseTest):
|
|||
with_db_and_bpmn_file_cleanup: None,
|
||||
) -> None:
|
||||
bpmn_process_identifier = "Level1"
|
||||
process_id_lookup = SpecReferenceCache(
|
||||
process_id_lookup = ReferenceCacheModel.from_params(
|
||||
identifier=bpmn_process_identifier,
|
||||
relative_path=self.call_activity_nested_relative_file_path,
|
||||
display_name="WHO CARES",
|
||||
relative_location=self.process_model_id,
|
||||
file_name=self.bpmn_file_name,
|
||||
type="process",
|
||||
use_current_cache_generation=True,
|
||||
)
|
||||
db.session.add(process_id_lookup)
|
||||
db.session.commit()
|
||||
|
@ -89,10 +92,10 @@ class TestSpecFileService(BaseTest):
|
|||
process_model_source_directory="call_activity_nested",
|
||||
)
|
||||
|
||||
bpmn_process_id_lookups = SpecReferenceCache.query.all()
|
||||
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
|
||||
assert len(bpmn_process_id_lookups) == 1
|
||||
assert bpmn_process_id_lookups[0].identifier == bpmn_process_identifier
|
||||
assert bpmn_process_id_lookups[0].relative_path == self.call_activity_nested_relative_file_path
|
||||
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
|
||||
|
||||
# this is really a test of your configuration.
|
||||
# sqlite and postgres are case sensitive by default,
|
||||
|
@ -102,15 +105,23 @@ class TestSpecFileService(BaseTest):
|
|||
app: Flask,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
) -> None:
|
||||
process_id_lookup = SpecReferenceCache(
|
||||
process_id_lookup = ReferenceCacheModel.from_params(
|
||||
identifier="HOT",
|
||||
display_name="WHO CARES",
|
||||
relative_location=self.process_model_id,
|
||||
file_name=self.bpmn_file_name,
|
||||
type="process",
|
||||
use_current_cache_generation=True,
|
||||
)
|
||||
db.session.add(process_id_lookup)
|
||||
db.session.commit()
|
||||
process_id_lookup = SpecReferenceCache(
|
||||
process_id_lookup = ReferenceCacheModel.from_params(
|
||||
identifier="hot",
|
||||
display_name="WHO CARES",
|
||||
relative_location=self.process_model_id,
|
||||
file_name=self.bpmn_file_name,
|
||||
type="process",
|
||||
use_current_cache_generation=True,
|
||||
)
|
||||
db.session.add(process_id_lookup)
|
||||
db.session.commit()
|
||||
|
@ -123,12 +134,13 @@ class TestSpecFileService(BaseTest):
|
|||
) -> None:
|
||||
"""When a BPMN processes identifier is changed in a file, the old id is removed from the cache."""
|
||||
old_identifier = "ye_old_identifier"
|
||||
process_id_lookup = SpecReferenceCache(
|
||||
process_id_lookup = ReferenceCacheModel.from_params(
|
||||
identifier=old_identifier,
|
||||
relative_path=self.call_activity_nested_relative_file_path,
|
||||
display_name="WHO CARES",
|
||||
relative_location=self.process_model_id,
|
||||
file_name=self.bpmn_file_name,
|
||||
process_model_id=self.process_model_id,
|
||||
type="process",
|
||||
use_current_cache_generation=True,
|
||||
)
|
||||
db.session.add(process_id_lookup)
|
||||
db.session.commit()
|
||||
|
@ -139,11 +151,11 @@ class TestSpecFileService(BaseTest):
|
|||
process_model_source_directory="call_activity_nested",
|
||||
)
|
||||
|
||||
bpmn_process_id_lookups = SpecReferenceCache.query.all()
|
||||
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
|
||||
assert len(bpmn_process_id_lookups) == 1
|
||||
assert bpmn_process_id_lookups[0].identifier != old_identifier
|
||||
assert bpmn_process_id_lookups[0].identifier == "Level1"
|
||||
assert bpmn_process_id_lookups[0].relative_path == self.call_activity_nested_relative_file_path
|
||||
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
|
||||
|
||||
def test_load_reference_information(
|
||||
self,
|
||||
|
@ -200,6 +212,63 @@ class TestSpecFileService(BaseTest):
|
|||
full_file_path = SpecFileService.full_file_path(process_model, "bad_xml.bpmn")
|
||||
assert not os.path.isfile(full_file_path)
|
||||
|
||||
def test_uses_correct_cache_generation(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
) -> None:
|
||||
current_cache_generation = CacheGenerationModel.newest_generation_for_table("reference_cache")
|
||||
assert current_cache_generation is None
|
||||
|
||||
load_test_spec(
|
||||
process_model_id=self.process_model_id,
|
||||
bpmn_file_name=self.bpmn_file_name,
|
||||
process_model_source_directory="call_activity_nested",
|
||||
)
|
||||
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
|
||||
assert len(bpmn_process_id_lookups) == 1
|
||||
assert bpmn_process_id_lookups[0].identifier == "Level1"
|
||||
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
|
||||
|
||||
current_cache_generation = CacheGenerationModel.newest_generation_for_table("reference_cache")
|
||||
assert current_cache_generation is not None
|
||||
assert bpmn_process_id_lookups[0].generation_id == current_cache_generation.id
|
||||
|
||||
# make sure it doesn't add a new entry to the cache
|
||||
load_test_spec(
|
||||
process_model_id=self.process_model_id,
|
||||
bpmn_file_name=self.bpmn_file_name,
|
||||
process_model_source_directory="call_activity_nested",
|
||||
)
|
||||
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
|
||||
assert len(bpmn_process_id_lookups) == 1
|
||||
assert bpmn_process_id_lookups[0].identifier == "Level1"
|
||||
assert bpmn_process_id_lookups[0].relative_path() == self.call_activity_nested_relative_file_path
|
||||
assert bpmn_process_id_lookups[0].generation_id == current_cache_generation.id
|
||||
|
||||
cache_generations = CacheGenerationModel.query.all()
|
||||
assert len(cache_generations) == 1
|
||||
|
||||
new_cache_generation = CacheGenerationModel(cache_table="reference_cache")
|
||||
db.session.add(new_cache_generation)
|
||||
db.session.commit()
|
||||
|
||||
cache_generations = CacheGenerationModel.query.all()
|
||||
assert len(cache_generations) == 2
|
||||
current_cache_generation = CacheGenerationModel.newest_generation_for_table("reference_cache")
|
||||
assert current_cache_generation is not None
|
||||
|
||||
load_test_spec(
|
||||
process_model_id=self.process_model_id,
|
||||
bpmn_file_name=self.bpmn_file_name,
|
||||
process_model_source_directory="call_activity_nested",
|
||||
)
|
||||
bpmn_process_id_lookups = ReferenceCacheModel.basic_query().all()
|
||||
assert len(bpmn_process_id_lookups) == 1
|
||||
assert bpmn_process_id_lookups[0].identifier == "Level1"
|
||||
assert bpmn_process_id_lookups[0].generation_id == current_cache_generation.id
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="tmp file path is not valid xml for windows and it doesn't matter",
|
||||
|
|
|
@ -139,9 +139,7 @@ export interface ProcessReference {
|
|||
process_model_id: string;
|
||||
type: string; // either "decision" or "process"
|
||||
file_name: string;
|
||||
has_lanes: boolean;
|
||||
is_executable: boolean;
|
||||
is_primary: boolean;
|
||||
properties: any;
|
||||
}
|
||||
|
||||
export type ObjectWithStringKeysAndValues = { [key: string]: string };
|
||||
|
|
Loading…
Reference in New Issue