Merge pull request #36 from sartography/feature/call_activity_selection

Feature/call activity selection
This commit is contained in:
Kevin Burnett 2022-11-15 15:51:40 +00:00 committed by GitHub
commit bad6d17f3a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 343 additions and 194 deletions

1
.gitignore vendored
View File

@ -1 +1,2 @@
pyrightconfig.json pyrightconfig.json
.idea/

View File

@ -72,8 +72,8 @@ this in the app.js file.
Below is a table of all the events that are sent and accepted: Below is a table of all the events that are sent and accepted:
| Event Name | Description | Fired or Acceped | Parameters | Description | | Event Name | Description | Fired or Acceped | Parameters | Description |
|--------------------------------|------------------------------------------------------------------------------| ---------------- |----------------------|--------------------------------------------------------------------------| |--------------------------------|------------------------------------------------------------------|---------|----------------------|--------------------------------------------------------------------------|
| spiff.service\_tasks.requested | Request a list of available services that can be called from a service task. | Fired | \- | | | spiff.service\_tasks.requested | Request a list of available services for service task. | Fired | \- | |
| spiff.service\_tasks.returned | Provides a list of services. | Recieved | serviceTaskOperators | ex: \[{id:'Chuck Facts', parameters\[{id:'category', type:'string'}\]}\] | | spiff.service\_tasks.returned | Provides a list of services. | Recieved | serviceTaskOperators | ex: \[{id:'Chuck Facts', parameters\[{id:'category', type:'string'}\]}\] |
| spiff.script.edit | Request to edit a python script in some sort of facy editor. | Fired | scriptType | one of: script, preScript, postScript | | spiff.script.edit | Request to edit a python script in some sort of facy editor. | Fired | scriptType | one of: script, preScript, postScript |
| | | | value | The actual python script | | | | | value | The actual python script |
@ -87,8 +87,9 @@ Below is a table of all the events that are sent and accepted:
| spiff.markdown.update | Update Markdown content for a paticular elements 'instructions'. | Recieved | element | The element that needs updating | | spiff.markdown.update | Update Markdown content for a paticular elements 'instructions'. | Recieved | element | The element that needs updating |
| | | | value | Tne updated Markdown content | | | | | value | Tne updated Markdown content |
| spiff.callactivity.edit | Requst to edit a call activity by process id. | Fired | processId | The Process the users wants to edit | | spiff.callactivity.edit | Requst to edit a call activity by process id. | Fired | processId | The Process the users wants to edit |
| spiff.callactivity.search | Requst to search for a call activity | Fired | | | | spiff.callactivity.search | Requst to search for a call activity | Fired | processUd | The currently seleted process id |
| spiff.callactivity.update | Update the process id from a call activity (based on search) | Fired | processId | The Process the users wants to edit | | | | | eventBus | For sending back the selected process id. |
| spiff.callactivity.update | Update the process id from a call activity (based on search) | Received | processId | The Process the users wants to edit |
| spiff.file.edit | request to edit a file, but file name. | Fired | value | The file name the user wants to edit | | spiff.file.edit | request to edit a file, but file name. | Fired | value | The file name the user wants to edit |
| spiff.dmn.edit | request to edit a dmn by process id. | Fired | value | The DMN id the user wants to edit | | spiff.dmn.edit | request to edit a dmn by process id. | Fired | value | The DMN id the user wants to edit |
| spiff.json\_files.requested | request a list of local json files. | Fired | optionType | The type of options required ('json' or 'dmn') | | spiff.json\_files.requested | request a list of local json files. | Fired | optionType | The type of options required ('json' or 'dmn') |

View File

@ -181,6 +181,16 @@ bpmnModeler.on('spiff.dmn_files.requested', (event) => {
}); });
}); });
// As call activites might refernce processes across the system
// it should be possible to search for a paticular call activity.
bpmnModeler.on('spiff.callactivity.search', (event) => {
console.log("Firing call activity update")
event.eventBus.fire('spiff.callactivity.update', {
value: 'searched_bpmn_id',
});
});
// This handles the download and upload buttons - it isn't specific to // This handles the download and upload buttons - it isn't specific to
// the BPMN modeler or these extensions, just a quick way to allow you to // the BPMN modeler or these extensions, just a quick way to allow you to
// create and save files, so keeping it outside the example. // create and save files, so keeping it outside the example.

View File

@ -98,20 +98,28 @@ function CalledElementTextField(props) {
} }
function FindProcessButton(props) { function FindProcessButton(props) {
const { element } = props; const { element, commandStack } = props;
const eventBus = useService('eventBus'); const eventBus = useService('eventBus');
return HeaderButton({ return HeaderButton({
id: 'spiffworkflow-search-call-activity-button', id: 'spiffworkflow-search-call-activity-button',
class: 'spiffworkflow-properties-panel-button', class: 'spiffworkflow-properties-panel-button',
onClick: () => { onClick: () => {
const processId = getCalledElementValue(element); const processId = getCalledElementValue(element);
eventBus.fire('spiff.callactivity.search', {
element, // First, set up the listen, then fire the event, just
processId, // in case we are testing and things are happening super fast.
});
// Listen for a response, to update the script.
eventBus.once('spiff.callactivity.update', (response) => { eventBus.once('spiff.callactivity.update', (response) => {
element.businessObject.calledElement = response.value; commandStack.execute('element.updateModdleProperties', {
element,
moddleElement: element.businessObject,
properties: {
calledElement: response.value,
},
});
});
eventBus.fire('spiff.callactivity.search', {
processId,
eventBus,
}); });
}, },
children: 'Search', children: 'Search',

View File

@ -1,8 +1,8 @@
"""empty message """empty message
Revision ID: 88c2d7081664 Revision ID: 7cc9bdcc309f
Revises: Revises:
Create Date: 2022-11-14 15:16:42.833331 Create Date: 2022-11-15 09:53:53.349712
""" """
from alembic import op from alembic import op
@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '88c2d7081664' revision = '7cc9bdcc309f'
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None
@ -18,15 +18,6 @@ depends_on = None
def upgrade(): def upgrade():
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.create_table('bpmn_process_id_lookup',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=True),
sa.Column('display_name', sa.String(length=255), nullable=True),
sa.Column('bpmn_file_relative_path', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_bpmn_process_id_lookup_bpmn_process_identifier'), 'bpmn_process_id_lookup', ['bpmn_process_identifier'], unique=True)
op.create_index(op.f('ix_bpmn_process_id_lookup_display_name'), 'bpmn_process_id_lookup', ['display_name'], unique=True)
op.create_table('group', op.create_table('group',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True), sa.Column('name', sa.String(length=255), nullable=True),
@ -47,6 +38,22 @@ def upgrade():
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('uri') sa.UniqueConstraint('uri')
) )
op.create_table('spec_reference_cache',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('identifier', sa.String(length=255), nullable=True),
sa.Column('display_name', sa.String(length=255), nullable=True),
sa.Column('process_model_id', sa.String(length=255), nullable=True),
sa.Column('type', sa.String(length=255), nullable=True),
sa.Column('file_name', sa.String(length=255), nullable=True),
sa.Column('relative_path', sa.String(length=255), nullable=True),
sa.Column('has_lanes', sa.Boolean(), nullable=True),
sa.Column('is_executable', sa.Boolean(), nullable=True),
sa.Column('is_primary', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_spec_reference_cache_display_name'), 'spec_reference_cache', ['display_name'], unique=False)
op.create_index(op.f('ix_spec_reference_cache_identifier'), 'spec_reference_cache', ['identifier'], unique=True)
op.create_index(op.f('ix_spec_reference_cache_type'), 'spec_reference_cache', ['type'], unique=False)
op.create_table('spiff_logging', op.create_table('spiff_logging',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False),
@ -302,12 +309,13 @@ def downgrade():
op.drop_table('message_correlation_property') op.drop_table('message_correlation_property')
op.drop_table('user') op.drop_table('user')
op.drop_table('spiff_logging') op.drop_table('spiff_logging')
op.drop_index(op.f('ix_spec_reference_cache_type'), table_name='spec_reference_cache')
op.drop_index(op.f('ix_spec_reference_cache_identifier'), table_name='spec_reference_cache')
op.drop_index(op.f('ix_spec_reference_cache_display_name'), table_name='spec_reference_cache')
op.drop_table('spec_reference_cache')
op.drop_table('permission_target') op.drop_table('permission_target')
op.drop_index(op.f('ix_message_model_name'), table_name='message_model') op.drop_index(op.f('ix_message_model_name'), table_name='message_model')
op.drop_index(op.f('ix_message_model_identifier'), table_name='message_model') op.drop_index(op.f('ix_message_model_identifier'), table_name='message_model')
op.drop_table('message_model') op.drop_table('message_model')
op.drop_table('group') op.drop_table('group')
op.drop_index(op.f('ix_bpmn_process_id_lookup_display_name'), table_name='bpmn_process_id_lookup')
op.drop_index(op.f('ix_bpmn_process_id_lookup_bpmn_process_identifier'), table_name='bpmn_process_id_lookup')
op.drop_table('bpmn_process_id_lookup')
# ### end Alembic commands ### # ### end Alembic commands ###

View File

@ -371,6 +371,23 @@ paths:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/OkTrue" $ref: "#/components/schemas/OkTrue"
# process_model_list
/processes:
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_list
summary: Return a list of all processes (not just primary process of a process model)
useful for finding processes for call activites.
tags:
- Process Models
responses:
"200":
description: Successfully return the requested processes
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Process"
/process-instances: /process-instances:
parameters: parameters:
@ -1559,7 +1576,26 @@ components:
type: string type: string
x-nullable: true x-nullable: true
example: Some Value example: Some Value
Process:
properties:
identifier:
type: string
display_name:
type: string
process_group_id:
type: string
process_model_id:
type: string
type:
type: string
file_name:
type: string
has_lanes:
type: boolean
is_executable:
type: boolean
is_primary:
type: boolean
ProcessModel: ProcessModel:
properties: properties:
id: id:

View File

@ -18,8 +18,8 @@ from spiffworkflow_backend.models.principal import PrincipalModel # noqa: F401
from spiffworkflow_backend.models.active_task import ActiveTaskModel # noqa: F401 from spiffworkflow_backend.models.active_task import ActiveTaskModel # noqa: F401
from spiffworkflow_backend.models.bpmn_process_id_lookup import ( from spiffworkflow_backend.models.spec_reference import (
BpmnProcessIdLookup, SpecReferenceCache,
) # noqa: F401 ) # noqa: F401
from spiffworkflow_backend.models.message_correlation_property import ( from spiffworkflow_backend.models.message_correlation_property import (
MessageCorrelationPropertyModel, MessageCorrelationPropertyModel,

View File

@ -1,14 +0,0 @@
"""Message_model."""
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
class BpmnProcessIdLookup(SpiffworkflowBaseDBModel):
"""BpmnProcessIdLookup."""
__tablename__ = "bpmn_process_id_lookup"
id = db.Column(db.Integer, primary_key=True)
bpmn_process_identifier = db.Column(db.String(255), unique=True, index=True)
display_name = db.Column(db.String(255), unique=True, index=True)
bpmn_file_relative_path = db.Column(db.String(255))

View File

@ -9,6 +9,7 @@ from marshmallow import INCLUDE
from marshmallow import Schema from marshmallow import Schema
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.spec_reference import SpecReference
class FileType(SpiffEnum): class FileType(SpiffEnum):
@ -62,27 +63,6 @@ CONTENT_TYPES = {
} }
@dataclass()
class FileReference:
"""File Reference Information.
Includes items such as the process id and name for a BPMN,
or the Decision id and Decision name for a DMN file. There may be more than
one reference that points to a particular file.
"""
id: str
name: str
type: str # can be 'process', 'decision', or just 'file'
file_name: str
file_path: str
has_lanes: bool
executable: bool
messages: dict
correlations: dict
start_messages: list
@dataclass(order=True) @dataclass(order=True)
class File: class File:
"""File.""" """File."""
@ -94,7 +74,7 @@ class File:
type: str type: str
last_modified: datetime last_modified: datetime
size: int size: int
references: Optional[list[FileReference]] = None references: Optional[list[SpecReference]] = None
file_contents: Optional[bytes] = None file_contents: Optional[bytes] = None
process_model_id: Optional[str] = None process_model_id: Optional[str] = None
process_group_id: Optional[str] = None process_group_id: Optional[str] = None
@ -147,16 +127,5 @@ class FileSchema(Schema):
] ]
unknown = INCLUDE unknown = INCLUDE
references = marshmallow.fields.List( references = marshmallow.fields.List(
marshmallow.fields.Nested("FileReferenceSchema") marshmallow.fields.Nested("SpecReferenceSchema")
) )
class FileReferenceSchema(Schema):
"""FileSchema."""
class Meta:
"""Meta."""
model = FileReference
fields = ["id", "name", "type"]
unknown = INCLUDE

View File

@ -0,0 +1,84 @@
"""Message_model."""
from dataclasses import dataclass
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from flask_marshmallow import Schema # type: ignore
from marshmallow import INCLUDE
@dataclass()
class SpecReference:
"""File Reference Information.
Includes items such as the process id and name for a BPMN,
or the Decision id and Decision name for a DMN file. There may be more than
one reference that points to a particular file - if for instance, there are
three executable processes in a collaboration within a BPMN Diagram.
"""
identifier: str # The id of the process or decision. "Process_1234"
display_name: str # The name of the process or decision. "Invoice Submission"
process_model_id: str
type: str # can be 'process' or 'decision'
file_name: str # The name of the file where this process or decision is defined.
relative_path: str # The path to the file.
has_lanes: bool # If this is a process, whether it has lanes or not.
is_executable: bool # Whether this process or decision is designated as executable.
is_primary: bool # Whether this is the primary process of a process model
messages: dict # Any messages defined in the same file where this process is defined.
correlations: dict # Any correlations defined in the same file with this process.
start_messages: list # The names of any messages that would start this process.
class SpecReferenceCache(SpiffworkflowBaseDBModel):
"""A cache of information about all the Processes and Decisions defined in all files."""
__tablename__ = "spec_reference_cache"
id = db.Column(db.Integer, primary_key=True)
identifier = db.Column(db.String(255), unique=True, index=True)
display_name = db.Column(db.String(255), index=True)
process_model_id = db.Column(db.String(255))
type = db.Column(db.String(255), index=True) # either 'process' or 'decision'
file_name = db.Column(db.String(255))
relative_path = db.Column(db.String(255))
has_lanes = db.Column(db.Boolean())
is_executable = db.Column(db.Boolean()) # either 'process' or 'decision'
is_primary = db.Column(db.Boolean())
@classmethod
def from_spec_reference(cls, ref: SpecReference) -> "SpecReferenceCache":
"""From_spec_reference."""
return cls(
identifier=ref.identifier,
display_name=ref.display_name,
process_model_id=ref.process_model_id,
type=ref.type,
file_name=ref.file_name,
has_lanes=ref.has_lanes,
is_executable=ref.is_executable,
is_primary=ref.is_primary,
relative_path=ref.relative_path,
)
class SpecReferenceSchema(Schema): # type: ignore
"""FileSchema."""
class Meta:
"""Meta."""
model = SpecReference
fields = [
"identifier",
"display_name",
"process_group_id",
"process_model_id",
"type",
"file_name",
"has_lanes",
"is_executable",
"is_primary",
]
unknown = INCLUDE

View File

@ -59,6 +59,8 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.secret_model import SecretModel from spiffworkflow_backend.models.secret_model import SecretModel
from spiffworkflow_backend.models.secret_model import SecretModelSchema from spiffworkflow_backend.models.secret_model import SecretModelSchema
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
@ -334,10 +336,19 @@ def process_model_list(
"pages": pages, "pages": pages,
}, },
} }
return Response(json.dumps(response_json), status=200, mimetype="application/json") return Response(json.dumps(response_json), status=200, mimetype="application/json")
def process_list() -> Any:
"""Returns a list of all known processes.
This includes processes that are not the
primary process - helpful for finding possible call activities.
"""
references = SpecReferenceCache.query.filter_by(type="process")
return SpecReferenceSchema(many=True).dump(references)
def get_file(modified_process_model_id: str, file_name: str) -> Any: def get_file(modified_process_model_id: str, file_name: str) -> Any:
"""Get_file.""" """Get_file."""
process_model_identifier = modified_process_model_id.replace(":", "/") process_model_identifier = modified_process_model_id.replace(":", "/")

View File

@ -67,7 +67,6 @@ from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from spiffworkflow_backend.models.active_task import ActiveTaskModel from spiffworkflow_backend.models.active_task import ActiveTaskModel
from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
from spiffworkflow_backend.models.file import File from spiffworkflow_backend.models.file import File
from spiffworkflow_backend.models.file import FileType from spiffworkflow_backend.models.file import FileType
from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.group import GroupModel
@ -86,6 +85,7 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.script_attributes_context import ( from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext, ScriptAttributesContext,
) )
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserModelSchema from spiffworkflow_backend.models.user import UserModelSchema
@ -671,10 +671,10 @@ class ProcessInstanceProcessor:
return parser return parser
@staticmethod @staticmethod
def backfill_missing_bpmn_process_id_lookup_records( def backfill_missing_spec_reference_records(
bpmn_process_identifier: str, bpmn_process_identifier: str,
) -> Optional[str]: ) -> Optional[str]:
"""Backfill_missing_bpmn_process_id_lookup_records.""" """Backfill_missing_spec_reference_records."""
process_models = ProcessModelService().get_process_models() process_models = ProcessModelService().get_process_models()
for process_model in process_models: for process_model in process_models:
refs = SpecFileService.reference_map( refs = SpecFileService.reference_map(
@ -696,18 +696,20 @@ class ProcessInstanceProcessor:
"bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None" "bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None"
) )
bpmn_process_id_lookup = BpmnProcessIdLookup.query.filter_by( spec_reference = SpecReferenceCache.query.filter_by(
bpmn_process_identifier=bpmn_process_identifier identifier=bpmn_process_identifier
).first() ).first()
bpmn_file_full_path = None bpmn_file_full_path = None
if bpmn_process_id_lookup is None: if spec_reference is None:
bpmn_file_full_path = ProcessInstanceProcessor.backfill_missing_bpmn_process_id_lookup_records( bpmn_file_full_path = (
ProcessInstanceProcessor.backfill_missing_spec_reference_records(
bpmn_process_identifier bpmn_process_identifier
) )
)
else: else:
bpmn_file_full_path = os.path.join( bpmn_file_full_path = os.path.join(
FileSystemService.root_path(), FileSystemService.root_path(),
bpmn_process_id_lookup.bpmn_file_relative_path, spec_reference.relative_path,
) )
if bpmn_file_full_path is None: if bpmn_file_full_path is None:
raise ( raise (

View File

@ -8,10 +8,9 @@ from typing import Optional
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
from spiffworkflow_backend.models.file import File from spiffworkflow_backend.models.file import File
from spiffworkflow_backend.models.file import FileReference
from spiffworkflow_backend.models.file import FileType from spiffworkflow_backend.models.file import FileType
from spiffworkflow_backend.models.file import SpecReference
from spiffworkflow_backend.models.message_correlation_property import ( from spiffworkflow_backend.models.message_correlation_property import (
MessageCorrelationPropertyModel, MessageCorrelationPropertyModel,
) )
@ -20,6 +19,7 @@ from spiffworkflow_backend.models.message_triggerable_process_model import (
MessageTriggerableProcessModel, MessageTriggerableProcessModel,
) )
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.services.custom_parser import MyCustomParser from spiffworkflow_backend.services.custom_parser import MyCustomParser
from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
@ -54,17 +54,17 @@ class SpecFileService(FileSystemService):
return files return files
@staticmethod @staticmethod
def reference_map(references: list[FileReference]) -> dict[str, FileReference]: def reference_map(references: list[SpecReference]) -> dict[str, SpecReference]:
"""Creates a dict with provided references organized by id.""" """Creates a dict with provided references organized by id."""
ref_map = {} ref_map = {}
for ref in references: for ref in references:
ref_map[ref.id] = ref ref_map[ref.identifier] = ref
return ref_map return ref_map
@staticmethod @staticmethod
def get_references_for_process( def get_references_for_process(
process_model_info: ProcessModelInfo, process_model_info: ProcessModelInfo,
) -> list[FileReference]: ) -> list[SpecReference]:
"""Get_references_for_process.""" """Get_references_for_process."""
files = SpecFileService.get_files(process_model_info) files = SpecFileService.get_files(process_model_info)
references = [] references = []
@ -77,7 +77,7 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def get_references_for_file( def get_references_for_file(
file: File, process_model_info: ProcessModelInfo file: File, process_model_info: ProcessModelInfo
) -> list[FileReference]: ) -> list[SpecReference]:
"""Uses spiffworkflow to parse BPMN and DMN files to determine how they can be externally referenced. """Uses spiffworkflow to parse BPMN and DMN files to determine how they can be externally referenced.
Returns a list of Reference objects that contain the type of reference, the id, the name. Returns a list of Reference objects that contain the type of reference, the id, the name.
@ -86,16 +86,15 @@ class SpecFileService(FileSystemService):
name = {str} 'Level 3' name = {str} 'Level 3'
type = {str} 'process' / 'decision' type = {str} 'process' / 'decision'
""" """
references: list[FileReference] = [] references: list[SpecReference] = []
full_file_path = SpecFileService.full_file_path(process_model_info, file.name) full_file_path = SpecFileService.full_file_path(process_model_info, file.name)
relative_file_path = os.path.join( file_path = os.path.join(process_model_info.id, file.name)
process_model_info.id_for_file_path(), file.name
)
parser = MyCustomParser() parser = MyCustomParser()
parser_type = None parser_type = None
sub_parser = None sub_parser = None
has_lanes = False has_lanes = False
executable = True is_executable = True
is_primary = False
messages = {} messages = {}
correlations = {} correlations = {}
start_messages = [] start_messages = []
@ -114,18 +113,24 @@ class SpecFileService(FileSystemService):
for sub_parser in sub_parsers: for sub_parser in sub_parsers:
if parser_type == "process": if parser_type == "process":
has_lanes = sub_parser.has_lanes() has_lanes = sub_parser.has_lanes()
executable = sub_parser.process_executable sub_parser.process_executable
start_messages = sub_parser.start_messages() start_messages = sub_parser.start_messages()
is_primary = (
sub_parser.get_id() == process_model_info.primary_process_id
)
references.append( references.append(
FileReference( SpecReference(
id=sub_parser.get_id(), identifier=sub_parser.get_id(),
name=sub_parser.get_name(), display_name=sub_parser.get_name(),
process_model_id=process_model_info.id,
type=parser_type, type=parser_type,
file_name=file.name, file_name=file.name,
file_path=relative_file_path, relative_path=file_path,
has_lanes=has_lanes, has_lanes=has_lanes,
executable=executable, is_executable=is_executable,
messages=messages, messages=messages,
is_primary=is_primary,
correlations=correlations, correlations=correlations,
start_messages=start_messages, start_messages=start_messages,
) )
@ -150,33 +155,27 @@ class SpecFileService(FileSystemService):
SpecFileService.write_file_data_to_system(full_file_path, binary_data) SpecFileService.write_file_data_to_system(full_file_path, binary_data)
file = SpecFileService.to_file_object(file_name, full_file_path) file = SpecFileService.to_file_object(file_name, full_file_path)
if file.type == FileType.bpmn.value: references = SpecFileService.get_references_for_file(file, process_model_info)
if ( primary_process_ref = next((ref for ref in references if ref.is_primary), None)
process_model_info.primary_file_name is None
or file_name == process_model_info.primary_file_name
):
# If no primary process exists, make this primary process.
references = SpecFileService.get_references_for_file(
file, process_model_info
)
for ref in references: for ref in references:
if ref.type == "process": # If no valid primary process is defined, default to the first process in the
# updated file.
if not primary_process_ref and ref.type == "process":
ref.is_primary = True
if ref.is_primary:
ProcessModelService().update_spec( ProcessModelService().update_spec(
process_model_info, process_model_info,
{ {
"primary_process_id": ref.id, "primary_process_id": ref.identifier,
"primary_file_name": file_name, "primary_file_name": file_name,
"is_review": ref.has_lanes, "is_review": ref.has_lanes,
}, },
) )
SpecFileService.update_process_cache(ref) SpecFileService.update_process_cache(ref)
SpecFileService.update_message_cache(ref) SpecFileService.update_message_cache(ref)
SpecFileService.update_message_trigger_cache( SpecFileService.update_message_trigger_cache(ref, process_model_info)
ref, process_model_info
)
SpecFileService.update_correlation_cache(ref) SpecFileService.update_correlation_cache(ref)
break
return file return file
@staticmethod @staticmethod
@ -229,37 +228,33 @@ class SpecFileService(FileSystemService):
# fixme: Place all the caching stuff in a different service. # fixme: Place all the caching stuff in a different service.
@staticmethod @staticmethod
def update_process_cache(ref: FileReference) -> None: def update_process_cache(ref: SpecReference) -> None:
"""Update_process_cache.""" """Update_process_cache."""
process_id_lookup = BpmnProcessIdLookup.query.filter_by( process_id_lookup = SpecReferenceCache.query.filter_by(
bpmn_process_identifier=ref.id identifier=ref.identifier
).first() ).first()
if process_id_lookup is None: if process_id_lookup is None:
process_id_lookup = BpmnProcessIdLookup( process_id_lookup = SpecReferenceCache.from_spec_reference(ref)
bpmn_process_identifier=ref.id,
display_name=ref.name,
bpmn_file_relative_path=ref.file_path,
)
db.session.add(process_id_lookup) db.session.add(process_id_lookup)
else: else:
if ref.file_path != process_id_lookup.bpmn_file_relative_path: if ref.relative_path != process_id_lookup.relative_path:
full_bpmn_file_path = SpecFileService.full_path_from_relative_path( full_bpmn_file_path = SpecFileService.full_path_from_relative_path(
process_id_lookup.bpmn_file_relative_path process_id_lookup.relative_path
) )
# if the old relative bpmn file no longer exists, then assume things were moved around # if the old relative bpmn file no longer exists, then assume things were moved around
# on the file system. Otherwise, assume it is a duplicate process id and error. # on the file system. Otherwise, assume it is a duplicate process id and error.
if os.path.isfile(full_bpmn_file_path): if os.path.isfile(full_bpmn_file_path):
raise ValidationException( raise ValidationException(
f"Process id ({ref.id}) has already been used for " f"Process id ({ref.identifier}) has already been used for "
f"{process_id_lookup.bpmn_file_relative_path}. It cannot be reused." f"{process_id_lookup.relative_path}. It cannot be reused."
) )
else: else:
process_id_lookup.bpmn_file_relative_path = ref.file_path process_id_lookup.relative_path = ref.relative_path
db.session.add(process_id_lookup) db.session.add(process_id_lookup)
db.session.commit() db.session.commit()
@staticmethod @staticmethod
def update_message_cache(ref: FileReference) -> None: def update_message_cache(ref: SpecReference) -> None:
"""Assure we have a record in the database of all possible message ids and names.""" """Assure we have a record in the database of all possible message ids and names."""
for message_model_identifier in ref.messages.keys(): for message_model_identifier in ref.messages.keys():
message_model = MessageModel.query.filter_by( message_model = MessageModel.query.filter_by(
@ -275,7 +270,7 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def update_message_trigger_cache( def update_message_trigger_cache(
ref: FileReference, process_model_info: ProcessModelInfo ref: SpecReference, process_model_info: ProcessModelInfo
) -> None: ) -> None:
"""Assure we know which messages can trigger the start of a process.""" """Assure we know which messages can trigger the start of a process."""
for message_model_identifier in ref.start_messages: for message_model_identifier in ref.start_messages:
@ -313,7 +308,7 @@ class SpecFileService(FileSystemService):
) )
@staticmethod @staticmethod
def update_correlation_cache(ref: FileReference) -> None: def update_correlation_cache(ref: SpecReference) -> None:
"""Update_correlation_cache.""" """Update_correlation_cache."""
for correlation_identifier in ref.correlations.keys(): for correlation_identifier in ref.correlations.keys():
correlation_property_retrieval_expressions = ref.correlations[ correlation_property_retrieval_expressions = ref.correlations[

View File

@ -86,7 +86,7 @@ class ExampleDataLoader:
references = SpecFileService.get_references_for_file( references = SpecFileService.get_references_for_file(
file_info, spec file_info, spec
) )
spec.primary_process_id = references[0].id spec.primary_process_id = references[0].identifier
spec.primary_file_name = filename spec.primary_file_name = filename
ProcessModelService().save_process_model(spec) ProcessModelService().save_process_model(spec)
finally: finally:

View File

@ -25,6 +25,7 @@ from spiffworkflow_backend.models.process_instance_report import (
) )
from spiffworkflow_backend.models.process_model import NotificationType from spiffworkflow_backend.models.process_model import NotificationType
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.file_system_service import FileSystemService
@ -439,6 +440,49 @@ class TestProcessApi(BaseTest):
assert response.json["pagination"]["total"] == 5 assert response.json["pagination"]["total"] == 5
assert response.json["pagination"]["pages"] == 2 assert response.json["pagination"]["pages"] == 2
def test_process_list(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""It should be possible to get a list of all processes known to the system."""
load_test_spec(
"test_group_one/simple_form",
process_model_source_directory="simple_form",
bpmn_file_name="simple_form",
)
# When adding a process model with one Process, no decisions, and some json files, only one process is recorded.
assert len(SpecReferenceCache.query.all()) == 1
self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
process_group_id="test_group_two",
process_model_id="call_activity_nested",
bpmn_file_location="call_activity_nested",
)
# When adding a process model with 4 processes and a decision, 5 new records will be in the Cache
assert len(SpecReferenceCache.query.all()) == 6
# get the results
response = client.get(
"/v1.0/processes",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
# We should get 5 back, as one of the items in the cache is a decision.
assert len(response.json) == 5
simple_form = next(
p for p in response.json if p["identifier"] == "Proccess_WithForm"
)
assert simple_form["display_name"] == "Process With Form"
assert simple_form["process_model_id"] == "test_group_one/simple_form"
assert simple_form["has_lanes"] is False
assert simple_form["is_executable"] is True
assert simple_form["is_primary"] is True
def test_process_group_add( def test_process_group_add(
self, self,
app: Flask, app: Flask,

View File

@ -5,8 +5,8 @@ from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
@ -116,7 +116,7 @@ class TestProcessModel(BaseTest):
# delete all of the id lookup items to force to processor to find the correct # delete all of the id lookup items to force to processor to find the correct
# process model when running the process # process model when running the process
db.session.query(BpmnProcessIdLookup).delete() db.session.query(SpecReferenceCache).delete()
db.session.commit() db.session.commit()
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True) processor.do_engine_steps(save=True)

View File

@ -9,7 +9,7 @@ from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException #
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.spec_file_service import SpecFileService
@ -42,11 +42,11 @@ class TestSpecFileService(BaseTest):
bpmn_file_name=self.bpmn_file_name, bpmn_file_name=self.bpmn_file_name,
bpmn_file_location="call_activity_nested", bpmn_file_location="call_activity_nested",
) )
bpmn_process_id_lookups = BpmnProcessIdLookup.query.all() bpmn_process_id_lookups = SpecReferenceCache.query.all()
assert len(bpmn_process_id_lookups) == 1 assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].bpmn_process_identifier == "Level1" assert bpmn_process_id_lookups[0].identifier == "Level1"
assert ( assert (
bpmn_process_id_lookups[0].bpmn_file_relative_path bpmn_process_id_lookups[0].relative_path
== self.call_activity_nested_relative_file_path == self.call_activity_nested_relative_file_path
) )
@ -67,14 +67,11 @@ class TestSpecFileService(BaseTest):
bpmn_file_name=self.bpmn_file_name, bpmn_file_name=self.bpmn_file_name,
bpmn_file_location=self.process_model_id, bpmn_file_location=self.process_model_id,
) )
bpmn_process_id_lookups = BpmnProcessIdLookup.query.all() bpmn_process_id_lookups = SpecReferenceCache.query.all()
assert len(bpmn_process_id_lookups) == 1 assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == bpmn_process_identifier
assert ( assert (
bpmn_process_id_lookups[0].bpmn_process_identifier bpmn_process_id_lookups[0].relative_path
== bpmn_process_identifier
)
assert (
bpmn_process_id_lookups[0].bpmn_file_relative_path
== self.call_activity_nested_relative_file_path == self.call_activity_nested_relative_file_path
) )
with pytest.raises(ValidationException) as exception: with pytest.raises(ValidationException) as exception:
@ -97,9 +94,9 @@ class TestSpecFileService(BaseTest):
) -> None: ) -> None:
"""Test_updates_relative_file_path_when_appropriate.""" """Test_updates_relative_file_path_when_appropriate."""
bpmn_process_identifier = "Level1" bpmn_process_identifier = "Level1"
process_id_lookup = BpmnProcessIdLookup( process_id_lookup = SpecReferenceCache(
bpmn_process_identifier=bpmn_process_identifier, identifier=bpmn_process_identifier,
bpmn_file_relative_path=self.call_activity_nested_relative_file_path, relative_path=self.call_activity_nested_relative_file_path,
) )
db.session.add(process_id_lookup) db.session.add(process_id_lookup)
db.session.commit() db.session.commit()
@ -113,14 +110,11 @@ class TestSpecFileService(BaseTest):
bpmn_file_location=self.process_model_id, bpmn_file_location=self.process_model_id,
) )
bpmn_process_id_lookups = BpmnProcessIdLookup.query.all() bpmn_process_id_lookups = SpecReferenceCache.query.all()
assert len(bpmn_process_id_lookups) == 1 assert len(bpmn_process_id_lookups) == 1
assert bpmn_process_id_lookups[0].identifier == bpmn_process_identifier
assert ( assert (
bpmn_process_id_lookups[0].bpmn_process_identifier bpmn_process_id_lookups[0].relative_path
== bpmn_process_identifier
)
assert (
bpmn_process_id_lookups[0].bpmn_file_relative_path
== self.call_activity_nested_relative_file_path == self.call_activity_nested_relative_file_path
) )
@ -164,13 +158,13 @@ class TestSpecFileService(BaseTest):
file = next(filter(lambda f: f.name == "call_activity_level_3.bpmn", files)) file = next(filter(lambda f: f.name == "call_activity_level_3.bpmn", files))
ca_3 = SpecFileService.get_references_for_file(file, process_model_info) ca_3 = SpecFileService.get_references_for_file(file, process_model_info)
assert len(ca_3) == 1 assert len(ca_3) == 1
assert ca_3[0].name == "Level 3" assert ca_3[0].display_name == "Level 3"
assert ca_3[0].id == "Level3" assert ca_3[0].identifier == "Level3"
assert ca_3[0].type == "process" assert ca_3[0].type == "process"
file = next(filter(lambda f: f.name == "level2c.dmn", files)) file = next(filter(lambda f: f.name == "level2c.dmn", files))
dmn1 = SpecFileService.get_references_for_file(file, process_model_info) dmn1 = SpecFileService.get_references_for_file(file, process_model_info)
assert len(dmn1) == 1 assert len(dmn1) == 1
assert dmn1[0].name == "Decision 1" assert dmn1[0].display_name == "Decision 1"
assert dmn1[0].id == "Decision_0vrtcmk" assert dmn1[0].identifier == "Decision_0vrtcmk"
assert dmn1[0].type == "decision" assert dmn1[0].type == "decision"

View File

@ -7485,7 +7485,7 @@
}, },
"node_modules/bpmn-js-spiffworkflow": { "node_modules/bpmn-js-spiffworkflow": {
"version": "0.0.8", "version": "0.0.8",
"resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#09fa713bb0bb1b9d4f97684afc46bc3711e11770", "resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#c90359945c98034c76a65fcbe8709f8ddeaf949a",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"inherits": "^2.0.4", "inherits": "^2.0.4",
@ -35755,7 +35755,7 @@
} }
}, },
"bpmn-js-spiffworkflow": { "bpmn-js-spiffworkflow": {
"version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#09fa713bb0bb1b9d4f97684afc46bc3711e11770", "version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#c90359945c98034c76a65fcbe8709f8ddeaf949a",
"from": "bpmn-js-spiffworkflow@sartography/bpmn-js-spiffworkflow#main", "from": "bpmn-js-spiffworkflow@sartography/bpmn-js-spiffworkflow#main",
"requires": { "requires": {
"inherits": "^2.0.4", "inherits": "^2.0.4",