Merge pull request #206 from sartography/feature/store_boundary_events_properly
Feature/store boundary events properly
This commit is contained in:
commit
91c980d6ba
|
@ -1875,7 +1875,7 @@ test = ["pytest"]
|
|||
[[package]]
|
||||
name = "SpiffWorkflow"
|
||||
version = "1.2.1"
|
||||
description = ""
|
||||
description = "A workflow framework and BPMN/DMN Processor"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
@ -1890,7 +1890,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "e1add839ddf2512f27cd0afe681ff3e0460d6f7a"
|
||||
resolved_reference = "96ad2a2b060deb445c39374f065690023351de19"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
|
|
|
@ -29,7 +29,7 @@ flask-restful = "*"
|
|||
werkzeug = "*"
|
||||
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
|
||||
# SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"}
|
||||
# SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" }
|
||||
# SpiffWorkflow = {develop = true, path = "../../SpiffWorkflow" }
|
||||
sentry-sdk = "^1.10"
|
||||
sphinx-autoapi = "^2.0"
|
||||
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
|
||||
|
|
|
@ -137,7 +137,8 @@ def process_instance_run(
|
|||
raise e
|
||||
except Exception as e:
|
||||
ErrorHandlingService().handle_error(processor, e)
|
||||
# fixme: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes
|
||||
# FIXME: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes.
|
||||
# we need to recurse through all last tasks if the last task is a call activity or subprocess.
|
||||
task = processor.bpmn_process_instance.last_task
|
||||
raise ApiError.from_task(
|
||||
error_code="unknown_exception",
|
||||
|
|
|
@ -26,6 +26,11 @@ from spiffworkflow_backend.models.process_instance_event import ProcessInstanceE
|
|||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||
|
||||
|
||||
class StartAndEndTimes(TypedDict):
|
||||
start_in_seconds: Optional[float]
|
||||
end_in_seconds: Optional[float]
|
||||
|
||||
|
||||
class JsonDataDict(TypedDict):
|
||||
hash: str
|
||||
data: dict
|
||||
|
@ -108,30 +113,46 @@ class TaskService:
|
|||
self,
|
||||
spiff_task: SpiffTask,
|
||||
task_failed: bool = False,
|
||||
start_and_end_times: Optional[StartAndEndTimes] = None,
|
||||
) -> TaskModel:
|
||||
(
|
||||
new_bpmn_process,
|
||||
task_model,
|
||||
new_task_models,
|
||||
new_json_data_dicts,
|
||||
) = self.__class__.find_or_create_task_model_from_spiff_task(
|
||||
spiff_task,
|
||||
self.process_instance,
|
||||
self.serializer,
|
||||
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
|
||||
new_bpmn_process = None
|
||||
if str(spiff_task.id) in self.task_models:
|
||||
task_model = self.task_models[str(spiff_task.id)]
|
||||
else:
|
||||
(
|
||||
new_bpmn_process,
|
||||
task_model,
|
||||
new_task_models,
|
||||
new_json_data_dicts,
|
||||
) = self.__class__.find_or_create_task_model_from_spiff_task(
|
||||
spiff_task,
|
||||
self.process_instance,
|
||||
self.serializer,
|
||||
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
|
||||
)
|
||||
self.task_models.update(new_task_models)
|
||||
self.json_data_dicts.update(new_json_data_dicts)
|
||||
|
||||
# we are not sure why task_model.bpmn_process can be None while task_model.bpmn_process_id actually has a valid value
|
||||
bpmn_process = (
|
||||
new_bpmn_process
|
||||
or task_model.bpmn_process
|
||||
or BpmnProcessModel.query.filter_by(id=task_model.bpmn_process_id).first()
|
||||
)
|
||||
bpmn_process = new_bpmn_process or task_model.bpmn_process
|
||||
|
||||
bpmn_process_json_data = self.__class__.update_task_data_on_bpmn_process(
|
||||
bpmn_process, spiff_task.workflow.data
|
||||
)
|
||||
self.task_models.update(new_task_models)
|
||||
self.json_data_dicts.update(new_json_data_dicts)
|
||||
json_data_dict_list = self.__class__.update_task_model(task_model, spiff_task, self.serializer)
|
||||
self.task_models[task_model.guid] = task_model
|
||||
if bpmn_process_json_data is not None:
|
||||
json_data_dict_list.append(bpmn_process_json_data)
|
||||
self.update_json_data_dicts_using_list(json_data_dict_list, self.json_data_dicts)
|
||||
|
||||
if start_and_end_times:
|
||||
task_model.start_in_seconds = start_and_end_times["start_in_seconds"]
|
||||
task_model.end_in_seconds = start_and_end_times["end_in_seconds"]
|
||||
|
||||
if task_model.state == "COMPLETED" or task_failed:
|
||||
event_type = ProcessInstanceEventType.task_completed.value
|
||||
if task_failed:
|
||||
|
@ -432,10 +453,11 @@ class TaskService:
|
|||
spiff_task_guid = str(spiff_task.id)
|
||||
if spiff_task_parent_guid in task_models:
|
||||
parent_task_model = task_models[spiff_task_parent_guid]
|
||||
new_parent_properties_json = copy.copy(parent_task_model.properties_json)
|
||||
new_parent_properties_json["children"].remove(spiff_task_guid)
|
||||
parent_task_model.properties_json = new_parent_properties_json
|
||||
task_models[spiff_task_parent_guid] = parent_task_model
|
||||
if spiff_task_guid in parent_task_model.properties_json["children"]:
|
||||
new_parent_properties_json = copy.copy(parent_task_model.properties_json)
|
||||
new_parent_properties_json["children"].remove(spiff_task_guid)
|
||||
parent_task_model.properties_json = new_parent_properties_json
|
||||
task_models[spiff_task_parent_guid] = parent_task_model
|
||||
|
||||
@classmethod
|
||||
def update_task_data_on_bpmn_process(
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import time
|
||||
from typing import Callable
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from uuid import UUID
|
||||
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||
|
@ -15,12 +17,12 @@ from spiffworkflow_backend.models.message_instance_correlation import (
|
|||
MessageInstanceCorrelationRuleModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.task import TaskModel
|
||||
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel # noqa: F401
|
||||
from spiffworkflow_backend.services.assertion_service import safe_assertion
|
||||
from spiffworkflow_backend.services.process_instance_lock_service import (
|
||||
ProcessInstanceLockService,
|
||||
)
|
||||
from spiffworkflow_backend.services.task_service import StartAndEndTimes
|
||||
from spiffworkflow_backend.services.task_service import TaskService
|
||||
|
||||
|
||||
|
@ -58,10 +60,11 @@ class TaskModelSavingDelegate(EngineStepDelegate):
|
|||
self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings
|
||||
self.serializer = serializer
|
||||
|
||||
self.current_task_model: Optional[TaskModel] = None
|
||||
self.current_task_start_in_seconds: Optional[float] = None
|
||||
|
||||
self.last_completed_spiff_task: Optional[SpiffTask] = None
|
||||
self.spiff_tasks_to_process: Set[UUID] = set()
|
||||
self.spiff_task_timestamps: dict[UUID, StartAndEndTimes] = {}
|
||||
|
||||
self.task_service = TaskService(
|
||||
process_instance=self.process_instance,
|
||||
|
@ -71,18 +74,29 @@ class TaskModelSavingDelegate(EngineStepDelegate):
|
|||
|
||||
def will_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
if self._should_update_task_model():
|
||||
self.current_task_start_in_seconds = time.time()
|
||||
self.spiff_task_timestamps[spiff_task.id] = {"start_in_seconds": time.time(), "end_in_seconds": None}
|
||||
spiff_task.task_spec._predict(spiff_task, mask=TaskState.NOT_FINISHED_MASK)
|
||||
|
||||
self.current_task_start_in_seconds = time.time()
|
||||
|
||||
if self.secondary_engine_step_delegate:
|
||||
self.secondary_engine_step_delegate.will_complete_task(spiff_task)
|
||||
|
||||
def did_complete_task(self, spiff_task: SpiffTask) -> None:
|
||||
if self._should_update_task_model():
|
||||
# NOTE: used with process-all-tasks and process-children-of-last-task
|
||||
task_model = self.task_service.update_task_model_with_spiff_task(spiff_task)
|
||||
if self.current_task_start_in_seconds is None:
|
||||
raise Exception("Could not find cached current_task_start_in_seconds. This should never have happend")
|
||||
task_model.start_in_seconds = self.current_task_start_in_seconds
|
||||
task_model.end_in_seconds = time.time()
|
||||
|
||||
# # NOTE: used with process-spiff-tasks-list
|
||||
# self.spiff_task_timestamps[spiff_task.id]['end_in_seconds'] = time.time()
|
||||
# self.spiff_tasks_to_process.add(spiff_task.id)
|
||||
# self._add_children(spiff_task)
|
||||
# # self._add_parents(spiff_task)
|
||||
|
||||
self.last_completed_spiff_task = spiff_task
|
||||
if self.secondary_engine_step_delegate:
|
||||
self.secondary_engine_step_delegate.did_complete_task(spiff_task)
|
||||
|
@ -101,11 +115,66 @@ class TaskModelSavingDelegate(EngineStepDelegate):
|
|||
self.secondary_engine_step_delegate.save(bpmn_process_instance, commit=False)
|
||||
db.session.commit()
|
||||
|
||||
def _add_children(self, spiff_task: SpiffTask) -> None:
|
||||
for child_spiff_task in spiff_task.children:
|
||||
self.spiff_tasks_to_process.add(child_spiff_task.id)
|
||||
self._add_children(child_spiff_task)
|
||||
|
||||
def _add_parents(self, spiff_task: SpiffTask) -> None:
|
||||
if spiff_task.parent and spiff_task.parent.task_spec.name != "Root":
|
||||
self.spiff_tasks_to_process.add(spiff_task.parent.id)
|
||||
self._add_parents(spiff_task.parent)
|
||||
|
||||
def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||
if self._should_update_task_model():
|
||||
if self.last_completed_spiff_task is not None:
|
||||
self.task_service.process_spiff_task_parent_subprocess_tasks(self.last_completed_spiff_task)
|
||||
self.task_service.process_spiff_task_children(self.last_completed_spiff_task)
|
||||
# NOTE: process-all-tasks: All tests pass with this but it's less efficient and would be nice to replace
|
||||
# excludes COMPLETED. the others were required to get PP1 to go to completion.
|
||||
# process FUTURE tasks because Boundary events are not processed otherwise.
|
||||
for waiting_spiff_task in bpmn_process_instance.get_tasks(
|
||||
TaskState.WAITING
|
||||
| TaskState.CANCELLED
|
||||
| TaskState.READY
|
||||
| TaskState.MAYBE
|
||||
| TaskState.LIKELY
|
||||
| TaskState.FUTURE
|
||||
):
|
||||
if waiting_spiff_task._has_state(TaskState.PREDICTED_MASK):
|
||||
TaskService.remove_spiff_task_from_parent(waiting_spiff_task, self.task_service.task_models)
|
||||
continue
|
||||
self.task_service.update_task_model_with_spiff_task(waiting_spiff_task)
|
||||
|
||||
# # NOTE: process-spiff-tasks-list: this would be the ideal way to handle all tasks
|
||||
# # but we're missing something with it yet
|
||||
# #
|
||||
# # adding from line here until we are ready to go with this
|
||||
# from SpiffWorkflow.exceptions import TaskNotFoundException
|
||||
# for spiff_task_uuid in self.spiff_tasks_to_process:
|
||||
# try:
|
||||
# waiting_spiff_task = bpmn_process_instance.get_task_from_id(spiff_task_uuid)
|
||||
# except TaskNotFoundException:
|
||||
# continue
|
||||
#
|
||||
# # include PREDICTED_MASK tasks in list so we can remove them from the parent
|
||||
# if waiting_spiff_task._has_state(TaskState.PREDICTED_MASK):
|
||||
# TaskService.remove_spiff_task_from_parent(waiting_spiff_task, self.task_service.task_models)
|
||||
# for cpt in waiting_spiff_task.parent.children:
|
||||
# if cpt.id == waiting_spiff_task.id:
|
||||
# waiting_spiff_task.parent.children.remove(cpt)
|
||||
# continue
|
||||
# # if waiting_spiff_task.state == TaskState.FUTURE:
|
||||
# # continue
|
||||
# start_and_end_times = None
|
||||
# if waiting_spiff_task.id in self.spiff_task_timestamps:
|
||||
# start_and_end_times = self.spiff_task_timestamps[waiting_spiff_task.id]
|
||||
# self.task_service.update_task_model_with_spiff_task(waiting_spiff_task, start_and_end_times=start_and_end_times)
|
||||
#
|
||||
# if self.last_completed_spiff_task is not None:
|
||||
# self.task_service.process_spiff_task_parent_subprocess_tasks(self.last_completed_spiff_task)
|
||||
|
||||
# # NOTE: process-children-of-last-task: this does not work with escalation boundary events
|
||||
# if self.last_completed_spiff_task is not None:
|
||||
# self.task_service.process_spiff_task_children(self.last_completed_spiff_task)
|
||||
# self.task_service.process_spiff_task_parent_subprocess_tasks(self.last_completed_spiff_task)
|
||||
|
||||
def _should_update_task_model(self) -> bool:
|
||||
"""We need to figure out if we have previously save task info on this process intance.
|
||||
|
@ -257,6 +326,8 @@ class WorkflowExecutionService:
|
|||
if bpmn_process is not None:
|
||||
bpmn_process_correlations = self.bpmn_process_instance.correlations
|
||||
bpmn_process.properties_json["correlations"] = bpmn_process_correlations
|
||||
# update correlations correctly but always null out bpmn_messages since they get cleared out later
|
||||
bpmn_process.properties_json["bpmn_messages"] = []
|
||||
db.session.add(bpmn_process)
|
||||
|
||||
db.session.commit()
|
||||
|
|
|
@ -50,6 +50,9 @@
|
|||
<bpmn:outgoing>Flow_089aeua</bpmn:outgoing>
|
||||
<bpmn:script>set_in_test_process_to_call_script = 1</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:boundaryEvent id="our_boundary_event" name="our_boundary_event" attachedToRef="test_process_to_call_script">
|
||||
<bpmn:escalationEventDefinition id="EscalationEventDefinition_1bs7saf" />
|
||||
</bpmn:boundaryEvent>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="test_process_to_call">
|
||||
|
@ -66,6 +69,12 @@
|
|||
<dc:Bounds x="450" y="110" width="100" height="80" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0wfnf83_di" bpmnElement="our_boundary_event">
|
||||
<dc:Bounds x="492" y="172" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="467" y="215" width="87" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1qsx5et_di" bpmnElement="Flow_1qsx5et">
|
||||
<di:waypoint x="198" y="150" />
|
||||
<di:waypoint x="270" y="150" />
|
||||
|
|
|
@ -547,6 +547,11 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
all_spiff_tasks = processor_final.bpmn_process_instance.get_tasks()
|
||||
assert len(all_spiff_tasks) > 1
|
||||
for spiff_task in all_spiff_tasks:
|
||||
if spiff_task.task_spec.name == "our_boundary_event":
|
||||
assert spiff_task.state == TaskState.CANCELLED
|
||||
spiff_tasks_checked.append(spiff_task.task_spec.name)
|
||||
continue
|
||||
|
||||
assert spiff_task.state == TaskState.COMPLETED
|
||||
assert_spiff_task_is_in_process(spiff_task)
|
||||
|
||||
|
@ -558,6 +563,7 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
assert bpmn_process_definition is not None
|
||||
assert bpmn_process_definition.bpmn_identifier == "test_process_to_call"
|
||||
assert bpmn_process_definition.bpmn_name == "Test Process To Call"
|
||||
spiff_tasks_checked.append(spiff_task.task_spec.name)
|
||||
|
||||
# Check that the direct parent of the called activity subprocess task is the
|
||||
# name of the process that was called from the activity.
|
||||
|
@ -575,8 +581,14 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
).first()
|
||||
assert direct_parent_process is not None
|
||||
assert direct_parent_process.bpmn_process_definition.bpmn_identifier == "test_process_to_call"
|
||||
spiff_tasks_checked.append(spiff_task.task_spec.name)
|
||||
|
||||
for task_bpmn_identifier in expected_task_data.keys():
|
||||
expected_task_identifiers = list(expected_task_data.keys()) + [
|
||||
"our_boundary_event",
|
||||
"test_process_to_call_subprocess_script",
|
||||
"top_level_call_activity",
|
||||
]
|
||||
for task_bpmn_identifier in expected_task_identifiers:
|
||||
message = (
|
||||
f"Expected to have seen a task with a bpmn_identifier of {task_bpmn_identifier} but did not. "
|
||||
f"Only saw {sorted(spiff_tasks_checked)}"
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
{
|
||||
"project_id": "18606",
|
||||
"SPIFFWORKFLOW_FRONTEND_USERNAME": "core-a1.contributor",
|
||||
"SPIFFWORKFLOW_FRONTEND_PASSWORD": "core-a1.contributor",
|
||||
"requestor_username": "core-a1.contributor",
|
||||
"requestor_password": "core-a1.contributor",
|
||||
"budgetowner_username": "fluffy.project-lead",
|
||||
|
@ -15,6 +13,5 @@
|
|||
"infrasme_username": "infra-a1.sme",
|
||||
"infrasme_password": "infra-a1.sme",
|
||||
"legalsme_username": "legal-a1.sme",
|
||||
"legalsme_password": "legal-a1.sme",
|
||||
"SPIFFWORKFLOW_FRONTEND_AUTH_WITH_KEYCLOAK": true
|
||||
"legalsme_password": "legal-a1.sme"
|
||||
}
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
const approveWithUser = (
|
||||
username,
|
||||
processInstanceId,
|
||||
expectAdditionalApprovalInfoPage = false
|
||||
expectAdditionalApprovalInfoPage = false,
|
||||
password = null
|
||||
) => {
|
||||
cy.login(username, username);
|
||||
if (!password) {
|
||||
password = username;
|
||||
}
|
||||
cy.login(username, password);
|
||||
cy.visit('/admin/process-instances/find-by-id');
|
||||
cy.get('#process-instance-id-input').type(processInstanceId);
|
||||
cy.get('button')
|
||||
|
@ -33,22 +37,23 @@ const approveWithUser = (
|
|||
describe('pp1', () => {
|
||||
it('can run PP1', () => {
|
||||
cy.login('core-a1.contributor', 'core-a1.contributor');
|
||||
// cy.login('sasha', 'sasha');
|
||||
cy.visit('/');
|
||||
cy.contains('Start New +').click();
|
||||
cy.contains('Raise New Demand Request');
|
||||
cy.contains('New Demand Request - Procurement').click();
|
||||
cy.runPrimaryBpmnFile(true);
|
||||
cy.contains('Please select the type of request to start the process.');
|
||||
// wait a second to ensure we can click the radio button
|
||||
cy.wait(2000);
|
||||
cy.get('input#root-procurement').click();
|
||||
cy.wait(2000);
|
||||
cy.get('button')
|
||||
.contains(/^Submit$/)
|
||||
.click();
|
||||
cy.contains(
|
||||
'Submit a new demand request for the procurement of needed items',
|
||||
{ timeout: 60000 }
|
||||
);
|
||||
// cy.contains('Please select the type of request to start the process.');
|
||||
// // wait a second to ensure we can click the radio button
|
||||
// cy.wait(2000);
|
||||
// cy.get('input#root-procurement').click();
|
||||
// cy.wait(2000);
|
||||
// cy.get('button')
|
||||
// .contains(/^Submit$/)
|
||||
// .click();
|
||||
// cy.contains(
|
||||
// 'Submit a new demand request for the procurement of needed items',
|
||||
// { timeout: 60000 }
|
||||
// );
|
||||
|
||||
cy.url().then((currentUrl) => {
|
||||
// if url is "/tasks/8/d37c2f0f-016a-4066-b669-e0925b759560"
|
||||
|
@ -64,17 +69,17 @@ describe('pp1', () => {
|
|||
cy.get('#root_payment_method').select('Bank Transfer');
|
||||
cy.get('#root_project').select('18564');
|
||||
cy.get('#root_category').select('soft_and_lic');
|
||||
cy.get('button')
|
||||
.contains(/^Submit$/)
|
||||
.click();
|
||||
|
||||
cy.contains('Task: Enter NDR-P Items', { timeout: 60000 });
|
||||
cy.get('#root_0_sub_category').select('op_src');
|
||||
cy.get('#root_0_item').clear().type('spiffworkflow');
|
||||
cy.get('#root_0_qty').clear().type('1');
|
||||
cy.get('#root_0_currency_type').select('Fiat');
|
||||
cy.get('#root_0_currency').select('AUD');
|
||||
cy.get('#root_0_unit_price').type('100');
|
||||
// cy.get('button')
|
||||
// .contains(/^Submit$/)
|
||||
// .click();
|
||||
//
|
||||
// cy.contains('Task: Enter NDR-P Items', { timeout: 60000 });
|
||||
cy.get('#root_item_0_sub_category').select('op_src');
|
||||
cy.get('#root_item_0_item_name').clear().type('spiffworkflow');
|
||||
cy.get('#root_item_0_qty').clear().type('1');
|
||||
cy.get('#root_item_0_currency_type').select('Fiat');
|
||||
cy.get('#root_item_0_currency').select('AUD');
|
||||
cy.get('#root_item_0_unit_price').type('100');
|
||||
cy.get('button')
|
||||
.contains(/^Submit$/)
|
||||
.click();
|
||||
|
@ -94,7 +99,8 @@ describe('pp1', () => {
|
|||
approveWithUser(
|
||||
'infra.project-lead',
|
||||
processInstanceId,
|
||||
'Task: Reminder: Request Additional Budget'
|
||||
'Task: Reminder: Request Additional Budget',
|
||||
'infra.project-leadx'
|
||||
);
|
||||
approveWithUser('ppg.ba-a1.sme', processInstanceId);
|
||||
approveWithUser('security-a1.sme', processInstanceId);
|
||||
|
|
|
@ -98,14 +98,13 @@ Cypress.Commands.add('createModel', (groupId, modelId, modelDisplayName) => {
|
|||
cy.contains(`Process Model: ${modelDisplayName}`);
|
||||
});
|
||||
|
||||
// Intended to be run from the process model show page
|
||||
Cypress.Commands.add(
|
||||
'runPrimaryBpmnFile',
|
||||
(expectAutoRedirectToHumanTask = false) => {
|
||||
// cy.getBySel('start-process-instance').click();
|
||||
// click on button with text Start
|
||||
//cy.get('button')
|
||||
// cy.get('#process-model-tile-manage-procurement\\/procurement\\/requisition-order-management\\/new-demand-request-procurement > div > button')
|
||||
cy.get('#process-model-tile-manage-procurement\\/procurement\\/requisition-order-management\\/raise-new-demand-request > div > button')
|
||||
cy.get('button')
|
||||
.contains(/^Start$/)
|
||||
.click();
|
||||
if (expectAutoRedirectToHumanTask) {
|
||||
|
|
Loading…
Reference in New Issue