mirror of
https://github.com/sartography/spiff-arena.git
synced 2025-01-12 02:24:15 +00:00
allow getting all process models, process instances should not save when they are initialized, and fixed some cypress tests w/ burnettk
This commit is contained in:
parent
2338bbec55
commit
92d77f3a51
@ -67,9 +67,9 @@ def start_scheduler(
|
|||||||
seconds=10,
|
seconds=10,
|
||||||
)
|
)
|
||||||
scheduler.add_job(
|
scheduler.add_job(
|
||||||
BackgroundProcessingService(app).run,
|
BackgroundProcessingService(app).process_waiting_process_instances,
|
||||||
"interval",
|
"interval",
|
||||||
seconds=30,
|
seconds=10,
|
||||||
)
|
)
|
||||||
scheduler.start()
|
scheduler.start()
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Acceptance_test_fixtures."""
|
"""Acceptance_test_fixtures."""
|
||||||
import json
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
@ -8,13 +7,15 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
|||||||
|
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||||
|
from spiffworkflow_backend.services.process_instance_service import (
|
||||||
|
ProcessInstanceService,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
|
def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
|
||||||
"""Load_fixtures."""
|
"""Load_fixtures."""
|
||||||
current_app.logger.debug("load_acceptance_test_fixtures() start")
|
current_app.logger.debug("load_acceptance_test_fixtures() start")
|
||||||
test_process_group_id = ""
|
test_process_model_id = "misc/acceptance-tests-group-one/acceptance-tests-model-1"
|
||||||
test_process_model_id = "acceptance-tests-group-one/acceptance-tests-model-1"
|
|
||||||
user = BaseTest.find_or_create_user()
|
user = BaseTest.find_or_create_user()
|
||||||
statuses = ProcessInstanceStatus.list()
|
statuses = ProcessInstanceStatus.list()
|
||||||
current_time = round(time.time())
|
current_time = round(time.time())
|
||||||
@ -28,16 +29,13 @@ def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
|
|||||||
# suspended - 6 hours ago
|
# suspended - 6 hours ago
|
||||||
process_instances = []
|
process_instances = []
|
||||||
for i in range(len(statuses)):
|
for i in range(len(statuses)):
|
||||||
process_instance = ProcessInstanceModel(
|
|
||||||
status=statuses[i],
|
process_instance = ProcessInstanceService.create_process_instance(
|
||||||
process_initiator=user,
|
test_process_model_id, user
|
||||||
process_model_identifier=test_process_model_id,
|
|
||||||
process_group_identifier=test_process_group_id,
|
|
||||||
updated_at_in_seconds=round(time.time()),
|
|
||||||
start_in_seconds=current_time - (3600 * i),
|
|
||||||
end_in_seconds=current_time - (3600 * i - 20),
|
|
||||||
bpmn_json=json.dumps({"i": i}),
|
|
||||||
)
|
)
|
||||||
|
process_instance.status = statuses[i]
|
||||||
|
process_instance.start_in_seconds = current_time - (3600 * i)
|
||||||
|
process_instance.end_in_seconds = current_time - (3600 * i - 20)
|
||||||
db.session.add(process_instance)
|
db.session.add(process_instance)
|
||||||
process_instances.append(process_instance)
|
process_instances.append(process_instance)
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ class BackgroundProcessingService:
|
|||||||
"""__init__."""
|
"""__init__."""
|
||||||
self.app = app
|
self.app = app
|
||||||
|
|
||||||
def run(self) -> None:
|
def process_waiting_process_instances(self) -> None:
|
||||||
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||||
with self.app.app_context():
|
with self.app.app_context():
|
||||||
ProcessInstanceService.do_waiting()
|
ProcessInstanceService.do_waiting()
|
||||||
|
@ -359,21 +359,8 @@ class ProcessInstanceProcessor:
|
|||||||
subprocesses=subprocesses,
|
subprocesses=subprocesses,
|
||||||
)
|
)
|
||||||
self.bpmn_process_instance.script_engine = self._script_engine
|
self.bpmn_process_instance.script_engine = self._script_engine
|
||||||
|
|
||||||
self.add_user_info_to_process_instance(self.bpmn_process_instance)
|
self.add_user_info_to_process_instance(self.bpmn_process_instance)
|
||||||
|
|
||||||
if self.PROCESS_INSTANCE_ID_KEY not in self.bpmn_process_instance.data:
|
|
||||||
if not process_instance_model.id:
|
|
||||||
db.session.add(process_instance_model)
|
|
||||||
# If the model is new, and has no id, save it, write it into the process_instance model
|
|
||||||
# and save it again. In this way, the workflow process is always aware of the
|
|
||||||
# database model to which it is associated, and scripts running within the model
|
|
||||||
# can then load data as needed.
|
|
||||||
self.bpmn_process_instance.data[
|
|
||||||
ProcessInstanceProcessor.PROCESS_INSTANCE_ID_KEY
|
|
||||||
] = process_instance_model.id
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
except MissingSpecError as ke:
|
except MissingSpecError as ke:
|
||||||
raise ApiError(
|
raise ApiError(
|
||||||
error_code="unexpected_process_instance_structure",
|
error_code="unexpected_process_instance_structure",
|
||||||
|
@ -323,14 +323,9 @@ class ProcessInstanceService:
|
|||||||
"""Serialize_flat_with_task_data."""
|
"""Serialize_flat_with_task_data."""
|
||||||
results = {}
|
results = {}
|
||||||
try:
|
try:
|
||||||
original_status = process_instance.status
|
|
||||||
processor = ProcessInstanceProcessor(process_instance)
|
processor = ProcessInstanceProcessor(process_instance)
|
||||||
process_instance.data = processor.get_current_data()
|
process_instance.data = processor.get_current_data()
|
||||||
results = process_instance.serialized_flat
|
results = process_instance.serialized_flat
|
||||||
# this process seems to mutate the status of the process_instance which
|
|
||||||
# can result in different results than expected from process_instance_list,
|
|
||||||
# so set the status back to the expected value
|
|
||||||
results["status"] = original_status
|
|
||||||
except ApiError:
|
except ApiError:
|
||||||
results = process_instance.serialized
|
results = process_instance.serialized
|
||||||
return results
|
return results
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
from glob import glob
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from typing import List
|
from typing import List
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
@ -165,17 +166,20 @@ class ProcessModelService(FileSystemService):
|
|||||||
self, process_group_id: Optional[str] = None
|
self, process_group_id: Optional[str] = None
|
||||||
) -> List[ProcessModelInfo]:
|
) -> List[ProcessModelInfo]:
|
||||||
"""Get process models."""
|
"""Get process models."""
|
||||||
process_groups = []
|
|
||||||
if process_group_id is None:
|
|
||||||
process_groups = self.get_process_groups()
|
|
||||||
else:
|
|
||||||
process_group = self.get_process_group(process_group_id)
|
|
||||||
if process_group is not None:
|
|
||||||
process_groups.append(process_group)
|
|
||||||
|
|
||||||
process_models = []
|
process_models = []
|
||||||
for process_group in process_groups:
|
root_path = FileSystemService.root_path()
|
||||||
process_models.extend(process_group.process_models)
|
if process_group_id:
|
||||||
|
awesome_id = process_group_id.replace("/", os.sep)
|
||||||
|
root_path = os.path.join(root_path, awesome_id)
|
||||||
|
process_model_glob = os.path.join(root_path, "**", "process_model.json")
|
||||||
|
for file in glob(process_model_glob, recursive=True):
|
||||||
|
process_model_relative_path = os.path.relpath(
|
||||||
|
file, start=FileSystemService.root_path()
|
||||||
|
)
|
||||||
|
process_model = self.get_process_model_from_relative_path(
|
||||||
|
os.path.dirname(process_model_relative_path)
|
||||||
|
)
|
||||||
|
process_models.append(process_model)
|
||||||
process_models.sort()
|
process_models.sort()
|
||||||
return process_models
|
return process_models
|
||||||
|
|
||||||
|
@ -355,6 +355,41 @@ class TestProcessApi(BaseTest):
|
|||||||
assert response.json["primary_process_id"] == "superduper"
|
assert response.json["primary_process_id"] == "superduper"
|
||||||
assert response.json["is_review"] is False
|
assert response.json["is_review"] is False
|
||||||
|
|
||||||
|
def test_process_model_list_all(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
with_super_admin_user: UserModel,
|
||||||
|
) -> None:
|
||||||
|
"""Test_process_model_list_all."""
|
||||||
|
group_id = "test_group/test_sub_group"
|
||||||
|
self.create_process_group(client, with_super_admin_user, group_id)
|
||||||
|
|
||||||
|
# add 5 models to the group
|
||||||
|
for i in range(5):
|
||||||
|
process_model_identifier = f"{group_id}/test_model_{i}"
|
||||||
|
model_display_name = f"Test Model {i}"
|
||||||
|
model_description = f"Test Model {i} Description"
|
||||||
|
self.create_process_model_with_api(
|
||||||
|
client,
|
||||||
|
process_model_id=process_model_identifier,
|
||||||
|
process_model_display_name=model_display_name,
|
||||||
|
process_model_description=model_description,
|
||||||
|
user=with_super_admin_user,
|
||||||
|
)
|
||||||
|
|
||||||
|
# get all models
|
||||||
|
response = client.get(
|
||||||
|
"/v1.0/process-models?per_page=1000",
|
||||||
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
|
)
|
||||||
|
assert response.json is not None
|
||||||
|
assert len(response.json["results"]) == 5
|
||||||
|
assert response.json["pagination"]["count"] == 5
|
||||||
|
assert response.json["pagination"]["total"] == 5
|
||||||
|
assert response.json["pagination"]["pages"] == 1
|
||||||
|
|
||||||
def test_process_model_list(
|
def test_process_model_list(
|
||||||
self,
|
self,
|
||||||
app: Flask,
|
app: Flask,
|
||||||
|
@ -53,9 +53,9 @@ const updateBpmnPythonScriptWithMonaco = (
|
|||||||
cy.get('.monaco-editor textarea:first')
|
cy.get('.monaco-editor textarea:first')
|
||||||
.click()
|
.click()
|
||||||
.focused() // change subject to currently focused element
|
.focused() // change subject to currently focused element
|
||||||
// .type('{ctrl}a') // had been doing it this way, but it turns out to be flaky relative to clear()
|
|
||||||
.clear()
|
.clear()
|
||||||
.type(pythonScript, { delay: 30 });
|
// long delay to ensure cypress isn't competing with monaco auto complete stuff
|
||||||
|
.type(pythonScript, { delay: 120 });
|
||||||
|
|
||||||
cy.contains('Close').click();
|
cy.contains('Close').click();
|
||||||
// wait for a little bit for the xml to get set before saving
|
// wait for a little bit for the xml to get set before saving
|
||||||
|
@ -93,6 +93,8 @@ Cypress.Commands.add(
|
|||||||
'navigateToProcessModel',
|
'navigateToProcessModel',
|
||||||
(groupDisplayName, modelDisplayName, modelIdentifier) => {
|
(groupDisplayName, modelDisplayName, modelIdentifier) => {
|
||||||
cy.navigateToAdmin();
|
cy.navigateToAdmin();
|
||||||
|
cy.contains('Misc').click();
|
||||||
|
cy.contains(`Process Group: Misc`);
|
||||||
cy.contains(groupDisplayName).click();
|
cy.contains(groupDisplayName).click();
|
||||||
cy.contains(`Process Group: ${groupDisplayName}`);
|
cy.contains(`Process Group: ${groupDisplayName}`);
|
||||||
// https://stackoverflow.com/q/51254946/6090676
|
// https://stackoverflow.com/q/51254946/6090676
|
||||||
|
@ -111,6 +111,7 @@ export default function ProcessGroupShow() {
|
|||||||
]}
|
]}
|
||||||
/>
|
/>
|
||||||
<h1>Process Group: {processGroup.display_name}</h1>
|
<h1>Process Group: {processGroup.display_name}</h1>
|
||||||
|
<p className="process-description">{processGroup.description}</p>
|
||||||
<ul>
|
<ul>
|
||||||
<Stack orientation="horizontal" gap={3}>
|
<Stack orientation="horizontal" gap={3}>
|
||||||
<Can I="POST" a={targetUris.processGroupListPath} ability={ability}>
|
<Can I="POST" a={targetUris.processGroupListPath} ability={ability}>
|
||||||
|
@ -7,6 +7,7 @@ import {
|
|||||||
TrashCan,
|
TrashCan,
|
||||||
Favorite,
|
Favorite,
|
||||||
Edit,
|
Edit,
|
||||||
|
ArrowRight,
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
} from '@carbon/icons-react';
|
} from '@carbon/icons-react';
|
||||||
import {
|
import {
|
||||||
@ -514,6 +515,35 @@ export default function ProcessModelShow() {
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const processInstanceListTableButton = () => {
|
||||||
|
if (processModel) {
|
||||||
|
return (
|
||||||
|
<Grid fullWidth>
|
||||||
|
<Column
|
||||||
|
sm={{ span: 1, offset: 3 }}
|
||||||
|
md={{ span: 1, offset: 7 }}
|
||||||
|
lg={{ span: 1, offset: 15 }}
|
||||||
|
>
|
||||||
|
<Button
|
||||||
|
data-qa="process-instance-list-link"
|
||||||
|
kind="ghost"
|
||||||
|
renderIcon={ArrowRight}
|
||||||
|
iconDescription="Go to Filterable List"
|
||||||
|
hasIconOnly
|
||||||
|
size="lg"
|
||||||
|
onClick={() =>
|
||||||
|
navigate(
|
||||||
|
`/admin/process-instances?process_model_identifier=${processModel.id}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</Column>
|
||||||
|
</Grid>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
if (processModel) {
|
if (processModel) {
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
@ -571,6 +601,7 @@ export default function ProcessModelShow() {
|
|||||||
{processInstanceRunResultTag()}
|
{processInstanceRunResultTag()}
|
||||||
<br />
|
<br />
|
||||||
<Can I="GET" a={targetUris.processInstanceListPath} ability={ability}>
|
<Can I="GET" a={targetUris.processInstanceListPath} ability={ability}>
|
||||||
|
{processInstanceListTableButton()}
|
||||||
<ProcessInstanceListTable
|
<ProcessInstanceListTable
|
||||||
filtersEnabled={false}
|
filtersEnabled={false}
|
||||||
processModelFullIdentifier={processModel.id}
|
processModelFullIdentifier={processModel.id}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user