Merge pull request #33 from sartography/feature/task_page
Feature/task page
This commit is contained in:
commit
eeaa38fbd0
|
@ -901,7 +901,7 @@ paths:
|
|||
items:
|
||||
$ref: "#/components/schemas/Task"
|
||||
|
||||
/tasks/for-processes-started-by-others:
|
||||
/tasks/for-me:
|
||||
parameters:
|
||||
- name: page
|
||||
in: query
|
||||
|
@ -918,7 +918,36 @@ paths:
|
|||
get:
|
||||
tags:
|
||||
- Process Instances
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_processes_started_by_others
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_me
|
||||
summary: returns the list of tasks for given user's open process instances
|
||||
responses:
|
||||
"200":
|
||||
description: list of tasks
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Task"
|
||||
|
||||
/tasks/for-my-groups:
|
||||
parameters:
|
||||
- name: page
|
||||
in: query
|
||||
required: false
|
||||
description: The page number to return. Defaults to page 1.
|
||||
schema:
|
||||
type: integer
|
||||
- name: per_page
|
||||
in: query
|
||||
required: false
|
||||
description: The page number to return. Defaults to page 1.
|
||||
schema:
|
||||
type: integer
|
||||
get:
|
||||
tags:
|
||||
- Process Instances
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_my_groups
|
||||
summary: returns the list of tasks for given user's open process instances
|
||||
responses:
|
||||
"200":
|
||||
|
|
|
@ -82,6 +82,7 @@ class FileReference:
|
|||
correlations: dict
|
||||
start_messages: list
|
||||
|
||||
|
||||
@dataclass(order=True)
|
||||
class File:
|
||||
"""File."""
|
||||
|
|
|
@ -64,9 +64,7 @@ from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsMode
|
|||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.routes.user import verify_token
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
from spiffworkflow_backend.services.custom_parser import MyCustomParser
|
||||
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.git_service import GitService
|
||||
from spiffworkflow_backend.services.message_service import MessageService
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
|
@ -1027,7 +1025,17 @@ def task_list_for_my_open_processes(
|
|||
return get_tasks(page=page, per_page=per_page)
|
||||
|
||||
|
||||
def task_list_for_processes_started_by_others(
|
||||
def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
|
||||
"""Task_list_for_processes_started_by_others."""
|
||||
return get_tasks(
|
||||
processes_started_by_user=False,
|
||||
has_lane_assignment_id=False,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
)
|
||||
|
||||
|
||||
def task_list_for_my_groups(
|
||||
page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_list_for_processes_started_by_others."""
|
||||
|
@ -1035,14 +1043,21 @@ def task_list_for_processes_started_by_others(
|
|||
|
||||
|
||||
def get_tasks(
|
||||
processes_started_by_user: bool = True, page: int = 1, per_page: int = 100
|
||||
processes_started_by_user: bool = True,
|
||||
has_lane_assignment_id: bool = True,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Get_tasks."""
|
||||
user_id = g.user.id
|
||||
|
||||
# use distinct to ensure we only get one row per active task otherwise
|
||||
# we can get back multiple for the same active task row which throws off
|
||||
# pagination later on
|
||||
# https://stackoverflow.com/q/34582014/6090676
|
||||
active_tasks_query = (
|
||||
ActiveTaskModel.query.outerjoin(
|
||||
GroupModel, GroupModel.id == ActiveTaskModel.lane_assignment_id
|
||||
)
|
||||
ActiveTaskModel.query.distinct()
|
||||
.outerjoin(GroupModel, GroupModel.id == ActiveTaskModel.lane_assignment_id)
|
||||
.join(ProcessInstanceModel)
|
||||
.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)
|
||||
)
|
||||
|
@ -1050,11 +1065,29 @@ def get_tasks(
|
|||
if processes_started_by_user:
|
||||
active_tasks_query = active_tasks_query.filter(
|
||||
ProcessInstanceModel.process_initiator_id == user_id
|
||||
).outerjoin(ActiveTaskUserModel, and_(ActiveTaskUserModel.user_id == user_id))
|
||||
).outerjoin(
|
||||
ActiveTaskUserModel,
|
||||
and_(
|
||||
ActiveTaskUserModel.user_id == user_id,
|
||||
ActiveTaskModel.id == ActiveTaskUserModel.active_task_id,
|
||||
),
|
||||
)
|
||||
else:
|
||||
active_tasks_query = active_tasks_query.filter(
|
||||
ProcessInstanceModel.process_initiator_id != user_id
|
||||
).join(ActiveTaskUserModel, and_(ActiveTaskUserModel.user_id == user_id))
|
||||
).join(
|
||||
ActiveTaskUserModel,
|
||||
and_(
|
||||
ActiveTaskUserModel.user_id == user_id,
|
||||
ActiveTaskModel.id == ActiveTaskUserModel.active_task_id,
|
||||
),
|
||||
)
|
||||
if has_lane_assignment_id:
|
||||
active_tasks_query = active_tasks_query.filter(
|
||||
ActiveTaskModel.lane_assignment_id.is_not(None) # type: ignore
|
||||
)
|
||||
else:
|
||||
active_tasks_query = active_tasks_query.filter(ActiveTaskModel.lane_assignment_id.is_(None)) # type: ignore
|
||||
|
||||
active_tasks = active_tasks_query.add_columns(
|
||||
ProcessInstanceModel.process_model_identifier,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
|
||||
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser
|
||||
"""Custom_parser."""
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
||||
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
|
||||
|
||||
|
||||
class MyCustomParser(BpmnDmnParser): # type: ignore
|
||||
|
|
|
@ -38,7 +38,6 @@ from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
|||
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore
|
||||
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
||||
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
|
||||
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import BoundaryEventConverter # type: ignore
|
||||
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
||||
CallActivityTaskConverter,
|
||||
|
@ -95,9 +94,6 @@ from spiffworkflow_backend.services.custom_parser import MyCustomParser
|
|||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate
|
||||
from spiffworkflow_backend.services.spec_file_service import (
|
||||
ProcessModelFileNotFoundError,
|
||||
)
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
@ -674,18 +670,19 @@ class ProcessInstanceProcessor:
|
|||
return parser
|
||||
|
||||
@staticmethod
|
||||
def backfill_missing_bpmn_process_id_lookup_records(bpmn_process_identifier: str) -> Optional[str]:
|
||||
|
||||
def backfill_missing_bpmn_process_id_lookup_records(
|
||||
bpmn_process_identifier: str,
|
||||
) -> Optional[str]:
|
||||
"""Backfill_missing_bpmn_process_id_lookup_records."""
|
||||
process_models = ProcessModelService().get_process_models()
|
||||
for process_model in process_models:
|
||||
refs = SpecFileService.reference_map(SpecFileService.get_references_for_process(process_model))
|
||||
refs = SpecFileService.reference_map(
|
||||
SpecFileService.get_references_for_process(process_model)
|
||||
)
|
||||
bpmn_process_identifiers = refs.keys()
|
||||
if bpmn_process_identifier in bpmn_process_identifiers:
|
||||
SpecFileService.update_process_cache(refs[bpmn_process_identifier])
|
||||
return FileSystemService.full_path_to_process_model_file(
|
||||
process_model
|
||||
)
|
||||
return FileSystemService.full_path_to_process_model_file(process_model)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
|
|
|
@ -2,17 +2,10 @@
|
|||
import os
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from typing import Any, Type
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
|
||||
from SpiffWorkflow.bpmn.parser.ProcessParser import ProcessParser
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from lxml import etree # type: ignore
|
||||
from lxml.etree import _Element # type: ignore
|
||||
from lxml.etree import Element as EtreeElement
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
|
||||
|
||||
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
|
||||
|
@ -62,29 +55,29 @@ class SpecFileService(FileSystemService):
|
|||
|
||||
@staticmethod
|
||||
def reference_map(references: list[FileReference]) -> dict[str, FileReference]:
|
||||
""" Creates a dict with provided references organized by id. """
|
||||
"""Creates a dict with provided references organized by id."""
|
||||
ref_map = {}
|
||||
for ref in references:
|
||||
ref_map[ref.id] = ref
|
||||
return ref_map
|
||||
|
||||
@staticmethod
|
||||
def get_references(process_models: List[ProcessModelInfo]) -> list[FileReference]:
|
||||
"""Returns all references -- process_ids, and decision ids, across all process models provided"""
|
||||
references = []
|
||||
for process_model in process_models:
|
||||
references.extend(SpecFileService.get_references_for_process(process_model))
|
||||
|
||||
@staticmethod
|
||||
def get_references_for_process(process_model_info: ProcessModelInfo) -> list[FileReference]:
|
||||
def get_references_for_process(
|
||||
process_model_info: ProcessModelInfo,
|
||||
) -> list[FileReference]:
|
||||
"""Get_references_for_process."""
|
||||
files = SpecFileService.get_files(process_model_info)
|
||||
references = []
|
||||
for file in files:
|
||||
references.extend(SpecFileService.get_references_for_file(file, process_model_info))
|
||||
references.extend(
|
||||
SpecFileService.get_references_for_file(file, process_model_info)
|
||||
)
|
||||
return references
|
||||
|
||||
@staticmethod
|
||||
def get_references_for_file(file: File, process_model_info: ProcessModelInfo) -> list[FileReference]:
|
||||
def get_references_for_file(
|
||||
file: File, process_model_info: ProcessModelInfo
|
||||
) -> list[FileReference]:
|
||||
"""Uses spiffworkflow to parse BPMN and DMN files to determine how they can be externally referenced.
|
||||
|
||||
Returns a list of Reference objects that contain the type of reference, the id, the name.
|
||||
|
@ -117,16 +110,24 @@ class SpecFileService(FileSystemService):
|
|||
else:
|
||||
return references
|
||||
for sub_parser in sub_parsers:
|
||||
if parser_type == 'process':
|
||||
if parser_type == "process":
|
||||
has_lanes = sub_parser.has_lanes()
|
||||
executable = sub_parser.process_executable
|
||||
start_messages = sub_parser.start_messages()
|
||||
references.append(FileReference(
|
||||
id=sub_parser.get_id(), name=sub_parser.get_name(), type=parser_type,
|
||||
file_name=file.name, file_path=file_path, has_lanes=has_lanes,
|
||||
executable=executable, messages=messages,
|
||||
correlations=correlations, start_messages=start_messages
|
||||
))
|
||||
references.append(
|
||||
FileReference(
|
||||
id=sub_parser.get_id(),
|
||||
name=sub_parser.get_name(),
|
||||
type=parser_type,
|
||||
file_name=file.name,
|
||||
file_path=file_path,
|
||||
has_lanes=has_lanes,
|
||||
executable=executable,
|
||||
messages=messages,
|
||||
correlations=correlations,
|
||||
start_messages=start_messages,
|
||||
)
|
||||
)
|
||||
return references
|
||||
|
||||
@staticmethod
|
||||
|
@ -138,7 +139,8 @@ class SpecFileService(FileSystemService):
|
|||
return SpecFileService.update_file(process_model_info, file_name, binary_data)
|
||||
|
||||
@staticmethod
|
||||
def update_file(process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
|
||||
def update_file(
|
||||
process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
|
||||
) -> File:
|
||||
"""Update_file."""
|
||||
SpecFileService.assert_valid_file_name(file_name)
|
||||
|
@ -150,26 +152,29 @@ class SpecFileService(FileSystemService):
|
|||
file = SpecFileService.to_file_object(file_name, file_path)
|
||||
|
||||
if file.type == FileType.bpmn.value:
|
||||
set_primary_file = False
|
||||
if (
|
||||
process_model_info.primary_file_name is None
|
||||
or file_name == process_model_info.primary_file_name
|
||||
):
|
||||
# If no primary process exists, make this primary process.
|
||||
set_primary_file = True
|
||||
references = SpecFileService.get_references_for_file(file, process_model_info)
|
||||
references = SpecFileService.get_references_for_file(
|
||||
file, process_model_info
|
||||
)
|
||||
for ref in references:
|
||||
if ref.type == "process":
|
||||
ProcessModelService().update_spec(
|
||||
process_model_info, {
|
||||
process_model_info,
|
||||
{
|
||||
"primary_process_id": ref.id,
|
||||
"primary_file_name": file_name,
|
||||
"is_review": ref.has_lanes,
|
||||
}
|
||||
},
|
||||
)
|
||||
SpecFileService.update_process_cache(ref)
|
||||
SpecFileService.update_message_cache(ref)
|
||||
SpecFileService.update_message_trigger_cache(ref, process_model_info)
|
||||
SpecFileService.update_message_trigger_cache(
|
||||
ref, process_model_info
|
||||
)
|
||||
SpecFileService.update_correlation_cache(ref)
|
||||
break
|
||||
|
||||
|
@ -226,13 +231,14 @@ class SpecFileService(FileSystemService):
|
|||
if os.path.exists(dir_path):
|
||||
shutil.rmtree(dir_path)
|
||||
|
||||
|
||||
# fixme: Place all the caching stuff in a different service.
|
||||
|
||||
|
||||
@staticmethod
|
||||
def update_process_cache(ref: FileReference) -> None:
|
||||
process_id_lookup = BpmnProcessIdLookup.query.filter_by(bpmn_process_identifier=ref.id).first()
|
||||
"""Update_process_cache."""
|
||||
process_id_lookup = BpmnProcessIdLookup.query.filter_by(
|
||||
bpmn_process_identifier=ref.id
|
||||
).first()
|
||||
if process_id_lookup is None:
|
||||
process_id_lookup = BpmnProcessIdLookup(
|
||||
bpmn_process_identifier=ref.id,
|
||||
|
@ -253,74 +259,77 @@ class SpecFileService(FileSystemService):
|
|||
f"{process_id_lookup.bpmn_file_relative_path}. It cannot be reused."
|
||||
)
|
||||
else:
|
||||
process_id_lookup.bpmn_file_relative_path = (
|
||||
ref.file_path
|
||||
)
|
||||
process_id_lookup.bpmn_file_relative_path = ref.file_path
|
||||
db.session.add(process_id_lookup)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
|
||||
@staticmethod
|
||||
def update_message_cache(ref: FileReference) -> None:
|
||||
"""Assure we have a record in the database of all possible message ids and names."""
|
||||
for message_model_identifier in ref.messages.keys():
|
||||
message_model = MessageModel.query.filter_by(identifier=message_model_identifier).first()
|
||||
message_model = MessageModel.query.filter_by(
|
||||
identifier=message_model_identifier
|
||||
).first()
|
||||
if message_model is None:
|
||||
message_model = MessageModel(
|
||||
identifier=message_model_identifier, name=ref.messages[message_model_identifier]
|
||||
identifier=message_model_identifier,
|
||||
name=ref.messages[message_model_identifier],
|
||||
)
|
||||
db.session.add(message_model)
|
||||
db.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def update_message_trigger_cache(ref: FileReference, process_model_info: ProcessModelInfo) -> None:
|
||||
"""assure we know which messages can trigger the start of a process."""
|
||||
def update_message_trigger_cache(
|
||||
ref: FileReference, process_model_info: ProcessModelInfo
|
||||
) -> None:
|
||||
"""Assure we know which messages can trigger the start of a process."""
|
||||
for message_model_identifier in ref.start_messages:
|
||||
message_model = MessageModel.query.filter_by(
|
||||
identifier=message_model_identifier
|
||||
).first()
|
||||
if message_model is None:
|
||||
raise ValidationException(
|
||||
f"Could not find message model with identifier '{message_model_identifier}'"
|
||||
f"Required by a Start Event in : {ref.file_name}"
|
||||
)
|
||||
message_triggerable_process_model = (
|
||||
MessageTriggerableProcessModel.query.filter_by(
|
||||
message_model_id=message_model.id,
|
||||
).first()
|
||||
message_model = MessageModel.query.filter_by(
|
||||
identifier=message_model_identifier
|
||||
).first()
|
||||
if message_model is None:
|
||||
raise ValidationException(
|
||||
f"Could not find message model with identifier '{message_model_identifier}'"
|
||||
f"Required by a Start Event in : {ref.file_name}"
|
||||
)
|
||||
message_triggerable_process_model = (
|
||||
MessageTriggerableProcessModel.query.filter_by(
|
||||
message_model_id=message_model.id,
|
||||
).first()
|
||||
)
|
||||
|
||||
if message_triggerable_process_model is None:
|
||||
message_triggerable_process_model = (
|
||||
MessageTriggerableProcessModel(
|
||||
message_model_id=message_model.id,
|
||||
process_model_identifier=process_model_info.id,
|
||||
process_group_identifier="process_group_identifier"
|
||||
)
|
||||
if message_triggerable_process_model is None:
|
||||
message_triggerable_process_model = MessageTriggerableProcessModel(
|
||||
message_model_id=message_model.id,
|
||||
process_model_identifier=process_model_info.id,
|
||||
process_group_identifier="process_group_identifier",
|
||||
)
|
||||
db.session.add(message_triggerable_process_model)
|
||||
db.session.commit()
|
||||
else:
|
||||
if (
|
||||
message_triggerable_process_model.process_model_identifier
|
||||
!= process_model_info.id
|
||||
# or message_triggerable_process_model.process_group_identifier
|
||||
# != process_model_info.process_group_id
|
||||
):
|
||||
raise ValidationException(
|
||||
f"Message model is already used to start process model {process_model_info.id}"
|
||||
)
|
||||
db.session.add(message_triggerable_process_model)
|
||||
db.session.commit()
|
||||
else:
|
||||
if (
|
||||
message_triggerable_process_model.process_model_identifier
|
||||
!= process_model_info.id
|
||||
# or message_triggerable_process_model.process_group_identifier
|
||||
# != process_model_info.process_group_id
|
||||
):
|
||||
raise ValidationException(
|
||||
f"Message model is already used to start process model {process_model_info.id}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_correlation_cache(ref: FileReference) -> None:
|
||||
"""Update_correlation_cache."""
|
||||
for correlation_identifier in ref.correlations.keys():
|
||||
correlation_property_retrieval_expressions = \
|
||||
ref.correlations[correlation_identifier]['retrieval_expressions']
|
||||
correlation_property_retrieval_expressions = ref.correlations[
|
||||
correlation_identifier
|
||||
]["retrieval_expressions"]
|
||||
|
||||
for cpre in correlation_property_retrieval_expressions:
|
||||
message_model_identifier = cpre["messageRef"]
|
||||
message_model = MessageModel.query.filter_by(identifier=message_model_identifier).first()
|
||||
message_model = MessageModel.query.filter_by(
|
||||
identifier=message_model_identifier
|
||||
).first()
|
||||
if message_model is None:
|
||||
raise ValidationException(
|
||||
f"Could not find message model with identifier '{message_model_identifier}'"
|
||||
|
|
|
@ -25,11 +25,9 @@ class ExampleDataLoader:
|
|||
"""Assumes that process_model_source_directory exists in static/bpmn and contains bpmn_file_name.
|
||||
|
||||
further assumes that bpmn_file_name is the primary file for the process model.
|
||||
|
||||
if bpmn_file_name is None we load all files in process_model_source_directory,
|
||||
otherwise, we only load bpmn_file_name
|
||||
"""
|
||||
|
||||
if process_model_source_directory is None:
|
||||
raise Exception("You must include `process_model_source_directory`.")
|
||||
|
||||
|
@ -85,7 +83,9 @@ class ExampleDataLoader:
|
|||
process_model_info=spec, file_name=filename, binary_data=data
|
||||
)
|
||||
if is_primary:
|
||||
references = SpecFileService.get_references_for_file(file_info, spec)
|
||||
references = SpecFileService.get_references_for_file(
|
||||
file_info, spec
|
||||
)
|
||||
spec.primary_process_id = references[0].id
|
||||
spec.primary_file_name = filename
|
||||
ProcessModelService().save_process_model(spec)
|
||||
|
|
|
@ -2325,4 +2325,3 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
|
||||
print("test_script_unit_test_run")
|
||||
|
||||
|
|
|
@ -4,9 +4,8 @@ import os
|
|||
import pytest
|
||||
from flask import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
|
@ -14,9 +13,7 @@ from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLoo
|
|||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
|
||||
|
||||
|
||||
class TestSpecFileService(BaseTest):
|
||||
"""TestSpecFileService."""
|
||||
|
@ -86,8 +83,9 @@ class TestSpecFileService(BaseTest):
|
|||
process_model_source_directory="call_activity_duplicate",
|
||||
bpmn_file_name="call_activity_nested_duplicate",
|
||||
)
|
||||
assert f"Process id ({bpmn_process_identifier}) has already been used" in str(
|
||||
exception.value
|
||||
assert (
|
||||
f"Process id ({bpmn_process_identifier}) has already been used"
|
||||
in str(exception.value)
|
||||
)
|
||||
|
||||
def test_updates_relative_file_path_when_appropriate(
|
||||
|
|
|
@ -124,7 +124,7 @@ export default function MyOpenProcesses() {
|
|||
perPageOptions={[2, PER_PAGE_FOR_TASKS_ON_HOME_PAGE, 25]}
|
||||
pagination={pagination}
|
||||
tableToDisplay={buildTable()}
|
||||
path="/tasks/for-my-open-processes"
|
||||
path="/tasks/grouped"
|
||||
/>
|
||||
</>
|
||||
);
|
||||
|
|
|
@ -13,7 +13,7 @@ import { PaginationObject } from '../interfaces';
|
|||
|
||||
const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5;
|
||||
|
||||
export default function MyTasksForProcessesStartedByOthers() {
|
||||
export default function TasksWaitingForMe() {
|
||||
const [searchParams] = useSearchParams();
|
||||
const [tasks, setTasks] = useState([]);
|
||||
const [pagination, setPagination] = useState<PaginationObject | null>(null);
|
||||
|
@ -28,7 +28,7 @@ export default function MyTasksForProcessesStartedByOthers() {
|
|||
setPagination(result.pagination);
|
||||
};
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/tasks/for-processes-started-by-others?per_page=${perPage}&page=${page}`,
|
||||
path: `/tasks/for-me?per_page=${perPage}&page=${page}`,
|
||||
successCallback: setTasksFromResult,
|
||||
});
|
||||
}, [searchParams]);
|
||||
|
@ -126,7 +126,7 @@ export default function MyTasksForProcessesStartedByOthers() {
|
|||
perPageOptions={[2, PER_PAGE_FOR_TASKS_ON_HOME_PAGE, 25]}
|
||||
pagination={pagination}
|
||||
tableToDisplay={buildTable()}
|
||||
path="/tasks/for-my-open-processes"
|
||||
path="/tasks/grouped"
|
||||
/>
|
||||
</>
|
||||
);
|
|
@ -0,0 +1,139 @@
|
|||
import { useEffect, useState } from 'react';
|
||||
// @ts-ignore
|
||||
import { Button, Table } from '@carbon/react';
|
||||
import { Link, useSearchParams } from 'react-router-dom';
|
||||
import PaginationForTable from './PaginationForTable';
|
||||
import {
|
||||
convertSecondsToFormattedDateTime,
|
||||
getPageInfoFromSearchParams,
|
||||
modifyProcessModelPath,
|
||||
} from '../helpers';
|
||||
import HttpService from '../services/HttpService';
|
||||
import { PaginationObject } from '../interfaces';
|
||||
|
||||
const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5;
|
||||
|
||||
export default function TasksForWaitingForMyGroups() {
|
||||
const [searchParams] = useSearchParams();
|
||||
const [tasks, setTasks] = useState([]);
|
||||
const [pagination, setPagination] = useState<PaginationObject | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const { page, perPage } = getPageInfoFromSearchParams(
|
||||
searchParams,
|
||||
PER_PAGE_FOR_TASKS_ON_HOME_PAGE
|
||||
);
|
||||
const setTasksFromResult = (result: any) => {
|
||||
setTasks(result.results);
|
||||
setPagination(result.pagination);
|
||||
};
|
||||
HttpService.makeCallToBackend({
|
||||
path: `/tasks/for-my-groups?per_page=${perPage}&page=${page}`,
|
||||
successCallback: setTasksFromResult,
|
||||
});
|
||||
}, [searchParams]);
|
||||
|
||||
const buildTable = () => {
|
||||
const rows = tasks.map((row) => {
|
||||
const rowToUse = row as any;
|
||||
const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`;
|
||||
const modifiedProcessModelIdentifier = modifyProcessModelPath(
|
||||
rowToUse.process_model_identifier
|
||||
);
|
||||
return (
|
||||
<tr key={rowToUse.id}>
|
||||
<td>
|
||||
<Link
|
||||
data-qa="process-model-show-link"
|
||||
to={`/admin/process-models/${modifiedProcessModelIdentifier}`}
|
||||
>
|
||||
{rowToUse.process_model_display_name}
|
||||
</Link>
|
||||
</td>
|
||||
<td>
|
||||
<Link
|
||||
data-qa="process-instance-show-link"
|
||||
to={`/admin/process-models/${modifiedProcessModelIdentifier}/process-instances/${rowToUse.process_instance_id}`}
|
||||
>
|
||||
View {rowToUse.process_instance_id}
|
||||
</Link>
|
||||
</td>
|
||||
<td
|
||||
title={`task id: ${rowToUse.name}, spiffworkflow task guid: ${rowToUse.id}`}
|
||||
>
|
||||
{rowToUse.task_title}
|
||||
</td>
|
||||
<td>{rowToUse.username}</td>
|
||||
<td>{rowToUse.process_instance_status}</td>
|
||||
<td>{rowToUse.group_identifier || '-'}</td>
|
||||
<td>
|
||||
{convertSecondsToFormattedDateTime(
|
||||
rowToUse.created_at_in_seconds
|
||||
) || '-'}
|
||||
</td>
|
||||
<td>
|
||||
{convertSecondsToFormattedDateTime(
|
||||
rowToUse.updated_at_in_seconds
|
||||
) || '-'}
|
||||
</td>
|
||||
<td>
|
||||
<Button
|
||||
variant="primary"
|
||||
href={taskUrl}
|
||||
hidden={rowToUse.process_instance_status === 'suspended'}
|
||||
disabled={!rowToUse.current_user_is_potential_owner}
|
||||
>
|
||||
Go
|
||||
</Button>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
});
|
||||
return (
|
||||
<Table striped bordered>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Process Model</th>
|
||||
<th>Process Instance</th>
|
||||
<th>Task Name</th>
|
||||
<th>Process Started By</th>
|
||||
<th>Process Instance Status</th>
|
||||
<th>Assigned Group</th>
|
||||
<th>Process Started</th>
|
||||
<th>Process Updated</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>{rows}</tbody>
|
||||
</Table>
|
||||
);
|
||||
};
|
||||
|
||||
const tasksComponent = () => {
|
||||
if (pagination && pagination.total < 1) {
|
||||
return null;
|
||||
}
|
||||
const { page, perPage } = getPageInfoFromSearchParams(
|
||||
searchParams,
|
||||
PER_PAGE_FOR_TASKS_ON_HOME_PAGE
|
||||
);
|
||||
return (
|
||||
<>
|
||||
<h1>Tasks waiting for my groups</h1>
|
||||
<PaginationForTable
|
||||
page={page}
|
||||
perPage={perPage}
|
||||
perPageOptions={[2, PER_PAGE_FOR_TASKS_ON_HOME_PAGE, 25]}
|
||||
pagination={pagination}
|
||||
tableToDisplay={buildTable()}
|
||||
path="/tasks/grouped"
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
if (pagination) {
|
||||
return tasksComponent();
|
||||
}
|
||||
return null;
|
||||
}
|
|
@ -1,12 +1,15 @@
|
|||
import MyTasksForProcessesStartedByOthers from '../components/MyTasksForProcessesStartedByOthers';
|
||||
import TasksForMyOpenProcesses from '../components/TasksForMyOpenProcesses';
|
||||
import TasksWaitingForMe from '../components/TasksWaitingForMe';
|
||||
import TasksForWaitingForMyGroups from '../components/TasksWaitingForMyGroups';
|
||||
|
||||
export default function GroupedTasks() {
|
||||
return (
|
||||
<>
|
||||
<TasksForMyOpenProcesses />
|
||||
<br />
|
||||
<MyTasksForProcessesStartedByOthers />
|
||||
<TasksWaitingForMe />
|
||||
<br />
|
||||
<TasksForWaitingForMyGroups />
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue