Merge pull request #33 from sartography/feature/task_page

Feature/task page
This commit is contained in:
jasquat 2022-11-14 12:24:45 -05:00 committed by GitHub
commit 5d288361ff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 327 additions and 118 deletions

View File

@ -901,7 +901,7 @@ paths:
items: items:
$ref: "#/components/schemas/Task" $ref: "#/components/schemas/Task"
/tasks/for-processes-started-by-others: /tasks/for-me:
parameters: parameters:
- name: page - name: page
in: query in: query
@ -918,7 +918,36 @@ paths:
get: get:
tags: tags:
- Process Instances - Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_processes_started_by_others operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_me
summary: returns the list of tasks for given user's open process instances
responses:
"200":
description: list of tasks
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Task"
/tasks/for-my-groups:
parameters:
- name: page
in: query
required: false
description: The page number to return. Defaults to page 1.
schema:
type: integer
- name: per_page
in: query
required: false
description: The page number to return. Defaults to page 1.
schema:
type: integer
get:
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_my_groups
summary: returns the list of tasks for given user's open process instances summary: returns the list of tasks for given user's open process instances
responses: responses:
"200": "200":

View File

@ -82,6 +82,7 @@ class FileReference:
correlations: dict correlations: dict
start_messages: list start_messages: list
@dataclass(order=True) @dataclass(order=True)
class File: class File:
"""File.""" """File."""

View File

@ -64,9 +64,7 @@ from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsMode
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.routes.user import verify_token
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.custom_parser import MyCustomParser
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.message_service import MessageService from spiffworkflow_backend.services.message_service import MessageService
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
@ -1027,7 +1025,17 @@ def task_list_for_my_open_processes(
return get_tasks(page=page, per_page=per_page) return get_tasks(page=page, per_page=per_page)
def task_list_for_processes_started_by_others( def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
"""Task_list_for_processes_started_by_others."""
return get_tasks(
processes_started_by_user=False,
has_lane_assignment_id=False,
page=page,
per_page=per_page,
)
def task_list_for_my_groups(
page: int = 1, per_page: int = 100 page: int = 1, per_page: int = 100
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Task_list_for_processes_started_by_others.""" """Task_list_for_processes_started_by_others."""
@ -1035,14 +1043,21 @@ def task_list_for_processes_started_by_others(
def get_tasks( def get_tasks(
processes_started_by_user: bool = True, page: int = 1, per_page: int = 100 processes_started_by_user: bool = True,
has_lane_assignment_id: bool = True,
page: int = 1,
per_page: int = 100,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Get_tasks.""" """Get_tasks."""
user_id = g.user.id user_id = g.user.id
# use distinct to ensure we only get one row per active task otherwise
# we can get back multiple for the same active task row which throws off
# pagination later on
# https://stackoverflow.com/q/34582014/6090676
active_tasks_query = ( active_tasks_query = (
ActiveTaskModel.query.outerjoin( ActiveTaskModel.query.distinct()
GroupModel, GroupModel.id == ActiveTaskModel.lane_assignment_id .outerjoin(GroupModel, GroupModel.id == ActiveTaskModel.lane_assignment_id)
)
.join(ProcessInstanceModel) .join(ProcessInstanceModel)
.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)
) )
@ -1050,11 +1065,29 @@ def get_tasks(
if processes_started_by_user: if processes_started_by_user:
active_tasks_query = active_tasks_query.filter( active_tasks_query = active_tasks_query.filter(
ProcessInstanceModel.process_initiator_id == user_id ProcessInstanceModel.process_initiator_id == user_id
).outerjoin(ActiveTaskUserModel, and_(ActiveTaskUserModel.user_id == user_id)) ).outerjoin(
ActiveTaskUserModel,
and_(
ActiveTaskUserModel.user_id == user_id,
ActiveTaskModel.id == ActiveTaskUserModel.active_task_id,
),
)
else: else:
active_tasks_query = active_tasks_query.filter( active_tasks_query = active_tasks_query.filter(
ProcessInstanceModel.process_initiator_id != user_id ProcessInstanceModel.process_initiator_id != user_id
).join(ActiveTaskUserModel, and_(ActiveTaskUserModel.user_id == user_id)) ).join(
ActiveTaskUserModel,
and_(
ActiveTaskUserModel.user_id == user_id,
ActiveTaskModel.id == ActiveTaskUserModel.active_task_id,
),
)
if has_lane_assignment_id:
active_tasks_query = active_tasks_query.filter(
ActiveTaskModel.lane_assignment_id.is_not(None) # type: ignore
)
else:
active_tasks_query = active_tasks_query.filter(ActiveTaskModel.lane_assignment_id.is_(None)) # type: ignore
active_tasks = active_tasks_query.add_columns( active_tasks = active_tasks_query.add_columns(
ProcessInstanceModel.process_model_identifier, ProcessInstanceModel.process_model_identifier,

View File

@ -1,5 +1,6 @@
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser """Custom_parser."""
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
class MyCustomParser(BpmnDmnParser): # type: ignore class MyCustomParser(BpmnDmnParser): # type: ignore

View File

@ -38,7 +38,6 @@ from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore
from SpiffWorkflow.exceptions import WorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowException # type: ignore
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
from SpiffWorkflow.spiff.serializer.task_spec_converters import BoundaryEventConverter # type: ignore from SpiffWorkflow.spiff.serializer.task_spec_converters import BoundaryEventConverter # type: ignore
from SpiffWorkflow.spiff.serializer.task_spec_converters import ( from SpiffWorkflow.spiff.serializer.task_spec_converters import (
CallActivityTaskConverter, CallActivityTaskConverter,
@ -95,9 +94,6 @@ from spiffworkflow_backend.services.custom_parser import MyCustomParser
from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate
from spiffworkflow_backend.services.spec_file_service import (
ProcessModelFileNotFoundError,
)
from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.spec_file_service import SpecFileService
from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.user_service import UserService
@ -674,18 +670,19 @@ class ProcessInstanceProcessor:
return parser return parser
@staticmethod @staticmethod
def backfill_missing_bpmn_process_id_lookup_records(bpmn_process_identifier: str) -> Optional[str]: def backfill_missing_bpmn_process_id_lookup_records(
bpmn_process_identifier: str,
) -> Optional[str]:
"""Backfill_missing_bpmn_process_id_lookup_records.""" """Backfill_missing_bpmn_process_id_lookup_records."""
process_models = ProcessModelService().get_process_models() process_models = ProcessModelService().get_process_models()
for process_model in process_models: for process_model in process_models:
refs = SpecFileService.reference_map(SpecFileService.get_references_for_process(process_model)) refs = SpecFileService.reference_map(
SpecFileService.get_references_for_process(process_model)
)
bpmn_process_identifiers = refs.keys() bpmn_process_identifiers = refs.keys()
if bpmn_process_identifier in bpmn_process_identifiers: if bpmn_process_identifier in bpmn_process_identifiers:
SpecFileService.update_process_cache(refs[bpmn_process_identifier]) SpecFileService.update_process_cache(refs[bpmn_process_identifier])
return FileSystemService.full_path_to_process_model_file( return FileSystemService.full_path_to_process_model_file(process_model)
process_model
)
return None return None
@staticmethod @staticmethod

View File

@ -2,17 +2,10 @@
import os import os
import shutil import shutil
from datetime import datetime from datetime import datetime
from typing import Any, Type
from typing import List from typing import List
from typing import Optional from typing import Optional
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from SpiffWorkflow.bpmn.parser.ProcessParser import ProcessParser
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from lxml import etree # type: ignore
from lxml.etree import _Element # type: ignore
from lxml.etree import Element as EtreeElement
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
@ -69,22 +62,22 @@ class SpecFileService(FileSystemService):
return ref_map return ref_map
@staticmethod @staticmethod
def get_references(process_models: List[ProcessModelInfo]) -> list[FileReference]: def get_references_for_process(
"""Returns all references -- process_ids, and decision ids, across all process models provided""" process_model_info: ProcessModelInfo,
references = [] ) -> list[FileReference]:
for process_model in process_models: """Get_references_for_process."""
references.extend(SpecFileService.get_references_for_process(process_model))
@staticmethod
def get_references_for_process(process_model_info: ProcessModelInfo) -> list[FileReference]:
files = SpecFileService.get_files(process_model_info) files = SpecFileService.get_files(process_model_info)
references = [] references = []
for file in files: for file in files:
references.extend(SpecFileService.get_references_for_file(file, process_model_info)) references.extend(
SpecFileService.get_references_for_file(file, process_model_info)
)
return references return references
@staticmethod @staticmethod
def get_references_for_file(file: File, process_model_info: ProcessModelInfo) -> list[FileReference]: def get_references_for_file(
file: File, process_model_info: ProcessModelInfo
) -> list[FileReference]:
"""Uses spiffworkflow to parse BPMN and DMN files to determine how they can be externally referenced. """Uses spiffworkflow to parse BPMN and DMN files to determine how they can be externally referenced.
Returns a list of Reference objects that contain the type of reference, the id, the name. Returns a list of Reference objects that contain the type of reference, the id, the name.
@ -117,16 +110,24 @@ class SpecFileService(FileSystemService):
else: else:
return references return references
for sub_parser in sub_parsers: for sub_parser in sub_parsers:
if parser_type == 'process': if parser_type == "process":
has_lanes = sub_parser.has_lanes() has_lanes = sub_parser.has_lanes()
executable = sub_parser.process_executable executable = sub_parser.process_executable
start_messages = sub_parser.start_messages() start_messages = sub_parser.start_messages()
references.append(FileReference( references.append(
id=sub_parser.get_id(), name=sub_parser.get_name(), type=parser_type, FileReference(
file_name=file.name, file_path=file_path, has_lanes=has_lanes, id=sub_parser.get_id(),
executable=executable, messages=messages, name=sub_parser.get_name(),
correlations=correlations, start_messages=start_messages type=parser_type,
)) file_name=file.name,
file_path=file_path,
has_lanes=has_lanes,
executable=executable,
messages=messages,
correlations=correlations,
start_messages=start_messages,
)
)
return references return references
@staticmethod @staticmethod
@ -138,7 +139,8 @@ class SpecFileService(FileSystemService):
return SpecFileService.update_file(process_model_info, file_name, binary_data) return SpecFileService.update_file(process_model_info, file_name, binary_data)
@staticmethod @staticmethod
def update_file(process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes def update_file(
process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
) -> File: ) -> File:
"""Update_file.""" """Update_file."""
SpecFileService.assert_valid_file_name(file_name) SpecFileService.assert_valid_file_name(file_name)
@ -150,26 +152,29 @@ class SpecFileService(FileSystemService):
file = SpecFileService.to_file_object(file_name, file_path) file = SpecFileService.to_file_object(file_name, file_path)
if file.type == FileType.bpmn.value: if file.type == FileType.bpmn.value:
set_primary_file = False
if ( if (
process_model_info.primary_file_name is None process_model_info.primary_file_name is None
or file_name == process_model_info.primary_file_name or file_name == process_model_info.primary_file_name
): ):
# If no primary process exists, make this primary process. # If no primary process exists, make this primary process.
set_primary_file = True references = SpecFileService.get_references_for_file(
references = SpecFileService.get_references_for_file(file, process_model_info) file, process_model_info
)
for ref in references: for ref in references:
if ref.type == "process": if ref.type == "process":
ProcessModelService().update_spec( ProcessModelService().update_spec(
process_model_info, { process_model_info,
{
"primary_process_id": ref.id, "primary_process_id": ref.id,
"primary_file_name": file_name, "primary_file_name": file_name,
"is_review": ref.has_lanes, "is_review": ref.has_lanes,
} },
) )
SpecFileService.update_process_cache(ref) SpecFileService.update_process_cache(ref)
SpecFileService.update_message_cache(ref) SpecFileService.update_message_cache(ref)
SpecFileService.update_message_trigger_cache(ref, process_model_info) SpecFileService.update_message_trigger_cache(
ref, process_model_info
)
SpecFileService.update_correlation_cache(ref) SpecFileService.update_correlation_cache(ref)
break break
@ -226,13 +231,14 @@ class SpecFileService(FileSystemService):
if os.path.exists(dir_path): if os.path.exists(dir_path):
shutil.rmtree(dir_path) shutil.rmtree(dir_path)
# fixme: Place all the caching stuff in a different service. # fixme: Place all the caching stuff in a different service.
@staticmethod @staticmethod
def update_process_cache(ref: FileReference) -> None: def update_process_cache(ref: FileReference) -> None:
process_id_lookup = BpmnProcessIdLookup.query.filter_by(bpmn_process_identifier=ref.id).first() """Update_process_cache."""
process_id_lookup = BpmnProcessIdLookup.query.filter_by(
bpmn_process_identifier=ref.id
).first()
if process_id_lookup is None: if process_id_lookup is None:
process_id_lookup = BpmnProcessIdLookup( process_id_lookup = BpmnProcessIdLookup(
bpmn_process_identifier=ref.id, bpmn_process_identifier=ref.id,
@ -253,29 +259,30 @@ class SpecFileService(FileSystemService):
f"{process_id_lookup.bpmn_file_relative_path}. It cannot be reused." f"{process_id_lookup.bpmn_file_relative_path}. It cannot be reused."
) )
else: else:
process_id_lookup.bpmn_file_relative_path = ( process_id_lookup.bpmn_file_relative_path = ref.file_path
ref.file_path
)
db.session.add(process_id_lookup) db.session.add(process_id_lookup)
db.session.commit() db.session.commit()
@staticmethod @staticmethod
def update_message_cache(ref: FileReference) -> None: def update_message_cache(ref: FileReference) -> None:
"""Assure we have a record in the database of all possible message ids and names.""" """Assure we have a record in the database of all possible message ids and names."""
for message_model_identifier in ref.messages.keys(): for message_model_identifier in ref.messages.keys():
message_model = MessageModel.query.filter_by(identifier=message_model_identifier).first() message_model = MessageModel.query.filter_by(
identifier=message_model_identifier
).first()
if message_model is None: if message_model is None:
message_model = MessageModel( message_model = MessageModel(
identifier=message_model_identifier, name=ref.messages[message_model_identifier] identifier=message_model_identifier,
name=ref.messages[message_model_identifier],
) )
db.session.add(message_model) db.session.add(message_model)
db.session.commit() db.session.commit()
@staticmethod @staticmethod
def update_message_trigger_cache(ref: FileReference, process_model_info: ProcessModelInfo) -> None: def update_message_trigger_cache(
"""assure we know which messages can trigger the start of a process.""" ref: FileReference, process_model_info: ProcessModelInfo
) -> None:
"""Assure we know which messages can trigger the start of a process."""
for message_model_identifier in ref.start_messages: for message_model_identifier in ref.start_messages:
message_model = MessageModel.query.filter_by( message_model = MessageModel.query.filter_by(
identifier=message_model_identifier identifier=message_model_identifier
@ -292,12 +299,10 @@ class SpecFileService(FileSystemService):
) )
if message_triggerable_process_model is None: if message_triggerable_process_model is None:
message_triggerable_process_model = ( message_triggerable_process_model = MessageTriggerableProcessModel(
MessageTriggerableProcessModel(
message_model_id=message_model.id, message_model_id=message_model.id,
process_model_identifier=process_model_info.id, process_model_identifier=process_model_info.id,
process_group_identifier="process_group_identifier" process_group_identifier="process_group_identifier",
)
) )
db.session.add(message_triggerable_process_model) db.session.add(message_triggerable_process_model)
db.session.commit() db.session.commit()
@ -314,13 +319,17 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def update_correlation_cache(ref: FileReference) -> None: def update_correlation_cache(ref: FileReference) -> None:
"""Update_correlation_cache."""
for correlation_identifier in ref.correlations.keys(): for correlation_identifier in ref.correlations.keys():
correlation_property_retrieval_expressions = \ correlation_property_retrieval_expressions = ref.correlations[
ref.correlations[correlation_identifier]['retrieval_expressions'] correlation_identifier
]["retrieval_expressions"]
for cpre in correlation_property_retrieval_expressions: for cpre in correlation_property_retrieval_expressions:
message_model_identifier = cpre["messageRef"] message_model_identifier = cpre["messageRef"]
message_model = MessageModel.query.filter_by(identifier=message_model_identifier).first() message_model = MessageModel.query.filter_by(
identifier=message_model_identifier
).first()
if message_model is None: if message_model is None:
raise ValidationException( raise ValidationException(
f"Could not find message model with identifier '{message_model_identifier}'" f"Could not find message model with identifier '{message_model_identifier}'"

View File

@ -25,11 +25,9 @@ class ExampleDataLoader:
"""Assumes that process_model_source_directory exists in static/bpmn and contains bpmn_file_name. """Assumes that process_model_source_directory exists in static/bpmn and contains bpmn_file_name.
further assumes that bpmn_file_name is the primary file for the process model. further assumes that bpmn_file_name is the primary file for the process model.
if bpmn_file_name is None we load all files in process_model_source_directory, if bpmn_file_name is None we load all files in process_model_source_directory,
otherwise, we only load bpmn_file_name otherwise, we only load bpmn_file_name
""" """
if process_model_source_directory is None: if process_model_source_directory is None:
raise Exception("You must include `process_model_source_directory`.") raise Exception("You must include `process_model_source_directory`.")
@ -85,7 +83,9 @@ class ExampleDataLoader:
process_model_info=spec, file_name=filename, binary_data=data process_model_info=spec, file_name=filename, binary_data=data
) )
if is_primary: if is_primary:
references = SpecFileService.get_references_for_file(file_info, spec) references = SpecFileService.get_references_for_file(
file_info, spec
)
spec.primary_process_id = references[0].id spec.primary_process_id = references[0].id
spec.primary_file_name = filename spec.primary_file_name = filename
ProcessModelService().save_process_model(spec) ProcessModelService().save_process_model(spec)

View File

@ -2325,4 +2325,3 @@ class TestProcessApi(BaseTest):
) )
print("test_script_unit_test_run") print("test_script_unit_test_run")

View File

@ -4,9 +4,8 @@ import os
import pytest import pytest
from flask import Flask from flask import Flask
from flask.testing import FlaskClient from flask.testing import FlaskClient
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@ -14,9 +13,7 @@ from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLoo
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.spec_file_service import SpecFileService
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
class TestSpecFileService(BaseTest): class TestSpecFileService(BaseTest):
"""TestSpecFileService.""" """TestSpecFileService."""
@ -86,8 +83,9 @@ class TestSpecFileService(BaseTest):
process_model_source_directory="call_activity_duplicate", process_model_source_directory="call_activity_duplicate",
bpmn_file_name="call_activity_nested_duplicate", bpmn_file_name="call_activity_nested_duplicate",
) )
assert f"Process id ({bpmn_process_identifier}) has already been used" in str( assert (
exception.value f"Process id ({bpmn_process_identifier}) has already been used"
in str(exception.value)
) )
def test_updates_relative_file_path_when_appropriate( def test_updates_relative_file_path_when_appropriate(

View File

@ -124,7 +124,7 @@ export default function MyOpenProcesses() {
perPageOptions={[2, PER_PAGE_FOR_TASKS_ON_HOME_PAGE, 25]} perPageOptions={[2, PER_PAGE_FOR_TASKS_ON_HOME_PAGE, 25]}
pagination={pagination} pagination={pagination}
tableToDisplay={buildTable()} tableToDisplay={buildTable()}
path="/tasks/for-my-open-processes" path="/tasks/grouped"
/> />
</> </>
); );

View File

@ -13,7 +13,7 @@ import { PaginationObject } from '../interfaces';
const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5; const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5;
export default function MyTasksForProcessesStartedByOthers() { export default function TasksWaitingForMe() {
const [searchParams] = useSearchParams(); const [searchParams] = useSearchParams();
const [tasks, setTasks] = useState([]); const [tasks, setTasks] = useState([]);
const [pagination, setPagination] = useState<PaginationObject | null>(null); const [pagination, setPagination] = useState<PaginationObject | null>(null);
@ -28,7 +28,7 @@ export default function MyTasksForProcessesStartedByOthers() {
setPagination(result.pagination); setPagination(result.pagination);
}; };
HttpService.makeCallToBackend({ HttpService.makeCallToBackend({
path: `/tasks/for-processes-started-by-others?per_page=${perPage}&page=${page}`, path: `/tasks/for-me?per_page=${perPage}&page=${page}`,
successCallback: setTasksFromResult, successCallback: setTasksFromResult,
}); });
}, [searchParams]); }, [searchParams]);
@ -126,7 +126,7 @@ export default function MyTasksForProcessesStartedByOthers() {
perPageOptions={[2, PER_PAGE_FOR_TASKS_ON_HOME_PAGE, 25]} perPageOptions={[2, PER_PAGE_FOR_TASKS_ON_HOME_PAGE, 25]}
pagination={pagination} pagination={pagination}
tableToDisplay={buildTable()} tableToDisplay={buildTable()}
path="/tasks/for-my-open-processes" path="/tasks/grouped"
/> />
</> </>
); );

View File

@ -0,0 +1,139 @@
import { useEffect, useState } from 'react';
// @ts-ignore
import { Button, Table } from '@carbon/react';
import { Link, useSearchParams } from 'react-router-dom';
import PaginationForTable from './PaginationForTable';
import {
convertSecondsToFormattedDateTime,
getPageInfoFromSearchParams,
modifyProcessModelPath,
} from '../helpers';
import HttpService from '../services/HttpService';
import { PaginationObject } from '../interfaces';
const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5;
export default function TasksForWaitingForMyGroups() {
const [searchParams] = useSearchParams();
const [tasks, setTasks] = useState([]);
const [pagination, setPagination] = useState<PaginationObject | null>(null);
useEffect(() => {
const { page, perPage } = getPageInfoFromSearchParams(
searchParams,
PER_PAGE_FOR_TASKS_ON_HOME_PAGE
);
const setTasksFromResult = (result: any) => {
setTasks(result.results);
setPagination(result.pagination);
};
HttpService.makeCallToBackend({
path: `/tasks/for-my-groups?per_page=${perPage}&page=${page}`,
successCallback: setTasksFromResult,
});
}, [searchParams]);
const buildTable = () => {
const rows = tasks.map((row) => {
const rowToUse = row as any;
const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`;
const modifiedProcessModelIdentifier = modifyProcessModelPath(
rowToUse.process_model_identifier
);
return (
<tr key={rowToUse.id}>
<td>
<Link
data-qa="process-model-show-link"
to={`/admin/process-models/${modifiedProcessModelIdentifier}`}
>
{rowToUse.process_model_display_name}
</Link>
</td>
<td>
<Link
data-qa="process-instance-show-link"
to={`/admin/process-models/${modifiedProcessModelIdentifier}/process-instances/${rowToUse.process_instance_id}`}
>
View {rowToUse.process_instance_id}
</Link>
</td>
<td
title={`task id: ${rowToUse.name}, spiffworkflow task guid: ${rowToUse.id}`}
>
{rowToUse.task_title}
</td>
<td>{rowToUse.username}</td>
<td>{rowToUse.process_instance_status}</td>
<td>{rowToUse.group_identifier || '-'}</td>
<td>
{convertSecondsToFormattedDateTime(
rowToUse.created_at_in_seconds
) || '-'}
</td>
<td>
{convertSecondsToFormattedDateTime(
rowToUse.updated_at_in_seconds
) || '-'}
</td>
<td>
<Button
variant="primary"
href={taskUrl}
hidden={rowToUse.process_instance_status === 'suspended'}
disabled={!rowToUse.current_user_is_potential_owner}
>
Go
</Button>
</td>
</tr>
);
});
return (
<Table striped bordered>
<thead>
<tr>
<th>Process Model</th>
<th>Process Instance</th>
<th>Task Name</th>
<th>Process Started By</th>
<th>Process Instance Status</th>
<th>Assigned Group</th>
<th>Process Started</th>
<th>Process Updated</th>
<th>Actions</th>
</tr>
</thead>
<tbody>{rows}</tbody>
</Table>
);
};
const tasksComponent = () => {
if (pagination && pagination.total < 1) {
return null;
}
const { page, perPage } = getPageInfoFromSearchParams(
searchParams,
PER_PAGE_FOR_TASKS_ON_HOME_PAGE
);
return (
<>
<h1>Tasks waiting for my groups</h1>
<PaginationForTable
page={page}
perPage={perPage}
perPageOptions={[2, PER_PAGE_FOR_TASKS_ON_HOME_PAGE, 25]}
pagination={pagination}
tableToDisplay={buildTable()}
path="/tasks/grouped"
/>
</>
);
};
if (pagination) {
return tasksComponent();
}
return null;
}

View File

@ -1,12 +1,15 @@
import MyTasksForProcessesStartedByOthers from '../components/MyTasksForProcessesStartedByOthers';
import TasksForMyOpenProcesses from '../components/TasksForMyOpenProcesses'; import TasksForMyOpenProcesses from '../components/TasksForMyOpenProcesses';
import TasksWaitingForMe from '../components/TasksWaitingForMe';
import TasksForWaitingForMyGroups from '../components/TasksWaitingForMyGroups';
export default function GroupedTasks() { export default function GroupedTasks() {
return ( return (
<> <>
<TasksForMyOpenProcesses /> <TasksForMyOpenProcesses />
<br /> <br />
<MyTasksForProcessesStartedByOthers /> <TasksWaitingForMe />
<br />
<TasksForWaitingForMyGroups />
</> </>
); );
} }