diff --git a/bin/pre b/bin/pre new file mode 100755 index 000000000..fa89ecc79 --- /dev/null +++ b/bin/pre @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +"${script_dir}/run_pyl" pre diff --git a/bin/run_pyl b/bin/run_pyl index d3abd7fc6..64446662a 100755 --- a/bin/run_pyl +++ b/bin/run_pyl @@ -16,6 +16,17 @@ react_projects=( spiffworkflow-frontend ) +subcommand="${1:-}" + +if [[ "$subcommand" == "pre" ]]; then + if [[ -n "$(git status --porcelain SpiffWorkflow)" ]]; then + echo "SpiffWorkflow has uncommitted changes. Running its test suite." + pushd SpiffWorkflow + make tests-par # run tests in parallel + popd + fi +fi + function get_python_dirs() { (git ls-tree -r HEAD --name-only | grep -E '\.py$' | awk -F '/' '{print $1}' | sort | uniq | grep -v '\.' | grep -Ev '^(bin|migrations)$') || echo '' } @@ -50,23 +61,34 @@ function run_pre_commmit() { } for react_project in "${react_projects[@]}" ; do - pushd "$react_project" - npm run lint:fix - popd + # if pre, only do stuff when there are changes + if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$react_project")" ]]; then + pushd "$react_project" + npm run lint:fix + popd + fi done for python_project in "${python_projects[@]}" ; do - pushd "$python_project" - run_fix_docstrings || run_fix_docstrings - run_autoflake || run_autoflake - popd + if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$python_project")" ]]; then + pushd "$python_project" + run_fix_docstrings || run_fix_docstrings + run_autoflake || run_autoflake + popd + fi done -run_pre_commmit || run_pre_commmit -for python_project in "${python_projects[@]}"; do - pushd "$python_project" - poetry install - poetry run mypy $(get_python_dirs) - poetry run coverage run --parallel -m pytest - popd +if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "spiffworkflow-backend")" ]]; then + # rune_pre_commit only applies to spiffworkflow-backend at the moment + run_pre_commmit || run_pre_commmit +fi + +for python_project in "${python_projects[@]}"; do + if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$python_project")" ]]; then + pushd "$python_project" + poetry install + poetry run mypy $(get_python_dirs) + poetry run coverage run --parallel -m pytest + popd + fi done diff --git a/spiffworkflow-backend/bin/git_commit_bpmn_models_repo b/spiffworkflow-backend/bin/git_commit_bpmn_models_repo index 62fc0cab0..0ba512021 100755 --- a/spiffworkflow-backend/bin/git_commit_bpmn_models_repo +++ b/spiffworkflow-backend/bin/git_commit_bpmn_models_repo @@ -14,21 +14,39 @@ git_commit_message="$2" git_branch="$3" git_commit_username="$4" git_commit_email="$5" +git_commit_password="$6" -if [[ -z "${5:-}" ]]; then +if [[ -z "${6:-}" ]]; then >&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message] [git_branch] [git_commit_username] [git_commit_email]" exit 1 fi -cd "$bpmn_models_absolute_dir" -git add . +function failed_to_get_lock() { + >&2 echo "ERROR: Failed to get lock." + exit 1 +} -# https://unix.stackexchange.com/a/155077/456630 -if [ -z "$(git status --porcelain)" ]; then - echo "No changes to commit" -else - git config --local user.name "$git_commit_username" - git config --local user.email "$git_commit_email" - git commit -m "$git_commit_message" - git push --set-upstream origin "$git_branch" -fi +function run() { + cd "$bpmn_models_absolute_dir" + git add . + + # https://unix.stackexchange.com/a/155077/456630 + if [ -z "$(git status --porcelain)" ]; then + echo "No changes to commit" + else + PAT="${git_commit_username}:${git_commit_password}" + AUTH=$(echo -n "$PAT" | openssl base64 | tr -d '\n') + + git config --local user.name "$git_commit_username" + git config --local user.email "$git_commit_email" + git config --local http.extraHeader "Authorization: Basic $AUTH" + git commit -m "$git_commit_message" + git push --set-upstream origin "$git_branch" + git config --unset --local http.extraHeader + fi +} + +exec {lock_fd}>/var/lock/mylockfile || failed_to_get_lock +flock --timeout 60 "$lock_fd" || failed_to_get_lock +run +flock -u "$lock_fd" diff --git a/spiffworkflow-backend/conftest.py b/spiffworkflow-backend/conftest.py index c3af94332..b24a7ed1b 100644 --- a/spiffworkflow-backend/conftest.py +++ b/spiffworkflow-backend/conftest.py @@ -9,7 +9,7 @@ from flask_bpmn.models.db import db from flask_bpmn.models.db import SpiffworkflowBaseDBModel from tests.spiffworkflow_backend.helpers.base_test import BaseTest -from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.process_instance_processor import ( @@ -47,7 +47,7 @@ def app() -> Flask: @pytest.fixture() def with_db_and_bpmn_file_cleanup() -> None: """Process_group_resource.""" - db.session.query(ActiveTaskUserModel).delete() + db.session.query(HumanTaskUserModel).delete() for model in SpiffworkflowBaseDBModel._all_subclasses(): db.session.query(model).delete() diff --git a/spiffworkflow-backend/migrations/versions/10e376f55dc2_.py b/spiffworkflow-backend/migrations/versions/67197b02b0c1_.py similarity index 93% rename from spiffworkflow-backend/migrations/versions/10e376f55dc2_.py rename to spiffworkflow-backend/migrations/versions/67197b02b0c1_.py index bde9ba2ae..2eb3d107c 100644 --- a/spiffworkflow-backend/migrations/versions/10e376f55dc2_.py +++ b/spiffworkflow-backend/migrations/versions/67197b02b0c1_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: 10e376f55dc2 +Revision ID: 67197b02b0c1 Revises: -Create Date: 2022-12-15 14:11:10.965454 +Create Date: 2022-12-20 15:05:31.545567 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = '10e376f55dc2' +revision = '67197b02b0c1' down_revision = None branch_labels = None depends_on = None @@ -76,6 +76,8 @@ def upgrade(): sa.Column('service_id', sa.String(length=255), nullable=False), sa.Column('display_name', sa.String(length=255), nullable=True), sa.Column('email', sa.String(length=255), nullable=True), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('service', 'service_id', name='service_key'), sa.UniqueConstraint('username') @@ -181,11 +183,12 @@ def upgrade(): sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('username', 'group_id', name='user_group_assignment_staged_unique') ) - op.create_table('active_task', + op.create_table('human_task', sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False), - sa.Column('actual_owner_id', sa.Integer(), nullable=True), sa.Column('lane_assignment_id', sa.Integer(), nullable=True), + sa.Column('completed_by_user_id', sa.Integer(), nullable=True), + sa.Column('actual_owner_id', sa.Integer(), nullable=True), sa.Column('form_file_name', sa.String(length=50), nullable=True), sa.Column('ui_form_file_name', sa.String(length=50), nullable=True), sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), @@ -196,12 +199,15 @@ def upgrade(): sa.Column('task_type', sa.String(length=50), nullable=True), sa.Column('task_status', sa.String(length=50), nullable=True), sa.Column('process_model_display_name', sa.String(length=255), nullable=True), + sa.Column('completed', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['actual_owner_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ), sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ), sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('task_id', 'process_instance_id', name='active_task_unique') + sa.UniqueConstraint('task_id', 'process_instance_id', name='human_task_unique') ) + op.create_index(op.f('ix_human_task_completed'), 'human_task', ['completed'], unique=False) op.create_table('message_correlation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False), @@ -262,23 +268,20 @@ def upgrade(): sa.Column('spiff_step', sa.Integer(), nullable=False), sa.Column('task_json', sa.JSON(), nullable=False), sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False), - sa.Column('completed_by_user_id', sa.Integer(), nullable=True), - sa.Column('lane_assignment_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ), sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), sa.PrimaryKeyConstraint('id') ) - op.create_table('active_task_user', + op.create_table('human_task_user', sa.Column('id', sa.Integer(), nullable=False), - sa.Column('active_task_id', sa.Integer(), nullable=False), + sa.Column('human_task_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['active_task_id'], ['active_task.id'], ), + sa.ForeignKeyConstraint(['human_task_id'], ['human_task.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('active_task_id', 'user_id', name='active_task_user_unique') + sa.UniqueConstraint('human_task_id', 'user_id', name='human_task_user_unique') ) - op.create_index(op.f('ix_active_task_user_active_task_id'), 'active_task_user', ['active_task_id'], unique=False) - op.create_index(op.f('ix_active_task_user_user_id'), 'active_task_user', ['user_id'], unique=False) + op.create_index(op.f('ix_human_task_user_human_task_id'), 'human_task_user', ['human_task_id'], unique=False) + op.create_index(op.f('ix_human_task_user_user_id'), 'human_task_user', ['user_id'], unique=False) op.create_table('message_correlation_message_instance', sa.Column('id', sa.Integer(), nullable=False), sa.Column('message_instance_id', sa.Integer(), nullable=False), @@ -298,9 +301,9 @@ def downgrade(): op.drop_index(op.f('ix_message_correlation_message_instance_message_instance_id'), table_name='message_correlation_message_instance') op.drop_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), table_name='message_correlation_message_instance') op.drop_table('message_correlation_message_instance') - op.drop_index(op.f('ix_active_task_user_user_id'), table_name='active_task_user') - op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user') - op.drop_table('active_task_user') + op.drop_index(op.f('ix_human_task_user_user_id'), table_name='human_task_user') + op.drop_index(op.f('ix_human_task_user_human_task_id'), table_name='human_task_user') + op.drop_table('human_task_user') op.drop_table('spiff_step_details') op.drop_index(op.f('ix_process_instance_metadata_key'), table_name='process_instance_metadata') op.drop_table('process_instance_metadata') @@ -311,7 +314,8 @@ def downgrade(): op.drop_index(op.f('ix_message_correlation_name'), table_name='message_correlation') op.drop_index(op.f('ix_message_correlation_message_correlation_property_id'), table_name='message_correlation') op.drop_table('message_correlation') - op.drop_table('active_task') + op.drop_index(op.f('ix_human_task_completed'), table_name='human_task') + op.drop_table('human_task') op.drop_table('user_group_assignment_waiting') op.drop_table('user_group_assignment') op.drop_table('secret') diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index be9796aae..0d3a4afef 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -509,6 +509,119 @@ paths: schema: $ref: "#/components/schemas/OkTrue" + /process-instances/for-me: + parameters: + - name: process_model_identifier + in: query + required: false + description: The unique id of an existing process model. + schema: + type: string + - name: page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: per_page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: start_from + in: query + required: false + description: For filtering - beginning of start window - in seconds since epoch + schema: + type: integer + - name: start_to + in: query + required: false + description: For filtering - end of start window - in seconds since epoch + schema: + type: integer + - name: end_from + in: query + required: false + description: For filtering - beginning of end window - in seconds since epoch + schema: + type: integer + - name: end_to + in: query + required: false + description: For filtering - end of end window - in seconds since epoch + schema: + type: integer + - name: process_status + in: query + required: false + description: For filtering - not_started, user_input_required, waiting, complete, error, or suspended + schema: + type: string + - name: initiated_by_me + in: query + required: false + description: For filtering - show instances initiated by me + schema: + type: boolean + - name: with_tasks_completed_by_me + in: query + required: false + description: For filtering - show instances with tasks completed by me + schema: + type: boolean + - name: with_tasks_completed_by_my_group + in: query + required: false + description: For filtering - show instances with tasks completed by my group + schema: + type: boolean + - name: with_relation_to_me + in: query + required: false + description: For filtering - show instances that have something to do with me + schema: + type: boolean + - name: user_filter + in: query + required: false + description: For filtering - indicates the user has manually entered a query + schema: + type: boolean + - name: report_identifier + in: query + required: false + description: Specifies the identifier of a report to use, if any + schema: + type: string + - name: report_id + in: query + required: false + description: Specifies the identifier of a report to use, if any + schema: + type: integer + - name: group_identifier + in: query + required: false + description: The identifier of the group to get the process instances for + schema: + type: string + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list_for_me + summary: Returns a list of process instances that are associated with me. + tags: + - Process Instances + responses: + "200": + description: Workflow. + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Workflow" + /process-instances: parameters: - name: process_model_identifier @@ -577,6 +690,12 @@ paths: description: For filtering - show instances with tasks completed by my group schema: type: boolean + - name: with_relation_to_me + in: query + required: false + description: For filtering - show instances that have something to do with me + schema: + type: boolean - name: user_filter in: query required: false @@ -603,7 +722,7 @@ paths: type: string get: operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list - summary: Returns a list of process instances for a given process model + summary: Returns a list of process instances. tags: - Process Instances responses: @@ -679,6 +798,53 @@ paths: schema: $ref: "#/components/schemas/Workflow" + /process-instances/for-me/{modified_process_model_identifier}/{process_instance_id}/task-info: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The unique id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: process_identifier + in: query + required: false + description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. + schema: + type: string + - name: all_tasks + in: query + required: false + description: If true, this wil return all tasks associated with the process instance and not just user tasks. + schema: + type: boolean + - name: spiff_step + in: query + required: false + description: If set will return the tasks as they were during a specific step of execution. + schema: + type: integer + get: + tags: + - Process Instances + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list_without_task_data_for_me + summary: returns the list of all user tasks associated with process instance without the task data + responses: + "200": + description: list of tasks + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Task" + /process-instances/{modified_process_model_identifier}/{process_instance_id}/task-info: parameters: - name: modified_process_model_identifier @@ -726,6 +892,39 @@ paths: items: $ref: "#/components/schemas/Task" + /process-instances/for-me/{modified_process_model_identifier}/{process_instance_id}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The unique id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: process_identifier + in: query + required: false + description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. + schema: + type: string + get: + tags: + - Process Instances + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_show_for_me + summary: Show information about a process instance that is associated with me + responses: + "200": + description: One Process Instance + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + /process-instances/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: modified_process_model_identifier @@ -798,7 +997,7 @@ paths: schema: $ref: "#/components/schemas/Workflow" - /process-instances/{modified_process_model_identifier}/{process_instance_id}/terminate: + /process-instance-terminate/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: process_instance_id in: path @@ -819,7 +1018,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" - /process-instances/{modified_process_model_identifier}/{process_instance_id}/suspend: + /process-instance-suspend/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: process_instance_id in: path @@ -840,7 +1039,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" - /process-instances/{modified_process_model_identifier}/{process_instance_id}/resume: + /process-instance-resume/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: process_instance_id in: path diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml index b4b0c9b94..6ab24699c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml @@ -112,15 +112,15 @@ permissions: users: [] allowed_permissions: [read] uri: /v1.0/process-models/* - read-all-process-instance: + read-all-process-instances-for-me: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/process-instances/* + uri: /v1.0/process-instances/for-me/* read-process-instance-reports: groups: [everybody] users: [] - allowed_permissions: [read] + allowed_permissions: [create, read, update, delete] uri: /v1.0/process-instances/reports/* processes-read: groups: [everybody] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml index 1300f36b5..20635ea2e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml @@ -98,15 +98,15 @@ permissions: users: [] allowed_permissions: [read] uri: /v1.0/process-models/* - read-all-process-instance: + read-all-process-instances-for-me: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/process-instances/* - read-process-instance-reports: + uri: /v1.0/process-instances/for-me/* + manage-process-instance-reports: groups: [everybody] users: [] - allowed_permissions: [read] + allowed_permissions: [create, read, update, delete] uri: /v1.0/process-instances/reports/* processes-read: groups: [everybody] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml index 19ce385db..fc118b900 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml @@ -93,15 +93,15 @@ permissions: users: [] allowed_permissions: [read] uri: /v1.0/process-models/* - read-all-process-instance: + read-all-process-instances-for-me: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/process-instances/* + uri: /v1.0/process-instances/for-me/* read-process-instance-reports: groups: [everybody] users: [] - allowed_permissions: [read] + allowed_permissions: [create, read, update, delete] uri: /v1.0/process-instances/reports/* processes-read: groups: [everybody] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py index 71adb57c6..bc79a8e39 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py @@ -17,7 +17,7 @@ from spiffworkflow_backend.models.user_group_assignment import ( from spiffworkflow_backend.models.principal import PrincipalModel # noqa: F401 -from spiffworkflow_backend.models.active_task import ActiveTaskModel # noqa: F401 +from spiffworkflow_backend.models.human_task import HumanTaskModel # noqa: F401 from spiffworkflow_backend.models.spec_reference import ( SpecReferenceCache, ) # noqa: F401 diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/active_task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py similarity index 73% rename from spiffworkflow-backend/src/spiffworkflow_backend/models/active_task.py rename to spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py index ea9e10552..940a51fc0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/active_task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py @@ -1,4 +1,4 @@ -"""Active_task.""" +"""Human_task.""" from __future__ import annotations from dataclasses import dataclass @@ -8,7 +8,6 @@ from flask_bpmn.models.db import db from flask_bpmn.models.db import SpiffworkflowBaseDBModel from sqlalchemy import ForeignKey from sqlalchemy.orm import relationship -from sqlalchemy.orm import RelationshipProperty from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel @@ -17,29 +16,30 @@ from spiffworkflow_backend.models.user import UserModel if TYPE_CHECKING: - from spiffworkflow_backend.models.active_task_user import ( # noqa: F401 - ActiveTaskUserModel, + from spiffworkflow_backend.models.human_task_user import ( # noqa: F401 + HumanTaskUserModel, ) @dataclass -class ActiveTaskModel(SpiffworkflowBaseDBModel): - """ActiveTaskModel.""" +class HumanTaskModel(SpiffworkflowBaseDBModel): + """HumanTaskModel.""" - __tablename__ = "active_task" + __tablename__ = "human_task" __table_args__ = ( - db.UniqueConstraint( - "task_id", "process_instance_id", name="active_task_unique" - ), + db.UniqueConstraint("task_id", "process_instance_id", name="human_task_unique"), ) - actual_owner: RelationshipProperty[UserModel] = relationship(UserModel) id: int = db.Column(db.Integer, primary_key=True) process_instance_id: int = db.Column( ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore ) - actual_owner_id: int = db.Column(ForeignKey(UserModel.id)) lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id)) + completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) + + actual_owner_id: int = db.Column(ForeignKey(UserModel.id)) + # actual_owner: RelationshipProperty[UserModel] = relationship(UserModel) + form_file_name: str | None = db.Column(db.String(50)) ui_form_file_name: str | None = db.Column(db.String(50)) @@ -52,17 +52,18 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel): task_type: str = db.Column(db.String(50)) task_status: str = db.Column(db.String(50)) process_model_display_name: str = db.Column(db.String(255)) + completed: bool = db.Column(db.Boolean, default=False, nullable=False, index=True) - active_task_users = relationship("ActiveTaskUserModel", cascade="delete") + human_task_users = relationship("HumanTaskUserModel", cascade="delete") potential_owners = relationship( # type: ignore "UserModel", viewonly=True, - secondary="active_task_user", - overlaps="active_task_user,users", + secondary="human_task_user", + overlaps="human_task_user,users", ) @classmethod - def to_task(cls, task: ActiveTaskModel) -> Task: + def to_task(cls, task: HumanTaskModel) -> Task: """To_task.""" new_task = Task( task.task_id, @@ -79,7 +80,7 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel): if hasattr(task, "process_model_identifier"): new_task.process_model_identifier = task.process_model_identifier - # active tasks only have status when getting the list on the home page + # human tasks only have status when getting the list on the home page # and it comes from the process_instance. it should not be confused with task_status. if hasattr(task, "status"): new_task.process_instance_status = task.status diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/active_task_user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py similarity index 55% rename from spiffworkflow-backend/src/spiffworkflow_backend/models/active_task_user.py rename to spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py index f194c38e4..7d98880fc 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/active_task_user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py @@ -1,4 +1,4 @@ -"""Active_task_user.""" +"""Human_task_user.""" from __future__ import annotations from dataclasses import dataclass @@ -7,26 +7,26 @@ from flask_bpmn.models.db import db from flask_bpmn.models.db import SpiffworkflowBaseDBModel from sqlalchemy import ForeignKey -from spiffworkflow_backend.models.active_task import ActiveTaskModel +from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.user import UserModel @dataclass -class ActiveTaskUserModel(SpiffworkflowBaseDBModel): - """ActiveTaskUserModel.""" +class HumanTaskUserModel(SpiffworkflowBaseDBModel): + """HumanTaskUserModel.""" - __tablename__ = "active_task_user" + __tablename__ = "human_task_user" __table_args__ = ( db.UniqueConstraint( - "active_task_id", + "human_task_id", "user_id", - name="active_task_user_unique", + name="human_task_user_unique", ), ) id = db.Column(db.Integer, primary_key=True) - active_task_id = db.Column( - ForeignKey(ActiveTaskModel.id), nullable=False, index=True # type: ignore + human_task_id = db.Column( + ForeignKey(HumanTaskModel.id), nullable=False, index=True # type: ignore ) user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index c89f457b0..f41897fd7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -26,34 +26,12 @@ class ProcessInstanceNotFoundError(Exception): """ProcessInstanceNotFoundError.""" -class NavigationItemSchema(Schema): - """NavigationItemSchema.""" +class ProcessInstanceTaskDataCannotBeUpdatedError(Exception): + """ProcessInstanceTaskDataCannotBeUpdatedError.""" - class Meta: - """Meta.""" - fields = [ - "spec_id", - "name", - "spec_type", - "task_id", - "description", - "backtracks", - "indent", - "lane", - "state", - "children", - ] - unknown = INCLUDE - - state = marshmallow.fields.String(required=False, allow_none=True) - description = marshmallow.fields.String(required=False, allow_none=True) - backtracks = marshmallow.fields.String(required=False, allow_none=True) - lane = marshmallow.fields.String(required=False, allow_none=True) - task_id = marshmallow.fields.String(required=False, allow_none=True) - children = marshmallow.fields.List( - marshmallow.fields.Nested(lambda: NavigationItemSchema()) - ) +class ProcessInstanceCannotBeDeletedError(Exception): + """ProcessInstanceCannotBeDeletedError.""" class ProcessInstanceStatus(SpiffEnum): @@ -82,7 +60,11 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) process_initiator = relationship("UserModel") - active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore + human_tasks = relationship( + "HumanTaskModel", + cascade="delete", + primaryjoin="and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id, HumanTaskModel.completed == False)", + ) # type: ignore message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore @@ -131,6 +113,19 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): """Validate_status.""" return self.validate_enum_field(key, value, ProcessInstanceStatus) + def can_submit_task(self) -> bool: + """Can_submit_task.""" + return not self.has_terminal_status() and self.status != "suspended" + + def has_terminal_status(self) -> bool: + """Has_terminal_status.""" + return self.status in self.terminal_statuses() + + @classmethod + def terminal_statuses(cls) -> list[str]: + """Terminal_statuses.""" + return ["complete", "error", "terminated"] + class ProcessInstanceModelSchema(Schema): """ProcessInstanceModelSchema.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py index 9afb5d078..11c3aeada 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py @@ -1,13 +1,11 @@ """Spiff_step_details.""" from dataclasses import dataclass -from typing import Optional from flask_bpmn.models.db import db from flask_bpmn.models.db import SpiffworkflowBaseDBModel from sqlalchemy import ForeignKey from sqlalchemy.orm import deferred -from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel @@ -20,10 +18,13 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel): process_instance_id: int = db.Column( ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore ) + # human_task_id: int = db.Column( + # ForeignKey(HumanTaskModel.id) # type: ignore + # ) spiff_step: int = db.Column(db.Integer, nullable=False) task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) - completed_by_user_id: int = db.Column(db.Integer, nullable=True) - lane_assignment_id: Optional[int] = db.Column( - ForeignKey(GroupModel.id), nullable=True - ) + # completed_by_user_id: int = db.Column(db.Integer, nullable=True) + # lane_assignment_id: Optional[int] = db.Column( + # ForeignKey(GroupModel.id), nullable=True + # ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py index 6c1cc1356..ab520ea79 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py @@ -34,6 +34,8 @@ class UserModel(SpiffworkflowBaseDBModel): service_id = db.Column(db.String(255), nullable=False, unique=False) display_name = db.Column(db.String(255)) email = db.Column(db.String(255)) + updated_at_in_seconds: int = db.Column(db.Integer) + created_at_in_seconds: int = db.Column(db.Integer) user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete") # type: ignore groups = relationship( # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 31e692169..bfece39b8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -2,7 +2,6 @@ import json import os import random -import re import string import uuid from typing import Any @@ -32,17 +31,15 @@ from SpiffWorkflow.task import TaskState from sqlalchemy import and_ from sqlalchemy import asc from sqlalchemy import desc -from sqlalchemy import func -from sqlalchemy.orm import aliased -from sqlalchemy.orm import selectinload +from sqlalchemy import or_ from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, ) -from spiffworkflow_backend.models.active_task import ActiveTaskModel -from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel from spiffworkflow_backend.models.file import FileSchema from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel from spiffworkflow_backend.models.message_instance import MessageInstanceModel from spiffworkflow_backend.models.message_model import MessageModel @@ -53,9 +50,15 @@ from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.process_group import ProcessGroup from spiffworkflow_backend.models.process_group import ProcessGroupSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema +from spiffworkflow_backend.models.process_instance import ( + ProcessInstanceCannotBeDeletedError, +) from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance import ( + ProcessInstanceTaskDataCannotBeUpdatedError, +) from spiffworkflow_backend.models.process_instance_metadata import ( ProcessInstanceMetadataModel, ) @@ -72,7 +75,6 @@ from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.user import UserModel -from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService @@ -170,7 +172,7 @@ def process_group_add(body: dict) -> flask.wrappers.Response: """Add_process_group.""" process_group = ProcessGroup(**body) ProcessModelService.add_process_group(process_group) - commit_and_push_to_git( + _commit_and_push_to_git( f"User: {g.user.username} added process group {process_group.id}" ) return make_response(jsonify(process_group), 201) @@ -180,7 +182,7 @@ def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Respo """Process_group_delete.""" process_group_id = un_modify_modified_process_model_id(modified_process_group_id) ProcessModelService().process_group_delete(process_group_id) - commit_and_push_to_git( + _commit_and_push_to_git( f"User: {g.user.username} deleted process group {process_group_id}" ) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -200,7 +202,7 @@ def process_group_update( process_group_id = un_modify_modified_process_model_id(modified_process_group_id) process_group = ProcessGroup(id=process_group_id, **body_filtered) ProcessModelService.update_process_group(process_group) - commit_and_push_to_git( + _commit_and_push_to_git( f"User: {g.user.username} updated process group {process_group_id}" ) return make_response(jsonify(process_group), 200) @@ -267,7 +269,7 @@ def process_group_move( new_process_group = ProcessModelService().process_group_move( original_process_group_id, new_location ) - commit_and_push_to_git( + _commit_and_push_to_git( f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}" ) return make_response(jsonify(new_process_group), 200) @@ -318,7 +320,7 @@ def process_model_create( ) ProcessModelService.add_process_model(process_model_info) - commit_and_push_to_git( + _commit_and_push_to_git( f"User: {g.user.username} created process model {process_model_info.id}" ) return Response( @@ -334,7 +336,7 @@ def process_model_delete( """Process_model_delete.""" process_model_identifier = modified_process_model_identifier.replace(":", "/") ProcessModelService().process_model_delete(process_model_identifier) - commit_and_push_to_git( + _commit_and_push_to_git( f"User: {g.user.username} deleted process model {process_model_identifier}" ) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -360,7 +362,7 @@ def process_model_update( process_model = get_process_model(process_model_identifier) ProcessModelService.update_process_model(process_model, body_filtered) - commit_and_push_to_git( + _commit_and_push_to_git( f"User: {g.user.username} updated process model {process_model_identifier}" ) return ProcessModelInfoSchema().dump(process_model) @@ -394,7 +396,7 @@ def process_model_move( new_process_model = ProcessModelService().process_model_move( original_process_model_id, new_location ) - commit_and_push_to_git( + _commit_and_push_to_git( f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}" ) return make_response(jsonify(new_process_model), 200) @@ -493,7 +495,7 @@ def process_model_file_update( ) SpecFileService.update_file(process_model, file_name, request_file_contents) - commit_and_push_to_git( + _commit_and_push_to_git( f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}" ) @@ -517,7 +519,7 @@ def process_model_file_delete( ) ) from exception - commit_and_push_to_git( + _commit_and_push_to_git( f"User: {g.user.username} deleted process model file {process_model_identifier}/{file_name}" ) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -541,7 +543,7 @@ def add_file(modified_process_model_identifier: str) -> flask.wrappers.Response: file_contents = SpecFileService.get_data(process_model, file.name) file.file_contents = file_contents file.process_model_id = process_model.id - commit_and_push_to_git( + _commit_and_push_to_git( f"User: {g.user.username} added process model file {process_model_identifier}/{file.name}" ) return Response( @@ -577,6 +579,13 @@ def process_instance_run( process_instance = ProcessInstanceService().get_process_instance( process_instance_id ) + if process_instance.status != "not_started": + raise ApiError( + error_code="process_instance_not_runnable", + message=f"Process Instance ({process_instance.id}) is currently running or has already run.", + status_code=400, + ) + processor = ProcessInstanceProcessor(process_instance) if do_engine_steps: @@ -834,6 +843,38 @@ def message_start( ) +def process_instance_list_for_me( + process_model_identifier: Optional[str] = None, + page: int = 1, + per_page: int = 100, + start_from: Optional[int] = None, + start_to: Optional[int] = None, + end_from: Optional[int] = None, + end_to: Optional[int] = None, + process_status: Optional[str] = None, + user_filter: Optional[bool] = False, + report_identifier: Optional[str] = None, + report_id: Optional[int] = None, + user_group_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Process_instance_list_for_me.""" + return process_instance_list( + process_model_identifier=process_model_identifier, + page=page, + per_page=per_page, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + process_status=process_status, + user_filter=user_filter, + report_identifier=report_identifier, + report_id=report_id, + user_group_identifier=user_group_identifier, + with_relation_to_me=True, + ) + + def process_instance_list( process_model_identifier: Optional[str] = None, page: int = 1, @@ -843,13 +884,11 @@ def process_instance_list( end_from: Optional[int] = None, end_to: Optional[int] = None, process_status: Optional[str] = None, - initiated_by_me: Optional[bool] = None, - with_tasks_completed_by_me: Optional[bool] = None, - with_tasks_completed_by_my_group: Optional[bool] = None, + with_relation_to_me: Optional[bool] = None, user_filter: Optional[bool] = False, report_identifier: Optional[str] = None, report_id: Optional[int] = None, - group_identifier: Optional[str] = None, + user_group_identifier: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_list.""" process_instance_report = ProcessInstanceReportService.report_with_identifier( @@ -858,236 +897,38 @@ def process_instance_list( if user_filter: report_filter = ProcessInstanceReportFilter( - process_model_identifier, - start_from, - start_to, - end_from, - end_to, - process_status.split(",") if process_status else None, - initiated_by_me, - with_tasks_completed_by_me, - with_tasks_completed_by_my_group, + process_model_identifier=process_model_identifier, + user_group_identifier=user_group_identifier, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + with_relation_to_me=with_relation_to_me, + process_status=process_status.split(",") if process_status else None, ) else: report_filter = ( ProcessInstanceReportService.filter_from_metadata_with_overrides( - process_instance_report, - process_model_identifier, - start_from, - start_to, - end_from, - end_to, - process_status, - initiated_by_me, - with_tasks_completed_by_me, - with_tasks_completed_by_my_group, + process_instance_report=process_instance_report, + process_model_identifier=process_model_identifier, + user_group_identifier=user_group_identifier, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + process_status=process_status, + with_relation_to_me=with_relation_to_me, ) ) - process_instance_query = ProcessInstanceModel.query - # Always join that hot user table for good performance at serialization time. - process_instance_query = process_instance_query.options( - selectinload(ProcessInstanceModel.process_initiator) + response_json = ProcessInstanceReportService.run_process_instance_report( + report_filter=report_filter, + process_instance_report=process_instance_report, + page=page, + per_page=per_page, + user=g.user, ) - if report_filter.process_model_identifier is not None: - process_model = get_process_model( - f"{report_filter.process_model_identifier}", - ) - - process_instance_query = process_instance_query.filter_by( - process_model_identifier=process_model.id - ) - - # this can never happen. obviously the class has the columns it defines. this is just to appease mypy. - if ( - ProcessInstanceModel.start_in_seconds is None - or ProcessInstanceModel.end_in_seconds is None - ): - raise ( - ApiError( - error_code="unexpected_condition", - message="Something went very wrong", - status_code=500, - ) - ) - - if report_filter.start_from is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.start_in_seconds >= report_filter.start_from - ) - if report_filter.start_to is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.start_in_seconds <= report_filter.start_to - ) - if report_filter.end_from is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.end_in_seconds >= report_filter.end_from - ) - if report_filter.end_to is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.end_in_seconds <= report_filter.end_to - ) - if report_filter.process_status is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore - ) - - if report_filter.initiated_by_me is True: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore - ) - process_instance_query = process_instance_query.filter_by( - process_initiator=g.user - ) - - # TODO: not sure if this is exactly what is wanted - if report_filter.with_tasks_completed_by_me is True: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore - ) - # process_instance_query = process_instance_query.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) - # process_instance_query = process_instance_query.add_columns(UserModel.username) - # search for process_instance.UserModel.username in this file for more details about why adding columns is annoying. - - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.process_initiator_id != g.user.id - ) - process_instance_query = process_instance_query.join( - SpiffStepDetailsModel, - ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, - ) - process_instance_query = process_instance_query.join( - SpiffLoggingModel, - ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.message.contains("COMPLETED") # type: ignore - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step - ) - process_instance_query = process_instance_query.filter( - SpiffStepDetailsModel.completed_by_user_id == g.user.id - ) - - if report_filter.with_tasks_completed_by_my_group is True: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore - ) - process_instance_query = process_instance_query.join( - SpiffStepDetailsModel, - ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, - ) - process_instance_query = process_instance_query.join( - SpiffLoggingModel, - ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.message.contains("COMPLETED") # type: ignore - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step - ) - if group_identifier: - process_instance_query = process_instance_query.join( - GroupModel, - GroupModel.identifier == group_identifier, - ) - else: - process_instance_query = process_instance_query.join( - GroupModel, - GroupModel.id == SpiffStepDetailsModel.lane_assignment_id, - ) - process_instance_query = process_instance_query.join( - UserGroupAssignmentModel, - UserGroupAssignmentModel.group_id == GroupModel.id, - ) - process_instance_query = process_instance_query.filter( - UserGroupAssignmentModel.user_id == g.user.id - ) - - instance_metadata_aliases = {} - stock_columns = ProcessInstanceReportService.get_column_names_for_model( - ProcessInstanceModel - ) - for column in process_instance_report.report_metadata["columns"]: - if column["accessor"] in stock_columns: - continue - instance_metadata_alias = aliased(ProcessInstanceMetadataModel) - instance_metadata_aliases[column["accessor"]] = instance_metadata_alias - - filter_for_column = None - if "filter_by" in process_instance_report.report_metadata: - filter_for_column = next( - ( - f - for f in process_instance_report.report_metadata["filter_by"] - if f["field_name"] == column["accessor"] - ), - None, - ) - isouter = True - conditions = [ - ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, - instance_metadata_alias.key == column["accessor"], - ] - if filter_for_column: - isouter = False - conditions.append( - instance_metadata_alias.value == filter_for_column["field_value"] - ) - process_instance_query = process_instance_query.join( - instance_metadata_alias, and_(*conditions), isouter=isouter - ).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"])) - - order_by_query_array = [] - order_by_array = process_instance_report.report_metadata["order_by"] - if len(order_by_array) < 1: - order_by_array = ProcessInstanceReportModel.default_order_by() - for order_by_option in order_by_array: - attribute = re.sub("^-", "", order_by_option) - if attribute in stock_columns: - if order_by_option.startswith("-"): - order_by_query_array.append( - getattr(ProcessInstanceModel, attribute).desc() - ) - else: - order_by_query_array.append( - getattr(ProcessInstanceModel, attribute).asc() - ) - elif attribute in instance_metadata_aliases: - if order_by_option.startswith("-"): - order_by_query_array.append( - func.max(instance_metadata_aliases[attribute].value).desc() - ) - else: - order_by_query_array.append( - func.max(instance_metadata_aliases[attribute].value).asc() - ) - - process_instances = ( - process_instance_query.group_by(ProcessInstanceModel.id) - .add_columns(ProcessInstanceModel.id) - .order_by(*order_by_query_array) - .paginate(page=page, per_page=per_page, error_out=False) - ) - - results = ProcessInstanceReportService.add_metadata_columns_to_process_instance( - process_instances.items, process_instance_report.report_metadata["columns"] - ) - - response_json = { - "report": process_instance_report, - "results": results, - "filters": report_filter.to_dict(), - "pagination": { - "count": len(results), - "total": process_instances.total, - "pages": process_instances.pages, - }, - } - return make_response(jsonify(response_json), 200) @@ -1107,14 +948,41 @@ def process_instance_report_column_list() -> flask.wrappers.Response: return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) +def process_instance_show_for_me( + modified_process_model_identifier: str, + process_instance_id: int, + process_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Process_instance_show_for_me.""" + process_instance = _find_process_instance_for_me_or_raise(process_instance_id) + return _get_process_instance( + process_instance=process_instance, + modified_process_model_identifier=modified_process_model_identifier, + process_identifier=process_identifier, + ) + + def process_instance_show( modified_process_model_identifier: str, process_instance_id: int, process_identifier: Optional[str] = None, ) -> flask.wrappers.Response: """Create_process_instance.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") process_instance = find_process_instance_by_id_or_raise(process_instance_id) + return _get_process_instance( + process_instance=process_instance, + modified_process_model_identifier=modified_process_model_identifier, + process_identifier=process_identifier, + ) + + +def _get_process_instance( + modified_process_model_identifier: str, + process_instance: ProcessInstanceModel, + process_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """_get_process_instance.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") current_version_control_revision = GitService.get_current_revision() process_model_with_diagram = None @@ -1162,6 +1030,12 @@ def process_instance_delete( """Create_process_instance.""" process_instance = find_process_instance_by_id_or_raise(process_instance_id) + if not process_instance.has_terminal_status(): + raise ProcessInstanceCannotBeDeletedError( + f"Process instance ({process_instance.id}) cannot be deleted since it does not have a terminal status. " + f"Current status is {process_instance.status}." + ) + # (Pdb) db.session.delete # > db.session.query(SpiffLoggingModel).filter_by( @@ -1316,35 +1190,36 @@ def process_instance_report_show( def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: """Task_list_my_tasks.""" principal = find_principal_or_raise() - active_tasks = ( - ActiveTaskModel.query.order_by(desc(ActiveTaskModel.id)) # type: ignore + human_tasks = ( + HumanTaskModel.query.order_by(desc(HumanTaskModel.id)) # type: ignore .join(ProcessInstanceModel) - .join(ActiveTaskUserModel) + .join(HumanTaskUserModel) .filter_by(user_id=principal.user_id) + .filter(HumanTaskModel.completed == False) # noqa: E712 # just need this add_columns to add the process_model_identifier. Then add everything back that was removed. .add_columns( ProcessInstanceModel.process_model_identifier, ProcessInstanceModel.process_model_display_name, ProcessInstanceModel.status, - ActiveTaskModel.task_name, - ActiveTaskModel.task_title, - ActiveTaskModel.task_type, - ActiveTaskModel.task_status, - ActiveTaskModel.task_id, - ActiveTaskModel.id, - ActiveTaskModel.process_model_display_name, - ActiveTaskModel.process_instance_id, + HumanTaskModel.task_name, + HumanTaskModel.task_title, + HumanTaskModel.task_type, + HumanTaskModel.task_status, + HumanTaskModel.task_id, + HumanTaskModel.id, + HumanTaskModel.process_model_display_name, + HumanTaskModel.process_instance_id, ) .paginate(page=page, per_page=per_page, error_out=False) ) - tasks = [ActiveTaskModel.to_task(active_task) for active_task in active_tasks.items] + tasks = [HumanTaskModel.to_task(human_task) for human_task in human_tasks.items] response_json = { "results": tasks, "pagination": { - "count": len(active_tasks.items), - "total": active_tasks.total, - "pages": active_tasks.pages, + "count": len(human_tasks.items), + "total": human_tasks.total, + "pages": human_tasks.pages, }, } @@ -1369,11 +1244,11 @@ def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Respo def task_list_for_my_groups( - group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 + user_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 ) -> flask.wrappers.Response: """Task_list_for_my_groups.""" return get_tasks( - group_identifier=group_identifier, + user_group_identifier=user_group_identifier, processes_started_by_user=False, page=page, per_page=per_page, @@ -1393,79 +1268,102 @@ def get_tasks( has_lane_assignment_id: bool = True, page: int = 1, per_page: int = 100, - group_identifier: Optional[str] = None, + user_group_identifier: Optional[str] = None, ) -> flask.wrappers.Response: """Get_tasks.""" user_id = g.user.id - # use distinct to ensure we only get one row per active task otherwise - # we can get back multiple for the same active task row which throws off + # use distinct to ensure we only get one row per human task otherwise + # we can get back multiple for the same human task row which throws off # pagination later on # https://stackoverflow.com/q/34582014/6090676 - active_tasks_query = ( - ActiveTaskModel.query.distinct() - .outerjoin(GroupModel, GroupModel.id == ActiveTaskModel.lane_assignment_id) + human_tasks_query = ( + HumanTaskModel.query.distinct() + .outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id) .join(ProcessInstanceModel) .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) + .filter(HumanTaskModel.completed == False) # noqa: E712 ) if processes_started_by_user: - active_tasks_query = active_tasks_query.filter( + human_tasks_query = human_tasks_query.filter( ProcessInstanceModel.process_initiator_id == user_id ).outerjoin( - ActiveTaskUserModel, + HumanTaskUserModel, and_( - ActiveTaskUserModel.user_id == user_id, - ActiveTaskModel.id == ActiveTaskUserModel.active_task_id, + HumanTaskUserModel.user_id == user_id, + HumanTaskModel.id == HumanTaskUserModel.human_task_id, ), ) else: - active_tasks_query = active_tasks_query.filter( + human_tasks_query = human_tasks_query.filter( ProcessInstanceModel.process_initiator_id != user_id ).join( - ActiveTaskUserModel, + HumanTaskUserModel, and_( - ActiveTaskUserModel.user_id == user_id, - ActiveTaskModel.id == ActiveTaskUserModel.active_task_id, + HumanTaskUserModel.user_id == user_id, + HumanTaskModel.id == HumanTaskUserModel.human_task_id, ), ) if has_lane_assignment_id: - if group_identifier: - active_tasks_query = active_tasks_query.filter( - GroupModel.identifier == group_identifier + if user_group_identifier: + human_tasks_query = human_tasks_query.filter( + GroupModel.identifier == user_group_identifier ) else: - active_tasks_query = active_tasks_query.filter( - ActiveTaskModel.lane_assignment_id.is_not(None) # type: ignore + human_tasks_query = human_tasks_query.filter( + HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore ) else: - active_tasks_query = active_tasks_query.filter(ActiveTaskModel.lane_assignment_id.is_(None)) # type: ignore + human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore - active_tasks = active_tasks_query.add_columns( - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.status.label("process_instance_status"), # type: ignore - ProcessInstanceModel.updated_at_in_seconds, - ProcessInstanceModel.created_at_in_seconds, - UserModel.username, - GroupModel.identifier.label("group_identifier"), - ActiveTaskModel.task_name, - ActiveTaskModel.task_title, - ActiveTaskModel.process_model_display_name, - ActiveTaskModel.process_instance_id, - ActiveTaskUserModel.user_id.label("current_user_is_potential_owner"), - ).paginate(page=page, per_page=per_page, error_out=False) + human_tasks = ( + human_tasks_query.add_columns( + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.status.label("process_instance_status"), # type: ignore + ProcessInstanceModel.updated_at_in_seconds, + ProcessInstanceModel.created_at_in_seconds, + UserModel.username, + GroupModel.identifier.label("user_group_identifier"), + HumanTaskModel.task_name, + HumanTaskModel.task_title, + HumanTaskModel.process_model_display_name, + HumanTaskModel.process_instance_id, + HumanTaskUserModel.user_id.label("current_user_is_potential_owner"), + ) + .order_by(desc(HumanTaskModel.id)) # type: ignore + .paginate(page=page, per_page=per_page, error_out=False) + ) response_json = { - "results": active_tasks.items, + "results": human_tasks.items, "pagination": { - "count": len(active_tasks.items), - "total": active_tasks.total, - "pages": active_tasks.pages, + "count": len(human_tasks.items), + "total": human_tasks.total, + "pages": human_tasks.pages, }, } return make_response(jsonify(response_json), 200) +def process_instance_task_list_without_task_data_for_me( + modified_process_model_identifier: str, + process_instance_id: int, + all_tasks: bool = False, + spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_task_list_without_task_data_for_me.""" + process_instance = _find_process_instance_for_me_or_raise(process_instance_id) + print(f"process_instance: {process_instance}") + return process_instance_task_list( + modified_process_model_identifier, + process_instance, + all_tasks, + spiff_step, + get_task_data=False, + ) + + def process_instance_task_list_without_task_data( modified_process_model_identifier: str, process_instance_id: int, @@ -1473,9 +1371,10 @@ def process_instance_task_list_without_task_data( spiff_step: int = 0, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data.""" + process_instance = find_process_instance_by_id_or_raise(process_instance_id) return process_instance_task_list( modified_process_model_identifier, - process_instance_id, + process_instance, all_tasks, spiff_step, get_task_data=False, @@ -1489,9 +1388,10 @@ def process_instance_task_list_with_task_data( spiff_step: int = 0, ) -> flask.wrappers.Response: """Process_instance_task_list_with_task_data.""" + process_instance = find_process_instance_by_id_or_raise(process_instance_id) return process_instance_task_list( modified_process_model_identifier, - process_instance_id, + process_instance, all_tasks, spiff_step, get_task_data=True, @@ -1500,19 +1400,17 @@ def process_instance_task_list_with_task_data( def process_instance_task_list( _modified_process_model_identifier: str, - process_instance_id: int, + process_instance: ProcessInstanceModel, all_tasks: bool = False, spiff_step: int = 0, get_task_data: bool = False, ) -> flask.wrappers.Response: """Process_instance_task_list.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - if spiff_step > 0: step_detail = ( db.session.query(SpiffStepDetailsModel) .filter( - SpiffStepDetailsModel.process_instance_id == process_instance.id, + SpiffStepDetailsModel.process_instance.id == process_instance.id, SpiffStepDetailsModel.spiff_step == spiff_step, ) .first() @@ -1651,6 +1549,13 @@ def task_submit( """Task_submit_user_data.""" principal = find_principal_or_raise() process_instance = find_process_instance_by_id_or_raise(process_instance_id) + if not process_instance.can_submit_task(): + raise ApiError( + error_code="process_instance_not_runnable", + message=f"Process Instance ({process_instance.id}) has status " + f"{process_instance.status} which does not allow tasks to be submitted.", + status_code=400, + ) processor = ProcessInstanceProcessor(process_instance) spiff_task = get_spiff_task_from_process_instance( @@ -1672,14 +1577,14 @@ def task_submit( if terminate_loop and spiff_task.is_looping(): spiff_task.terminate_loop() - active_task = ActiveTaskModel.query.filter_by( - process_instance_id=process_instance_id, task_id=task_id + human_task = HumanTaskModel.query.filter_by( + process_instance_id=process_instance_id, task_id=task_id, completed=False ).first() - if active_task is None: + if human_task is None: raise ( ApiError( - error_code="no_active_task", - message="Cannot find an active task with task id '{task_id}' for process instance {process_instance_id}.", + error_code="no_human_task", + message="Cannot find an human task with task id '{task_id}' for process instance {process_instance_id}.", status_code=500, ) ) @@ -1689,7 +1594,7 @@ def task_submit( spiff_task=spiff_task, data=body, user=g.user, - active_task=active_task, + human_task=human_task, ) # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same @@ -1702,16 +1607,18 @@ def task_submit( # last_index = next_task.task_info()["mi_index"] # next_task = processor.next_task() - next_active_task_assigned_to_me = ( - ActiveTaskModel.query.filter_by(process_instance_id=process_instance_id) - .order_by(asc(ActiveTaskModel.id)) # type: ignore - .join(ActiveTaskUserModel) + next_human_task_assigned_to_me = ( + HumanTaskModel.query.filter_by( + process_instance_id=process_instance_id, completed=False + ) + .order_by(asc(HumanTaskModel.id)) # type: ignore + .join(HumanTaskUserModel) .filter_by(user_id=principal.user_id) .first() ) - if next_active_task_assigned_to_me: + if next_human_task_assigned_to_me: return make_response( - jsonify(ActiveTaskModel.to_task(next_active_task_assigned_to_me)), 200 + jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200 ) return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") @@ -2023,6 +1930,57 @@ def delete_secret(key: str) -> Response: return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") +def update_task_data( + process_instance_id: str, + modified_process_model_identifier: str, + task_id: str, + body: Dict, +) -> Response: + """Update task data.""" + process_instance = ProcessInstanceModel.query.filter( + ProcessInstanceModel.id == int(process_instance_id) + ).first() + if process_instance: + if process_instance.status != "suspended": + raise ProcessInstanceTaskDataCannotBeUpdatedError( + f"The process instance needs to be suspended to udpate the task-data. It is currently: {process_instance.status}" + ) + + process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json) + if "new_task_data" in body: + new_task_data_str: str = body["new_task_data"] + new_task_data_dict = json.loads(new_task_data_str) + if task_id in process_instance_bpmn_json_dict["tasks"]: + process_instance_bpmn_json_dict["tasks"][task_id][ + "data" + ] = new_task_data_dict + process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict) + db.session.add(process_instance) + try: + db.session.commit() + except Exception as e: + db.session.rollback() + raise ApiError( + error_code="update_task_data_error", + message=f"Could not update the Instance. Original error is {e}", + ) from e + else: + raise ApiError( + error_code="update_task_data_error", + message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", + ) + else: + raise ApiError( + error_code="update_task_data_error", + message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) + + def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: """Get_required_parameter_or_raise.""" return_value = None @@ -2099,56 +2057,45 @@ def _update_form_schema_with_task_data_as_needed( _update_form_schema_with_task_data_as_needed(o, task_data) -def update_task_data( - process_instance_id: str, - modified_process_model_identifier: str, - task_id: str, - body: Dict, -) -> Response: - """Update task data.""" - process_instance = ProcessInstanceModel.query.filter( - ProcessInstanceModel.id == int(process_instance_id) - ).first() - if process_instance: - process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json) - if "new_task_data" in body: - new_task_data_str: str = body["new_task_data"] - new_task_data_dict = json.loads(new_task_data_str) - if task_id in process_instance_bpmn_json_dict["tasks"]: - process_instance_bpmn_json_dict["tasks"][task_id][ - "data" - ] = new_task_data_dict - process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict) - db.session.add(process_instance) - try: - db.session.commit() - except Exception as e: - db.session.rollback() - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update the Instance. Original error is {e}", - ) from e - else: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", - ) - else: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) - - -def commit_and_push_to_git(message: str) -> None: +def _commit_and_push_to_git(message: str) -> None: """Commit_and_push_to_git.""" if current_app.config["GIT_COMMIT_ON_SAVE"]: git_output = GitService.commit(message=message) current_app.logger.info(f"git output: {git_output}") else: current_app.logger.info("Git commit on save is disabled") + + +def _find_process_instance_for_me_or_raise( + process_instance_id: int, +) -> ProcessInstanceModel: + """_find_process_instance_for_me_or_raise.""" + process_instance: ProcessInstanceModel = ( + ProcessInstanceModel.query.filter_by(id=process_instance_id) + .outerjoin(HumanTaskModel) + .outerjoin( + HumanTaskUserModel, + and_( + HumanTaskModel.id == HumanTaskUserModel.human_task_id, + HumanTaskUserModel.user_id == g.user.id, + ), + ) + .filter( + or_( + HumanTaskUserModel.id.is_not(None), + ProcessInstanceModel.process_initiator_id == g.user.id, + ) + ) + .first() + ) + + if process_instance is None: + raise ( + ApiError( + error_code="process_instance_cannot_be_found", + message=f"Process instance with id {process_instance_id} cannot be found that is associated with you.", + status_code=400, + ) + ) + + return process_instance diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index 429a65e1b..8c3b98ed1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -19,8 +19,8 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore from sqlalchemy import or_ from sqlalchemy import text -from spiffworkflow_backend.models.active_task import ActiveTaskModel from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel from spiffworkflow_backend.models.permission_target import PermissionTargetModel from spiffworkflow_backend.models.principal import MissingPrincipalError @@ -37,8 +37,8 @@ class PermissionsFileNotSetError(Exception): """PermissionsFileNotSetError.""" -class ActiveTaskNotFoundError(Exception): - """ActiveTaskNotFoundError.""" +class HumanTaskNotFoundError(Exception): + """HumanTaskNotFoundError.""" class UserDoesNotHaveAccessToTaskError(Exception): @@ -432,17 +432,17 @@ class AuthorizationService: user: UserModel, ) -> bool: """Assert_user_can_complete_spiff_task.""" - active_task = ActiveTaskModel.query.filter_by( + human_task = HumanTaskModel.query.filter_by( task_name=spiff_task.task_spec.name, process_instance_id=process_instance_id, ).first() - if active_task is None: - raise ActiveTaskNotFoundError( - f"Could find an active task with task name '{spiff_task.task_spec.name}'" + if human_task is None: + raise HumanTaskNotFoundError( + f"Could find an human task with task name '{spiff_task.task_spec.name}'" f" for process instance '{process_instance_id}'" ) - if user not in active_task.potential_owners: + if user not in human_task.potential_owners: raise UserDoesNotHaveAccessToTaskError( f"User {user.username} does not have access to update task'{spiff_task.task_spec.name}'" f" for process instance '{process_instance_id}'" @@ -503,7 +503,7 @@ class AuthorizationService: cls.import_permissions_from_yaml_file() if is_new_user: - UserService.add_user_to_active_tasks_if_appropriate(user_model) + UserService.add_user_to_human_tasks_if_appropriate(user_model) # this cannot be None so ignore mypy return user_model # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py index 8ef952c3c..495603cf0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py @@ -100,6 +100,7 @@ class GitService: branch_name_to_use, git_username, git_email, + current_app.config["GIT_USER_PASSWORD"], ] return cls.run_shell_command_to_get_stdout(shell_command) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 6d110a7dc..bd588a373 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -65,11 +65,11 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore -from spiffworkflow_backend.models.active_task import ActiveTaskModel -from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel from spiffworkflow_backend.models.file import File from spiffworkflow_backend.models.file import FileType from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel from spiffworkflow_backend.models.message_correlation_message_instance import ( MessageCorrelationMessageInstanceModel, @@ -559,7 +559,7 @@ class ProcessInstanceProcessor: "spiff_step": self.process_instance_model.spiff_step or 1, "task_json": task_json, "timestamp": round(time.time()), - "completed_by_user_id": self.current_user().id, + # "completed_by_user_id": self.current_user().id, } def spiff_step_details(self) -> SpiffStepDetailsModel: @@ -570,14 +570,13 @@ class ProcessInstanceProcessor: spiff_step=details_mapping["spiff_step"], task_json=details_mapping["task_json"], timestamp=details_mapping["timestamp"], - completed_by_user_id=details_mapping["completed_by_user_id"], + # completed_by_user_id=details_mapping["completed_by_user_id"], ) return details_model - def save_spiff_step_details(self, active_task: ActiveTaskModel) -> None: + def save_spiff_step_details(self) -> None: """SaveSpiffStepDetails.""" details_model = self.spiff_step_details() - details_model.lane_assignment_id = active_task.lane_assignment_id db.session.add(details_model) db.session.commit() @@ -638,7 +637,7 @@ class ProcessInstanceProcessor: db.session.add(self.process_instance_model) db.session.commit() - active_tasks = ActiveTaskModel.query.filter_by( + human_tasks = HumanTaskModel.query.filter_by( process_instance_id=self.process_instance_model.id ).all() ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks() @@ -669,14 +668,14 @@ class ProcessInstanceProcessor: if "formUiSchemaFilename" in properties: ui_form_file_name = properties["formUiSchemaFilename"] - active_task = None - for at in active_tasks: + human_task = None + for at in human_tasks: if at.task_id == str(ready_or_waiting_task.id): - active_task = at - active_tasks.remove(at) + human_task = at + human_tasks.remove(at) - if active_task is None: - active_task = ActiveTaskModel( + if human_task is None: + human_task = HumanTaskModel( process_instance_id=self.process_instance_model.id, process_model_display_name=process_model_display_name, form_file_name=form_file_name, @@ -688,21 +687,22 @@ class ProcessInstanceProcessor: task_status=ready_or_waiting_task.get_state_name(), lane_assignment_id=potential_owner_hash["lane_assignment_id"], ) - db.session.add(active_task) + db.session.add(human_task) db.session.commit() for potential_owner_id in potential_owner_hash[ "potential_owner_ids" ]: - active_task_user = ActiveTaskUserModel( - user_id=potential_owner_id, active_task_id=active_task.id + human_task_user = HumanTaskUserModel( + user_id=potential_owner_id, human_task_id=human_task.id ) - db.session.add(active_task_user) + db.session.add(human_task_user) db.session.commit() - if len(active_tasks) > 0: - for at in active_tasks: - db.session.delete(at) + if len(human_tasks) > 0: + for at in human_tasks: + at.completed = True + db.session.add(at) db.session.commit() @staticmethod @@ -1180,11 +1180,16 @@ class ProcessInstanceProcessor: ) return user_tasks # type: ignore - def complete_task(self, task: SpiffTask, active_task: ActiveTaskModel) -> None: + def complete_task( + self, task: SpiffTask, human_task: HumanTaskModel, user: UserModel + ) -> None: """Complete_task.""" self.increment_spiff_step() self.bpmn_process_instance.complete_task_from_id(task.id) - self.save_spiff_step_details(active_task) + human_task.completed_by_user_id = user.id + db.session.add(human_task) + db.session.commit() + self.save_spiff_step_details() def get_data(self) -> dict[str, Any]: """Get_data.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index 84d5d6752..773533ae2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -1,14 +1,30 @@ """Process_instance_report_service.""" +import re from dataclasses import dataclass from typing import Optional import sqlalchemy +from flask_bpmn.api.api_error import ApiError from flask_bpmn.models.db import db +from sqlalchemy import and_ +from sqlalchemy import func +from sqlalchemy import or_ +from sqlalchemy.orm import aliased +from sqlalchemy.orm import selectinload +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel +from spiffworkflow_backend.services.process_model_service import ProcessModelService @dataclass @@ -16,14 +32,17 @@ class ProcessInstanceReportFilter: """ProcessInstanceReportFilter.""" process_model_identifier: Optional[str] = None + user_group_identifier: Optional[str] = None start_from: Optional[int] = None start_to: Optional[int] = None end_from: Optional[int] = None end_to: Optional[int] = None process_status: Optional[list[str]] = None initiated_by_me: Optional[bool] = None + has_terminal_status: Optional[bool] = None with_tasks_completed_by_me: Optional[bool] = None - with_tasks_completed_by_my_group: Optional[bool] = None + with_tasks_assigned_to_my_group: Optional[bool] = None + with_relation_to_me: Optional[bool] = None def to_dict(self) -> dict[str, str]: """To_dict.""" @@ -31,6 +50,8 @@ class ProcessInstanceReportFilter: if self.process_model_identifier is not None: d["process_model_identifier"] = self.process_model_identifier + if self.user_group_identifier is not None: + d["user_group_identifier"] = self.user_group_identifier if self.start_from is not None: d["start_from"] = str(self.start_from) if self.start_to is not None: @@ -43,14 +64,18 @@ class ProcessInstanceReportFilter: d["process_status"] = ",".join(self.process_status) if self.initiated_by_me is not None: d["initiated_by_me"] = str(self.initiated_by_me).lower() + if self.has_terminal_status is not None: + d["has_terminal_status"] = str(self.has_terminal_status).lower() if self.with_tasks_completed_by_me is not None: d["with_tasks_completed_by_me"] = str( self.with_tasks_completed_by_me ).lower() - if self.with_tasks_completed_by_my_group is not None: - d["with_tasks_completed_by_my_group"] = str( - self.with_tasks_completed_by_my_group + if self.with_tasks_assigned_to_my_group is not None: + d["with_tasks_assigned_to_my_group"] = str( + self.with_tasks_assigned_to_my_group ).lower() + if self.with_relation_to_me is not None: + d["with_relation_to_me"] = str(self.with_relation_to_me).lower() return d @@ -89,7 +114,7 @@ class ProcessInstanceReportService: "filter_by": [], "order_by": ["-start_in_seconds", "-id"], }, - "system_report_instances_initiated_by_me": { + "system_report_completed_instances_initiated_by_me": { "columns": [ {"Header": "id", "accessor": "id"}, { @@ -100,28 +125,32 @@ class ProcessInstanceReportService: {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, {"Header": "status", "accessor": "status"}, ], - "filter_by": [{"field_name": "initiated_by_me", "field_value": True}], - "order_by": ["-start_in_seconds", "-id"], - }, - "system_report_instances_with_tasks_completed_by_me": { - "columns": cls.builtin_column_options(), "filter_by": [ - {"field_name": "with_tasks_completed_by_me", "field_value": True} + {"field_name": "initiated_by_me", "field_value": True}, + {"field_name": "has_terminal_status", "field_value": True}, ], "order_by": ["-start_in_seconds", "-id"], }, - "system_report_instances_with_tasks_completed_by_my_groups": { + "system_report_completed_instances_with_tasks_completed_by_me": { + "columns": cls.builtin_column_options(), + "filter_by": [ + {"field_name": "with_tasks_completed_by_me", "field_value": True}, + {"field_name": "has_terminal_status", "field_value": True}, + ], + "order_by": ["-start_in_seconds", "-id"], + }, + "system_report_completed_instances_with_tasks_completed_by_my_groups": { "columns": cls.builtin_column_options(), "filter_by": [ { - "field_name": "with_tasks_completed_by_my_group", + "field_name": "with_tasks_assigned_to_my_group", "field_value": True, - } + }, + {"field_name": "has_terminal_status", "field_value": True}, ], "order_by": ["-start_in_seconds", "-id"], }, } - process_instance_report = ProcessInstanceReportModel( identifier=report_identifier, created_by_id=user.id, @@ -164,27 +193,31 @@ class ProcessInstanceReportService: return filters[key].split(",") if key in filters else None process_model_identifier = filters.get("process_model_identifier") + user_group_identifier = filters.get("user_group_identifier") start_from = int_value("start_from") start_to = int_value("start_to") end_from = int_value("end_from") end_to = int_value("end_to") process_status = list_value("process_status") initiated_by_me = bool_value("initiated_by_me") + has_terminal_status = bool_value("has_terminal_status") with_tasks_completed_by_me = bool_value("with_tasks_completed_by_me") - with_tasks_completed_by_my_group = bool_value( - "with_tasks_completed_by_my_group" - ) + with_tasks_assigned_to_my_group = bool_value("with_tasks_assigned_to_my_group") + with_relation_to_me = bool_value("with_relation_to_me") report_filter = ProcessInstanceReportFilter( process_model_identifier, + user_group_identifier, start_from, start_to, end_from, end_to, process_status, initiated_by_me, + has_terminal_status, with_tasks_completed_by_me, - with_tasks_completed_by_my_group, + with_tasks_assigned_to_my_group, + with_relation_to_me, ) return report_filter @@ -194,20 +227,25 @@ class ProcessInstanceReportService: cls, process_instance_report: ProcessInstanceReportModel, process_model_identifier: Optional[str] = None, + user_group_identifier: Optional[str] = None, start_from: Optional[int] = None, start_to: Optional[int] = None, end_from: Optional[int] = None, end_to: Optional[int] = None, process_status: Optional[str] = None, initiated_by_me: Optional[bool] = None, + has_terminal_status: Optional[bool] = None, with_tasks_completed_by_me: Optional[bool] = None, - with_tasks_completed_by_my_group: Optional[bool] = None, + with_tasks_assigned_to_my_group: Optional[bool] = None, + with_relation_to_me: Optional[bool] = None, ) -> ProcessInstanceReportFilter: """Filter_from_metadata_with_overrides.""" report_filter = cls.filter_from_metadata(process_instance_report) if process_model_identifier is not None: report_filter.process_model_identifier = process_model_identifier + if user_group_identifier is not None: + report_filter.user_group_identifier = user_group_identifier if start_from is not None: report_filter.start_from = start_from if start_to is not None: @@ -220,12 +258,16 @@ class ProcessInstanceReportService: report_filter.process_status = process_status.split(",") if initiated_by_me is not None: report_filter.initiated_by_me = initiated_by_me + if has_terminal_status is not None: + report_filter.has_terminal_status = has_terminal_status if with_tasks_completed_by_me is not None: report_filter.with_tasks_completed_by_me = with_tasks_completed_by_me - if with_tasks_completed_by_my_group is not None: - report_filter.with_tasks_completed_by_my_group = ( - with_tasks_completed_by_my_group + if with_tasks_assigned_to_my_group is not None: + report_filter.with_tasks_assigned_to_my_group = ( + with_tasks_assigned_to_my_group ) + if with_relation_to_me is not None: + report_filter.with_relation_to_me = with_relation_to_me return report_filter @@ -268,3 +310,203 @@ class ProcessInstanceReportService: {"Header": "Username", "accessor": "username", "filterable": False}, {"Header": "Status", "accessor": "status", "filterable": False}, ] + + @classmethod + def run_process_instance_report( + cls, + report_filter: ProcessInstanceReportFilter, + process_instance_report: ProcessInstanceReportModel, + user: UserModel, + page: int = 1, + per_page: int = 100, + ) -> dict: + """Run_process_instance_report.""" + process_instance_query = ProcessInstanceModel.query + # Always join that hot user table for good performance at serialization time. + process_instance_query = process_instance_query.options( + selectinload(ProcessInstanceModel.process_initiator) + ) + + if report_filter.process_model_identifier is not None: + process_model = ProcessModelService.get_process_model( + f"{report_filter.process_model_identifier}", + ) + + process_instance_query = process_instance_query.filter_by( + process_model_identifier=process_model.id + ) + + # this can never happen. obviously the class has the columns it defines. this is just to appease mypy. + if ( + ProcessInstanceModel.start_in_seconds is None + or ProcessInstanceModel.end_in_seconds is None + ): + raise ( + ApiError( + error_code="unexpected_condition", + message="Something went very wrong", + status_code=500, + ) + ) + + if report_filter.start_from is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.start_in_seconds >= report_filter.start_from + ) + if report_filter.start_to is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.start_in_seconds <= report_filter.start_to + ) + if report_filter.end_from is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.end_in_seconds >= report_filter.end_from + ) + if report_filter.end_to is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.end_in_seconds <= report_filter.end_to + ) + if report_filter.process_status is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore + ) + + if report_filter.initiated_by_me is True: + process_instance_query = process_instance_query.filter_by( + process_initiator=user + ) + + if report_filter.has_terminal_status is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(ProcessInstanceModel.terminal_statuses()) # type: ignore + ) + + if report_filter.with_relation_to_me is True: + process_instance_query = process_instance_query.outerjoin( + HumanTaskModel + ).outerjoin( + HumanTaskUserModel, + and_( + HumanTaskModel.id == HumanTaskUserModel.human_task_id, + HumanTaskUserModel.user_id == user.id, + ), + ) + process_instance_query = process_instance_query.filter( + or_( + HumanTaskUserModel.id.is_not(None), + ProcessInstanceModel.process_initiator_id == user.id, + ) + ) + + if report_filter.with_tasks_completed_by_me is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.process_initiator_id != user.id + ) + process_instance_query = process_instance_query.join( + HumanTaskModel, + and_( + HumanTaskModel.process_instance_id == ProcessInstanceModel.id, + HumanTaskModel.completed_by_user_id == user.id, + ), + ) + + if report_filter.with_tasks_assigned_to_my_group is True: + if report_filter.user_group_identifier: + process_instance_query = process_instance_query.join( + GroupModel, + GroupModel.identifier == report_filter.user_group_identifier, + ) + else: + process_instance_query = process_instance_query.join(HumanTaskModel) + process_instance_query = process_instance_query.join( + GroupModel, + GroupModel.id == HumanTaskModel.lane_assignment_id, + ) + process_instance_query = process_instance_query.join( + UserGroupAssignmentModel, + UserGroupAssignmentModel.group_id == GroupModel.id, + ) + process_instance_query = process_instance_query.filter( + UserGroupAssignmentModel.user_id == user.id + ) + + instance_metadata_aliases = {} + stock_columns = ProcessInstanceReportService.get_column_names_for_model( + ProcessInstanceModel + ) + for column in process_instance_report.report_metadata["columns"]: + if column["accessor"] in stock_columns: + continue + instance_metadata_alias = aliased(ProcessInstanceMetadataModel) + instance_metadata_aliases[column["accessor"]] = instance_metadata_alias + + filter_for_column = None + if "filter_by" in process_instance_report.report_metadata: + filter_for_column = next( + ( + f + for f in process_instance_report.report_metadata["filter_by"] + if f["field_name"] == column["accessor"] + ), + None, + ) + isouter = True + conditions = [ + ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, + instance_metadata_alias.key == column["accessor"], + ] + if filter_for_column: + isouter = False + conditions.append( + instance_metadata_alias.value == filter_for_column["field_value"] + ) + process_instance_query = process_instance_query.join( + instance_metadata_alias, and_(*conditions), isouter=isouter + ).add_columns( + func.max(instance_metadata_alias.value).label(column["accessor"]) + ) + + order_by_query_array = [] + order_by_array = process_instance_report.report_metadata["order_by"] + if len(order_by_array) < 1: + order_by_array = ProcessInstanceReportModel.default_order_by() + for order_by_option in order_by_array: + attribute = re.sub("^-", "", order_by_option) + if attribute in stock_columns: + if order_by_option.startswith("-"): + order_by_query_array.append( + getattr(ProcessInstanceModel, attribute).desc() + ) + else: + order_by_query_array.append( + getattr(ProcessInstanceModel, attribute).asc() + ) + elif attribute in instance_metadata_aliases: + if order_by_option.startswith("-"): + order_by_query_array.append( + func.max(instance_metadata_aliases[attribute].value).desc() + ) + else: + order_by_query_array.append( + func.max(instance_metadata_aliases[attribute].value).asc() + ) + # return process_instance_query + process_instances = ( + process_instance_query.group_by(ProcessInstanceModel.id) + .add_columns(ProcessInstanceModel.id) + .order_by(*order_by_query_array) + .paginate(page=page, per_page=per_page, error_out=False) + ) + results = ProcessInstanceReportService.add_metadata_columns_to_process_instance( + process_instances.items, process_instance_report.report_metadata["columns"] + ) + response_json = { + "report": process_instance_report, + "results": results, + "filters": report_filter.to_dict(), + "pagination": { + "count": len(results), + "total": process_instances.total, + "pages": process_instances.pages, + }, + } + return response_json diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 5b2781a20..e933eda91 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -8,7 +8,7 @@ from flask_bpmn.api.api_error import ApiError from flask_bpmn.models.db import db from SpiffWorkflow.task import Task as SpiffTask # type: ignore -from spiffworkflow_backend.models.active_task import ActiveTaskModel +from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.process_instance import ProcessInstanceApi from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus @@ -196,7 +196,7 @@ class ProcessInstanceService: spiff_task: SpiffTask, data: dict[str, Any], user: UserModel, - active_task: ActiveTaskModel, + human_task: HumanTaskModel, ) -> None: """All the things that need to happen when we complete a form. @@ -210,7 +210,7 @@ class ProcessInstanceService: dot_dct = ProcessInstanceService.create_dot_dict(data) spiff_task.update_data(dot_dct) # ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store. - processor.complete_task(spiff_task, active_task) + processor.complete_task(spiff_task, human_task, user=user) processor.do_engine_steps(save=True) @staticmethod diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py index 8003c9e80..a3d83e6cc 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py @@ -7,9 +7,9 @@ from flask import g from flask_bpmn.api.api_error import ApiError from flask_bpmn.models.db import db -from spiffworkflow_backend.models.active_task import ActiveTaskModel -from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel @@ -173,15 +173,15 @@ class UserService: return None @classmethod - def add_user_to_active_tasks_if_appropriate(cls, user: UserModel) -> None: - """Add_user_to_active_tasks_if_appropriate.""" + def add_user_to_human_tasks_if_appropriate(cls, user: UserModel) -> None: + """Add_user_to_human_tasks_if_appropriate.""" group_ids = [g.id for g in user.groups] - active_tasks = ActiveTaskModel.query.filter( - ActiveTaskModel.lane_assignment_id.in_(group_ids) # type: ignore + human_tasks = HumanTaskModel.query.filter( + HumanTaskModel.lane_assignment_id.in_(group_ids) # type: ignore ).all() - for active_task in active_tasks: - active_task_user = ActiveTaskUserModel( - user_id=user.id, active_task_id=active_task.id + for human_task in human_tasks: + human_task_user = HumanTaskUserModel( + user_id=user.id, human_task_id=human_task.id ) - db.session.add(active_task_user) + db.session.add(human_task_user) db.session.commit() diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py index 1084cc6d6..4310fba51 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py @@ -243,7 +243,7 @@ class BaseTest: return file @staticmethod - def create_process_instance_from_process_model_id( + def create_process_instance_from_process_model_id_with_api( client: FlaskClient, test_process_model_id: str, headers: Dict[str, str], diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index f9dd44522..d27bbdc7c 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -45,7 +45,7 @@ class TestLoggingService(BaseTest): user=with_super_admin_user, ) headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert response.json is not None diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_nested_groups.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_nested_groups.py index 3983f9be8..90b5af88d 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_nested_groups.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_nested_groups.py @@ -38,7 +38,7 @@ class TestNestedGroups(BaseTest): bpmn_file_name=bpmn_file_name, bpmn_file_location=bpmn_file_location, ) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, self.logged_in_headers(with_super_admin_user), @@ -99,7 +99,7 @@ class TestNestedGroups(BaseTest): bpmn_file_name=bpmn_file_name, bpmn_file_location=bpmn_file_location, ) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, self.logged_in_headers(with_super_admin_user), diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 3bc21456e..adc21c29f 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -15,8 +15,8 @@ from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, ) -from spiffworkflow_backend.models.active_task import ActiveTaskModel from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.process_group import ProcessGroup from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus @@ -284,7 +284,7 @@ class TestProcessApi(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) # create an instance from a model - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) @@ -1072,7 +1072,7 @@ class TestProcessApi(BaseTest): """Test_process_instance_create.""" test_process_model_id = "runs_without_input/sample" headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, test_process_model_id, headers ) assert response.json is not None @@ -1102,7 +1102,7 @@ class TestProcessApi(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert response.json is not None @@ -1144,7 +1144,7 @@ class TestProcessApi(BaseTest): self.modify_process_identifier_for_path_param(process_model_identifier) ) headers = self.logged_in_headers(with_super_admin_user) - create_response = self.create_process_instance_from_process_model_id( + create_response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert create_response.json is not None @@ -1191,7 +1191,7 @@ class TestProcessApi(BaseTest): self.modify_process_identifier_for_path_param(process_model_identifier) ) headers = self.logged_in_headers(with_super_admin_user) - create_response = self.create_process_instance_from_process_model_id( + create_response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert create_response.json is not None @@ -1299,7 +1299,7 @@ class TestProcessApi(BaseTest): "andThis": "another_item_non_key", } } - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, self.logged_in_headers(with_super_admin_user), @@ -1359,7 +1359,7 @@ class TestProcessApi(BaseTest): bpmn_file_location=bpmn_file_location, ) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, self.logged_in_headers(with_super_admin_user), @@ -1375,7 +1375,7 @@ class TestProcessApi(BaseTest): assert response.json is not None response = client.post( - f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/terminate", + f"/v1.0/process-instance-terminate/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 @@ -1396,20 +1396,18 @@ class TestProcessApi(BaseTest): ) -> None: """Test_process_instance_delete.""" process_group_id = "my_process_group" - process_model_id = "user_task" - bpmn_file_name = "user_task.bpmn" - bpmn_file_location = "user_task" + process_model_id = "sample" + bpmn_file_location = "sample" process_model_identifier = self.create_group_and_model_with_bpmn( client, with_super_admin_user, process_group_id=process_group_id, process_model_id=process_model_id, - bpmn_file_name=bpmn_file_name, bpmn_file_location=bpmn_file_location, ) headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert response.json is not None @@ -1420,11 +1418,13 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) assert response.json is not None + assert response.status_code == 200 delete_response = client.delete( f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", headers=self.logged_in_headers(with_super_admin_user), ) + assert delete_response.json["ok"] is True assert delete_response.status_code == 200 def test_task_show( @@ -1448,7 +1448,7 @@ class TestProcessApi(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert response.json is not None @@ -1462,15 +1462,15 @@ class TestProcessApi(BaseTest): assert response.json is not None assert response.json["next_task"] is not None - active_tasks = ( - db.session.query(ActiveTaskModel) - .filter(ActiveTaskModel.process_instance_id == process_instance_id) + human_tasks = ( + db.session.query(HumanTaskModel) + .filter(HumanTaskModel.process_instance_id == process_instance_id) .all() ) - assert len(active_tasks) == 1 - active_task = active_tasks[0] + assert len(human_tasks) == 1 + human_task = human_tasks[0] response = client.get( - f"/v1.0/tasks/{process_instance_id}/{active_task.task_id}", + f"/v1.0/tasks/{process_instance_id}/{human_task.task_id}", headers=self.logged_in_headers(with_super_admin_user), ) assert response.json is not None @@ -1499,7 +1499,7 @@ class TestProcessApi(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) - self.create_process_instance_from_process_model_id( + self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) @@ -1546,19 +1546,19 @@ class TestProcessApi(BaseTest): bpmn_file_location=bpmn_file_location, ) headers = self.logged_in_headers(with_super_admin_user) - self.create_process_instance_from_process_model_id( + self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) - self.create_process_instance_from_process_model_id( + self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) - self.create_process_instance_from_process_model_id( + self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) - self.create_process_instance_from_process_model_id( + self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) - self.create_process_instance_from_process_model_id( + self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) @@ -1872,7 +1872,7 @@ class TestProcessApi(BaseTest): ) -> Any: """Setup_testing_instance.""" headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_id, headers ) process_instance = response.json @@ -2195,7 +2195,7 @@ class TestProcessApi(BaseTest): # process_group_id="finance", # ) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, # process_model.process_group_id, process_model_identifier, @@ -2404,7 +2404,7 @@ class TestProcessApi(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert response.json is not None @@ -2421,7 +2421,7 @@ class TestProcessApi(BaseTest): assert process_instance.status == "user_input_required" client.post( - f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/suspend", + f"/v1.0/process-instance-suspend/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", headers=self.logged_in_headers(with_super_admin_user), ) process_instance = ProcessInstanceService().get_process_instance( @@ -2429,15 +2429,25 @@ class TestProcessApi(BaseTest): ) assert process_instance.status == "suspended" - # TODO: Why can I run a suspended process instance? response = client.post( f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + assert process_instance.status == "suspended" + assert response.status_code == 400 - # task = response.json['next_task'] - - print("test_process_instance_suspend") + response = client.post( + f"/v1.0/process-instance-resume/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 200 + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + assert process_instance.status == "waiting" def test_script_unit_test_run( self, diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py index f1834ab3a..8f3864fe8 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py @@ -68,9 +68,9 @@ class TestGetLocaltime(BaseTest): processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - active_task = process_instance.active_tasks[0] + human_task = process_instance.human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) ProcessInstanceService.complete_form_task( @@ -78,12 +78,12 @@ class TestGetLocaltime(BaseTest): spiff_task, {"timezone": "US/Pacific"}, initiator_user, - active_task, + human_task, ) - active_task = process_instance.active_tasks[0] + human_task = process_instance.human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) assert spiff_task diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py index f0fe1fff0..f85692d64 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py @@ -90,14 +90,14 @@ class TestAuthorizationService(BaseTest): users["testuser2"], "read", "/v1.0/process-groups/" ) - def test_user_can_be_added_to_active_task_on_first_login( + def test_user_can_be_added_to_human_task_on_first_login( self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_user_can_be_added_to_active_task_on_first_login.""" + """Test_user_can_be_added_to_human_task_on_first_login.""" initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.principal is not None # to ensure there is a user that can be assigned to the task @@ -121,21 +121,21 @@ class TestAuthorizationService(BaseTest): ) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - active_task = process_instance.active_tasks[0] + human_task = process_instance.human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) - active_task = process_instance.active_tasks[0] + human_task = process_instance.human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) finance_user = AuthorizationService.create_user_from_sign_in( {"username": "testuser2", "sub": "testuser2", "iss": "https://test.stuff", "email": "testuser2"} ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user, active_task + processor, spiff_task, {}, finance_user, human_task ) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_dot_notation.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_dot_notation.py index 80b052544..59a0fee8d 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_dot_notation.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_dot_notation.py @@ -37,7 +37,7 @@ class TestDotNotation(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) process_instance_id = response.json["id"] @@ -47,7 +47,7 @@ class TestDotNotation(BaseTest): processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - active_task = process_instance.active_tasks[0] + human_task = process_instance.human_tasks[0] user_task = processor.get_ready_user_tasks()[0] form_data = { @@ -58,7 +58,7 @@ class TestDotNotation(BaseTest): "invoice.dueDate": "09/30/2022", } ProcessInstanceService.complete_form_task( - processor, user_task, form_data, with_super_admin_user, active_task + processor, user_task, form_data, with_super_admin_user, human_task ) expected = { diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 3e0107957..1a96ca882 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -49,14 +49,14 @@ class TestProcessInstanceProcessor(BaseTest): == "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants." ) - def test_sets_permission_correctly_on_active_task( + def test_sets_permission_correctly_on_human_task( self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_sets_permission_correctly_on_active_task.""" + """Test_sets_permission_correctly_on_human_task.""" self.create_process_group( client, with_super_admin_user, "test_group", "test_group" ) @@ -80,63 +80,63 @@ class TestProcessInstanceProcessor(BaseTest): processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id is None - assert len(active_task.potential_owners) == 1 - assert active_task.potential_owners[0] == initiator_user + assert len(process_instance.human_tasks) == 1 + human_task = process_instance.human_tasks[0] + assert human_task.lane_assignment_id is None + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == initiator_user spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user, active_task + processor, spiff_task, {}, finance_user, human_task ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id == finance_group.id - assert len(active_task.potential_owners) == 1 - assert active_task.potential_owners[0] == finance_user + assert len(process_instance.human_tasks) == 1 + human_task = process_instance.human_tasks[0] + assert human_task.lane_assignment_id == finance_group.id + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == finance_user spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user, active_task + processor, spiff_task, {}, finance_user, human_task ) - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id is None - assert len(active_task.potential_owners) == 1 - assert active_task.potential_owners[0] == initiator_user + assert len(process_instance.human_tasks) == 1 + human_task = process_instance.human_tasks[0] + assert human_task.lane_assignment_id is None + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == initiator_user spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) assert process_instance.status == ProcessInstanceStatus.complete.value - def test_sets_permission_correctly_on_active_task_when_using_dict( + def test_sets_permission_correctly_on_human_task_when_using_dict( self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_sets_permission_correctly_on_active_task_when_using_dict.""" + """Test_sets_permission_correctly_on_human_task_when_using_dict.""" self.create_process_group( client, with_super_admin_user, "test_group", "test_group" ) @@ -163,94 +163,97 @@ class TestProcessInstanceProcessor(BaseTest): processor.do_engine_steps(save=True) processor.save() - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id is None - assert len(active_task.potential_owners) == 1 - assert active_task.potential_owners[0] == initiator_user + assert len(process_instance.human_tasks) == 1 + human_task = process_instance.human_tasks[0] + assert human_task.lane_assignment_id is None + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == initiator_user spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user_three, active_task + processor, spiff_task, {}, finance_user_three, human_task ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) + assert human_task.completed_by_user_id == initiator_user.id - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id is None - assert len(active_task.potential_owners) == 2 - assert active_task.potential_owners == [finance_user_three, finance_user_four] + assert len(process_instance.human_tasks) == 1 + human_task = process_instance.human_tasks[0] + assert human_task.lane_assignment_id is None + assert len(human_task.potential_owners) == 2 + assert human_task.potential_owners == [finance_user_three, finance_user_four] spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) g.user = finance_user_three ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user_three, active_task + processor, spiff_task, {}, finance_user_three, human_task ) - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id is None - assert len(active_task.potential_owners) == 1 - assert active_task.potential_owners[0] == finance_user_four + assert human_task.completed_by_user_id == finance_user_three.id + assert len(process_instance.human_tasks) == 1 + human_task = process_instance.human_tasks[0] + assert human_task.lane_assignment_id is None + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == finance_user_four spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user_four, active_task + processor, spiff_task, {}, finance_user_four, human_task ) - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id is None - assert len(active_task.potential_owners) == 1 - assert active_task.potential_owners[0] == initiator_user + assert human_task.completed_by_user_id == finance_user_four.id + assert len(process_instance.human_tasks) == 1 + human_task = process_instance.human_tasks[0] + assert human_task.lane_assignment_id is None + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == initiator_user spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] + assert len(process_instance.human_tasks) == 1 + human_task = process_instance.human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, testadmin1, active_task + processor, spiff_task, {}, testadmin1, human_task ) assert process_instance.status == ProcessInstanceStatus.complete.value - def test_does_not_recreate_active_tasks_on_multiple_saves( + def test_does_not_recreate_human_tasks_on_multiple_saves( self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_sets_permission_correctly_on_active_task_when_using_dict.""" + """Test_does_not_recreate_human_tasks_on_multiple_saves.""" self.create_process_group( client, with_super_admin_user, "test_group", "test_group" ) @@ -273,11 +276,11 @@ class TestProcessInstanceProcessor(BaseTest): ) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - assert len(process_instance.active_tasks) == 1 - initial_active_task_id = process_instance.active_tasks[0].id + assert len(process_instance.human_tasks) == 1 + initial_human_task_id = process_instance.human_tasks[0].id - # save again to ensure we go attempt to process the active tasks again + # save again to ensure we go attempt to process the human tasks again processor.save() - assert len(process_instance.active_tasks) == 1 - assert initial_active_task_id == process_instance.active_tasks[0].id + assert len(process_instance.human_tasks) == 1 + assert initial_human_task_id == process_instance.human_tasks[0].id diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py index 98412faa3..75ad3f28e 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py @@ -3,8 +3,12 @@ from typing import Optional from flask import Flask from flask.testing import FlaskClient +from flask_bpmn.models.db import db from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -15,6 +19,7 @@ from spiffworkflow_backend.services.process_instance_report_service import ( from spiffworkflow_backend.services.process_instance_report_service import ( ProcessInstanceReportService, ) +from spiffworkflow_backend.services.user_service import UserService class TestProcessInstanceReportFilter(BaseTest): @@ -122,13 +127,13 @@ class TestProcessInstanceReportService(BaseTest): report_metadata=report_metadata, ) return ProcessInstanceReportService.filter_from_metadata_with_overrides( - report, - process_model_identifier, - start_from, - start_to, - end_from, - end_to, - process_status, + process_instance_report=report, + process_model_identifier=process_model_identifier, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + process_status=process_status, ) def _filter_by_dict_from_metadata(self, report_metadata: dict) -> dict[str, str]: @@ -743,3 +748,383 @@ class TestProcessInstanceReportService(BaseTest): assert report_filter.end_from is None assert report_filter.end_to is None assert report_filter.process_status == ["sue"] + + def test_can_filter_by_completed_instances_initiated_by_me( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_can_filter_by_completed_instances_initiated_by_me.""" + process_model_id = "runs_without_input/sample" + bpmn_file_location = "sample" + process_model = load_test_spec( + process_model_id, + process_model_source_directory=bpmn_file_location, + ) + user_one = self.find_or_create_user(username="user_one") + user_two = self.find_or_create_user(username="user_two") + + # Several processes to ensure they do not return in the result + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_one + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + + process_instance_report = ProcessInstanceReportService.report_with_identifier( + user=user_one, + report_identifier="system_report_completed_instances_initiated_by_me", + ) + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model.id, + ) + ) + response_json = ProcessInstanceReportService.run_process_instance_report( + report_filter=report_filter, + process_instance_report=process_instance_report, + user=user_one, + ) + + assert len(response_json["results"]) == 2 + assert response_json["results"][0]["process_initiator_id"] == user_one.id + assert response_json["results"][1]["process_initiator_id"] == user_one.id + assert response_json["results"][0]["status"] == "complete" + assert response_json["results"][1]["status"] == "complete" + + def test_can_filter_by_completed_instances_with_tasks_completed_by_me( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_can_filter_by_completed_instances_with_tasks_completed_by_me.""" + process_model_id = "runs_without_input/sample" + bpmn_file_location = "sample" + process_model = load_test_spec( + process_model_id, + process_model_source_directory=bpmn_file_location, + ) + user_one = self.find_or_create_user(username="user_one") + user_two = self.find_or_create_user(username="user_two") + + # Several processes to ensure they do not return in the result + process_instance_created_by_user_one_one = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + process_instance_created_by_user_one_three = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_one + ) + ) + process_instance_created_by_user_two_one = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_two + ) + + human_task_for_user_one_one = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_one.id, + completed_by_user_id=user_one.id, + ) + human_task_for_user_one_two = HumanTaskModel( + process_instance_id=process_instance_created_by_user_two_one.id, + completed_by_user_id=user_one.id, + ) + human_task_for_user_one_three = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_three.id, + completed_by_user_id=user_one.id, + ) + human_task_for_user_two_one = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_one.id, + completed_by_user_id=user_two.id, + ) + human_task_for_user_two_two = HumanTaskModel( + process_instance_id=process_instance_created_by_user_two_one.id, + completed_by_user_id=user_two.id, + ) + human_task_for_user_two_three = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_three.id, + completed_by_user_id=user_two.id, + ) + db.session.add(human_task_for_user_one_one) + db.session.add(human_task_for_user_one_two) + db.session.add(human_task_for_user_one_three) + db.session.add(human_task_for_user_two_one) + db.session.add(human_task_for_user_two_two) + db.session.add(human_task_for_user_two_three) + db.session.commit() + + process_instance_report = ProcessInstanceReportService.report_with_identifier( + user=user_one, + report_identifier="system_report_completed_instances_with_tasks_completed_by_me", + ) + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model.id, + ) + ) + response_json = ProcessInstanceReportService.run_process_instance_report( + report_filter=report_filter, + process_instance_report=process_instance_report, + user=user_one, + ) + + assert len(response_json["results"]) == 1 + assert response_json["results"][0]["process_initiator_id"] == user_two.id + assert ( + response_json["results"][0]["id"] + == process_instance_created_by_user_two_one.id + ) + assert response_json["results"][0]["status"] == "complete" + + def test_can_filter_by_completed_instances_with_tasks_completed_by_my_groups( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_can_filter_by_completed_instances_with_tasks_completed_by_my_groups.""" + process_model_id = "runs_without_input/sample" + bpmn_file_location = "sample" + process_model = load_test_spec( + process_model_id, + process_model_source_directory=bpmn_file_location, + ) + user_group_one = GroupModel(identifier="group_one") + user_group_two = GroupModel(identifier="group_two") + db.session.add(user_group_one) + db.session.add(user_group_two) + db.session.commit() + + user_one = self.find_or_create_user(username="user_one") + user_two = self.find_or_create_user(username="user_two") + user_three = self.find_or_create_user(username="user_three") + UserService.add_user_to_group(user_one, user_group_one) + UserService.add_user_to_group(user_two, user_group_one) + UserService.add_user_to_group(user_three, user_group_two) + + # Several processes to ensure they do not return in the result + process_instance_created_by_user_one_one = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + process_instance_created_by_user_one_three = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_one + ) + ) + process_instance_created_by_user_two_one = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_two + ) + + human_task_for_user_group_one_one = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_one.id, + lane_assignment_id=user_group_one.id, + ) + human_task_for_user_group_one_two = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_three.id, + lane_assignment_id=user_group_one.id, + ) + human_task_for_user_group_one_three = HumanTaskModel( + process_instance_id=process_instance_created_by_user_two_one.id, + lane_assignment_id=user_group_one.id, + ) + human_task_for_user_group_two_one = HumanTaskModel( + process_instance_id=process_instance_created_by_user_two_one.id, + lane_assignment_id=user_group_two.id, + ) + human_task_for_user_group_two_two = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_one.id, + lane_assignment_id=user_group_two.id, + ) + db.session.add(human_task_for_user_group_one_one) + db.session.add(human_task_for_user_group_one_two) + db.session.add(human_task_for_user_group_one_three) + db.session.add(human_task_for_user_group_two_one) + db.session.add(human_task_for_user_group_two_two) + db.session.commit() + + process_instance_report = ProcessInstanceReportService.report_with_identifier( + user=user_one, + report_identifier="system_report_completed_instances_with_tasks_completed_by_my_groups", + ) + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model.id, + ) + ) + response_json = ProcessInstanceReportService.run_process_instance_report( + report_filter=report_filter, + process_instance_report=process_instance_report, + user=user_one, + ) + + assert len(response_json["results"]) == 2 + assert response_json["results"][0]["process_initiator_id"] == user_two.id + assert ( + response_json["results"][0]["id"] + == process_instance_created_by_user_two_one.id + ) + assert response_json["results"][0]["status"] == "complete" + assert response_json["results"][1]["process_initiator_id"] == user_one.id + assert ( + response_json["results"][1]["id"] + == process_instance_created_by_user_one_one.id + ) + assert response_json["results"][1]["status"] == "complete" + + def test_can_filter_by_with_relation_to_me( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_can_filter_by_with_relation_to_me.""" + process_model_id = "runs_without_input/sample" + bpmn_file_location = "sample" + process_model = load_test_spec( + process_model_id, + process_model_source_directory=bpmn_file_location, + ) + user_group_one = GroupModel(identifier="group_one") + user_group_two = GroupModel(identifier="group_two") + db.session.add(user_group_one) + db.session.add(user_group_two) + db.session.commit() + + user_one = self.find_or_create_user(username="user_one") + user_two = self.find_or_create_user(username="user_two") + user_three = self.find_or_create_user(username="user_three") + UserService.add_user_to_group(user_one, user_group_one) + UserService.add_user_to_group(user_two, user_group_one) + UserService.add_user_to_group(user_three, user_group_two) + + # Several processes to ensure they do not return in the result + process_instance_created_by_user_one_one = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + ) + process_instance_created_by_user_one_two = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + ) + process_instance_created_by_user_one_three = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_one + ) + ) + process_instance_created_by_user_two_one = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_two + ) + + human_task_for_user_group_one_one = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_one.id, + lane_assignment_id=user_group_one.id, + ) + human_task_for_user_group_one_two = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_three.id, + lane_assignment_id=user_group_one.id, + ) + human_task_for_user_group_one_three = HumanTaskModel( + process_instance_id=process_instance_created_by_user_two_one.id, + lane_assignment_id=user_group_one.id, + ) + human_task_for_user_group_two_one = HumanTaskModel( + process_instance_id=process_instance_created_by_user_two_one.id, + lane_assignment_id=user_group_two.id, + ) + human_task_for_user_group_two_two = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_one.id, + lane_assignment_id=user_group_two.id, + ) + db.session.add(human_task_for_user_group_one_one) + db.session.add(human_task_for_user_group_one_two) + db.session.add(human_task_for_user_group_one_three) + db.session.add(human_task_for_user_group_two_one) + db.session.add(human_task_for_user_group_two_two) + db.session.commit() + + UserService.add_user_to_human_tasks_if_appropriate(user_one) + + process_instance_report = ProcessInstanceReportService.report_with_identifier( + user=user_one + ) + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model.id, + with_relation_to_me=True, + ) + ) + response_json = ProcessInstanceReportService.run_process_instance_report( + report_filter=report_filter, + process_instance_report=process_instance_report, + user=user_one, + ) + + assert len(response_json["results"]) == 4 + process_instance_ids_in_results = [r["id"] for r in response_json["results"]] + assert ( + process_instance_created_by_user_one_one.id + in process_instance_ids_in_results + ) + assert ( + process_instance_created_by_user_one_two.id + in process_instance_ids_in_results + ) + assert ( + process_instance_created_by_user_one_three.id + in process_instance_ids_in_results + ) + assert ( + process_instance_created_by_user_two_one.id + in process_instance_ids_in_results + ) diff --git a/spiffworkflow-frontend/src/classes/ProcessInstanceClass.tsx b/spiffworkflow-frontend/src/classes/ProcessInstanceClass.tsx new file mode 100644 index 000000000..d44569cd4 --- /dev/null +++ b/spiffworkflow-frontend/src/classes/ProcessInstanceClass.tsx @@ -0,0 +1,5 @@ +export default class ProcessInstanceClass { + static terminalStatuses() { + return ['complete', 'error', 'terminated']; + } +} diff --git a/spiffworkflow-frontend/src/components/MyCompletedInstances.tsx b/spiffworkflow-frontend/src/components/MyCompletedInstances.tsx index 2d0fe26a7..47042e910 100644 --- a/spiffworkflow-frontend/src/components/MyCompletedInstances.tsx +++ b/spiffworkflow-frontend/src/components/MyCompletedInstances.tsx @@ -8,7 +8,7 @@ export default function MyCompletedInstances() { filtersEnabled={false} paginationQueryParamPrefix={paginationQueryParamPrefix} perPageOptions={[2, 5, 25]} - reportIdentifier="system_report_instances_initiated_by_me" + reportIdentifier="system_report_completed_instances_initiated_by_me" showReports={false} /> ); diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 06f7793c4..d961627ab 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -80,6 +80,7 @@ type OwnProps = { paginationClassName?: string; autoReload?: boolean; additionalParams?: string; + variant?: string; }; interface dateParameters { @@ -97,7 +98,12 @@ export default function ProcessInstanceListTable({ textToShowIfEmpty, paginationClassName, autoReload = false, + variant = 'for-me', }: OwnProps) { + let apiPath = '/process-instances/for-me'; + if (variant === 'all') { + apiPath = '/process-instances'; + } const params = useParams(); const [searchParams] = useSearchParams(); const navigate = useNavigate(); @@ -126,6 +132,11 @@ export default function ProcessInstanceListTable({ const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const processInstancePathPrefix = + variant === 'all' + ? '/admin/process-instances' + : '/admin/process-instances/for-me'; + const [processStatusAllOptions, setProcessStatusAllOptions] = useState( [] ); @@ -260,7 +271,7 @@ export default function ProcessInstanceListTable({ } HttpService.makeCallToBackend({ - path: `/process-instances?${queryParamString}`, + path: `${apiPath}?${queryParamString}`, successCallback: setProcessInstancesFromResult, }); } @@ -327,6 +338,7 @@ export default function ProcessInstanceListTable({ perPageOptions, reportIdentifier, additionalParams, + apiPath, ]); // This sets the filter data using the saved reports returned from the initial instance_list query. @@ -516,7 +528,7 @@ export default function ProcessInstanceListTable({ setErrorMessage(null); setProcessInstanceReportJustSaved(null); - navigate(`/admin/process-instances?${queryParamString}`); + navigate(`${processInstancePathPrefix}?${queryParamString}`); }; const dateComponent = ( @@ -615,7 +627,7 @@ export default function ProcessInstanceListTable({ setErrorMessage(null); setProcessInstanceReportJustSaved(mode || null); - navigate(`/admin/process-instances${queryParamString}`); + navigate(`${processInstancePathPrefix}${queryParamString}`); }; const reportColumns = () => { @@ -1081,7 +1093,7 @@ export default function ProcessInstanceListTable({ return ( {id} diff --git a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx index 4ba04352b..574eb4e9e 100644 --- a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx +++ b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx @@ -9,13 +9,17 @@ export const useUriListForPermissions = () => { messageInstanceListPath: '/v1.0/messages', processGroupListPath: '/v1.0/process-groups', processGroupShowPath: `/v1.0/process-groups/${params.process_group_id}`, - processInstanceCreatePath: `/v1.0/process-instances/${params.process_model_id}`, processInstanceActionPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}`, + processInstanceCreatePath: `/v1.0/process-instances/${params.process_model_id}`, processInstanceListPath: '/v1.0/process-instances', processInstanceLogListPath: `/v1.0/logs/${params.process_model_id}/${params.process_instance_id}`, processInstanceReportListPath: '/v1.0/process-instances/reports', - processInstanceTaskListPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}/task-info`, + processInstanceResumePath: `/v1.0/process-instance-resume/${params.process_model_id}/${params.process_instance_id}`, + processInstanceSuspendPath: `/v1.0/process-instance-suspend/${params.process_model_id}/${params.process_instance_id}`, processInstanceTaskListDataPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, + processInstanceTaskListPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}/task-info`, + processInstanceTaskListForMePath: `/v1.0/process-instances/for-me/${params.process_model_id}/${params.process_instance_id}/task-info`, + processInstanceTerminatePath: `/v1.0/process-instance-terminate/${params.process_model_id}/${params.process_instance_id}`, processModelCreatePath: `/v1.0/process-models/${params.process_group_id}`, processModelFileCreatePath: `/v1.0/process-models/${params.process_model_id}/files`, processModelFileShowPath: `/v1.0/process-models/${params.process_model_id}/files/${params.file_name}`, diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index b0ab6208a..7805249be 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -52,6 +52,10 @@ export interface ProcessInstance { id: number; process_model_identifier: string; process_model_display_name: string; + status: string; + start_in_seconds: number | null; + end_in_seconds: number | null; + bpmn_xml_file_contents?: string; spiff_step?: number; } diff --git a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx index da6cae356..d24c2b6e2 100644 --- a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx +++ b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx @@ -62,21 +62,25 @@ export default function AdminRoutes() { path="process-models/:process_model_id/files/:file_name" element={} /> - } - /> } /> + } + /> + } + /> } + element={} /> } + element={} /> } /> - } /> + } + /> + } + /> + } + /> } /> } /> { const processModelFullIdentifier = getProcessModelFullIdentifierFromSearchParams(searchParams); @@ -33,13 +51,44 @@ export default function ProcessInstanceList() { }; const processInstanceTitleElement = () => { - return

Process Instances

; + if (variant === 'all') { + return

All Process Instances

; + } + return

My Process Instances

; }; + + let selectedTabIndex = 0; + if (variant === 'all') { + selectedTabIndex = 1; + } return ( <> + + + { + navigate('/admin/process-instances/for-me'); + }} + > + For Me + + + { + navigate('/admin/process-instances/all'); + }} + > + All + + + + +
{processInstanceBreadcrumbElement()} {processInstanceTitleElement()} - + ); } diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 88e5e3bb7..e4e1ffa23 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -45,8 +45,13 @@ import { ProcessInstanceTask, } from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; +import ProcessInstanceClass from '../classes/ProcessInstanceClass'; -export default function ProcessInstanceShow() { +type OwnProps = { + variant: string; +}; + +export default function ProcessInstanceShow({ variant }: OwnProps) { const navigate = useNavigate(); const params = useParams(); const [searchParams] = useSearchParams(); @@ -67,16 +72,21 @@ export default function ProcessInstanceShow() { const modifiedProcessModelId = params.process_model_id; const { targetUris } = useUriListForPermissions(); + const taskListPath = + variant === 'all' + ? targetUris.processInstanceTaskListPath + : targetUris.processInstanceTaskListForMePath; + const permissionRequestData: PermissionsToCheck = { [targetUris.messageInstanceListPath]: ['GET'], - [targetUris.processInstanceTaskListPath]: ['GET'], + [taskListPath]: ['GET'], [targetUris.processInstanceTaskListDataPath]: ['GET', 'PUT'], [targetUris.processInstanceActionPath]: ['DELETE'], [targetUris.processInstanceLogListPath]: ['GET'], [targetUris.processModelShowPath]: ['PUT'], - [`${targetUris.processInstanceActionPath}/suspend`]: ['POST'], - [`${targetUris.processInstanceActionPath}/terminate`]: ['POST'], - [`${targetUris.processInstanceActionPath}/resume`]: ['POST'], + [`${targetUris.processInstanceResumePath}`]: ['POST'], + [`${targetUris.processInstanceSuspendPath}`]: ['POST'], + [`${targetUris.processInstanceTerminatePath}`]: ['POST'], }; const { ability, permissionsLoaded } = usePermissionFetcher( permissionRequestData @@ -98,8 +108,12 @@ export default function ProcessInstanceShow() { if (processIdentifier) { queryParams = `?process_identifier=${processIdentifier}`; } + let apiPath = '/process-instances/for-me'; + if (variant === 'all') { + apiPath = '/process-instances'; + } HttpService.makeCallToBackend({ - path: `/process-instances/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, + path: `${apiPath}/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, successCallback: setProcessInstance, }); let taskParams = '?all_tasks=true'; @@ -109,8 +123,8 @@ export default function ProcessInstanceShow() { let taskPath = ''; if (ability.can('GET', targetUris.processInstanceTaskListDataPath)) { taskPath = `${targetUris.processInstanceTaskListDataPath}${taskParams}`; - } else if (ability.can('GET', targetUris.processInstanceTaskListPath)) { - taskPath = `${targetUris.processInstanceTaskListPath}${taskParams}`; + } else if (ability.can('GET', taskListPath)) { + taskPath = `${taskListPath}${taskParams}`; } if (taskPath) { HttpService.makeCallToBackend({ @@ -129,6 +143,8 @@ export default function ProcessInstanceShow() { ability, targetUris, searchParams, + taskListPath, + variant, ]); const deleteProcessInstance = () => { @@ -146,7 +162,7 @@ export default function ProcessInstanceShow() { const terminateProcessInstance = () => { HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceActionPath}/terminate`, + path: `${targetUris.processInstanceTerminatePath}`, successCallback: refreshPage, httpMethod: 'POST', }); @@ -154,7 +170,7 @@ export default function ProcessInstanceShow() { const suspendProcessInstance = () => { HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceActionPath}/suspend`, + path: `${targetUris.processInstanceSuspendPath}`, successCallback: refreshPage, httpMethod: 'POST', }); @@ -162,7 +178,7 @@ export default function ProcessInstanceShow() { const resumeProcessInstance = () => { HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceActionPath}/resume`, + path: `${targetUris.processInstanceResumePath}`, successCallback: refreshPage, httpMethod: 'POST', }); @@ -183,29 +199,23 @@ export default function ProcessInstanceShow() { return taskIds; }; - const currentSpiffStep = (processInstanceToUse: any) => { - if (typeof params.spiff_step === 'undefined') { - return processInstanceToUse.spiff_step; + const currentSpiffStep = () => { + if (processInstance && typeof params.spiff_step === 'undefined') { + return processInstance.spiff_step || 0; } return Number(params.spiff_step); }; - const showingFirstSpiffStep = (processInstanceToUse: any) => { - return currentSpiffStep(processInstanceToUse) === 1; + const showingFirstSpiffStep = () => { + return currentSpiffStep() === 1; }; - const showingLastSpiffStep = (processInstanceToUse: any) => { - return ( - currentSpiffStep(processInstanceToUse) === processInstanceToUse.spiff_step - ); + const showingLastSpiffStep = () => { + return processInstance && currentSpiffStep() === processInstance.spiff_step; }; - const spiffStepLink = ( - processInstanceToUse: any, - label: any, - distance: number - ) => { + const spiffStepLink = (label: any, distance: number) => { const processIdentifier = searchParams.get('process_identifier'); let queryParams = ''; if (processIdentifier) { @@ -217,32 +227,35 @@ export default function ProcessInstanceShow() { data-qa="process-instance-step-link" to={`/admin/process-instances/${params.process_model_id}/${ params.process_instance_id - }/${currentSpiffStep(processInstanceToUse) + distance}${queryParams}`} + }/${currentSpiffStep() + distance}${queryParams}`} > {label} ); }; - const previousStepLink = (processInstanceToUse: any) => { - if (showingFirstSpiffStep(processInstanceToUse)) { + const previousStepLink = () => { + if (showingFirstSpiffStep()) { return null; } - return spiffStepLink(processInstanceToUse, , -1); + return spiffStepLink(, -1); }; - const nextStepLink = (processInstanceToUse: any) => { - if (showingLastSpiffStep(processInstanceToUse)) { + const nextStepLink = () => { + if (showingLastSpiffStep()) { return null; } - return spiffStepLink(processInstanceToUse, , 1); + return spiffStepLink(, 1); }; - const getInfoTag = (processInstanceToUse: any) => { + const getInfoTag = () => { + if (!processInstance) { + return null; + } const currentEndDate = convertSecondsToFormattedDateTime( - processInstanceToUse.end_in_seconds + processInstance.end_in_seconds || 0 ); let currentEndDateTag; if (currentEndDate) { @@ -253,7 +266,7 @@ export default function ProcessInstanceShow() { {convertSecondsToFormattedDateTime( - processInstanceToUse.end_in_seconds + processInstance.end_in_seconds || 0 ) || 'N/A'} @@ -261,13 +274,13 @@ export default function ProcessInstanceShow() { } let statusIcon = ; - if (processInstanceToUse.status === 'suspended') { + if (processInstance.status === 'suspended') { statusIcon = ; - } else if (processInstanceToUse.status === 'complete') { + } else if (processInstance.status === 'complete') { statusIcon = ; - } else if (processInstanceToUse.status === 'terminated') { + } else if (processInstance.status === 'terminated') { statusIcon = ; - } else if (processInstanceToUse.status === 'error') { + } else if (processInstance.status === 'error') { statusIcon = ; } @@ -279,7 +292,7 @@ export default function ProcessInstanceShow() { {convertSecondsToFormattedDateTime( - processInstanceToUse.start_in_seconds + processInstance.start_in_seconds || 0 )} @@ -290,7 +303,7 @@ export default function ProcessInstanceShow() { - {processInstanceToUse.status} {statusIcon} + {processInstance.status} {statusIcon} @@ -333,11 +346,10 @@ export default function ProcessInstanceShow() { ); }; - const terminateButton = (processInstanceToUse: any) => { + const terminateButton = () => { if ( - ['complete', 'terminated', 'error'].indexOf( - processInstanceToUse.status - ) === -1 + processInstance && + !ProcessInstanceClass.terminalStatuses().includes(processInstance.status) ) { return ( @@ -354,11 +366,12 @@ export default function ProcessInstanceShow() { return
; }; - const suspendButton = (processInstanceToUse: any) => { + const suspendButton = () => { if ( - ['complete', 'terminated', 'error', 'suspended'].indexOf( - processInstanceToUse.status - ) === -1 + processInstance && + !ProcessInstanceClass.terminalStatuses() + .concat(['suspended']) + .includes(processInstance.status) ) { return (