Merge pull request #236 from sartography/feature/home_page_filter_links

Feature/home page filter links
This commit is contained in:
jasquat 2023-05-01 15:30:58 -04:00 committed by GitHub
commit 2084befdb0
35 changed files with 1375 additions and 2715 deletions

View File

@ -1,108 +0,0 @@
"""Import tickets, for use in script task."""
def main():
"""Use main to avoid global namespace."""
import csv
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
process_model_identifier_ticket = "ticket"
db.session.query(ProcessInstanceModel).filter(
ProcessInstanceModel.process_model_identifier == process_model_identifier_ticket
).delete()
db.session.commit()
"""Print process instance count."""
process_instances = ProcessInstanceModel.query.filter_by(
process_model_identifier=process_model_identifier_ticket
).all()
process_instance_count = len(process_instances)
print(f"process_instance_count: {process_instance_count}")
columns_to_data_key_mappings = {
"Month": "month",
"MS": "milestone",
"Done?": "done",
"#": "notion_id",
"ID": "req_id",
"Dev Days": "dev_days",
"Feature": "feature",
"Feature description": "feature_description",
"Priority": "priority",
}
columns_to_header_index_mappings = {}
user = UserModel.query.first()
with open("tests/files/tickets.csv") as infile:
reader = csv.reader(infile, delimiter=",")
# first row is garbage
next(reader)
header = next(reader)
for column_name in columns_to_data_key_mappings:
columns_to_header_index_mappings[column_name] = header.index(column_name)
id_index = header.index("ID")
priority_index = header.index("Priority")
month_index = header.index("Month")
print(f"header: {header}")
for row in reader:
ticket_identifier = row[id_index]
priority = row[priority_index]
month = row[month_index]
print(f"ticket_identifier: {ticket_identifier}")
print(f"priority: {priority}")
# if there is no month, who cares about it.
if month:
process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
process_model_identifier=process_model_identifier_ticket,
user=user,
process_group_identifier="sartography-admin",
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps()
# processor.save()
for (
column_name,
desired_data_key,
) in columns_to_data_key_mappings.items():
appropriate_index = columns_to_header_index_mappings[column_name]
print(f"appropriate_index: {appropriate_index}")
processor.bpmn_process_instance.data[desired_data_key] = row[appropriate_index]
# you at least need a month, or else this row in the csv is considered garbage
month_value = processor.bpmn_process_instance.data["month"]
if month_value == "" or month_value is None:
db.session.delete(process_instance)
db.session.commit()
continue
processor.save()
process_instance_data = processor.get_data()
print(f"process_instance_data: {process_instance_data}")
ProcessInstanceReportModel.add_fixtures()
print("added report fixtures")
main()
# to avoid serialization issues
del main

View File

@ -23,7 +23,7 @@ from spiffworkflow_backend.services.process_model_service import ProcessModelSer
# We need to call this before importing spiffworkflow_backend
# otherwise typeguard cannot work. hence the noqa: E402
if os.environ.get("RUN_TYPEGUARD") == "true":
from typeguard.importhook import install_import_hook
from typeguard import install_import_hook
install_import_hook(packages="spiffworkflow_backend")

View File

@ -0,0 +1,34 @@
"""empty message
Revision ID: 68adb1d504e1
Revises: 0c7428378d6e
Create Date: 2023-04-27 12:24:01.771698
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '68adb1d504e1'
down_revision = '664bb2f00694'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('process_instance_report', schema=None) as batch_op:
batch_op.add_column(sa.Column('json_data_hash', sa.String(length=255), nullable=False))
batch_op.create_index(batch_op.f('ix_process_instance_report_json_data_hash'), ['json_data_hash'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('process_instance_report', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_process_instance_report_json_data_hash'))
batch_op.drop_column('json_data_hash')
# ### end Alembic commands ###

View File

@ -3502,19 +3502,23 @@ files = [
[[package]]
name = "typeguard"
version = "2.13.3"
version = "3.0.2"
description = "Run-time type checker for Python"
category = "dev"
optional = false
python-versions = ">=3.5.3"
python-versions = ">=3.7.4"
files = [
{file = "typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1"},
{file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"},
{file = "typeguard-3.0.2-py3-none-any.whl", hash = "sha256:bbe993854385284ab42fd5bd3bee6f6556577ce8b50696d6cb956d704f286c8e"},
{file = "typeguard-3.0.2.tar.gz", hash = "sha256:fee5297fdb28f8e9efcb8142b5ee219e02375509cd77ea9d270b5af826358d5a"},
]
[package.dependencies]
importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""}
typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.11\""}
[package.extras]
doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
test = ["mypy", "pytest", "typing-extensions"]
doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
test = ["mypy (>=0.991)", "pytest (>=7)"]
[[package]]
name = "types-click"
@ -3934,4 +3938,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.12"
content-hash = "994c36ab39238500b4fd05bc1ccdd2d729dd5f66749ab77b1028371147bdf753"
content-hash = "53f3340f73de770b4fbebff3fcd396cdf1bc2c082b929ade350f31a9df6c3860"

View File

@ -93,7 +93,7 @@ pytest = "^7.1.2"
coverage = {extras = ["toml"], version = "^6.1"}
safety = "^2.3.1"
mypy = ">=0.961"
typeguard = "^2"
typeguard = "^3"
xdoctest = {extras = ["colors"], version = "^1.0.1"}
sphinx = "^5.0.2"
sphinx-autobuild = ">=2021.3.14"

View File

@ -616,107 +616,18 @@ paths:
description: The page number to return. Defaults to page 1.
schema:
type: integer
- name: start_from
in: query
required: false
description: For filtering - beginning of start window - in seconds since epoch
schema:
type: integer
- name: start_to
in: query
required: false
description: For filtering - end of start window - in seconds since epoch
schema:
type: integer
- name: end_from
in: query
required: false
description: For filtering - beginning of end window - in seconds since epoch
schema:
type: integer
- name: end_to
in: query
required: false
description: For filtering - end of end window - in seconds since epoch
schema:
type: integer
- name: process_status
in: query
required: false
description: For filtering - not_started, user_input_required, waiting, complete, error, or suspended
schema:
type: string
- name: initiated_by_me
in: query
required: false
description: For filtering - show instances initiated by me
schema:
type: boolean
- name: with_tasks_completed_by_me
in: query
required: false
description: For filtering - show instances with tasks completed by me
schema:
type: boolean
- name: with_tasks_completed_by_my_group
in: query
required: false
description: For filtering - show instances with tasks completed by my group
schema:
type: boolean
- name: with_relation_to_me
in: query
required: false
description: For filtering - show instances that have something to do with me
schema:
type: boolean
- name: user_filter
in: query
required: false
description: For filtering - indicates the user has manually entered a query
schema:
type: boolean
- name: report_identifier
in: query
required: false
description: Specifies the identifier of a report to use, if any
schema:
type: string
- name: report_id
in: query
required: false
description: Specifies the identifier of a report to use, if any
schema:
type: integer
- name: user_group_identifier
in: query
required: false
description: The identifier of the group to get the process instances for
schema:
type: string
- name: process_initiator_username
in: query
required: false
description: The username of the process initiator
schema:
type: string
- name: report_columns
in: query
required: false
description: Base64 encoded json of report columns.
schema:
type: string
- name: report_filter_by
in: query
required: false
description: Base64 encoded json of report filter by.
schema:
type: string
get:
post:
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list_for_me
summary: Returns a list of process instances that are associated with me.
tags:
- Process Instances
requestBody:
description: Report dictionary to use.
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/ProcessInstanceReport"
responses:
"200":
description: Workflow.
@ -747,107 +658,18 @@ paths:
description: The page number to return. Defaults to page 1.
schema:
type: integer
- name: start_from
in: query
required: false
description: For filtering - beginning of start window - in seconds since epoch
schema:
type: integer
- name: start_to
in: query
required: false
description: For filtering - end of start window - in seconds since epoch
schema:
type: integer
- name: end_from
in: query
required: false
description: For filtering - beginning of end window - in seconds since epoch
schema:
type: integer
- name: end_to
in: query
required: false
description: For filtering - end of end window - in seconds since epoch
schema:
type: integer
- name: process_status
in: query
required: false
description: For filtering - not_started, user_input_required, waiting, complete, error, or suspended
schema:
type: string
- name: initiated_by_me
in: query
required: false
description: For filtering - show instances initiated by me
schema:
type: boolean
- name: with_tasks_completed_by_me
in: query
required: false
description: For filtering - show instances with tasks completed by me
schema:
type: boolean
- name: with_tasks_completed_by_my_group
in: query
required: false
description: For filtering - show instances with tasks completed by my group
schema:
type: boolean
- name: with_relation_to_me
in: query
required: false
description: For filtering - show instances that have something to do with me
schema:
type: boolean
- name: user_filter
in: query
required: false
description: For filtering - indicates the user has manually entered a query
schema:
type: boolean
- name: report_identifier
in: query
required: false
description: Specifies the identifier of a report to use, if any
schema:
type: string
- name: report_id
in: query
required: false
description: Specifies the identifier of a report to use, if any
schema:
type: integer
- name: user_group_identifier
in: query
required: false
description: The identifier of the group to get the process instances for
schema:
type: string
- name: process_initiator_username
in: query
required: false
description: The username of the process initiator
schema:
type: string
- name: report_columns
in: query
required: false
description: Base64 encoded json of report columns.
schema:
type: string
- name: report_filter_by
in: query
required: false
description: Base64 encoded json of report filter by.
schema:
type: string
get:
post:
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list
summary: Returns a list of process instances.
tags:
- Process Instances
requestBody:
description: Report dictionary to use.
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/ProcessInstanceReport"
responses:
"200":
description: Workflow.
@ -1268,15 +1090,16 @@ paths:
summary: Returns all process instance reports for process model
tags:
- Process Instances
- Process Instances Reports
responses:
"200":
description: Workflow.
description: Process Instance Report
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Workflow"
$ref: "#/components/schemas/ProcessInstanceReport"
post:
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_create
summary: Returns all process instance reports for process model
@ -1313,6 +1136,41 @@ paths:
items:
$ref: "#/components/schemas/Workflow"
/process-instances/report-metadata:
parameters:
- name: report_hash
in: query
required: false
description: The hash of a query that has been searched before.
schema:
type: string
- name: report_id
in: query
required: false
description: The unique id of an existing report.
schema:
type: integer
- name: report_identifier
in: query
required: false
description: Specifies the identifier of a report to use, if any
schema:
type: string
get:
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_show
summary: Returns the metadata associated with a given report key. This favors report_hash over report_id and report_identifier.
tags:
- Process Instances
responses:
"200":
description: Workflow.
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Workflow"
/process-instances/reports/{report_id}:
parameters:
- name: report_id
@ -1333,20 +1191,6 @@ paths:
description: The page number to return. Defaults to page 1.
schema:
type: integer
get:
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_show
summary: Returns a report of process instances for a given process model
tags:
- Process Instances
responses:
"200":
description: Workflow.
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Workflow"
put:
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_update
summary: Updates a process instance report
@ -3063,3 +2907,53 @@ components:
description: The timestamp returned in the log
type: number
example: 123456789.12345
ProcessInstanceReport:
properties:
id:
type: number
nullable: true
identifier:
type: string
nullable: true
name:
type: string
nullable: true
report_metadata:
nullable: false
$ref: "#/components/schemas/ReportMetadata"
ReportMetadata:
properties:
columns:
type: array
nullable: false
$ref: "#/components/schemas/ReportMetadataColumn"
filter_by:
type: array
nullable: false
$ref: "#/components/schemas/FilterValue"
order_by:
type: array
nullable: false
ReportMetadataColumn:
properties:
Header:
type: string
nullable: false
accessor:
type: string
nullable: false
fiilterable:
type: boolean
nullable: true
FilterValue:
properties:
field_name:
type: string
nullable: false
field_value:
type: string
nullable: false
fiilterable:
type: string
nullable: false

View File

@ -1,5 +1,13 @@
from __future__ import annotations
import json
from hashlib import sha256
from typing import TypedDict
from flask import current_app
from sqlalchemy.dialects.mysql import insert as mysql_insert
from sqlalchemy.dialects.postgresql import insert as postgres_insert
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
@ -8,6 +16,11 @@ class JsonDataModelNotFoundError(Exception):
pass
class JsonDataDict(TypedDict):
hash: str
data: dict
# delta algorithm <- just to save it for when we want to try to implement it:
# a = {"hey": { "hey2": 2, "hey3": 3, "hey6": 7 }, "hey30": 3, "hey40": 4}
# b = {"hey": { "hey2": 4, "hey5": 3 }, "hey20": 2, "hey30": 3}
@ -42,3 +55,29 @@ class JsonDataModel(SpiffworkflowBaseDBModel):
@classmethod
def find_data_dict_by_hash(cls, hash: str) -> dict:
return cls.find_object_by_hash(hash).data
@classmethod
def insert_or_update_json_data_records(
cls, json_data_hash_to_json_data_dict_mapping: dict[str, JsonDataDict]
) -> None:
list_of_dicts = [*json_data_hash_to_json_data_dict_mapping.values()]
if len(list_of_dicts) > 0:
on_duplicate_key_stmt = None
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "mysql":
insert_stmt = mysql_insert(JsonDataModel).values(list_of_dicts)
on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(data=insert_stmt.inserted.data)
else:
insert_stmt = postgres_insert(JsonDataModel).values(list_of_dicts)
on_duplicate_key_stmt = insert_stmt.on_conflict_do_nothing(index_elements=["hash"])
db.session.execute(on_duplicate_key_stmt)
@classmethod
def insert_or_update_json_data_dict(cls, json_data_dict: JsonDataDict) -> None:
cls.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict})
@classmethod
def create_and_insert_json_data_from_dict(cls, data: dict) -> str:
json_data_hash = sha256(json.dumps(data, sort_keys=True).encode("utf8")).hexdigest()
cls.insert_or_update_json_data_dict({"hash": json_data_hash, "data": data})
db.session.commit()
return json_data_hash

View File

@ -165,6 +165,11 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
def terminal_statuses(cls) -> list[str]:
return ["complete", "error", "terminated"]
@classmethod
def non_terminal_statuses(cls) -> list[str]:
terminal_status_values = cls.terminal_statuses()
return [s for s in ProcessInstanceStatus.list() if s not in terminal_status_values]
@classmethod
def active_statuses(cls) -> list[str]:
return ["user_input_required", "waiting"]

View File

@ -1,28 +1,48 @@
"""Process_instance."""
from __future__ import annotations
import sys
import typing
from dataclasses import dataclass
from typing import Any
from typing import cast
from typing import Optional
from typing import TypedDict
if sys.version_info < (3, 11):
from typing_extensions import TypedDict, NotRequired
else:
from typing import TypedDict, NotRequired
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
ProcessEntityNotFoundError,
)
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
ReportMetadata = dict[str, Any]
class FilterValue(TypedDict):
field_name: str
field_value: str | int | bool
operator: NotRequired[str]
class ReportMetadataColumn(TypedDict):
Header: str
accessor: str
filterable: NotRequired[bool]
class ReportMetadata(TypedDict):
columns: list[ReportMetadataColumn]
filter_by: list[FilterValue]
order_by: list[str]
class Report(TypedDict):
id: int
identifier: str
name: str
report_metadata: ReportMetadata
class ProcessInstanceReportAlreadyExistsError(Exception):
@ -68,60 +88,24 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
id: int = db.Column(db.Integer, primary_key=True)
identifier: str = db.Column(db.String(50), nullable=False, index=True)
report_metadata: dict = db.Column(db.JSON)
report_metadata: ReportMetadata = db.Column(db.JSON)
created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
created_by = relationship("UserModel")
created_at_in_seconds = db.Column(db.Integer)
updated_at_in_seconds = db.Column(db.Integer)
json_data_hash: str = db.Column(db.String(255), nullable=False, index=True)
def get_report_metadata(self) -> ReportMetadata:
rdata_dict = JsonDataModel.find_data_dict_by_hash(self.json_data_hash)
rdata = typing.cast(ReportMetadata, rdata_dict)
return rdata
@classmethod
def default_order_by(cls) -> list[str]:
"""Default_order_by."""
return ["-start_in_seconds", "-id"]
@classmethod
def add_fixtures(cls) -> None:
"""Add_fixtures."""
try:
# process_model = ProcessModelService.get_process_model(
# process_model_id="sartography-admin/ticket"
# )
user = UserModel.query.first()
columns = [
{"Header": "id", "accessor": "id"},
{"Header": "month", "accessor": "month"},
{"Header": "milestone", "accessor": "milestone"},
{"Header": "req_id", "accessor": "req_id"},
{"Header": "feature", "accessor": "feature"},
{"Header": "dev_days", "accessor": "dev_days"},
{"Header": "priority", "accessor": "priority"},
]
json = {"order": "month asc", "columns": columns}
cls.create_report(
identifier="standard",
user=user,
report_metadata=json,
)
cls.create_report(
identifier="for-month",
user=user,
report_metadata=cls.ticket_for_month_report(),
)
cls.create_report(
identifier="for-month-3",
user=user,
report_metadata=cls.ticket_for_month_3_report(),
)
cls.create_report(
identifier="hot-report",
user=user,
report_metadata=cls.process_model_with_form_report_fixture(),
)
except ProcessEntityNotFoundError:
print("Did not find process models so not adding report fixtures for them")
@classmethod
def create_report(
cls,
@ -129,7 +113,6 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
user: UserModel,
report_metadata: ReportMetadata,
) -> ProcessInstanceReportModel:
"""Make_fixture_report."""
process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=identifier,
created_by_id=user.id,
@ -140,170 +123,23 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
f"Process instance report with identifier already exists: {identifier}"
)
report_metadata_dict = typing.cast(typing.Dict[str, Any], report_metadata)
json_data_hash = JsonDataModel.create_and_insert_json_data_from_dict(report_metadata_dict)
process_instance_report = cls(
identifier=identifier,
created_by_id=user.id,
report_metadata=report_metadata,
json_data_hash=json_data_hash,
)
db.session.add(process_instance_report)
db.session.commit()
return process_instance_report # type: ignore
@classmethod
def ticket_for_month_report(cls) -> dict:
"""Ticket_for_month_report."""
return {
"columns": [
{"Header": "id", "accessor": "id"},
{"Header": "month", "accessor": "month"},
{"Header": "milestone", "accessor": "milestone"},
{"Header": "req_id", "accessor": "req_id"},
{"Header": "feature", "accessor": "feature"},
{"Header": "priority", "accessor": "priority"},
],
"order": "month asc",
"filter_by": [
{
"field_name": "month",
"operator": "equals",
"field_value": "{{month}}",
}
],
}
@classmethod
def ticket_for_month_3_report(cls) -> dict:
"""Ticket_for_month_report."""
return {
"columns": [
{"Header": "id", "accessor": "id"},
{"Header": "month", "accessor": "month"},
{"Header": "milestone", "accessor": "milestone"},
{"Header": "req_id", "accessor": "req_id"},
{"Header": "feature", "accessor": "feature"},
{"Header": "dev_days", "accessor": "dev_days"},
{"Header": "priority", "accessor": "priority"},
],
"order": "month asc",
"filter_by": [{"field_name": "month", "operator": "equals", "field_value": "3"}],
}
@classmethod
def process_model_with_form_report_fixture(cls) -> dict:
"""Process_model_with_form_report_fixture."""
return {
"columns": [
{"Header": "id", "accessor": "id"},
{
"Header": "system_generated_number",
"accessor": "system_generated_number",
},
{
"Header": "user_generated_number",
"accessor": "user_generated_number",
},
{"Header": "product", "accessor": "product"},
],
"order": "-id",
}
@classmethod
def create_with_attributes(
cls,
identifier: str,
report_metadata: dict,
user: UserModel,
) -> ProcessInstanceReportModel:
"""Create_with_attributes."""
process_instance_report = cls(
identifier=identifier,
created_by_id=user.id,
report_metadata=report_metadata,
)
db.session.add(process_instance_report)
db.session.commit()
return process_instance_report
def with_substitutions(self, field_value: Any, substitution_variables: dict) -> Any:
"""With_substitutions."""
if substitution_variables is not None:
for key, value in substitution_variables.items():
if isinstance(value, str) or isinstance(value, int):
field_value = str(field_value).replace("{{" + key + "}}", str(value))
return field_value
# modeled after https://github.com/suyash248/sqlalchemy-json-querybuilder
# just supports "equals" operator for now.
# perhaps we will use the database instead of filtering in memory in the future and then we might use this lib directly.
def passes_filter(self, process_instance_dict: dict, substitution_variables: dict) -> bool:
"""Passes_filter."""
if "filter_by" in self.report_metadata:
for filter_by in self.report_metadata["filter_by"]:
field_name = filter_by["field_name"]
operator = filter_by["operator"]
field_value = self.with_substitutions(filter_by["field_value"], substitution_variables)
if operator == "equals":
if str(process_instance_dict.get(field_name)) != str(field_value):
return False
return True
def order_things(self, process_instance_dicts: list) -> list:
"""Order_things."""
order_by = self.report_metadata["order_by"]
def order_by_function_for_lambda(
process_instance_dict: dict,
) -> list[Reversor | str | None]:
"""Order_by_function_for_lambda."""
comparison_values: list[Reversor | str | None] = []
for order_by_item in order_by:
if order_by_item.startswith("-"):
# remove leading - from order_by_item
order_by_item = order_by_item[1:]
sort_value = process_instance_dict.get(order_by_item)
comparison_values.append(Reversor(sort_value))
else:
sort_value = cast(Optional[str], process_instance_dict.get(order_by_item))
comparison_values.append(sort_value)
return comparison_values
return sorted(process_instance_dicts, key=order_by_function_for_lambda)
def generate_report(
self,
process_instances: list[ProcessInstanceModel],
substitution_variables: dict | None,
) -> ProcessInstanceReportResult:
"""Generate_report."""
if substitution_variables is None:
substitution_variables = {}
def to_serialized(process_instance: ProcessInstanceModel) -> dict:
"""To_serialized."""
processor = ProcessInstanceProcessor(process_instance)
process_instance.data = processor.get_current_data()
return process_instance.serialized_flat
process_instance_dicts = map(to_serialized, process_instances)
results = []
for process_instance_dict in process_instance_dicts:
if self.passes_filter(process_instance_dict, substitution_variables):
results.append(process_instance_dict)
if "order_by" in self.report_metadata:
results = self.order_things(results)
if "columns" in self.report_metadata:
column_keys_to_keep = [c["accessor"] for c in self.report_metadata["columns"]]
pruned_results = []
for result in results:
dict_you_want = {
your_key: result[your_key] for your_key in column_keys_to_keep if result.get(your_key)
}
pruned_results.append(dict_you_want)
results = pruned_results
return ProcessInstanceReportResult(report_metadata=self.report_metadata, results=results)

View File

@ -1,16 +1,15 @@
"""APIs for dealing with process groups, process models, and process instances."""
import base64
import json
from typing import Any
from typing import Dict
from typing import Optional
from typing import Union
import flask.wrappers
from flask import current_app
from flask import g
from flask import jsonify
from flask import make_response
from flask import request
from flask.wrappers import Response
from sqlalchemy import and_
from sqlalchemy import or_
@ -24,6 +23,7 @@ from spiffworkflow_backend.models.bpmn_process_definition import (
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
from spiffworkflow_backend.models.process_instance import (
ProcessInstanceCannotBeDeletedError,
@ -36,9 +36,8 @@ from spiffworkflow_backend.models.process_instance_metadata import (
from spiffworkflow_backend.models.process_instance_queue import (
ProcessInstanceQueueModel,
)
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
from spiffworkflow_backend.models.process_instance_report import ProcessInstanceReportModel
from spiffworkflow_backend.models.process_instance_report import Report
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
@ -68,9 +67,6 @@ from spiffworkflow_backend.services.process_instance_queue_service import (
from spiffworkflow_backend.services.process_instance_queue_service import (
ProcessInstanceQueueService,
)
from spiffworkflow_backend.services.process_instance_report_service import (
ProcessInstanceReportFilter,
)
from spiffworkflow_backend.services.process_instance_report_service import (
ProcessInstanceReportService,
)
@ -81,6 +77,10 @@ from spiffworkflow_backend.services.process_model_service import ProcessModelSer
from spiffworkflow_backend.services.spec_file_service import SpecFileService
from spiffworkflow_backend.services.task_service import TaskService
# from spiffworkflow_backend.services.process_instance_report_service import (
# ProcessInstanceReportFilter,
# )
def process_instance_create(
modified_process_model_identifier: str,
@ -226,112 +226,75 @@ def process_instance_resume(
def process_instance_list_for_me(
body: Dict[str, Any],
process_model_identifier: Optional[str] = None,
page: int = 1,
per_page: int = 100,
start_from: Optional[int] = None,
start_to: Optional[int] = None,
end_from: Optional[int] = None,
end_to: Optional[int] = None,
process_status: Optional[str] = None,
user_filter: Optional[bool] = False,
report_identifier: Optional[str] = None,
report_id: Optional[int] = None,
user_group_identifier: Optional[str] = None,
process_initiator_username: Optional[str] = None,
report_columns: Optional[str] = None,
report_filter_by: Optional[str] = None,
) -> flask.wrappers.Response:
"""Process_instance_list_for_me."""
ProcessInstanceReportService.add_or_update_filter(
body["report_metadata"]["filter_by"], {"field_name": "with_relation_to_me", "field_value": True}
)
return process_instance_list(
process_model_identifier=process_model_identifier,
page=page,
per_page=per_page,
start_from=start_from,
start_to=start_to,
end_from=end_from,
end_to=end_to,
process_status=process_status,
user_filter=user_filter,
report_identifier=report_identifier,
report_id=report_id,
user_group_identifier=user_group_identifier,
with_relation_to_me=True,
report_columns=report_columns,
report_filter_by=report_filter_by,
process_initiator_username=process_initiator_username,
body=body,
)
def process_instance_list(
body: Dict[str, Any],
process_model_identifier: Optional[str] = None,
page: int = 1,
per_page: int = 100,
start_from: Optional[int] = None,
start_to: Optional[int] = None,
end_from: Optional[int] = None,
end_to: Optional[int] = None,
process_status: Optional[str] = None,
with_relation_to_me: Optional[bool] = None,
user_filter: Optional[bool] = False,
report_identifier: Optional[str] = None,
report_id: Optional[int] = None,
user_group_identifier: Optional[str] = None,
process_initiator_username: Optional[str] = None,
report_columns: Optional[str] = None,
report_filter_by: Optional[str] = None,
) -> flask.wrappers.Response:
"""Process_instance_list."""
process_instance_report = ProcessInstanceReportService.report_with_identifier(g.user, report_id, report_identifier)
report_column_list = None
if report_columns:
report_column_list = json.loads(base64.b64decode(report_columns))
report_filter_by_list = None
if report_filter_by:
report_filter_by_list = json.loads(base64.b64decode(report_filter_by))
if user_filter:
report_filter = ProcessInstanceReportFilter(
process_model_identifier=process_model_identifier,
user_group_identifier=user_group_identifier,
start_from=start_from,
start_to=start_to,
end_from=end_from,
end_to=end_to,
with_relation_to_me=with_relation_to_me,
process_status=process_status.split(",") if process_status else None,
process_initiator_username=process_initiator_username,
report_column_list=report_column_list,
report_filter_by_list=report_filter_by_list,
)
else:
report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides(
process_instance_report=process_instance_report,
process_model_identifier=process_model_identifier,
user_group_identifier=user_group_identifier,
start_from=start_from,
start_to=start_to,
end_from=end_from,
end_to=end_to,
process_status=process_status,
with_relation_to_me=with_relation_to_me,
process_initiator_username=process_initiator_username,
report_column_list=report_column_list,
report_filter_by_list=report_filter_by_list,
)
response_json = ProcessInstanceReportService.run_process_instance_report(
report_filter=report_filter,
process_instance_report=process_instance_report,
report_metadata=body["report_metadata"],
page=page,
per_page=per_page,
user=g.user,
)
json_data_hash = JsonDataModel.create_and_insert_json_data_from_dict(body["report_metadata"])
response_json["report_hash"] = json_data_hash
db.session.commit()
return make_response(jsonify(response_json), 200)
def process_instance_report_show(
report_hash: Optional[str] = None,
report_id: Optional[int] = None,
report_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
if report_hash is None and report_id is None and report_identifier is None:
raise ApiError(
error_code="report_key_missing",
message=(
"A report key is needed to lookup a report. Either choose a report_hash, report_id, or"
" report_identifier."
),
)
response_result: Optional[Union[Report, ProcessInstanceReportModel]] = None
if report_hash is not None:
json_data = JsonDataModel.query.filter_by(hash=report_hash).first()
if json_data is None:
raise ApiError(
error_code="report_metadata_not_found",
message=f"Could not find report metadata for {report_hash}.",
)
response_result = {
"id": 0,
"identifier": "custom",
"name": "custom",
"report_metadata": json_data.data,
}
else:
response_result = ProcessInstanceReportService.report_with_identifier(g.user, report_id, report_identifier)
return make_response(jsonify(response_result), 200)
def process_instance_report_column_list(
process_model_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
@ -404,7 +367,6 @@ def process_instance_delete(
def process_instance_report_list(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
"""Process_instance_report_list."""
process_instance_reports = ProcessInstanceReportModel.query.filter_by(
created_by_id=g.user.id,
).all()
@ -413,7 +375,6 @@ def process_instance_report_list(page: int = 1, per_page: int = 100) -> flask.wr
def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response:
"""Process_instance_report_create."""
process_instance_report = ProcessInstanceReportModel.create_report(
identifier=body["identifier"],
user=g.user,
@ -466,40 +427,6 @@ def process_instance_report_delete(
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_instance_report_show(
report_id: int,
page: int = 1,
per_page: int = 100,
) -> flask.wrappers.Response:
"""Process_instance_report_show."""
process_instances = ProcessInstanceModel.query.order_by(
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
).paginate(page=page, per_page=per_page, error_out=False)
process_instance_report = ProcessInstanceReportModel.query.filter_by(
id=report_id,
created_by_id=g.user.id,
).first()
if process_instance_report is None:
raise ApiError(
error_code="unknown_process_instance_report",
message="Unknown process instance report",
status_code=404,
)
substitution_variables = request.args.to_dict()
result_dict = process_instance_report.generate_report(process_instances.items, substitution_variables)
# update this if we go back to a database query instead of filtering in memory
result_dict["pagination"] = {
"count": len(result_dict["results"]),
"total": len(result_dict["results"]),
"pages": 1,
}
return Response(json.dumps(result_dict), status=200, mimetype="application/json")
def process_instance_task_list_without_task_data_for_me(
modified_process_model_identifier: str,
process_instance_id: int,
@ -765,7 +692,7 @@ def _find_process_instance_for_me_or_raise(
process_instance_id: int,
) -> ProcessInstanceModel:
"""_find_process_instance_for_me_or_raise."""
process_instance: ProcessInstanceModel = (
process_instance: Optional[ProcessInstanceModel] = (
ProcessInstanceModel.query.filter_by(id=process_instance_id)
.outerjoin(HumanTaskModel)
.outerjoin(

View File

@ -438,7 +438,7 @@ def process_model_create_with_natural_language(
def _get_file_from_request() -> FileStorage:
"""Get_file_from_request."""
request_file: FileStorage = connexion.request.files.get("file")
request_file: Optional[FileStorage] = connexion.request.files.get("file")
if not request_file:
raise ApiError(
error_code="no_file_given",

View File

@ -36,6 +36,7 @@ from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
@ -220,7 +221,7 @@ def task_data_update(
task_model, new_task_data_dict, "json_data_hash"
)
if json_data_dict is not None:
TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict})
JsonDataModel.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict})
ProcessInstanceTmpService.add_event_to_process_instance(
process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid
)
@ -537,7 +538,7 @@ def _task_submit_shared(
task_model, spiff_task.data, "json_data_hash"
)
if json_data_dict is not None:
TaskService.insert_or_update_json_data_dict(json_data_dict)
JsonDataModel.insert_or_update_json_data_dict(json_data_dict)
db.session.add(task_model)
db.session.commit()
else:

View File

@ -1737,7 +1737,7 @@ class ProcessInstanceProcessor:
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
)
task_service.update_task_model(task_model, spiff_task)
TaskService.insert_or_update_json_data_records(task_service.json_data_dicts)
JsonDataModel.insert_or_update_json_data_records(task_service.json_data_dicts)
ProcessInstanceTmpService.add_event_to_process_instance(
self.process_instance_model,

View File

@ -1,7 +1,8 @@
"""Process_instance_report_service."""
import copy
import re
from dataclasses import dataclass
from typing import Any
from typing import Generator
from typing import Optional
from typing import Type
@ -23,9 +24,10 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
from spiffworkflow_backend.models.process_instance_report import FilterValue
from spiffworkflow_backend.models.process_instance_report import ProcessInstanceReportModel
from spiffworkflow_backend.models.process_instance_report import ReportMetadata
from spiffworkflow_backend.models.process_instance_report import ReportMetadataColumn
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService
@ -35,199 +37,157 @@ class ProcessInstanceReportNotFoundError(Exception):
"""ProcessInstanceReportNotFoundError."""
@dataclass
class ProcessInstanceReportFilter:
"""ProcessInstanceReportFilter."""
process_model_identifier: Optional[str] = None
user_group_identifier: Optional[str] = None
start_from: Optional[int] = None
start_to: Optional[int] = None
end_from: Optional[int] = None
end_to: Optional[int] = None
process_status: Optional[list[str]] = None
initiated_by_me: Optional[bool] = None
has_terminal_status: Optional[bool] = None
has_active_status: Optional[bool] = None
with_tasks_completed_by_me: Optional[bool] = None
with_tasks_i_can_complete: Optional[bool] = None
with_tasks_assigned_to_my_group: Optional[bool] = None
with_relation_to_me: Optional[bool] = None
process_initiator_username: Optional[str] = None
report_column_list: Optional[list] = None
report_filter_by_list: Optional[list] = None
oldest_open_human_task_fields: Optional[list] = None
def to_dict(self) -> dict[str, str]:
"""To_dict."""
d = {}
if self.process_model_identifier is not None:
d["process_model_identifier"] = self.process_model_identifier
if self.user_group_identifier is not None:
d["user_group_identifier"] = self.user_group_identifier
if self.start_from is not None:
d["start_from"] = str(self.start_from)
if self.start_to is not None:
d["start_to"] = str(self.start_to)
if self.end_from is not None:
d["end_from"] = str(self.end_from)
if self.end_to is not None:
d["end_to"] = str(self.end_to)
if self.process_status is not None:
d["process_status"] = ",".join(self.process_status)
if self.initiated_by_me is not None:
d["initiated_by_me"] = str(self.initiated_by_me).lower()
if self.has_terminal_status is not None:
d["has_terminal_status"] = str(self.has_terminal_status).lower()
if self.has_active_status is not None:
d["has_active_status"] = str(self.has_active_status).lower()
if self.with_tasks_completed_by_me is not None:
d["with_tasks_completed_by_me"] = str(self.with_tasks_completed_by_me).lower()
if self.with_tasks_i_can_complete is not None:
d["with_tasks_i_can_complete"] = str(self.with_tasks_i_can_complete).lower()
if self.with_tasks_assigned_to_my_group is not None:
d["with_tasks_assigned_to_my_group"] = str(self.with_tasks_assigned_to_my_group).lower()
if self.with_relation_to_me is not None:
d["with_relation_to_me"] = str(self.with_relation_to_me).lower()
if self.process_initiator_username is not None:
d["process_initiator_username"] = str(self.process_initiator_username)
if self.report_column_list is not None:
d["report_column_list"] = str(self.report_column_list)
if self.report_filter_by_list is not None:
d["report_filter_by_list"] = str(self.report_filter_by_list)
if self.oldest_open_human_task_fields is not None:
d["oldest_open_human_task_fields"] = str(self.oldest_open_human_task_fields)
return d
class ProcessInstanceReportService:
"""ProcessInstanceReportService."""
@classmethod
def system_metadata_map(cls, metadata_key: str) -> Optional[dict[str, Any]]:
"""System_metadata_map."""
def system_metadata_map(cls, metadata_key: str) -> Optional[ReportMetadata]:
# TODO replace with system reports that are loaded on launch (or similar)
temp_system_metadata_map = {
"default": {
"columns": cls.builtin_column_options(),
"filter_by": [],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_completed_instances_initiated_by_me": {
"columns": [
{"Header": "id", "accessor": "id"},
{
"Header": "process_model_display_name",
"accessor": "process_model_display_name",
},
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
{"Header": "status", "accessor": "status"},
],
"filter_by": [
{"field_name": "initiated_by_me", "field_value": "true"},
{"field_name": "has_terminal_status", "field_value": "true"},
],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_completed_instances_with_tasks_completed_by_me": {
"columns": cls.builtin_column_options(),
"filter_by": [
{"field_name": "with_tasks_completed_by_me", "field_value": "true"},
{"field_name": "has_terminal_status", "field_value": "true"},
],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_completed_instances_with_tasks_completed_by_my_groups": {
"columns": cls.builtin_column_options(),
"filter_by": [
{
"field_name": "with_tasks_assigned_to_my_group",
"field_value": "true",
},
{"field_name": "has_terminal_status", "field_value": "true"},
],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_in_progress_instances_initiated_by_me": {
"columns": [
{"Header": "id", "accessor": "id"},
{
"Header": "process_model_display_name",
"accessor": "process_model_display_name",
},
{"Header": "Task", "accessor": "task_title"},
{"Header": "Waiting For", "accessor": "waiting_for"},
{"Header": "Started", "accessor": "start_in_seconds"},
{"Header": "Last Updated", "accessor": "task_updated_at_in_seconds"},
{"Header": "status", "accessor": "status"},
],
"filter_by": [
{"field_name": "initiated_by_me", "field_value": "true"},
{"field_name": "has_terminal_status", "field_value": "false"},
{
"field_name": "oldest_open_human_task_fields",
"field_value": (
"task_id,task_title,task_name,potential_owner_usernames,assigned_user_group_identifier"
),
},
],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_in_progress_instances_with_tasks_for_me": {
"columns": [
{"Header": "id", "accessor": "id"},
{
"Header": "process_model_display_name",
"accessor": "process_model_display_name",
},
{"Header": "Task", "accessor": "task_title"},
{"Header": "Started By", "accessor": "process_initiator_username"},
{"Header": "Started", "accessor": "start_in_seconds"},
{"Header": "Last Updated", "accessor": "task_updated_at_in_seconds"},
],
"filter_by": [
{"field_name": "with_tasks_i_can_complete", "field_value": "true"},
{"field_name": "has_active_status", "field_value": "true"},
{
"field_name": "oldest_open_human_task_fields",
"field_value": "task_id,task_title,task_name",
},
],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_in_progress_instances_with_tasks_for_my_group": {
"columns": [
{"Header": "id", "accessor": "id"},
{
"Header": "process_model_display_name",
"accessor": "process_model_display_name",
},
{"Header": "Task", "accessor": "task_title"},
{"Header": "Started By", "accessor": "process_initiator_username"},
{"Header": "Started", "accessor": "start_in_seconds"},
{"Header": "Last Updated", "accessor": "task_updated_at_in_seconds"},
],
"filter_by": [
{
"field_name": "with_tasks_assigned_to_my_group",
"field_value": "true",
},
{"field_name": "has_active_status", "field_value": "true"},
{
"field_name": "oldest_open_human_task_fields",
"field_value": "task_id,task_title,task_name",
},
],
"order_by": ["-start_in_seconds", "-id"],
},
terminal_status_values = ",".join(ProcessInstanceModel.terminal_statuses())
non_terminal_status_values = ",".join(ProcessInstanceModel.non_terminal_statuses())
active_status_values = ",".join(ProcessInstanceModel.active_statuses())
default: ReportMetadata = {
"columns": cls.builtin_column_options(),
"filter_by": [],
"order_by": ["-start_in_seconds", "-id"],
}
system_report_completed_instances_initiated_by_me: ReportMetadata = {
"columns": [
{"Header": "id", "accessor": "id", "filterable": False},
{
"Header": "process_model_display_name",
"accessor": "process_model_display_name",
"filterable": False,
},
{"Header": "start_in_seconds", "accessor": "start_in_seconds", "filterable": False},
{"Header": "end_in_seconds", "accessor": "end_in_seconds", "filterable": False},
{"Header": "status", "accessor": "status", "filterable": False},
],
"filter_by": [
{"field_name": "initiated_by_me", "field_value": True, "operator": "equals"},
{"field_name": "process_status", "field_value": terminal_status_values, "operator": "equals"},
],
"order_by": ["-start_in_seconds", "-id"],
}
system_report_completed_instances_with_tasks_completed_by_me: ReportMetadata = {
"columns": cls.builtin_column_options(),
"filter_by": [
{"field_name": "with_tasks_completed_by_me", "field_value": True, "operator": "equals"},
{"field_name": "process_status", "field_value": terminal_status_values, "operator": "equals"},
],
"order_by": ["-start_in_seconds", "-id"],
}
system_report_completed_instances: ReportMetadata = {
"columns": cls.builtin_column_options(),
"filter_by": [
{"field_name": "process_status", "field_value": terminal_status_values, "operator": "equals"},
],
"order_by": ["-start_in_seconds", "-id"],
}
system_report_in_progress_instances_initiated_by_me: ReportMetadata = {
"columns": [
{"Header": "id", "accessor": "id", "filterable": False},
{
"Header": "process_model_display_name",
"accessor": "process_model_display_name",
"filterable": False,
},
{"Header": "Task", "accessor": "task_title", "filterable": False},
{"Header": "Waiting For", "accessor": "waiting_for", "filterable": False},
{"Header": "Started", "accessor": "start_in_seconds", "filterable": False},
{"Header": "Last Updated", "accessor": "task_updated_at_in_seconds", "filterable": False},
{"Header": "status", "accessor": "status", "filterable": False},
],
"filter_by": [
{"field_name": "initiated_by_me", "field_value": True, "operator": "equals"},
{"field_name": "process_status", "field_value": non_terminal_status_values, "operator": "equals"},
{
"field_name": "with_oldest_open_task",
"field_value": True,
"operator": "equals",
},
],
"order_by": ["-start_in_seconds", "-id"],
}
system_report_in_progress_instances_with_tasks_for_me: ReportMetadata = {
"columns": [
{"Header": "id", "accessor": "id", "filterable": False},
{
"Header": "process_model_display_name",
"accessor": "process_model_display_name",
"filterable": False,
},
{"Header": "Task", "accessor": "task_title", "filterable": False},
{"Header": "Started By", "accessor": "process_initiator_username", "filterable": False},
{"Header": "Started", "accessor": "start_in_seconds", "filterable": False},
{"Header": "Last Updated", "accessor": "task_updated_at_in_seconds", "filterable": False},
],
"filter_by": [
{"field_name": "with_tasks_i_can_complete", "field_value": True, "operator": "equals"},
{"field_name": "process_status", "field_value": active_status_values, "operator": "equals"},
{
"field_name": "with_oldest_open_task",
"field_value": True,
"operator": "equals",
},
],
"order_by": ["-start_in_seconds", "-id"],
}
system_report_in_progress_instances_with_tasks: ReportMetadata = {
"columns": [
{"Header": "id", "accessor": "id", "filterable": False},
{
"Header": "process_model_display_name",
"accessor": "process_model_display_name",
"filterable": False,
},
{"Header": "Task", "accessor": "task_title", "filterable": False},
{"Header": "Started By", "accessor": "process_initiator_username", "filterable": False},
{"Header": "Started", "accessor": "start_in_seconds", "filterable": False},
{"Header": "Last Updated", "accessor": "task_updated_at_in_seconds", "filterable": False},
],
"filter_by": [
{"field_name": "process_status", "field_value": active_status_values, "operator": "equals"},
{
"field_name": "with_oldest_open_task",
"field_value": True,
"operator": "equals",
},
],
"order_by": ["-start_in_seconds", "-id"],
}
temp_system_metadata_map = {
"default": default,
"system_report_completed_instances_initiated_by_me": system_report_completed_instances_initiated_by_me,
"system_report_completed_instances_with_tasks_completed_by_me": (
system_report_completed_instances_with_tasks_completed_by_me
),
"system_report_completed_instances": system_report_completed_instances,
"system_report_in_progress_instances_initiated_by_me": system_report_in_progress_instances_initiated_by_me,
"system_report_in_progress_instances_with_tasks_for_me": (
system_report_in_progress_instances_with_tasks_for_me
),
"system_report_in_progress_instances_with_tasks": system_report_in_progress_instances_with_tasks,
}
if metadata_key not in temp_system_metadata_map:
return None
return temp_system_metadata_map[metadata_key]
return_value: ReportMetadata = temp_system_metadata_map[metadata_key]
return return_value
@classmethod
def compile_report(cls, report_metadata: ReportMetadata, user: UserModel) -> None:
compiled_filters: list[FilterValue] = []
old_filters = copy.deepcopy(report_metadata["filter_by"])
for filter in old_filters:
if filter["field_name"] == "initiated_by_me":
compiled_filters.append(
{"field_name": "process_initiator_username", "field_value": user.username, "operator": "equals"}
)
else:
compiled_filters.append(filter)
report_metadata["filter_by"] = compiled_filters
@classmethod
def report_with_identifier(
@ -236,7 +196,6 @@ class ProcessInstanceReportService:
report_id: Optional[int] = None,
report_identifier: Optional[str] = None,
) -> ProcessInstanceReportModel:
"""Report_with_filter."""
if report_id is not None:
process_instance_report = ProcessInstanceReportModel.query.filter_by(
id=report_id, created_by_id=user.id
@ -258,6 +217,7 @@ class ProcessInstanceReportService:
raise ProcessInstanceReportNotFoundError(
f"Could not find a report with identifier '{report_identifier}' for user '{user.username}'"
)
cls.compile_report(report_metadata, user=user)
process_instance_report = ProcessInstanceReportModel(
identifier=report_identifier,
@ -267,150 +227,15 @@ class ProcessInstanceReportService:
return process_instance_report # type: ignore
@classmethod
def filter_by_to_dict(cls, process_instance_report: ProcessInstanceReportModel) -> dict[str, str]:
"""Filter_by_to_dict."""
metadata = process_instance_report.report_metadata
filter_by = metadata.get("filter_by", [])
filters = {d["field_name"]: d["field_value"] for d in filter_by if "field_name" in d and "field_value" in d}
return filters
@classmethod
def filter_from_metadata(cls, process_instance_report: ProcessInstanceReportModel) -> ProcessInstanceReportFilter:
"""Filter_from_metadata."""
filters = cls.filter_by_to_dict(process_instance_report)
def bool_value(key: str) -> Optional[bool]:
"""Bool_value."""
if key not in filters:
return None
# bool returns True if not an empty string so check explicitly for false
if filters[key] in ["false", "False"]:
return False
return bool(filters[key])
def int_value(key: str) -> Optional[int]:
"""Int_value."""
return int(filters[key]) if key in filters else None
def list_value(key: str) -> Optional[list[str]]:
return filters[key].split(",") if key in filters else None
process_model_identifier = filters.get("process_model_identifier")
user_group_identifier = filters.get("user_group_identifier")
start_from = int_value("start_from")
start_to = int_value("start_to")
end_from = int_value("end_from")
end_to = int_value("end_to")
process_status = list_value("process_status")
initiated_by_me = bool_value("initiated_by_me")
has_terminal_status = bool_value("has_terminal_status")
has_active_status = bool_value("has_active_status")
with_tasks_completed_by_me = bool_value("with_tasks_completed_by_me")
with_tasks_i_can_complete = bool_value("with_tasks_i_can_complete")
with_tasks_assigned_to_my_group = bool_value("with_tasks_assigned_to_my_group")
with_relation_to_me = bool_value("with_relation_to_me")
process_initiator_username = filters.get("process_initiator_username")
report_column_list = list_value("report_column_list")
report_filter_by_list = list_value("report_filter_by_list")
oldest_open_human_task_fields = list_value("oldest_open_human_task_fields")
report_filter = ProcessInstanceReportFilter(
process_model_identifier=process_model_identifier,
user_group_identifier=user_group_identifier,
start_from=start_from,
start_to=start_to,
end_from=end_from,
end_to=end_to,
process_status=process_status,
initiated_by_me=initiated_by_me,
has_terminal_status=has_terminal_status,
has_active_status=has_active_status,
with_tasks_completed_by_me=with_tasks_completed_by_me,
with_tasks_i_can_complete=with_tasks_i_can_complete,
with_tasks_assigned_to_my_group=with_tasks_assigned_to_my_group,
with_relation_to_me=with_relation_to_me,
process_initiator_username=process_initiator_username,
report_column_list=report_column_list,
report_filter_by_list=report_filter_by_list,
oldest_open_human_task_fields=oldest_open_human_task_fields,
)
return report_filter
@classmethod
def filter_from_metadata_with_overrides(
cls,
process_instance_report: ProcessInstanceReportModel,
process_model_identifier: Optional[str] = None,
user_group_identifier: Optional[str] = None,
start_from: Optional[int] = None,
start_to: Optional[int] = None,
end_from: Optional[int] = None,
end_to: Optional[int] = None,
process_status: Optional[str] = None,
initiated_by_me: Optional[bool] = None,
has_terminal_status: Optional[bool] = None,
has_active_status: Optional[bool] = None,
with_tasks_completed_by_me: Optional[bool] = None,
with_tasks_i_can_complete: Optional[bool] = None,
with_tasks_assigned_to_my_group: Optional[bool] = None,
with_relation_to_me: Optional[bool] = None,
process_initiator_username: Optional[str] = None,
report_column_list: Optional[list] = None,
report_filter_by_list: Optional[list] = None,
oldest_open_human_task_fields: Optional[list] = None,
) -> ProcessInstanceReportFilter:
"""Filter_from_metadata_with_overrides."""
report_filter = cls.filter_from_metadata(process_instance_report)
if process_model_identifier is not None:
report_filter.process_model_identifier = process_model_identifier
if user_group_identifier is not None:
report_filter.user_group_identifier = user_group_identifier
if start_from is not None:
report_filter.start_from = start_from
if start_to is not None:
report_filter.start_to = start_to
if end_from is not None:
report_filter.end_from = end_from
if end_to is not None:
report_filter.end_to = end_to
if process_status is not None:
report_filter.process_status = process_status.split(",")
if initiated_by_me is not None:
report_filter.initiated_by_me = initiated_by_me
if has_terminal_status is not None:
report_filter.has_terminal_status = has_terminal_status
if has_active_status is not None:
report_filter.has_active_status = has_active_status
if with_tasks_completed_by_me is not None:
report_filter.with_tasks_completed_by_me = with_tasks_completed_by_me
if with_tasks_i_can_complete is not None:
report_filter.with_tasks_i_can_complete = with_tasks_i_can_complete
if process_initiator_username is not None:
report_filter.process_initiator_username = process_initiator_username
if report_column_list is not None:
report_filter.report_column_list = report_column_list
if report_filter_by_list is not None:
report_filter.report_filter_by_list = report_filter_by_list
if oldest_open_human_task_fields is not None:
report_filter.oldest_open_human_task_fields = oldest_open_human_task_fields
if with_tasks_assigned_to_my_group is not None:
report_filter.with_tasks_assigned_to_my_group = with_tasks_assigned_to_my_group
if with_relation_to_me is not None:
report_filter.with_relation_to_me = with_relation_to_me
return report_filter
@classmethod
def add_metadata_columns_to_process_instance(
cls,
process_instance_sqlalchemy_rows: list[sqlalchemy.engine.row.Row], # type: ignore
metadata_columns: list[dict],
metadata_columns: list[ReportMetadataColumn],
) -> list[dict]:
"""Add_metadata_columns_to_process_instance."""
results = []
cls.non_metadata_columns()
for process_instance_row in process_instance_sqlalchemy_rows:
process_instance_mapping = process_instance_row._mapping
process_instance_dict = process_instance_row[0].serialized
@ -424,9 +249,14 @@ class ProcessInstanceReportService:
return results
@classmethod
def add_human_task_fields(
cls, process_instance_dicts: list[dict], oldest_open_human_task_fields: list
) -> list[dict]:
def add_human_task_fields(cls, process_instance_dicts: list[dict]) -> list[dict]:
fields_to_return = [
"task_id",
"task_title",
"task_name",
"potential_owner_usernames",
"assigned_user_group_identifier",
]
for process_instance_dict in process_instance_dicts:
assigned_user = aliased(UserModel)
human_task_query = (
@ -452,7 +282,7 @@ class ProcessInstanceReportService:
.first()
)
if human_task is not None:
for field in oldest_open_human_task_fields:
for field in fields_to_return:
process_instance_dict[field] = getattr(human_task, field)
return process_instance_dicts
@ -477,9 +307,17 @@ class ProcessInstanceReportService:
return [i.name for i in model.__table__.columns]
@classmethod
def builtin_column_options(cls) -> list[dict]:
def process_instance_stock_columns(cls) -> list[str]:
return cls.get_column_names_for_model(ProcessInstanceModel)
@classmethod
def non_metadata_columns(cls) -> list[str]:
return cls.process_instance_stock_columns() + ["process_initiator_username"]
@classmethod
def builtin_column_options(cls) -> list[ReportMetadataColumn]:
"""Builtin_column_options."""
return [
return_value: list[ReportMetadataColumn] = [
{"Header": "Id", "accessor": "id", "filterable": False},
{
"Header": "Process",
@ -495,26 +333,47 @@ class ProcessInstanceReportService:
},
{"Header": "Status", "accessor": "status", "filterable": False},
]
return return_value
@classmethod
def get_filter_value(cls, filters: list[FilterValue], filter_key: str) -> Any:
for filter in filters:
if filter["field_name"] == filter_key and filter["field_value"] is not None:
return filter["field_value"]
@classmethod
def check_filter_value(cls, filters: list[FilterValue], filter_key: str) -> Generator:
value = cls.get_filter_value(filters, filter_key)
if value is not None:
yield value
@classmethod
def add_or_update_filter(cls, filters: list[FilterValue], new_filter: FilterValue) -> None:
filter_found = False
for filter in filters:
if filter["field_name"] == new_filter["field_name"]:
filter["field_value"] = new_filter["field_value"]
filter_found = True
if filter_found is False:
filters.append(new_filter)
@classmethod
def run_process_instance_report(
cls,
report_filter: ProcessInstanceReportFilter,
process_instance_report: ProcessInstanceReportModel,
report_metadata: ReportMetadata,
user: UserModel,
page: int = 1,
per_page: int = 100,
) -> dict:
"""Run_process_instance_report."""
process_instance_query = ProcessInstanceModel.query
# Always join that hot user table for good performance at serialization time.
process_instance_query = process_instance_query.options(selectinload(ProcessInstanceModel.process_initiator))
filters = report_metadata["filter_by"]
if report_filter.process_model_identifier is not None:
for value in cls.check_filter_value(filters, "process_model_identifier"):
process_model = ProcessModelService.get_process_model(
f"{report_filter.process_model_identifier}",
f"{value}",
)
process_instance_query = process_instance_query.filter_by(process_model_identifier=process_model.id)
# this can never happen. obviously the class has the columns it defines. this is just to appease mypy.
@ -527,55 +386,46 @@ class ProcessInstanceReportService:
)
)
if report_filter.start_from is not None:
for value in cls.check_filter_value(filters, "start_from"):
process_instance_query = process_instance_query.filter(ProcessInstanceModel.start_in_seconds >= value)
for value in cls.check_filter_value(filters, "start_to"):
process_instance_query = process_instance_query.filter(ProcessInstanceModel.start_in_seconds <= value)
for value in cls.check_filter_value(filters, "end_from"):
process_instance_query = process_instance_query.filter(ProcessInstanceModel.end_in_seconds >= value)
for value in cls.check_filter_value(filters, "end_to"):
process_instance_query = process_instance_query.filter(ProcessInstanceModel.end_in_seconds <= value)
process_status = cls.get_filter_value(filters, "process_status")
if process_status is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.start_in_seconds >= report_filter.start_from
)
if report_filter.start_to is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.start_in_seconds <= report_filter.start_to
)
if report_filter.end_from is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.end_in_seconds >= report_filter.end_from
)
if report_filter.end_to is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.end_in_seconds <= report_filter.end_to
)
if report_filter.process_status is not None:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore
ProcessInstanceModel.status.in_(process_status.split(",")) # type: ignore
)
if report_filter.initiated_by_me is True:
process_instance_query = process_instance_query.filter_by(process_initiator=user)
if report_filter.has_terminal_status is True:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.status.in_(ProcessInstanceModel.terminal_statuses()) # type: ignore
)
elif report_filter.has_terminal_status is False:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.status.not_in(ProcessInstanceModel.terminal_statuses()) # type: ignore
)
if report_filter.has_active_status is True:
has_active_status = cls.get_filter_value(filters, "has_active_status")
if has_active_status:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.status.in_(ProcessInstanceModel.active_statuses()) # type: ignore
)
if report_filter.process_initiator_username is not None:
initiator = UserModel.query.filter_by(username=report_filter.process_initiator_username).first()
for value in cls.check_filter_value(filters, "process_initiator_username"):
initiator = UserModel.query.filter_by(username=value).first()
process_initiator_id = -1
if initiator:
process_initiator_id = initiator.id
process_instance_query = process_instance_query.filter_by(process_initiator_id=process_initiator_id)
with_tasks_completed_by_me = cls.get_filter_value(filters, "with_tasks_completed_by_me")
with_tasks_i_can_complete = cls.get_filter_value(filters, "with_tasks_i_can_complete")
user_group_identifier = cls.get_filter_value(filters, "user_group_identifier")
# builtin only - for the for-me paths
with_relation_to_me = cls.get_filter_value(filters, "with_relation_to_me")
if (
not report_filter.with_tasks_completed_by_me
and not report_filter.with_tasks_assigned_to_my_group
and not report_filter.with_tasks_i_can_complete
and report_filter.with_relation_to_me is True
not with_tasks_completed_by_me
and not user_group_identifier
and not with_tasks_i_can_complete
and with_relation_to_me is True
):
process_instance_query = process_instance_query.outerjoin(HumanTaskModel).outerjoin(
HumanTaskUserModel,
@ -591,7 +441,7 @@ class ProcessInstanceReportService:
)
)
if report_filter.with_tasks_completed_by_me is True:
if with_tasks_completed_by_me is True:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.process_initiator_id != user.id
)
@ -603,7 +453,9 @@ class ProcessInstanceReportService:
),
)
if report_filter.with_tasks_i_can_complete is True:
# this excludes some tasks you can complete, because that's the way the requirements were described.
# if it's assigned to one of your groups, it does not get returned by this query.
if with_tasks_i_can_complete is True:
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.process_initiator_id != user.id
)
@ -612,26 +464,27 @@ class ProcessInstanceReportService:
and_(
HumanTaskModel.process_instance_id == ProcessInstanceModel.id,
HumanTaskModel.lane_assignment_id.is_(None), # type: ignore
HumanTaskModel.completed.is_(False), # type: ignore
),
).join(
HumanTaskUserModel,
and_(HumanTaskUserModel.human_task_id == HumanTaskModel.id, HumanTaskUserModel.user_id == user.id),
)
if report_filter.has_active_status:
process_instance_query = process_instance_query.filter(
HumanTaskModel.completed.is_(False) # type: ignore
)
if report_filter.with_tasks_assigned_to_my_group is True:
if user_group_identifier is not None:
group_model_join_conditions = [GroupModel.id == HumanTaskModel.lane_assignment_id]
if report_filter.user_group_identifier:
group_model_join_conditions.append(GroupModel.identifier == report_filter.user_group_identifier)
if user_group_identifier:
group_model_join_conditions.append(GroupModel.identifier == user_group_identifier)
process_instance_query = process_instance_query.join(HumanTaskModel)
if report_filter.has_active_status:
process_instance_query = process_instance_query.filter(
HumanTaskModel.completed.is_(False) # type: ignore
)
if process_status is not None:
non_active_statuses = [
s for s in process_status.split(",") if s not in ProcessInstanceModel.active_statuses()
]
if len(non_active_statuses) == 0:
process_instance_query = process_instance_query.filter(
HumanTaskModel.completed.is_(False) # type: ignore
)
process_instance_query = process_instance_query.join(GroupModel, and_(*group_model_join_conditions))
process_instance_query = process_instance_query.join(
@ -641,26 +494,19 @@ class ProcessInstanceReportService:
process_instance_query = process_instance_query.filter(UserGroupAssignmentModel.user_id == user.id)
instance_metadata_aliases = {}
stock_columns = cls.get_column_names_for_model(ProcessInstanceModel)
if isinstance(report_filter.report_column_list, list):
process_instance_report.report_metadata["columns"] = report_filter.report_column_list
if isinstance(report_filter.report_filter_by_list, list):
process_instance_report.report_metadata["filter_by"] = report_filter.report_filter_by_list
if report_metadata["columns"] is None or len(report_metadata["columns"]) < 1:
report_metadata["columns"] = cls.builtin_column_options()
for column in process_instance_report.report_metadata["columns"]:
if column["accessor"] in stock_columns:
for column in report_metadata["columns"]:
if column["accessor"] in cls.non_metadata_columns():
continue
instance_metadata_alias = aliased(ProcessInstanceMetadataModel)
instance_metadata_aliases[column["accessor"]] = instance_metadata_alias
filter_for_column = None
if "filter_by" in process_instance_report.report_metadata:
if "filter_by" in report_metadata:
filter_for_column = next(
(
f
for f in process_instance_report.report_metadata["filter_by"]
if f["field_name"] == column["accessor"]
),
(f for f in report_metadata["filter_by"] if f["field_name"] == column["accessor"]),
None,
)
isouter = True
@ -676,12 +522,12 @@ class ProcessInstanceReportService:
).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"]))
order_by_query_array = []
order_by_array = process_instance_report.report_metadata["order_by"]
order_by_array = report_metadata["order_by"]
if len(order_by_array) < 1:
order_by_array = ProcessInstanceReportModel.default_order_by()
for order_by_option in order_by_array:
attribute = re.sub("^-", "", order_by_option)
if attribute in stock_columns:
if attribute in cls.process_instance_stock_columns():
if order_by_option.startswith("-"):
order_by_query_array.append(getattr(ProcessInstanceModel, attribute).desc())
else:
@ -698,16 +544,16 @@ class ProcessInstanceReportService:
.order_by(*order_by_query_array)
.paginate(page=page, per_page=per_page, error_out=False)
)
results = cls.add_metadata_columns_to_process_instance(
process_instances.items, process_instance_report.report_metadata["columns"]
)
results = cls.add_metadata_columns_to_process_instance(process_instances.items, report_metadata["columns"])
if report_filter.oldest_open_human_task_fields:
results = cls.add_human_task_fields(results, report_filter.oldest_open_human_task_fields)
for value in cls.check_filter_value(filters, "with_oldest_open_task"):
if value is True:
results = cls.add_human_task_fields(results)
report_metadata["filter_by"] = filters
response_json = {
"report": process_instance_report,
"report_metadata": report_metadata,
"results": results,
"filters": report_filter.to_dict(),
"pagination": {
"count": len(results),
"total": process_instances.total,

View File

@ -8,21 +8,19 @@ from typing import TypedDict
from typing import Union
from uuid import UUID
from flask import current_app
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.task import TaskStateNames
from sqlalchemy.dialects.mysql import insert as mysql_insert
from sqlalchemy.dialects.postgresql import insert as postgres_insert
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
from spiffworkflow_backend.models.json_data import JsonDataDict
from spiffworkflow_backend.models.json_data import JsonDataModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
@ -38,11 +36,6 @@ class StartAndEndTimes(TypedDict):
end_in_seconds: Optional[float]
class JsonDataDict(TypedDict):
hash: str
data: dict
class TaskModelError(Exception):
"""Copied from SpiffWorkflow.exceptions.WorkflowTaskException.
@ -130,7 +123,7 @@ class TaskService:
db.session.bulk_save_objects(self.bpmn_processes.values())
db.session.bulk_save_objects(self.task_models.values())
db.session.bulk_save_objects(self.process_instance_events.values())
self.__class__.insert_or_update_json_data_records(self.json_data_dicts)
JsonDataModel.insert_or_update_json_data_records(self.json_data_dicts)
def process_parents_and_children_and_save_to_database(
self,
@ -483,10 +476,6 @@ class TaskService:
bpmn_process.json_data_hash = bpmn_process_data_hash
return json_data_dict
@classmethod
def insert_or_update_json_data_dict(cls, json_data_dict: JsonDataDict) -> None:
TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict})
@classmethod
def update_task_data_on_task_model_and_return_dict_if_updated(
cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str
@ -610,21 +599,6 @@ class TaskService:
new_properties_json["state"] = getattr(TaskState, state)
task_model.properties_json = new_properties_json
@classmethod
def insert_or_update_json_data_records(
cls, json_data_hash_to_json_data_dict_mapping: dict[str, JsonDataDict]
) -> None:
list_of_dicts = [*json_data_hash_to_json_data_dict_mapping.values()]
if len(list_of_dicts) > 0:
on_duplicate_key_stmt = None
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "mysql":
insert_stmt = mysql_insert(JsonDataModel).values(list_of_dicts)
on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(data=insert_stmt.inserted.data)
else:
insert_stmt = postgres_insert(JsonDataModel).values(list_of_dicts)
on_duplicate_key_stmt = insert_stmt.on_conflict_do_nothing(index_elements=["hash"])
db.session.execute(on_duplicate_key_stmt)
@classmethod
def get_extensions_from_task_model(cls, task_model: TaskModel) -> dict:
task_definition = task_model.task_definition

View File

@ -19,6 +19,7 @@ from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.process_group import ProcessGroup
from spiffworkflow_backend.models.process_group import ProcessGroupSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_report import ReportMetadata
from spiffworkflow_backend.models.process_model import NotificationType
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
@ -379,3 +380,26 @@ class BaseTest:
},
)
return process_model
def post_to_process_instance_list(
self,
client: FlaskClient,
user: UserModel,
report_metadata: Optional[ReportMetadata] = None,
param_string: Optional[str] = "",
) -> TestResponse:
report_metadata_to_use = report_metadata
if report_metadata_to_use is None:
report_metadata_to_use = self.empty_report_metadata_body()
response = client.post(
f"/v1.0/process-instances{param_string}",
headers=self.logged_in_headers(user),
content_type="application/json",
data=json.dumps({"report_metadata": report_metadata_to_use}),
)
assert response.status_code == 200
assert response.json is not None
return response
def empty_report_metadata_body(self) -> ReportMetadata:
return {"filter_by": [], "columns": [], "order_by": []}

View File

@ -25,9 +25,8 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
from spiffworkflow_backend.models.process_instance_report import ProcessInstanceReportModel
from spiffworkflow_backend.models.process_instance_report import ReportMetadata
from spiffworkflow_backend.models.process_model import NotificationType
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
@ -793,7 +792,6 @@ class TestProcessApi(BaseTest):
content_type="multipart/form-data",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 400
assert response.json is not None
assert response.json["error_code"] == "no_file_given"
@ -1762,12 +1760,7 @@ class TestProcessApi(BaseTest):
headers = self.logged_in_headers(with_super_admin_user)
self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers)
response = client.get(
"/v1.0/process-instances",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
response = self.post_to_process_instance_list(client, with_super_admin_user)
assert len(response.json["results"]) == 1
assert response.json["pagination"]["count"] == 1
assert response.json["pagination"]["pages"] == 1
@ -1808,23 +1801,13 @@ class TestProcessApi(BaseTest):
self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers)
self.create_process_instance_from_process_model_id_with_api(client, process_model_identifier, headers)
response = client.get(
"/v1.0/process-instances?per_page=2&page=3",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
response = self.post_to_process_instance_list(client, with_super_admin_user, param_string="?per_page=2&page=3")
assert len(response.json["results"]) == 1
assert response.json["pagination"]["count"] == 1
assert response.json["pagination"]["pages"] == 3
assert response.json["pagination"]["total"] == 5
response = client.get(
"/v1.0/process-instances?per_page=2&page=1",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
response = self.post_to_process_instance_list(client, with_super_admin_user, param_string="?per_page=2&page=1")
assert len(response.json["results"]) == 2
assert response.json["pagination"]["count"] == 2
assert response.json["pagination"]["pages"] == 3
@ -1868,31 +1851,64 @@ class TestProcessApi(BaseTest):
db.session.commit()
# Without filtering we should get all 5 instances
response = client.get(
f"/v1.0/process-instances?process_model_identifier={process_model_identifier}",
headers=self.logged_in_headers(with_super_admin_user),
report_metadata_body: ReportMetadata = {
"filter_by": [
{
"field_name": "process_model_identifier",
"field_value": process_model_identifier,
"operator": "equals",
}
],
"columns": [],
"order_by": [],
}
response = self.post_to_process_instance_list(
client, with_super_admin_user, report_metadata=report_metadata_body
)
assert response.json is not None
results = response.json["results"]
assert len(results) == 5
# filter for each of the status
# we should get 1 instance each time
for i in range(5):
response = client.get(
f"/v1.0/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}&process_model_identifier={process_model_identifier}",
headers=self.logged_in_headers(with_super_admin_user),
report_metadata_body = {
"filter_by": [
{
"field_name": "process_model_identifier",
"field_value": process_model_identifier,
"operator": "equals",
},
{
"field_name": "process_status",
"field_value": ProcessInstanceStatus[statuses[i]].value,
"operator": "equals",
},
],
"columns": [],
"order_by": [],
}
response = self.post_to_process_instance_list(
client, with_super_admin_user, report_metadata=report_metadata_body
)
assert response.json is not None
results = response.json["results"]
assert len(results) == 1
assert results[0]["status"] == ProcessInstanceStatus[statuses[i]].value
response = client.get(
f"/v1.0/process-instances?process_status=not_started,complete&process_model_identifier={process_model_identifier}",
headers=self.logged_in_headers(with_super_admin_user),
report_metadata_body = {
"filter_by": [
{
"field_name": "process_model_identifier",
"field_value": process_model_identifier,
"operator": "equals",
},
{"field_name": "process_status", "field_value": "not_started,complete", "operator": "equals"},
],
"columns": [],
"order_by": [],
}
response = self.post_to_process_instance_list(
client, with_super_admin_user, report_metadata=report_metadata_body
)
assert response.json is not None
results = response.json["results"]
assert len(results) == 2
assert results[0]["status"] in ["complete", "not_started"]
@ -1900,11 +1916,14 @@ class TestProcessApi(BaseTest):
# filter by start/end seconds
# start > 1000 - this should eliminate the first
response = client.get(
"/v1.0/process-instances?start_from=1001",
headers=self.logged_in_headers(with_super_admin_user),
report_metadata_body = {
"filter_by": [{"field_name": "start_from", "field_value": 1001, "operator": "equals"}],
"columns": [],
"order_by": [],
}
response = self.post_to_process_instance_list(
client, with_super_admin_user, report_metadata=report_metadata_body
)
assert response.json is not None
results = response.json["results"]
assert len(results) == 4
for i in range(4):
@ -1916,33 +1935,51 @@ class TestProcessApi(BaseTest):
)
# start > 2000, end < 5000 - this should eliminate the first 2 and the last
response = client.get(
"/v1.0/process-instances?start_from=2001&end_to=5999",
headers=self.logged_in_headers(with_super_admin_user),
report_metadata_body = {
"filter_by": [
{"field_name": "start_from", "field_value": 2001, "operator": "equals"},
{"field_name": "end_to", "field_value": 5999, "operator": "equals"},
],
"columns": [],
"order_by": [],
}
response = self.post_to_process_instance_list(
client, with_super_admin_user, report_metadata=report_metadata_body
)
assert response.json is not None
results = response.json["results"]
assert len(results) == 2
assert json.loads(results[0]["bpmn_version_control_identifier"]) in (2, 3)
assert json.loads(results[1]["bpmn_version_control_identifier"]) in (2, 3)
# start > 1000, start < 4000 - this should eliminate the first and the last 2
response = client.get(
"/v1.0/process-instances?start_from=1001&start_to=3999",
headers=self.logged_in_headers(with_super_admin_user),
report_metadata_body = {
"filter_by": [
{"field_name": "start_from", "field_value": 1001, "operator": "equals"},
{"field_name": "start_to", "field_value": 3999, "operator": "equals"},
],
"columns": [],
"order_by": [],
}
response = self.post_to_process_instance_list(
client, with_super_admin_user, report_metadata=report_metadata_body
)
assert response.json is not None
results = response.json["results"]
assert len(results) == 2
assert json.loads(results[0]["bpmn_version_control_identifier"]) in (1, 2)
assert json.loads(results[1]["bpmn_version_control_identifier"]) in (1, 2)
# end > 2000, end < 6000 - this should eliminate the first and the last
response = client.get(
"/v1.0/process-instances?end_from=2001&end_to=5999",
headers=self.logged_in_headers(with_super_admin_user),
report_metadata_body = {
"filter_by": [
{"field_name": "end_from", "field_value": 2001, "operator": "equals"},
{"field_name": "end_to", "field_value": 5999, "operator": "equals"},
],
"columns": [],
"order_by": [],
}
response = self.post_to_process_instance_list(
client, with_super_admin_user, report_metadata=report_metadata_body
)
assert response.json is not None
results = response.json["results"]
assert len(results) == 3
for i in range(3):
@ -1975,8 +2012,8 @@ class TestProcessApi(BaseTest):
self.logged_in_headers(with_super_admin_user)
report_identifier = "testreport"
report_metadata = {"order_by": ["month"]}
ProcessInstanceReportModel.create_with_attributes(
report_metadata: ReportMetadata = {"order_by": ["month"], "filter_by": [], "columns": []}
ProcessInstanceReportModel.create_report(
identifier=report_identifier,
report_metadata=report_metadata,
user=with_super_admin_user,
@ -1991,129 +2028,6 @@ class TestProcessApi(BaseTest):
assert response.json[0]["identifier"] == report_identifier
assert response.json[0]["report_metadata"]["order_by"] == ["month"]
# def test_process_instance_report_show_with_default_list(
# self,
# app: Flask,
# client: FlaskClient,
# with_db_and_bpmn_file_cleanup: None,
# with_super_admin_user: UserModel,
# setup_process_instances_for_reports: list[ProcessInstanceModel],
# ) -> None:
# """Test_process_instance_report_show_with_default_list."""
# process_group_id = "runs_without_input"
# process_model_id = "sample"
# process_model_identifier = f"{process_group_id}/{process_model_id}"
#
# report_metadata = {
# "columns": [
# {"Header": "id", "accessor": "id"},
# {
# "Header": "process_model_identifier",
# "accessor": "process_model_identifier",
# },
# {"Header": "process_group_id", "accessor": "process_group_identifier"},
# {"Header": "start_in_seconds", "accessor": "start_in_seconds"},
# {"Header": "status", "accessor": "status"},
# {"Header": "Name", "accessor": "name"},
# {"Header": "Status", "accessor": "status"},
# ],
# "order_by": ["test_score"],
# "filter_by": [
# {"field_name": "grade_level", "operator": "equals", "field_value": 2}
# ],
# }
#
# report = ProcessInstanceReportModel.create_with_attributes(
# identifier="sure",
# report_metadata=report_metadata,
# user=with_super_admin_user,
# )
#
# response = client.get(
# f"/v1.0/process-instances/reports/{report.id}",
# headers=self.logged_in_headers(with_super_admin_user),
# )
# assert response.status_code == 200
# assert response.json is not None
# assert len(response.json["results"]) == 2
# assert response.json["pagination"]["count"] == 2
# assert response.json["pagination"]["pages"] == 1
# assert response.json["pagination"]["total"] == 2
#
# process_instance_dict = response.json["results"][0]
# assert type(process_instance_dict["id"]) is int
# assert (
# process_instance_dict["process_model_identifier"]
# == process_model_identifier
# )
# assert type(process_instance_dict["start_in_seconds"]) is int
# assert process_instance_dict["start_in_seconds"] > 0
# assert process_instance_dict["status"] == "complete"
#
# def test_process_instance_report_show_with_dynamic_filter_and_query_param(
# self,
# app: Flask,
# client: FlaskClient,
# with_db_and_bpmn_file_cleanup: None,
# with_super_admin_user: UserModel,
# setup_process_instances_for_reports: list[ProcessInstanceModel],
# ) -> None:
# """Test_process_instance_report_show_with_default_list."""
# report_metadata = {
# "filter_by": [
# {
# "field_name": "grade_level",
# "operator": "equals",
# "field_value": "{{grade_level}}",
# }
# ],
# }
#
# report = ProcessInstanceReportModel.create_with_attributes(
# identifier="sure",
# report_metadata=report_metadata,
# user=with_super_admin_user,
# )
#
# response = client.get(
# f"/v1.0/process-instances/reports/{report.id}?grade_level=1",
# headers=self.logged_in_headers(with_super_admin_user),
# )
# assert response.status_code == 200
# assert response.json is not None
# assert len(response.json["results"]) == 1
def test_process_instance_report_show_with_bad_identifier(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
setup_process_instances_for_reports: list[ProcessInstanceModel],
) -> None:
"""Test_process_instance_report_show_with_bad_identifier."""
response = client.get(
"/v1.0/process-instances/reports/13000000?grade_level=1",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 404
data = json.loads(response.get_data(as_text=True))
assert data["error_code"] == "unknown_process_instance_report"
def setup_testing_instance(
self,
client: FlaskClient,
process_model_id: str,
with_super_admin_user: UserModel,
) -> Any:
"""Setup_testing_instance."""
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id_with_api(client, process_model_id, headers)
process_instance = response.json
assert isinstance(process_instance, dict)
process_instance_id = process_instance["id"]
return process_instance_id
def test_error_handler(
self,
app: Flask,
@ -2135,7 +2049,7 @@ class TestProcessApi(BaseTest):
bpmn_file_location=bpmn_file_location,
)
process_instance_id = self.setup_testing_instance(client, process_model_identifier, with_super_admin_user)
process_instance_id = self._setup_testing_instance(client, process_model_identifier, with_super_admin_user)
process = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first()
assert process is not None
@ -2176,7 +2090,7 @@ class TestProcessApi(BaseTest):
bpmn_file_location=bpmn_file_location,
)
process_instance_id = self.setup_testing_instance(client, process_model_identifier, with_super_admin_user)
process_instance_id = self._setup_testing_instance(client, process_model_identifier, with_super_admin_user)
process_model = ProcessModelService.get_process_model(process_model_identifier)
ProcessModelService.update_process_model(
process_model,
@ -2684,8 +2598,9 @@ class TestProcessApi(BaseTest):
content_type="application/json",
data=json.dumps(data),
)
print("test_script_unit_test_run")
# TODO: fix this test. I'm not sure it ever worked since it used to NOT check the status code
# and only printed out the test name.
assert response.status_code == 404
def test_send_event(
self,
@ -3061,7 +2976,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_process_instance_list_with_report_metadata."""
process_model = load_test_spec(
process_model_id="save_process_instance_metadata/save_process_instance_metadata",
bpmn_file_name="save_process_instance_metadata.bpmn",
@ -3078,30 +2992,26 @@ class TestProcessApi(BaseTest):
).all()
assert len(process_instance_metadata) == 3
report_metadata = {
report_metadata: ReportMetadata = {
"columns": [
{"Header": "ID", "accessor": "id"},
{"Header": "Status", "accessor": "status"},
{"Header": "Key One", "accessor": "key1"},
{"Header": "Key Two", "accessor": "key2"},
{"Header": "ID", "accessor": "id", "filterable": False},
{"Header": "Status", "accessor": "status", "filterable": False},
{"Header": "Key One", "accessor": "key1", "filterable": False},
{"Header": "Key Two", "accessor": "key2", "filterable": False},
],
"order_by": ["status"],
"filter_by": [],
}
process_instance_report = ProcessInstanceReportModel.create_with_attributes(
process_instance_report = ProcessInstanceReportModel.create_report(
identifier="sure",
report_metadata=report_metadata,
user=with_super_admin_user,
)
response = client.get(
f"/v1.0/process-instances?report_identifier={process_instance_report.identifier}",
headers=self.logged_in_headers(with_super_admin_user),
response = self.post_to_process_instance_list(
client, with_super_admin_user, report_metadata=process_instance_report.get_report_metadata()
)
assert response.json is not None
assert response.status_code == 200
assert len(response.json["results"]) == 1
assert response.json["results"][0]["status"] == "complete"
assert response.json["results"][0]["id"] == process_instance.id
@ -3130,11 +3040,11 @@ class TestProcessApi(BaseTest):
self.create_process_instance_from_process_model(process_model=process_model, user=user_one)
self.create_process_instance_from_process_model(process_model=process_model, user=with_super_admin_user)
dne_report_metadata = {
dne_report_metadata: ReportMetadata = {
"columns": [
{"Header": "ID", "accessor": "id"},
{"Header": "Status", "accessor": "status"},
{"Header": "Process Initiator", "accessor": "username"},
{"Header": "ID", "accessor": "id", "filterable": False},
{"Header": "Status", "accessor": "status", "filterable": False},
{"Header": "Process Initiator", "accessor": "username", "filterable": False},
],
"order_by": ["status"],
"filter_by": [
@ -3146,11 +3056,11 @@ class TestProcessApi(BaseTest):
],
}
user_one_report_metadata = {
user_one_report_metadata: ReportMetadata = {
"columns": [
{"Header": "ID", "accessor": "id"},
{"Header": "Status", "accessor": "status"},
{"Header": "Process Initiator", "accessor": "username"},
{"Header": "ID", "accessor": "id", "filterable": False},
{"Header": "Status", "accessor": "status", "filterable": False},
{"Header": "Process Initiator", "accessor": "username", "filterable": False},
],
"order_by": ["status"],
"filter_by": [
@ -3161,33 +3071,27 @@ class TestProcessApi(BaseTest):
}
],
}
process_instance_report_dne = ProcessInstanceReportModel.create_with_attributes(
process_instance_report_dne = ProcessInstanceReportModel.create_report(
identifier="dne_report",
report_metadata=dne_report_metadata,
user=user_one,
)
process_instance_report_user_one = ProcessInstanceReportModel.create_with_attributes(
process_instance_report_user_one = ProcessInstanceReportModel.create_report(
identifier="user_one_report",
report_metadata=user_one_report_metadata,
user=user_one,
)
response = client.get(
f"/v1.0/process-instances?report_identifier={process_instance_report_user_one.identifier}",
headers=self.logged_in_headers(user_one),
response = self.post_to_process_instance_list(
client, user_one, report_metadata=process_instance_report_user_one.get_report_metadata()
)
assert response.json is not None
assert response.status_code == 200
assert len(response.json["results"]) == 2
assert response.json["results"][0]["process_initiator_username"] == user_one.username
assert response.json["results"][1]["process_initiator_username"] == user_one.username
response = client.get(
f"/v1.0/process-instances?report_identifier={process_instance_report_dne.identifier}",
headers=self.logged_in_headers(user_one),
response = self.post_to_process_instance_list(
client, user_one, report_metadata=process_instance_report_dne.get_report_metadata()
)
assert response.json is not None
assert response.status_code == 200
assert len(response.json["results"]) == 0
def test_can_get_process_instance_report_column_list(
@ -3197,7 +3101,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_process_instance_list_with_report_metadata."""
process_model = self.create_process_model_with_metadata()
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=with_super_admin_user
@ -3308,49 +3211,44 @@ class TestProcessApi(BaseTest):
processor.do_engine_steps(save=True)
assert process_instance_two.status == "complete"
report_metadata = {
report_metadata: ReportMetadata = {
"columns": [
{"Header": "id", "accessor": "id"},
{"Header": "Time", "accessor": "time_ns"},
{"Header": "id", "accessor": "id", "filterable": True},
{"Header": "Time", "accessor": "time_ns", "filterable": True},
],
"order_by": ["time_ns"],
"filter_by": [],
}
report_one = ProcessInstanceReportModel.create_with_attributes(
report_one = ProcessInstanceReportModel.create_report(
identifier="report_one",
report_metadata=report_metadata,
user=with_super_admin_user,
)
response = client.get(
f"/v1.0/process-instances?report_id={report_one.id}",
headers=self.logged_in_headers(with_super_admin_user),
response = self.post_to_process_instance_list(
client, with_super_admin_user, report_metadata=report_one.get_report_metadata()
)
assert response.status_code == 200
assert response.json is not None
assert len(response.json["results"]) == 2
assert response.json["results"][0]["id"] == process_instance_one.id
assert response.json["results"][1]["id"] == process_instance_two.id
report_metadata = {
"columns": [
{"Header": "id", "accessor": "id"},
{"Header": "Time", "accessor": "time_ns"},
{"Header": "id", "accessor": "id", "filterable": True},
{"Header": "Time", "accessor": "time_ns", "filterable": True},
],
"order_by": ["-time_ns"],
"filter_by": [],
}
report_two = ProcessInstanceReportModel.create_with_attributes(
report_two = ProcessInstanceReportModel.create_report(
identifier="report_two",
report_metadata=report_metadata,
user=with_super_admin_user,
)
response = client.get(
f"/v1.0/process-instances?report_id={report_two.id}",
headers=self.logged_in_headers(with_super_admin_user),
response = self.post_to_process_instance_list(
client, with_super_admin_user, report_metadata=report_two.get_report_metadata()
)
assert response.status_code == 200
assert response.json is not None
assert len(response.json["results"]) == 2
assert response.json["results"][1]["id"] == process_instance_one.id
assert response.json["results"][0]["id"] == process_instance_two.id
@ -3380,3 +3278,16 @@ class TestProcessApi(BaseTest):
assert response.status_code == 200
assert response.json is not None
assert response.json["process_data_value"] == "hey"
def _setup_testing_instance(
self,
client: FlaskClient,
process_model_id: str,
with_super_admin_user: UserModel,
) -> Any:
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id_with_api(client, process_model_id, headers)
process_instance = response.json
assert isinstance(process_instance, dict)
process_instance_id = process_instance["id"]
return process_instance_id

View File

@ -125,7 +125,7 @@
# substitution_variables: Optional[dict] = None,
# ) -> list[dict]:
# """Do_report_with_metadata_and_instances."""
# process_instance_report = ProcessInstanceReportModel.create_with_attributes(
# process_instance_report = ProcessInstanceReportModel.create_report(
# identifier="sure",
# report_metadata=report_metadata,
# user=BaseTest.find_or_create_user(),

View File

@ -1,6 +1,4 @@
"""Test_process_instance_report_service."""
from typing import Optional
from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
@ -9,738 +7,13 @@ from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_report_service import (
ProcessInstanceReportFilter,
)
from spiffworkflow_backend.services.process_instance_report_service import (
ProcessInstanceReportService,
)
from spiffworkflow_backend.services.user_service import UserService
class TestProcessInstanceReportFilter(BaseTest):
"""TestProcessInstanceReportFilter."""
def test_empty_filter_to_dict(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
d = ProcessInstanceReportFilter().to_dict()
assert d == {}
def test_string_value_filter_to_dict(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
d = ProcessInstanceReportFilter(process_model_identifier="bob").to_dict()
assert d == {"process_model_identifier": "bob"}
def test_int_value_filter_to_dict(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
d = ProcessInstanceReportFilter(
start_from=1,
start_to=2,
end_from=3,
end_to=4,
).to_dict()
assert d == {
"start_from": "1",
"start_to": "2",
"end_from": "3",
"end_to": "4",
}
def test_list_single_value_filter_to_dict(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
d = ProcessInstanceReportFilter(process_status=["bob"]).to_dict()
assert d == {"process_status": "bob"}
def test_list_multiple_value_filter_to_dict(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
d = ProcessInstanceReportFilter(process_status=["joe", "bob", "sue"]).to_dict()
assert d == {"process_status": "joe,bob,sue"}
class TestProcessInstanceReportService(BaseTest):
"""TestProcessInstanceReportService."""
def _filter_from_metadata(self, report_metadata: dict) -> ProcessInstanceReportFilter:
"""Docstring."""
report = ProcessInstanceReportModel(
identifier="test",
created_by_id=1,
report_metadata=report_metadata,
)
return ProcessInstanceReportService.filter_from_metadata(report)
def _filter_from_metadata_with_overrides(
self,
report_metadata: dict,
process_model_identifier: Optional[str] = None,
start_from: Optional[int] = None,
start_to: Optional[int] = None,
end_from: Optional[int] = None,
end_to: Optional[int] = None,
process_status: Optional[str] = None,
) -> ProcessInstanceReportFilter:
"""Docstring."""
report = ProcessInstanceReportModel(
identifier="test",
created_by_id=1,
report_metadata=report_metadata,
)
return ProcessInstanceReportService.filter_from_metadata_with_overrides(
process_instance_report=report,
process_model_identifier=process_model_identifier,
start_from=start_from,
start_to=start_to,
end_from=end_from,
end_to=end_to,
process_status=process_status,
)
def _filter_by_dict_from_metadata(self, report_metadata: dict) -> dict[str, str]:
"""Docstring."""
report = ProcessInstanceReportModel(
identifier="test",
created_by_id=1,
report_metadata=report_metadata,
)
return ProcessInstanceReportService.filter_by_to_dict(report)
def test_filter_by_to_dict_no_filter_by(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
filters = self._filter_by_dict_from_metadata(
{
"columns": [],
}
)
assert filters == {}
def test_filter_by_to_dict_empty_filter_by(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
filters = self._filter_by_dict_from_metadata(
{
"columns": [],
"filter_by": [],
}
)
assert filters == {}
def test_filter_by_to_dict_single_filter_by(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
filters = self._filter_by_dict_from_metadata(
{
"columns": [],
"filter_by": [{"field_name": "end_to", "field_value": "1234"}],
}
)
assert filters == {"end_to": "1234"}
def test_filter_by_to_dict_mulitple_filter_by(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
filters = self._filter_by_dict_from_metadata(
{
"columns": [],
"filter_by": [
{"field_name": "end_to", "field_value": "1234"},
{"field_name": "end_from", "field_value": "4321"},
],
}
)
assert filters == {"end_to": "1234", "end_from": "4321"}
def test_report_with_no_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata(
{
"columns": [],
}
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_with_empty_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata(
{
"columns": [],
"filter_by": [],
}
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_with_unknown_filter_field_name(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata(
{
"columns": [],
"filter_by": [{"field_name": "bob", "field_value": "joe"}],
}
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_with_unknown_filter_keys(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata(
{
"columns": [],
"filter_by": [{"_name": "bob", "_value": "joe"}],
}
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_with_process_model_identifier_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata(
{
"columns": [],
"filter_by": [{"field_name": "process_model_identifier", "field_value": "bob"}],
}
)
assert report_filter.process_model_identifier == "bob"
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_with_start_from_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata(
{
"columns": [],
"filter_by": [{"field_name": "start_from", "field_value": "1234"}],
}
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from == 1234
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_with_start_to_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata(
{
"columns": [],
"filter_by": [{"field_name": "start_to", "field_value": "1234"}],
}
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to == 1234
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_with_end_from_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata(
{
"columns": [],
"filter_by": [{"field_name": "end_from", "field_value": "1234"}],
}
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from == 1234
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_with_end_to_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata(
{
"columns": [],
"filter_by": [{"field_name": "end_to", "field_value": "1234"}],
}
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to == 1234
assert report_filter.process_status is None
def test_report_with_single_startus_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata(
{
"columns": [],
"filter_by": [{"field_name": "process_status", "field_value": "ready"}],
}
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status == ["ready"]
def test_report_with_multiple_startus_filters(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata(
{
"columns": [],
"filter_by": [
{
"field_name": "process_status",
"field_value": "ready,completed,other",
}
],
}
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status == ["ready", "completed", "other"]
def test_report_with_multiple_filters(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata(
{
"columns": [],
"filter_by": [
{"field_name": "start_from", "field_value": "44"},
{"field_name": "end_from", "field_value": "55"},
{"field_name": "process_status", "field_value": "ready"},
],
}
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from == 44
assert report_filter.start_to is None
assert report_filter.end_from == 55
assert report_filter.end_to is None
assert report_filter.process_status == ["ready"]
def test_report_no_override_with_no_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata_with_overrides(
{
"columns": [],
},
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_override_with_no_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata_with_overrides(
{
"columns": [],
},
end_to=54321,
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to == 54321
assert report_filter.process_status is None
def test_report_override_process_model_identifier_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata_with_overrides(
{
"columns": [],
"filter_by": [{"field_name": "process_model_identifier", "field_value": "bob"}],
},
process_model_identifier="joe",
)
assert report_filter.process_model_identifier == "joe"
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_override_start_from_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata_with_overrides(
{
"columns": [],
"filter_by": [{"field_name": "start_from", "field_value": "123"}],
},
start_from=321,
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from == 321
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_override_start_to_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata_with_overrides(
{
"columns": [],
"filter_by": [{"field_name": "start_to", "field_value": "123"}],
},
start_to=321,
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to == 321
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_override_end_from_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata_with_overrides(
{
"columns": [],
"filter_by": [{"field_name": "end_from", "field_value": "123"}],
},
end_from=321,
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from == 321
assert report_filter.end_to is None
assert report_filter.process_status is None
def test_report_override_end_to_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata_with_overrides(
{
"columns": [],
"filter_by": [{"field_name": "end_to", "field_value": "123"}],
},
end_to=321,
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to == 321
assert report_filter.process_status is None
def test_report_override_process_status_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata_with_overrides(
{
"columns": [],
"filter_by": [{"field_name": "process_status", "field_value": "joe,bob"}],
},
process_status="sue",
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status == ["sue"]
def test_report_override_mulitple_process_status_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata_with_overrides(
{
"columns": [],
"filter_by": [{"field_name": "process_status", "field_value": "sue"}],
},
process_status="joe,bob",
)
assert report_filter.process_model_identifier is None
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status == ["joe", "bob"]
def test_report_override_does_not_override_other_filters(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata_with_overrides(
{
"columns": [],
"filter_by": [
{"field_name": "process_model_identifier", "field_value": "sue"},
{"field_name": "process_status", "field_value": "sue"},
],
},
process_status="joe,bob",
)
assert report_filter.process_model_identifier == "sue"
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status == ["joe", "bob"]
def test_report_override_of_none_does_not_override_filter(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Docstring."""
report_filter = self._filter_from_metadata_with_overrides(
{
"columns": [],
"filter_by": [
{"field_name": "process_model_identifier", "field_value": "sue"},
{"field_name": "process_status", "field_value": "sue"},
],
},
process_status=None,
)
assert report_filter.process_model_identifier == "sue"
assert report_filter.start_from is None
assert report_filter.start_to is None
assert report_filter.end_from is None
assert report_filter.end_to is None
assert report_filter.process_status == ["sue"]
def test_can_filter_by_completed_instances_initiated_by_me(
self,
app: Flask,
@ -768,13 +41,8 @@ class TestProcessInstanceReportService(BaseTest):
user=user_one,
report_identifier="system_report_completed_instances_initiated_by_me",
)
report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides(
process_instance_report=process_instance_report,
process_model_identifier=process_model.id,
)
response_json = ProcessInstanceReportService.run_process_instance_report(
report_filter=report_filter,
process_instance_report=process_instance_report,
report_metadata=process_instance_report.report_metadata,
user=user_one,
)
@ -850,13 +118,8 @@ class TestProcessInstanceReportService(BaseTest):
user=user_one,
report_identifier="system_report_completed_instances_with_tasks_completed_by_me",
)
report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides(
process_instance_report=process_instance_report,
process_model_identifier=process_model.id,
)
response_json = ProcessInstanceReportService.run_process_instance_report(
report_filter=report_filter,
process_instance_report=process_instance_report,
report_metadata=process_instance_report.report_metadata,
user=user_one,
)
@ -934,15 +197,13 @@ class TestProcessInstanceReportService(BaseTest):
process_instance_report = ProcessInstanceReportService.report_with_identifier(
user=user_one,
report_identifier="system_report_completed_instances_with_tasks_completed_by_my_groups",
report_identifier="system_report_completed_instances",
)
report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides(
process_instance_report=process_instance_report,
process_model_identifier=process_model.id,
process_instance_report.report_metadata["filter_by"].append(
{"field_name": "user_group_identifier", "field_value": user_one.groups[0].identifier}
)
response_json = ProcessInstanceReportService.run_process_instance_report(
report_filter=report_filter,
process_instance_report=process_instance_report,
report_metadata=process_instance_report.report_metadata,
user=user_one,
)
@ -1026,14 +287,12 @@ class TestProcessInstanceReportService(BaseTest):
UserService.add_user_to_human_tasks_if_appropriate(user_one)
process_instance_report = ProcessInstanceReportService.report_with_identifier(user=user_one)
report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides(
process_instance_report=process_instance_report,
process_model_identifier=process_model.id,
with_relation_to_me=True,
report_metadata = process_instance_report.report_metadata
report_metadata["filter_by"].append(
{"field_name": "with_relation_to_me", "field_value": True, "operator": "equals"}
)
response_json = ProcessInstanceReportService.run_process_instance_report(
report_filter=report_filter,
process_instance_report=process_instance_report,
report_metadata=report_metadata,
user=user_one,
)

View File

@ -27,6 +27,7 @@ module.exports = {
rules: {
// according to https://github.com/typescript-eslint/typescript-eslint/issues/2621, You should turn off the eslint core rule and turn on the typescript-eslint rule
// but not sure which of the above "extends" statements is maybe bringing in eslint core
'max-len': ['error', { code: 200, ignoreUrls: true }],
'no-shadow': 'off',
'@typescript-eslint/no-shadow': ['error'],
'jest/expect-expect': 'off',

View File

@ -1,5 +1,6 @@
import { format } from 'date-fns';
import { DATE_FORMAT, PROCESS_STATUSES } from '../../src/config';
import { titleizeString } from '../../src/helpers';
const filterByDate = (fromDate) => {
cy.get('#date-picker-start-from').clear().type(format(fromDate, DATE_FORMAT));
@ -160,7 +161,7 @@ describe('process-instances', () => {
cy.getBySel('process-instance-list-link').click();
cy.getBySel('process-instance-show-link-id').first().click();
cy.getBySel('process-instance-log-list-link').click();
cy.getBySel('process-instance-log-detailed').click();
cy.getBySel('process-instance-log-events').click();
cy.contains('process_model_one');
cy.contains('task_completed');
cy.basicPaginationTest();
@ -172,14 +173,19 @@ describe('process-instances', () => {
cy.contains('All Process Instances');
cy.assertAtLeastOneItemInPaginatedResults();
cy.getBySel('filter-section-expand-toggle').click();
const statusSelect = '#process-instance-status-select';
PROCESS_STATUSES.forEach((processStatus) => {
if (!['all', 'waiting'].includes(processStatus)) {
cy.get(statusSelect).click();
cy.get(statusSelect).contains(processStatus).click();
cy.get(statusSelect).contains(titleizeString(processStatus)).click();
cy.get(statusSelect).click();
cy.getBySel('filter-button').click();
cy.url().should('include', `status=${processStatus}`);
// make sure that there is 1 status item selected in the multiselect
cy.get(`${statusSelect} .cds--tag`).contains('1');
cy.assertAtLeastOneItemInPaginatedResults();
cy.getBySel(`process-instance-status-${processStatus}`);

View File

@ -1,9 +1,6 @@
import { slowCypressDown } from 'cypress-slow-down';
import { modifyProcessIdentifierForPathParam } from '../../src/helpers';
import { miscDisplayName } from '../support/helpers';
// slowCypressDown(500);
describe('process-models', () => {
beforeEach(() => {
cy.login();
@ -15,7 +12,7 @@ describe('process-models', () => {
const groupDisplayName = 'Acceptance Tests Group One';
const deleteProcessModelButtonId = 'delete-process-model-button';
const saveChangesButtonText = 'Save Changes';
const fileNameInputSelector = 'input[name=file_name]';
const fileNameInputSelector = 'input#process_model_file_name';
it('can perform crud operations', () => {
const uuid = () => Cypress._.random(0, 1e6);
@ -36,7 +33,8 @@ describe('process-models', () => {
cy.contains(`Process Model: ${modelDisplayName}`);
cy.getBySel('edit-process-model-button').click();
cy.get('input[name=display_name]').clear().type(newModelDisplayName);
cy.get('input[name=display_name]').clear();
cy.get('input[name=display_name]').type(newModelDisplayName);
cy.contains('Submit').click();
cy.contains(`Process Model: ${newModelDisplayName}`);
@ -50,7 +48,6 @@ describe('process-models', () => {
const uuid = () => Cypress._.random(0, 1e6);
const id = uuid();
const directParentGroupId = 'acceptance-tests-group-one';
const directParentGroupName = 'Acceptance Tests Group One';
const groupId = `misc/${directParentGroupId}`;
const modelDisplayName = `Test Model 2 ${id}`;
const modelId = `test-model-2-${id}`;
@ -58,7 +55,7 @@ describe('process-models', () => {
const bpmnFileName = `bpmn_test_file_${id}`;
const dmnFileName = `dmn_test_file_${id}`;
const jsonFileName = `json_test_file_${id}`;
const decision_acceptance_test_id = `decision_acceptance_test_${id}`;
const decisionAcceptanceTestId = `decision_acceptance_test_${id}`;
cy.contains(miscDisplayName).click();
cy.contains(groupDisplayName).click();
@ -79,9 +76,10 @@ describe('process-models', () => {
// add new bpmn file
cy.contains('New BPMN File').click();
cy.contains(/^Process Model File$/);
cy.get('g[data-element-id=StartEvent_1]').click().should('exist');
cy.get('g[data-element-id=StartEvent_1]').click();
cy.contains('General').click();
cy.get('#bio-properties-panel-name').clear().type('Start Event Name');
cy.get('#bio-properties-panel-name').clear();
cy.get('#bio-properties-panel-name').type('Start Event Name');
cy.wait(500);
cy.contains('Save').click();
cy.contains('Start Event Name');
@ -96,11 +94,10 @@ describe('process-models', () => {
// add new dmn file
cy.contains('New DMN File').click();
cy.contains(/^Process Model File$/);
cy.get('g[data-element-id=decision_1]').click().should('exist');
cy.get('g[data-element-id=decision_1]').click();
cy.contains('General').click();
cy.get('#bio-properties-panel-id')
.clear()
.type(decision_acceptance_test_id);
cy.get('#bio-properties-panel-id').clear();
cy.get('#bio-properties-panel-id').type(decisionAcceptanceTestId);
cy.contains('General').click();
cy.contains('Save').click();
cy.get(fileNameInputSelector).type(dmnFileName);
@ -135,7 +132,7 @@ describe('process-models', () => {
cy.get('.tile-process-group-content-container').should('exist');
});
it.only('can upload and run a bpmn file', () => {
it('can upload and run a bpmn file', () => {
const uuid = () => Cypress._.random(0, 1e6);
const id = uuid();
const directParentGroupId = 'acceptance-tests-group-one';
@ -192,7 +189,8 @@ describe('process-models', () => {
});
it('can allow searching for model', () => {
cy.getBySel('process-model-selection').click().type('model-3');
cy.getBySel('process-model-selection').click();
cy.getBySel('process-model-selection').type('model-3');
cy.contains('acceptance-tests-group-one/acceptance-tests-model-3').click();
cy.contains('Acceptance Tests Model 3');
});

View File

@ -1,13 +1,14 @@
const submitInputIntoFormField = (taskName, fieldKey, fieldValue) => {
cy.contains(`Task: ${taskName}`, { timeout: 10000 });
cy.get(fieldKey).clear().type(fieldValue);
cy.get(fieldKey).clear();
cy.get(fieldKey).type(fieldValue);
cy.contains('Submit').click();
};
const checkFormFieldIsReadOnly = (formName, fieldKey) => {
cy.contains(`Task: ${formName}`);
cy.get(fieldKey).invoke('attr', 'disabled').should('exist');
};
// const checkFormFieldIsReadOnly = (formName, fieldKey) => {
// cy.contains(`Task: ${formName}`);
// cy.get(fieldKey).invoke('attr', 'disabled').should('exist');
// };
const checkTaskHasClass = (taskName, className) => {
cy.get(`g[data-element-id=${taskName}]`).should('have.class', className);
@ -38,38 +39,26 @@ describe('tasks', () => {
cy.navigateToProcessModel(groupDisplayName, modelDisplayName);
cy.runPrimaryBpmnFile(true);
submitInputIntoFormField(
'get_user_generated_number_one',
'#root_user_generated_number_1',
2
);
submitInputIntoFormField(
'get_user_generated_number_two',
'#root_user_generated_number_2',
3
);
submitInputIntoFormField('get_form_num_one', '#root_form_num_1', 2);
submitInputIntoFormField('get_form_num_two', '#root_form_num_2', 3);
cy.contains('Task: get_user_generated_number_three');
cy.contains('Task: get_form_num_three');
// TODO: remove this if we decide to completely kill form navigation
// cy.getBySel('form-nav-form2').click();
// checkFormFieldIsReadOnly(
// 'get_user_generated_number_two',
// '#root_user_generated_number_2'
// 'get_form_num_two',
// '#root_form_num_2'
// );
// cy.getBySel('form-nav-form1').click();
// checkFormFieldIsReadOnly(
// 'get_user_generated_number_one',
// '#root_user_generated_number_1'
// 'get_form_num_one',
// '#root_form_num_1'
// );
//
// cy.getBySel('form-nav-form3').click();
submitInputIntoFormField(
'get_user_generated_number_three',
'#root_user_generated_number_3',
4
);
submitInputIntoFormField('get_form_num_three', '#root_form_num_3', 4);
cy.contains('Task: get_user_generated_number_four');
cy.contains('Task: get_form_num_four');
cy.navigateToProcessModel(groupDisplayName, modelDisplayName);
cy.getBySel('process-instance-list-link').click();
cy.assertAtLeastOneItemInPaginatedResults();
@ -79,10 +68,10 @@ describe('tasks', () => {
cy.contains('Process Instance Id: ');
cy.get(`g[data-element-id=form3]`).click();
cy.contains('"user_generated_number_1": 2');
cy.contains('"user_generated_number_2": 3');
cy.contains('"user_generated_number_3": 4');
cy.contains('"user_generated_number_4": 5').should('not.exist');
cy.contains('"form_num_1": 2');
cy.contains('"form_num_2": 3');
cy.contains('"form_num_3": 4');
cy.contains('"form_num_4": 5').should('not.exist');
checkTaskHasClass('form1', completedTaskClassName);
checkTaskHasClass('form2', completedTaskClassName);
checkTaskHasClass('form3', completedTaskClassName);
@ -97,11 +86,7 @@ describe('tasks', () => {
// FIXME: this will probably need a better way to link to the proper form that we want
cy.contains('Go').click();
submitInputIntoFormField(
'get_user_generated_number_four',
'#root_user_generated_number_4',
5
);
submitInputIntoFormField('get_form_num_four', '#root_form_num_4', 5);
cy.url().should('include', '/tasks');
cy.navigateToProcessModel(groupDisplayName, modelDisplayName);

View File

@ -6,48 +6,23 @@ import {
Modal,
// @ts-ignore
} from '@carbon/react';
import {
ReportFilter,
ProcessInstanceReport,
ProcessModel,
ReportColumn,
ReportMetadata,
User,
} from '../interfaces';
import { ProcessInstanceReport } from '../interfaces';
import HttpService from '../services/HttpService';
type OwnProps = {
onSuccess: (..._args: any[]) => any;
columnArray: ReportColumn[];
orderBy: string;
processModelSelection: ProcessModel | null;
processInitiatorSelection: User | null;
processStatusSelection: string[];
startFromSeconds: string | null;
startToSeconds: string | null;
endFromSeconds: string | null;
endToSeconds: string | null;
buttonText?: string;
buttonClassName?: string;
processInstanceReportSelection?: ProcessInstanceReport | null;
reportMetadata: ReportMetadata;
getReportMetadataCallback: Function;
};
export default function ProcessInstanceListSaveAsReport({
onSuccess,
columnArray,
orderBy,
processModelSelection,
processInitiatorSelection,
processInstanceReportSelection,
processStatusSelection,
startFromSeconds,
startToSeconds,
endFromSeconds,
endToSeconds,
buttonClassName,
buttonText = 'Save as Perspective',
reportMetadata,
getReportMetadataCallback,
}: OwnProps) {
const [identifier, setIdentifier] = useState<string>(
processInstanceReportSelection?.identifier || ''
@ -75,73 +50,10 @@ export default function ProcessInstanceListSaveAsReport({
const addProcessInstanceReport = (event: any) => {
event.preventDefault();
// TODO: make a field to set this
let orderByArray = ['-start_in_seconds', '-id'];
if (orderBy) {
orderByArray = orderBy.split(',').filter((n) => n);
const reportMetadata = getReportMetadataCallback();
if (!reportMetadata) {
return;
}
const filterByArray: any = [];
if (processModelSelection) {
filterByArray.push({
field_name: 'process_model_identifier',
field_value: processModelSelection.id,
});
}
if (processInitiatorSelection) {
filterByArray.push({
field_name: 'process_initiator_username',
field_value: processInitiatorSelection.username,
});
}
if (processStatusSelection.length > 0) {
filterByArray.push({
field_name: 'process_status',
field_value: processStatusSelection.join(','),
operator: 'in',
});
}
if (startFromSeconds) {
filterByArray.push({
field_name: 'start_from',
field_value: startFromSeconds,
});
}
if (startToSeconds) {
filterByArray.push({
field_name: 'start_to',
field_value: startToSeconds,
});
}
if (endFromSeconds) {
filterByArray.push({
field_name: 'end_from',
field_value: endFromSeconds,
});
}
if (endToSeconds) {
filterByArray.push({
field_name: 'end_to',
field_value: endToSeconds,
});
}
reportMetadata.filter_by.forEach((reportFilter: ReportFilter) => {
columnArray.forEach((reportColumn: ReportColumn) => {
if (
reportColumn.accessor === reportFilter.field_name &&
reportColumn.filterable
) {
filterByArray.push(reportFilter);
}
});
});
let path = `/process-instances/reports`;
let httpMethod = 'POST';
@ -156,11 +68,7 @@ export default function ProcessInstanceListSaveAsReport({
httpMethod,
postBody: {
identifier,
report_metadata: {
columns: columnArray,
order_by: orderByArray,
filter_by: filterByArray,
},
report_metadata: reportMetadata,
},
});
handleSaveFormClose();

View File

@ -5,7 +5,6 @@ import {
FormLabel,
// @ts-ignore
} from '@carbon/react';
import { useSearchParams } from 'react-router-dom';
import { truncateString } from '../helpers';
import { ProcessInstanceReport } from '../interfaces';
import HttpService from '../services/HttpService';
@ -14,32 +13,42 @@ type OwnProps = {
onChange: (..._args: any[]) => any;
selectedItem?: ProcessInstanceReport | null;
titleText?: string;
selectedReportId?: string | null;
handleSetSelectedReportCallback?: Function;
};
export default function ProcessInstanceReportSearch({
selectedItem,
onChange,
selectedReportId,
handleSetSelectedReportCallback,
titleText = 'Process instance perspectives',
}: OwnProps) {
const [processInstanceReports, setProcessInstanceReports] = useState<
ProcessInstanceReport[] | null
>(null);
const [searchParams] = useSearchParams();
const reportId = searchParams.get('report_id');
useEffect(() => {
const selectedReportIdAsNumber = Number(selectedReportId);
function setProcessInstanceReportsFromResult(
result: ProcessInstanceReport[]
) {
setProcessInstanceReports(result);
if (selectedReportId && handleSetSelectedReportCallback) {
result.forEach((processInstanceReport: ProcessInstanceReport) => {
if (processInstanceReport.id === selectedReportIdAsNumber) {
handleSetSelectedReportCallback(processInstanceReport);
}
});
}
}
HttpService.makeCallToBackend({
path: `/process-instances/reports`,
successCallback: setProcessInstanceReportsFromResult,
});
}, [reportId]);
}, [handleSetSelectedReportCallback, selectedReportId]);
const reportSelectionString = (
processInstanceReport: ProcessInstanceReport

View File

@ -41,6 +41,10 @@ export const capitalizeFirstLetter = (string: any) => {
return string.charAt(0).toUpperCase() + string.slice(1);
};
export const titleizeString = (string: any) => {
return capitalizeFirstLetter((string || '').replaceAll('_', ' '));
};
export const convertDateToSeconds = (
date: any,
onChangeFunction: any = null

View File

@ -105,10 +105,12 @@ h2 {
}
.cds--btn--ghost.button-link:hover {
color: #0062fe;
background-color: white;
padding-left: 0;
}
.cds--btn--ghost.button-link:visited:hover {
color: #0062fe;
background-color: white;
padding-left: 0;
}
@ -449,3 +451,7 @@ svg.notification-icon {
.user_instructions_4 {
filter: opacity(10%);
}
.float-right {
float: right;
}

View File

@ -163,7 +163,8 @@ export interface MessageInstance {
export interface ReportFilter {
field_name: string;
field_value: string;
// using any here so we can use this as a string and boolean
field_value: any;
operator?: string;
}
@ -335,3 +336,5 @@ export interface ProcessModelCaller {
display_name: string;
process_model_id: string;
}
export interface UserGroup {}

View File

@ -8,7 +8,6 @@ import ProcessGroupEdit from './ProcessGroupEdit';
import ProcessModelShow from './ProcessModelShow';
import ProcessModelEditDiagram from './ProcessModelEditDiagram';
import ProcessInstanceList from './ProcessInstanceList';
import ProcessInstanceReportShow from './ProcessInstanceReportShow';
import ProcessModelNew from './ProcessModelNew';
import ProcessModelEdit from './ProcessModelEdit';
import ProcessInstanceShow from './ProcessInstanceShow';
@ -88,10 +87,6 @@ export default function AdminRoutes() {
path="process-instances/reports"
element={<ProcessInstanceReportList />}
/>
<Route
path="process-instances/reports/:report_identifier"
element={<ProcessInstanceReportShow />}
/>
<Route
path="process-instances/reports/new"
element={<ProcessInstanceReportNew />}

View File

@ -29,10 +29,12 @@ export default function CompletedInstances() {
paginationQueryParamPrefix="group_completed_instances"
paginationClassName="with-large-bottom-margin"
perPageOptions={[2, 5, 25]}
reportIdentifier="system_report_completed_instances_with_tasks_completed_by_my_groups"
reportIdentifier="system_report_completed_instances"
showReports={false}
textToShowIfEmpty="This group has no completed instances at this time."
additionalParams={`user_group_identifier=${userGroup}`}
additionalReportFilters={[
{ field_name: 'user_group_identifier', field_value: userGroup },
]}
showActionsColumn
/>
</>

View File

@ -20,44 +20,55 @@ export default function InProgressInstances() {
return userGroups.map((userGroup: string) => {
const titleText = `This is a list of instances with tasks that are waiting for the ${userGroup} group.`;
const headerElement = (
<h2 title={titleText} className="process-instance-table-header">
Waiting for <strong>{userGroup}</strong>
</h2>
);
return (
<>
<h2 title={titleText} className="process-instance-table-header">
Waiting for <strong>{userGroup}</strong>
</h2>
<ProcessInstanceListTable
filtersEnabled={false}
paginationQueryParamPrefix={`waiting_for_${slugifyString(
userGroup
).replace('-', '_')}`}
paginationClassName="with-large-bottom-margin"
perPageOptions={[2, 5, 25]}
reportIdentifier="system_report_in_progress_instances_with_tasks_for_my_group"
showReports={false}
textToShowIfEmpty="This group has no instances waiting on it at this time."
additionalParams={`user_group_identifier=${userGroup}`}
canCompleteAllTasks
showActionsColumn
autoReload
/>
</>
<ProcessInstanceListTable
headerElement={headerElement}
showLinkToReport
filtersEnabled={false}
paginationQueryParamPrefix={`waiting_for_${slugifyString(
userGroup
).replace('-', '_')}`}
paginationClassName="with-large-bottom-margin"
perPageOptions={[2, 5, 25]}
reportIdentifier="system_report_in_progress_instances_with_tasks"
showReports={false}
textToShowIfEmpty="This group has no instances waiting on it at this time."
additionalReportFilters={[
{ field_name: 'user_group_identifier', field_value: userGroup },
]}
canCompleteAllTasks
showActionsColumn
autoReload={false}
/>
);
});
};
const startedByMeTitleText =
'This is a list of open instances that you started.';
const startedByMeHeaderElement = (
<h2 title={startedByMeTitleText} className="process-instance-table-header">
Started by me
</h2>
);
const waitingForMeTitleText =
'This is a list of instances that have tasks that you can complete.';
const waitingForMeHeaderElement = (
<h2 title={waitingForMeTitleText} className="process-instance-table-header">
Waiting for me
</h2>
);
return (
<>
<h2
title={startedByMeTitleText}
className="process-instance-table-header"
>
Started by me
</h2>
<ProcessInstanceListTable
headerElement={startedByMeHeaderElement}
filtersEnabled={false}
paginationQueryParamPrefix="open_instances_started_by_me"
perPageOptions={[2, 5, 25]}
@ -65,16 +76,13 @@ export default function InProgressInstances() {
showReports={false}
textToShowIfEmpty="There are no open instances you started at this time."
paginationClassName="with-large-bottom-margin"
showLinkToReport
showActionsColumn
autoReload
autoReload={false}
/>
<h2
title={waitingForMeTitleText}
className="process-instance-table-header"
>
Waiting for me
</h2>
<ProcessInstanceListTable
headerElement={waitingForMeHeaderElement}
showLinkToReport
filtersEnabled={false}
paginationQueryParamPrefix="waiting_for_me"
perPageOptions={[2, 5, 25]}

View File

@ -494,7 +494,7 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) {
<TabList aria-label="List of tabs">
<Tab
title="Only show a subset of the logs, and show fewer columns"
data-qa="process-instance-log-simple"
data-qa="process-instance-log-milestones"
onClick={() => {
resetFilters();
searchParams.set('events', 'false');

View File

@ -1,97 +0,0 @@
import { useEffect, useState } from 'react';
import { useParams, useSearchParams } from 'react-router-dom';
// @ts-ignore
import { Button, Table } from '@carbon/react';
import ProcessBreadcrumb from '../components/ProcessBreadcrumb';
import PaginationForTable from '../components/PaginationForTable';
import HttpService from '../services/HttpService';
import { getPageInfoFromSearchParams } from '../helpers';
const PER_PAGE_FOR_PROCESS_INSTANCE_REPORT = 500;
export default function ProcessInstanceReport() {
const params = useParams();
const [searchParams] = useSearchParams();
const [processInstances, setProcessInstances] = useState([]);
const [reportMetadata, setReportMetadata] = useState({});
const [pagination, setPagination] = useState(null);
useEffect(() => {
const processResult = (result: any) => {
const processInstancesFromApi = result.results;
setProcessInstances(processInstancesFromApi);
setReportMetadata(result.report_metadata);
setPagination(result.pagination);
};
function getProcessInstances() {
const { page, perPage } = getPageInfoFromSearchParams(
searchParams,
PER_PAGE_FOR_PROCESS_INSTANCE_REPORT
);
let query = `?page=${page}&per_page=${perPage}`;
searchParams.forEach((value, key) => {
if (key !== 'page' && key !== 'per_page') {
query += `&${key}=${value}`;
}
});
HttpService.makeCallToBackend({
path: `/process-instances/reports/${params.report_identifier}${query}`,
successCallback: processResult,
});
}
getProcessInstances();
}, [searchParams, params]);
const buildTable = () => {
const headers = (reportMetadata as any).columns.map((column: any) => {
return <th>{(column as any).Header}</th>;
});
const rows = processInstances.map((row) => {
const currentRow = (reportMetadata as any).columns.map((column: any) => {
return <td>{(row as any)[column.accessor]}</td>;
});
return <tr key={(row as any).id}>{currentRow}</tr>;
});
return (
<Table striped bordered>
<thead>
<tr>{headers}</tr>
</thead>
<tbody>{rows}</tbody>
</Table>
);
};
if (pagination) {
const { page, perPage } = getPageInfoFromSearchParams(
searchParams,
PER_PAGE_FOR_PROCESS_INSTANCE_REPORT
);
return (
<main>
<ProcessBreadcrumb
hotCrumbs={[['Process Groups', '/admin'], ['Process Instance']]}
/>
<h1>Process Instance Perspective: {params.report_identifier}</h1>
<Button
href={`/admin/process-instances/reports/${params.report_identifier}/edit`}
>
Edit process instance perspective
</Button>
<PaginationForTable
page={page}
perPage={perPage}
pagination={pagination}
tableToDisplay={buildTable()}
/>
</main>
);
}
return null;
}

View File

@ -174,6 +174,7 @@ export default function ReactFormEditor() {
<span>
<input
name="file_name"
id="process_model_file_name"
type="text"
value={newFileName}
onChange={(e) => setNewFileName(e.target.value)}