Merge remote-tracking branch 'origin/feature/add_process_model_metadata_to_reporting' into new_report

This commit is contained in:
jasquat 2022-11-29 16:40:44 -05:00
commit 5cd7abc18f
7 changed files with 2061 additions and 17 deletions

1908
spiffworkflow-backend/: Normal file

File diff suppressed because it is too large Load Diff

View File

@ -204,18 +204,8 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
user: UserModel,
) -> ProcessInstanceReportModel:
"""Create_with_attributes."""
# <<<<<<< HEAD
# process_model = ProcessModelService.get_process_model(
# process_model_id=f"{process_model_identifier}"
# )
# process_instance_report = cls(
# identifier=identifier,
# process_group_identifier="process_model.process_group_id",
# process_model_identifier=process_model.id,
# =======
process_instance_report = cls(
identifier=identifier,
# >>>>>>> main
created_by_id=user.id,
report_metadata=report_metadata,
)

View File

@ -30,6 +30,8 @@ from SpiffWorkflow.task import TaskState
from sqlalchemy import and_
from sqlalchemy import asc
from sqlalchemy import desc
from sqlalchemy import func
from sqlalchemy.orm import aliased
from sqlalchemy.orm import joinedload
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
@ -52,6 +54,9 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSche
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
@ -928,19 +933,32 @@ def process_instance_list(
UserGroupAssignmentModel.user_id == g.user.id
)
stock_columns = ProcessInstanceReportService.get_column_names_for_model(
ProcessInstanceModel
)
for column in process_instance_report.report_metadata["columns"]:
if column["accessor"] in stock_columns:
continue
instance_metadata_alias = aliased(ProcessInstanceMetadataModel)
process_instance_query = process_instance_query.outerjoin(
instance_metadata_alias,
and_(
ProcessInstanceModel.id == instance_metadata_alias.process_instance_id,
instance_metadata_alias.key == column["accessor"],
),
).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"]))
process_instances = (
process_instance_query.group_by(ProcessInstanceModel.id)
.add_columns(ProcessInstanceModel.id)
.order_by(
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
)
.paginate(page=page, per_page=per_page, error_out=False)
)
results = list(
map(
ProcessInstanceService.serialize_flat_with_task_data,
process_instances.items,
)
results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(
process_instances.items, process_instance_report.report_metadata["columns"]
)
report_metadata = process_instance_report.report_metadata

View File

@ -235,8 +235,9 @@ class AuthenticationService:
refresh_token_object: RefreshTokenModel = RefreshTokenModel.query.filter(
RefreshTokenModel.user_id == user_id
).first()
assert refresh_token_object # noqa: S101
return refresh_token_object.token
if refresh_token_object:
return refresh_token_object.token
return None
@classmethod
def get_auth_token_from_refresh_token(cls, refresh_token: str) -> dict:

View File

@ -2,6 +2,9 @@
from dataclasses import dataclass
from typing import Optional
import sqlalchemy
from flask_bpmn.models.db import db
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
@ -241,3 +244,27 @@ class ProcessInstanceReportService:
)
return report_filter
@classmethod
def add_metadata_columns_to_process_instance(
cls,
process_instance_sqlalchemy_rows: list[sqlalchemy.engine.row.Row], # type: ignore
metadata_columns: list[dict],
) -> list[dict]:
"""Add_metadata_columns_to_process_instance."""
results = []
for process_instance in process_instance_sqlalchemy_rows:
process_instance_dict = process_instance["ProcessInstanceModel"].serialized
for metadata_column in metadata_columns:
if metadata_column["accessor"] not in process_instance_dict:
process_instance_dict[
metadata_column["accessor"]
] = process_instance[metadata_column["accessor"]]
results.append(process_instance_dict)
return results
@classmethod
def get_column_names_for_model(cls, model: db.Model) -> list[str]: # type: ignore
"""Get_column_names_for_model."""
return [i.name for i in model.__table__.columns]

View File

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_zqd49ox" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0fmt4q1</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0fmt4q1" sourceRef="StartEvent_1" targetRef="save_metadata" />
<bpmn:scriptTask id="save_metadata" name="Save Metadata">
<bpmn:incoming>Flow_0fmt4q1</bpmn:incoming>
<bpmn:outgoing>Flow_0hhrkce</bpmn:outgoing>
<bpmn:script>save_process_instance_metadata({"key1": "value1", "key2": "value2"})</bpmn:script>
</bpmn:scriptTask>
<bpmn:endEvent id="Event_10onn1h">
<bpmn:incoming>Flow_0hhrkce</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0hhrkce" sourceRef="save_metadata" targetRef="Event_10onn1h" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_zqd49ox">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0e0gdj6_di" bpmnElement="save_metadata">
<dc:Bounds x="270" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_10onn1h_di" bpmnElement="Event_10onn1h">
<dc:Bounds x="432" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0fmt4q1_di" bpmnElement="Flow_0fmt4q1">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0hhrkce_di" bpmnElement="Flow_0hhrkce">
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -20,6 +20,9 @@ from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.process_group import ProcessGroup
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
@ -2544,3 +2547,60 @@ class TestProcessApi(BaseTest):
# make sure the new subgroup does exist
new_process_group = ProcessModelService.get_process_group(new_sub_path)
assert new_process_group.id == new_sub_path
def test_can_get_process_instance_list_with_report_metadata(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_process_instance_list_with_report_metadata."""
process_model = load_test_spec(
process_model_id="test-process-instance-metadata-report",
bpmn_file_name="process_instance_metadata.bpmn",
process_model_source_directory="test-process-instance-metadata-report",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=with_super_admin_user
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by(
process_instance_id=process_instance.id
).all()
assert len(process_instance_metadata) == 2
report_metadata = {
"columns": [
{"Header": "ID", "accessor": "id"},
{"Header": "Status", "accessor": "status"},
{"Header": "Key One", "accessor": "key1"},
# {"Header": "Key Two", "accessor": "key2"},
],
"order_by": ["status"],
"filter_by": [],
}
process_instance_report = ProcessInstanceReportModel.create_with_attributes(
identifier="sure",
report_metadata=report_metadata,
user=with_super_admin_user,
)
response = client.get(
f"/v1.0/process-instances?report_identifier={process_instance_report.identifier}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
assert response.status_code == 200
assert len(response.json["results"]) == 1
assert response.json["results"][0]["status"] == "complete"
assert response.json["results"][0]["id"] == process_instance.id
assert response.json["results"][0]["key1"] == "value1"
# assert response.json["results"][0]["key2"] == "value2"
assert response.json["pagination"]["count"] == 1
assert response.json["pagination"]["pages"] == 1
assert response.json["pagination"]["total"] == 1