merged main into branch and resolved conflicts w/ burnettk

This commit is contained in:
jasquat 2022-06-21 17:40:17 -04:00
commit 8eef5f5c36
5 changed files with 161 additions and 20 deletions

View File

@ -310,6 +310,37 @@ paths:
description: The page number to return. Defaults to page 1. description: The page number to return. Defaults to page 1.
schema: schema:
type: integer type: integer
- name: start_from
in: query
required: false
description: For filtering - beginning of start window - in seconds since epoch
schema:
type: integer
- name: start_till
in: query
required: false
description: For filtering - end of start window - in seconds since epoch
schema:
type: integer
- name: end_from
in: query
required: false
description: For filtering - beginning of end window - in seconds since epoch
schema:
type: integer
- name: end_till
in: query
required: false
description: For filtering - end of end window - in seconds since epoch
schema:
type: integer
- name: process_status
in: query
required: false
description: For filtering - not_started, user_input_required, waiting, complete, or erroring
schema:
type: string
# process_instance_list
get: get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list
summary: Returns a list of process instances for a given process model summary: Returns a list of process instances for a given process model

View File

@ -19,9 +19,9 @@ class GroupModel(FlaskBpmnGroupModel):
__tablename__ = "group" __tablename__ = "group"
__table_args__ = {"extend_existing": True} __table_args__ = {"extend_existing": True}
new_name_two = db.Column(db.String(255)) # type: ignore new_name_two = db.Column(db.String(255)) # type: ignore
user_group_assignments = relationship(UserGroupAssignmentModel, cascade="delete") user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete")
users = relationship( # type: ignore users = relationship( # type: ignore
UserModel, "UserModel",
viewonly=True, viewonly=True,
secondary="user_group_assignment", secondary="user_group_assignment",
overlaps="user_group_assignments,users", overlaps="user_group_assignments,users",

View File

@ -2,11 +2,12 @@
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass from dataclasses import dataclass
from dataclasses import field from dataclasses import field
from typing import Optional from typing import Dict, Optional, Union
from typing import Any from typing import Any
import marshmallow import marshmallow
from marshmallow import Schema from marshmallow import Schema
from marshmallow.decorators import post_load
from spiffworkflow_backend.models.file import File from spiffworkflow_backend.models.file import File
@ -64,11 +65,11 @@ class ProcessModelInfoSchema(Schema):
is_review = marshmallow.fields.Boolean(allow_none=True) is_review = marshmallow.fields.Boolean(allow_none=True)
process_group_id = marshmallow.fields.String(allow_none=True) process_group_id = marshmallow.fields.String(allow_none=True)
libraries = marshmallow.fields.List(marshmallow.fields.String(), allow_none=True) libraries = marshmallow.fields.List(marshmallow.fields.String(), allow_none=True)
# files = marshmallow.fields.List(marshmallow.fields.Nested("FileSchema")) files = marshmallow.fields.List(marshmallow.fields.Nested("FileSchema"))
# @post_load @post_load
# def make_spec( def make_spec(
# self, data: Dict[str, Union[str, bool, int]], **_ self, data: Dict[str, Union[str, bool, int]], **_
# ) -> ProcessModelInfo: ) -> ProcessModelInfo:
# """Make_spec.""" """Make_spec."""
# return ProcessModelInfo(**data) return ProcessModelInfo(**data)

View File

@ -273,8 +273,16 @@ def process_instance_create(
def process_instance_list( def process_instance_list(
process_group_id: str, process_model_id: str, page: int = 1, per_page: int = 100 process_group_id,
) -> flask.wrappers.Response: process_model_id,
page=1,
per_page=100,
start_from=None,
start_till=None,
end_from=None,
end_till=None,
process_status=None,
):
"""Process_instance_list.""" """Process_instance_list."""
process_model = ProcessModelService().get_process_model( process_model = ProcessModelService().get_process_model(
process_model_id, group_id=process_group_id process_model_id, group_id=process_group_id
@ -288,13 +296,23 @@ def process_instance_list(
) )
) )
process_instances = ( results = ProcessInstanceModel.query.filter_by(
ProcessInstanceModel.query.filter_by(process_model_identifier=process_model.id) process_model_identifier=process_model.id
.order_by(
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc()
)
.paginate(page, per_page, False)
) )
if start_from is not None:
results = results.filter(ProcessInstanceModel.start_in_seconds >= start_from)
if start_till is not None:
results = results.filter(ProcessInstanceModel.start_in_seconds <= start_till)
if end_from is not None:
results = results.filter(ProcessInstanceModel.end_in_seconds >= end_from)
if end_till is not None:
results = results.filter(ProcessInstanceModel.end_in_seconds <= end_till)
if process_status is not None:
results = results.filter(ProcessInstanceModel.status == process_status)
process_instances = results.order_by(
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc()
).paginate(page, per_page, False)
serialized_results = [] serialized_results = []
for process_instance in process_instances.items: for process_instance in process_instances.items:

View File

@ -1,6 +1,7 @@
"""Test Process Api Blueprint.""" """Test Process Api Blueprint."""
import io import io
import json import json
import time
from typing import Dict from typing import Dict
from typing import Optional from typing import Optional
from typing import Union from typing import Union
@ -17,6 +18,7 @@ from spiffworkflow_backend.models.file import FileType
from spiffworkflow_backend.models.process_group import ProcessGroup from spiffworkflow_backend.models.process_group import ProcessGroup
from spiffworkflow_backend.models.process_group import ProcessGroupSchema from spiffworkflow_backend.models.process_group import ProcessGroupSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
@ -637,6 +639,95 @@ def test_process_instance_list_with_paginated_items(
assert response.json["pagination"]["total"] == 5 assert response.json["pagination"]["total"] == 5
def test_process_instance_list_filter(
app: Flask, client: FlaskClient, with_bpmn_file_cleanup: None
) -> None:
"""Test_process_instance_list_filter."""
db.session.query(ProcessInstanceModel).delete()
db.session.commit()
test_process_group_id = "runs_without_input"
test_process_model_id = "sample"
user = find_or_create_user()
load_test_spec(app, test_process_model_id, process_group_id=test_process_group_id)
statuses = ("not_started", "user_input_required", "waiting", "complete", "erroring")
# create 5 instances with different status, and different start_in_seconds/end_in_seconds
for i in range(5):
process_instance = ProcessInstanceModel(
status=ProcessInstanceStatus[statuses[i]],
process_initiator=user,
process_model_identifier=test_process_model_id,
process_group_identifier=test_process_group_id,
updated_at_in_seconds=round(time.time()),
start_in_seconds=(1000 * i) + 1000,
end_in_seconds=(1000 * i) + 2000,
bpmn_json=json.dumps({"i": i}),
)
db.session.add(process_instance)
db.session.commit()
# Without filtering we should get all 5 instances
response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances",
headers=logged_in_headers(user),
)
results = response.json["results"]
assert len(results) == 5
# filter for each of the status
# we should get 1 instance each time
for i in range(5):
response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}",
headers=logged_in_headers(user),
)
results = response.json["results"]
assert len(results) == 1
assert results[0]["status"] == ProcessInstanceStatus[statuses[i]].value
# filter by start/end seconds
# start > 1000 - this should eliminate the first
response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=1001",
headers=logged_in_headers(user),
)
results = response.json["results"]
assert len(results) == 4
for i in range(4):
assert json.loads(results[i]["bpmn_json"])["i"] in (1, 2, 3, 4)
# start > 2000, end < 5000 - this should eliminate the first 2 and the last
response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=2001&end_till=5999",
headers=logged_in_headers(user),
)
results = response.json["results"]
assert len(results) == 2
assert json.loads(results[0]["bpmn_json"])["i"] in (2, 3)
assert json.loads(results[1]["bpmn_json"])["i"] in (2, 3)
# start > 1000, start < 4000 - this should eliminate the first and the last 2
response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=1001&start_till=3999",
headers=logged_in_headers(user),
)
results = response.json["results"]
assert len(results) == 2
assert json.loads(results[0]["bpmn_json"])["i"] in (1, 2)
assert json.loads(results[1]["bpmn_json"])["i"] in (1, 2)
# end > 2000, end < 6000 - this should eliminate the first and the last
response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?end_from=2001&end_till=5999",
headers=logged_in_headers(user),
)
results = response.json["results"]
assert len(results) == 3
for i in range(3):
assert json.loads(results[i]["bpmn_json"])["i"] in (1, 2, 3)
def test_process_instance_report_with_default_list( def test_process_instance_report_with_default_list(
app: Flask, client: FlaskClient, with_bpmn_file_cleanup: None app: Flask, client: FlaskClient, with_bpmn_file_cleanup: None
) -> None: ) -> None:
@ -707,10 +798,10 @@ def create_process_model(
process_model_service = ProcessModelService() process_model_service = ProcessModelService()
if process_group_id is None: if process_group_id is None:
process_group = ProcessGroup( process_group_tmp = ProcessGroup(
id="test_cat", display_name="Test Category", display_order=0, admin=False id="test_cat", display_name="Test Category", display_order=0, admin=False
) )
process_model_service.add_process_group(process_group) process_group = process_model_service.add_process_group(process_group_tmp)
else: else:
process_group = ProcessModelService().get_process_group(process_group_id) process_group = ProcessModelService().get_process_group(process_group_id)