Merge pull request #97 from sartography/feature/process_instance_search

Feature/process instance search
This commit is contained in:
Kevin Burnett 2022-09-23 19:40:32 +00:00 committed by GitHub
commit efd630d51a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 137 additions and 67 deletions

View File

@ -1,8 +1,8 @@
"""empty message """empty message
Revision ID: 6c705aca6530 Revision ID: 86bdc3330645
Revises: Revises:
Create Date: 2022-09-22 08:16:09.094932 Create Date: 2022-09-22 18:01:10.013335
""" """
from alembic import op from alembic import op
@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '6c705aca6530' revision = '86bdc3330645'
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None
@ -227,6 +227,7 @@ def upgrade():
sa.Column('process_instance_id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=False), sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=False),
sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False), sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False),
sa.Column('bpmn_task_name', sa.String(length=255), nullable=True),
sa.Column('bpmn_task_type', sa.String(length=255), nullable=True), sa.Column('bpmn_task_type', sa.String(length=255), nullable=True),
sa.Column('spiff_task_guid', sa.String(length=50), nullable=False), sa.Column('spiff_task_guid', sa.String(length=50), nullable=False),
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False), sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),

2
poetry.lock generated
View File

@ -1866,7 +1866,7 @@ pytz = "*"
type = "git" type = "git"
url = "https://github.com/sartography/SpiffWorkflow" url = "https://github.com/sartography/SpiffWorkflow"
reference = "main" reference = "main"
resolved_reference = "082d116990fcada49c3287f585cfd3a46c55de0f" resolved_reference = "dec9b4b942378d030ae73f1365dfbf108e6f7f8c"
[[package]] [[package]]
name = "sqlalchemy" name = "sqlalchemy"

View File

@ -238,11 +238,11 @@ paths:
schema: schema:
$ref: "#/components/schemas/WorkflowSpecCategory" $ref: "#/components/schemas/WorkflowSpecCategory"
/process-groups/{process_group_id}/process-models: /process-models:
parameters: parameters:
- name: process_group_id - name: process_group_identifier
in: path in: query
required: true required: false
description: The group containing the models we want to return description: The group containing the models we want to return
schema: schema:
type: string type: string
@ -273,8 +273,6 @@ paths:
type: array type: array
items: items:
$ref: "#/components/schemas/WorkflowSpec" $ref: "#/components/schemas/WorkflowSpec"
/process-models:
# process_model_add # process_model_add
post: post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_add operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_add
@ -404,17 +402,17 @@ paths:
schema: schema:
$ref: "#/components/schemas/WorkflowSpec" $ref: "#/components/schemas/WorkflowSpec"
/process-models/{process_group_id}/{process_model_id}/process-instances: /process-instances:
parameters: parameters:
- name: process_group_id - name: process_group_identifier
in: path in: query
required: true required: false
description: The unique id of an existing process group description: The unique id of an existing process group
schema: schema:
type: string type: string
- name: process_model_id - name: process_model_identifier
in: path in: query
required: true required: false
description: The unique id of an existing workflow specification. description: The unique id of an existing workflow specification.
schema: schema:
type: string type: string
@ -475,6 +473,20 @@ paths:
type: array type: array
items: items:
$ref: "#/components/schemas/Workflow" $ref: "#/components/schemas/Workflow"
/process-models/{process_group_id}/{process_model_id}/process-instances:
parameters:
- name: process_group_id
in: path
required: true
description: The unique id of an existing process group
schema:
type: string
- name: process_model_id
in: path
required: true
description: The unique id of an existing workflow specification.
schema:
type: string
# process_instance_create # process_instance_create
post: post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_create operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_create

View File

@ -1,15 +1,22 @@
"""Message_correlation.""" """Message_correlation."""
from dataclasses import dataclass from dataclasses import dataclass
from typing import TYPE_CHECKING
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
from spiffworkflow_backend.models.message_correlation_property import ( from spiffworkflow_backend.models.message_correlation_property import (
MessageCorrelationPropertyModel, MessageCorrelationPropertyModel,
) )
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
if TYPE_CHECKING:
from spiffworkflow_backend.models.message_correlation_message_instance import ( # noqa: F401
MessageCorrelationMessageInstanceModel,
)
@dataclass @dataclass
class MessageCorrelationModel(SpiffworkflowBaseDBModel): class MessageCorrelationModel(SpiffworkflowBaseDBModel):
@ -36,3 +43,7 @@ class MessageCorrelationModel(SpiffworkflowBaseDBModel):
value = db.Column(db.String(255), nullable=False, index=True) value = db.Column(db.String(255), nullable=False, index=True)
updated_at_in_seconds: int = db.Column(db.Integer) updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer)
message_correlations_message_instances = relationship(
"MessageCorrelationMessageInstanceModel", cascade="delete"
)

View File

@ -3,6 +3,7 @@ import enum
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any from typing import Any
from typing import Optional from typing import Optional
from typing import TYPE_CHECKING
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel from flask_bpmn.models.db import SpiffworkflowBaseDBModel
@ -15,6 +16,11 @@ from sqlalchemy.orm.events import event
from spiffworkflow_backend.models.message_model import MessageModel from spiffworkflow_backend.models.message_model import MessageModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
if TYPE_CHECKING:
from spiffworkflow_backend.models.message_correlation_message_instance import ( # noqa: F401
MessageCorrelationMessageInstanceModel,
)
class MessageTypes(enum.Enum): class MessageTypes(enum.Enum):
"""MessageTypes.""" """MessageTypes."""
@ -42,6 +48,9 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
message_model_id: int = db.Column(ForeignKey(MessageModel.id), nullable=False) message_model_id: int = db.Column(ForeignKey(MessageModel.id), nullable=False)
message_model = relationship("MessageModel") message_model = relationship("MessageModel")
message_correlations_message_instances = relationship(
"MessageCorrelationMessageInstanceModel", cascade="delete"
)
message_type: str = db.Column(db.String(20), nullable=False) message_type: str = db.Column(db.String(20), nullable=False)
payload: str = db.Column(db.JSON) payload: str = db.Column(db.JSON)

View File

@ -95,6 +95,8 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore
task_events = relationship("TaskEventModel", cascade="delete") # type: ignore task_events = relationship("TaskEventModel", cascade="delete") # type: ignore
spiff_logs = relationship("SpiffLoggingModel", cascade="delete") # type: ignore spiff_logs = relationship("SpiffLoggingModel", cascade="delete") # type: ignore
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore
bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore
start_in_seconds: int | None = db.Column(db.Integer) start_in_seconds: int | None = db.Column(db.Integer)

View File

@ -19,6 +19,7 @@ class SpiffLoggingModel(SpiffworkflowBaseDBModel):
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
bpmn_process_identifier: str = db.Column(db.String(255), nullable=False) bpmn_process_identifier: str = db.Column(db.String(255), nullable=False)
bpmn_task_identifier: str = db.Column(db.String(255), nullable=False) bpmn_task_identifier: str = db.Column(db.String(255), nullable=False)
bpmn_task_name: str = db.Column(db.String(255), nullable=True)
bpmn_task_type: str = db.Column(db.String(255), nullable=True) bpmn_task_type: str = db.Column(db.String(255), nullable=True)
spiff_task_guid: str = db.Column(db.String(50), nullable=False) spiff_task_guid: str = db.Column(db.String(50), nullable=False)
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)

View File

@ -202,10 +202,12 @@ def process_model_show(process_group_id: str, process_model_id: str) -> Any:
def process_model_list( def process_model_list(
process_group_id: str, page: int = 1, per_page: int = 100 process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process model list!""" """Process model list!"""
process_models = ProcessModelService().get_process_models(process_group_id) process_models = ProcessModelService().get_process_models(
process_group_id=process_group_identifier
)
batch = ProcessModelService().get_batch( batch = ProcessModelService().get_batch(
process_models, page=page, per_page=per_page process_models, page=page, per_page=per_page
) )
@ -530,8 +532,8 @@ def message_start(
def process_instance_list( def process_instance_list(
process_group_id: str, process_group_identifier: Optional[str] = None,
process_model_id: str, process_model_identifier: Optional[str] = None,
page: int = 1, page: int = 1,
per_page: int = 100, per_page: int = 100,
start_from: Optional[int] = None, start_from: Optional[int] = None,
@ -541,9 +543,13 @@ def process_instance_list(
process_status: Optional[str] = None, process_status: Optional[str] = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_list.""" """Process_instance_list."""
process_model = get_process_model(process_model_id, process_group_id) process_instance_query = ProcessInstanceModel.query
if process_model_identifier is not None and process_group_identifier is not None:
process_model = get_process_model(
process_model_identifier, process_group_identifier
)
results = ProcessInstanceModel.query.filter_by( process_instance_query = process_instance_query.filter_by(
process_model_identifier=process_model.id process_model_identifier=process_model.id
) )
@ -561,17 +567,28 @@ def process_instance_list(
) )
if start_from is not None: if start_from is not None:
results = results.filter(ProcessInstanceModel.start_in_seconds >= start_from) process_instance_query = process_instance_query.filter(
ProcessInstanceModel.start_in_seconds >= start_from
)
if start_till is not None: if start_till is not None:
results = results.filter(ProcessInstanceModel.start_in_seconds <= start_till) process_instance_query = process_instance_query.filter(
ProcessInstanceModel.start_in_seconds <= start_till
)
if end_from is not None: if end_from is not None:
results = results.filter(ProcessInstanceModel.end_in_seconds >= end_from) process_instance_query = process_instance_query.filter(
ProcessInstanceModel.end_in_seconds >= end_from
)
if end_till is not None: if end_till is not None:
results = results.filter(ProcessInstanceModel.end_in_seconds <= end_till) process_instance_query = process_instance_query.filter(
ProcessInstanceModel.end_in_seconds <= end_till
)
if process_status is not None: if process_status is not None:
results = results.filter(ProcessInstanceModel.status == process_status) process_status_array = process_status.split(",")
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.status.in_(process_status_array) # type: ignore
)
process_instances = results.order_by( process_instances = process_instance_query.order_by(
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
).paginate(page, per_page, False) ).paginate(page, per_page, False)

View File

@ -182,6 +182,7 @@ class DBHandler(logging.Handler):
bpmn_process_identifier = record.workflow # type: ignore bpmn_process_identifier = record.workflow # type: ignore
spiff_task_guid = str(record.task_id) # type: ignore spiff_task_guid = str(record.task_id) # type: ignore
bpmn_task_identifier = str(record.task_spec) # type: ignore bpmn_task_identifier = str(record.task_spec) # type: ignore
bpmn_task_name = record.task_name if hasattr(record, "task_name") else None # type: ignore
bpmn_task_type = record.task_type if hasattr(record, "task_type") else None # type: ignore bpmn_task_type = record.task_type if hasattr(record, "task_type") else None # type: ignore
timestamp = record.created timestamp = record.created
message = record.msg if hasattr(record, "msg") else None message = record.msg if hasattr(record, "msg") else None
@ -190,6 +191,7 @@ class DBHandler(logging.Handler):
process_instance_id=record.process_instance_id, # type: ignore process_instance_id=record.process_instance_id, # type: ignore
bpmn_process_identifier=bpmn_process_identifier, bpmn_process_identifier=bpmn_process_identifier,
spiff_task_guid=spiff_task_guid, spiff_task_guid=spiff_task_guid,
bpmn_task_name=bpmn_task_name,
bpmn_task_identifier=bpmn_task_identifier, bpmn_task_identifier=bpmn_task_identifier,
bpmn_task_type=bpmn_task_type, bpmn_task_type=bpmn_task_type,
message=message, message=message,

View File

@ -357,7 +357,7 @@ class ProcessInstanceProcessor:
@staticmethod @staticmethod
def __get_bpmn_process_instance( def __get_bpmn_process_instance(
process_instance_model: ProcessInstanceModel, process_instance_model: ProcessInstanceModel,
spec: WorkflowSpec = None, spec: Optional[WorkflowSpec] = None,
validate_only: bool = False, validate_only: bool = False,
subprocesses: Optional[IdToBpmnProcessSpecMapping] = None, subprocesses: Optional[IdToBpmnProcessSpecMapping] = None,
) -> BpmnWorkflow: ) -> BpmnWorkflow:
@ -368,12 +368,17 @@ class ProcessInstanceProcessor:
original_spiff_logger_log_level = spiff_logger.level original_spiff_logger_log_level = spiff_logger.level
spiff_logger.setLevel(logging.WARNING) spiff_logger.setLevel(logging.WARNING)
try:
bpmn_process_instance = ( bpmn_process_instance = (
ProcessInstanceProcessor._serializer.deserialize_json( ProcessInstanceProcessor._serializer.deserialize_json(
process_instance_model.bpmn_json process_instance_model.bpmn_json
) )
) )
except Exception as err:
raise (err)
finally:
spiff_logger.setLevel(original_spiff_logger_log_level) spiff_logger.setLevel(original_spiff_logger_log_level)
bpmn_process_instance.script_engine = ( bpmn_process_instance.script_engine = (
ProcessInstanceProcessor._script_engine ProcessInstanceProcessor._script_engine
) )

View File

@ -215,7 +215,7 @@ class TestProcessApi(BaseTest):
# get all models # get all models
response = client.get( response = client.get(
f"/v1.0/process-groups/{group_id}/process-models", f"/v1.0/process-models?process_group_identifier={group_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -226,7 +226,7 @@ class TestProcessApi(BaseTest):
# get first page, 1 per page # get first page, 1 per page
response = client.get( response = client.get(
f"/v1.0/process-groups/{group_id}/process-models?page=1&per_page=1", f"/v1.0/process-models?page=1&per_page=1&process_group_identifier={group_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -238,7 +238,7 @@ class TestProcessApi(BaseTest):
# get second page, 1 per page # get second page, 1 per page
response = client.get( response = client.get(
f"/v1.0/process-groups/{group_id}/process-models?page=2&per_page=1", f"/v1.0/process-models?page=2&per_page=1&process_group_identifier={group_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -250,7 +250,7 @@ class TestProcessApi(BaseTest):
# get first page, 3 per page # get first page, 3 per page
response = client.get( response = client.get(
f"/v1.0/process-groups/{group_id}/process-models?page=1&per_page=3", f"/v1.0/process-models?page=1&per_page=3&process_group_identifier={group_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -262,7 +262,7 @@ class TestProcessApi(BaseTest):
# get second page, 3 per page # get second page, 3 per page
response = client.get( response = client.get(
f"/v1.0/process-groups/{group_id}/process-models?page=2&per_page=3", f"/v1.0/process-models?page=2&per_page=3&process_group_identifier={group_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
# there should only be 2 left # there should only be 2 left
@ -436,11 +436,11 @@ class TestProcessApi(BaseTest):
"""Test_process_model_file_update.""" """Test_process_model_file_update."""
self.create_spec_file(client) self.create_spec_file(client)
spec = load_test_spec("random_fact") process_model = load_test_spec("random_fact")
data = {"key1": "THIS DATA"} data = {"key1": "THIS DATA"}
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.put( response = client.put(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
data=data, data=data,
follow_redirects=True, follow_redirects=True,
content_type="multipart/form-data", content_type="multipart/form-data",
@ -457,11 +457,11 @@ class TestProcessApi(BaseTest):
"""Test_process_model_file_update.""" """Test_process_model_file_update."""
self.create_spec_file(client) self.create_spec_file(client)
spec = load_test_spec("random_fact") process_model = load_test_spec("random_fact")
data = {"file": (io.BytesIO(b""), "random_fact.svg")} data = {"file": (io.BytesIO(b""), "random_fact.svg")}
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.put( response = client.put(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
data=data, data=data,
follow_redirects=True, follow_redirects=True,
content_type="multipart/form-data", content_type="multipart/form-data",
@ -478,12 +478,12 @@ class TestProcessApi(BaseTest):
"""Test_process_model_file_update.""" """Test_process_model_file_update."""
original_file = self.create_spec_file(client) original_file = self.create_spec_file(client)
spec = load_test_spec("random_fact") process_model = load_test_spec("random_fact")
new_file_contents = b"THIS_IS_NEW_DATA" new_file_contents = b"THIS_IS_NEW_DATA"
data = {"file": (io.BytesIO(new_file_contents), "random_fact.svg")} data = {"file": (io.BytesIO(new_file_contents), "random_fact.svg")}
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.put( response = client.put(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
data=data, data=data,
follow_redirects=True, follow_redirects=True,
content_type="multipart/form-data", content_type="multipart/form-data",
@ -495,7 +495,7 @@ class TestProcessApi(BaseTest):
assert response.json["ok"] assert response.json["ok"]
response = client.get( response = client.get(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
@ -509,10 +509,10 @@ class TestProcessApi(BaseTest):
"""Test_process_model_file_update.""" """Test_process_model_file_update."""
self.create_spec_file(client) self.create_spec_file(client)
spec = load_test_spec("random_fact") process_model = load_test_spec("random_fact")
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.delete( response = client.delete(
f"/v1.0/process-models/INCORRECT-NON-EXISTENT-GROUP/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/INCORRECT-NON-EXISTENT-GROUP/{process_model.id}/files/random_fact.svg",
follow_redirects=True, follow_redirects=True,
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
@ -527,10 +527,10 @@ class TestProcessApi(BaseTest):
"""Test_process_model_file_update.""" """Test_process_model_file_update."""
self.create_spec_file(client) self.create_spec_file(client)
spec = load_test_spec("random_fact") process_model = load_test_spec("random_fact")
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.delete( response = client.delete(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact_DOES_NOT_EXIST.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact_DOES_NOT_EXIST.svg",
follow_redirects=True, follow_redirects=True,
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
@ -545,10 +545,10 @@ class TestProcessApi(BaseTest):
"""Test_process_model_file_update.""" """Test_process_model_file_update."""
self.create_spec_file(client) self.create_spec_file(client)
spec = load_test_spec("random_fact") process_model = load_test_spec("random_fact")
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.delete( response = client.delete(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
follow_redirects=True, follow_redirects=True,
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
@ -558,7 +558,7 @@ class TestProcessApi(BaseTest):
assert response.json["ok"] assert response.json["ok"]
response = client.get( response = client.get(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 404 assert response.status_code == 404
@ -586,9 +586,9 @@ class TestProcessApi(BaseTest):
) -> None: ) -> None:
"""Test_get_workflow_from_workflow_spec.""" """Test_get_workflow_from_workflow_spec."""
user = self.find_or_create_user() user = self.find_or_create_user()
spec = load_test_spec("hello_world") process_model = load_test_spec("hello_world")
response = client.post( response = client.post(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/process-instances", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/process-instances",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 201 assert response.status_code == 201
@ -935,7 +935,7 @@ class TestProcessApi(BaseTest):
) )
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances", "/v1.0/process-instances",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
@ -983,7 +983,7 @@ class TestProcessApi(BaseTest):
) )
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances?per_page=2&page=3", "/v1.0/process-instances?per_page=2&page=3",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
@ -994,7 +994,7 @@ class TestProcessApi(BaseTest):
assert response.json["pagination"]["total"] == 5 assert response.json["pagination"]["total"] == 5
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances?per_page=2&page=1", "/v1.0/process-instances?per_page=2&page=1",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
@ -1031,7 +1031,7 @@ class TestProcessApi(BaseTest):
# Without filtering we should get all 5 instances # Without filtering we should get all 5 instances
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances", f"/v1.0/process-instances?process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -1042,7 +1042,7 @@ class TestProcessApi(BaseTest):
# we should get 1 instance each time # we should get 1 instance each time
for i in range(5): for i in range(5):
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}", f"/v1.0/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}&process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -1050,10 +1050,20 @@ class TestProcessApi(BaseTest):
assert len(results) == 1 assert len(results) == 1
assert results[0]["status"] == ProcessInstanceStatus[statuses[i]].value assert results[0]["status"] == ProcessInstanceStatus[statuses[i]].value
response = client.get(
f"/v1.0/process-instances?process_status=not_started,complete&process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}",
headers=self.logged_in_headers(user),
)
assert response.json is not None
results = response.json["results"]
assert len(results) == 2
assert results[0]["status"] in ['complete', 'not_started']
assert results[1]["status"] in ['complete', 'not_started']
# filter by start/end seconds # filter by start/end seconds
# start > 1000 - this should eliminate the first # start > 1000 - this should eliminate the first
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=1001", "/v1.0/process-instances?start_from=1001",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -1064,7 +1074,7 @@ class TestProcessApi(BaseTest):
# start > 2000, end < 5000 - this should eliminate the first 2 and the last # start > 2000, end < 5000 - this should eliminate the first 2 and the last
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=2001&end_till=5999", "/v1.0/process-instances?start_from=2001&end_till=5999",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -1075,7 +1085,7 @@ class TestProcessApi(BaseTest):
# start > 1000, start < 4000 - this should eliminate the first and the last 2 # start > 1000, start < 4000 - this should eliminate the first and the last 2
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=1001&start_till=3999", "/v1.0/process-instances?start_from=1001&start_till=3999",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -1086,7 +1096,7 @@ class TestProcessApi(BaseTest):
# end > 2000, end < 6000 - this should eliminate the first and the last # end > 2000, end < 6000 - this should eliminate the first and the last
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?end_from=2001&end_till=5999", "/v1.0/process-instances?end_from=2001&end_till=5999",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None