Merge branch 'main' into feature/secrets

# Conflicts:
#	migrations/versions/6c705aca6530_.py
#	migrations/versions/86bdc3330645_.py
#	migrations/versions/ade9bce1c592_.py
This commit is contained in:
mike cullerton 2022-09-26 10:39:21 -04:00
commit a0d00c3efa
13 changed files with 153 additions and 78 deletions

View File

@ -1,8 +1,8 @@
"""empty message """empty message
Revision ID: ade9bce1c592 Revision ID: 8fca9cdfb5be
Revises: Revises:
Create Date: 2022-09-23 13:38:48.460126 Create Date: 2022-09-26 10:38:30.015462
""" """
from alembic import op from alembic import op
@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'ade9bce1c592' revision = '8fca9cdfb5be'
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None
@ -227,6 +227,7 @@ def upgrade():
sa.Column('process_instance_id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=False), sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=False),
sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False), sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False),
sa.Column('bpmn_task_name', sa.String(length=255), nullable=True),
sa.Column('bpmn_task_type', sa.String(length=255), nullable=True), sa.Column('bpmn_task_type', sa.String(length=255), nullable=True),
sa.Column('spiff_task_guid', sa.String(length=50), nullable=False), sa.Column('spiff_task_guid', sa.String(length=50), nullable=False),
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False), sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),

2
poetry.lock generated
View File

@ -1866,7 +1866,7 @@ pytz = "*"
type = "git" type = "git"
url = "https://github.com/sartography/SpiffWorkflow" url = "https://github.com/sartography/SpiffWorkflow"
reference = "main" reference = "main"
resolved_reference = "082d116990fcada49c3287f585cfd3a46c55de0f" resolved_reference = "dec9b4b942378d030ae73f1365dfbf108e6f7f8c"
[[package]] [[package]]
name = "sqlalchemy" name = "sqlalchemy"

View File

@ -238,11 +238,11 @@ paths:
schema: schema:
$ref: "#/components/schemas/WorkflowSpecCategory" $ref: "#/components/schemas/WorkflowSpecCategory"
/process-groups/{process_group_id}/process-models: /process-models:
parameters: parameters:
- name: process_group_id - name: process_group_identifier
in: path in: query
required: true required: false
description: The group containing the models we want to return description: The group containing the models we want to return
schema: schema:
type: string type: string
@ -273,8 +273,6 @@ paths:
type: array type: array
items: items:
$ref: "#/components/schemas/WorkflowSpec" $ref: "#/components/schemas/WorkflowSpec"
/process-models:
# process_model_add # process_model_add
post: post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_add operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_add
@ -404,17 +402,17 @@ paths:
schema: schema:
$ref: "#/components/schemas/WorkflowSpec" $ref: "#/components/schemas/WorkflowSpec"
/process-models/{process_group_id}/{process_model_id}/process-instances: /process-instances:
parameters: parameters:
- name: process_group_id - name: process_group_identifier
in: path in: query
required: true required: false
description: The unique id of an existing process group description: The unique id of an existing process group
schema: schema:
type: string type: string
- name: process_model_id - name: process_model_identifier
in: path in: query
required: true required: false
description: The unique id of an existing workflow specification. description: The unique id of an existing workflow specification.
schema: schema:
type: string type: string
@ -475,6 +473,20 @@ paths:
type: array type: array
items: items:
$ref: "#/components/schemas/Workflow" $ref: "#/components/schemas/Workflow"
/process-models/{process_group_id}/{process_model_id}/process-instances:
parameters:
- name: process_group_id
in: path
required: true
description: The unique id of an existing process group
schema:
type: string
- name: process_model_id
in: path
required: true
description: The unique id of an existing workflow specification.
schema:
type: string
# process_instance_create # process_instance_create
post: post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_create operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_create

View File

@ -1,15 +1,22 @@
"""Message_correlation.""" """Message_correlation."""
from dataclasses import dataclass from dataclasses import dataclass
from typing import TYPE_CHECKING
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
from spiffworkflow_backend.models.message_correlation_property import ( from spiffworkflow_backend.models.message_correlation_property import (
MessageCorrelationPropertyModel, MessageCorrelationPropertyModel,
) )
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
if TYPE_CHECKING:
from spiffworkflow_backend.models.message_correlation_message_instance import ( # noqa: F401
MessageCorrelationMessageInstanceModel,
)
@dataclass @dataclass
class MessageCorrelationModel(SpiffworkflowBaseDBModel): class MessageCorrelationModel(SpiffworkflowBaseDBModel):
@ -36,3 +43,7 @@ class MessageCorrelationModel(SpiffworkflowBaseDBModel):
value = db.Column(db.String(255), nullable=False, index=True) value = db.Column(db.String(255), nullable=False, index=True)
updated_at_in_seconds: int = db.Column(db.Integer) updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer)
message_correlations_message_instances = relationship(
"MessageCorrelationMessageInstanceModel", cascade="delete"
)

View File

@ -3,6 +3,7 @@ import enum
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any from typing import Any
from typing import Optional from typing import Optional
from typing import TYPE_CHECKING
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel from flask_bpmn.models.db import SpiffworkflowBaseDBModel
@ -15,6 +16,11 @@ from sqlalchemy.orm.events import event
from spiffworkflow_backend.models.message_model import MessageModel from spiffworkflow_backend.models.message_model import MessageModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
if TYPE_CHECKING:
from spiffworkflow_backend.models.message_correlation_message_instance import ( # noqa: F401
MessageCorrelationMessageInstanceModel,
)
class MessageTypes(enum.Enum): class MessageTypes(enum.Enum):
"""MessageTypes.""" """MessageTypes."""
@ -42,6 +48,9 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
message_model_id: int = db.Column(ForeignKey(MessageModel.id), nullable=False) message_model_id: int = db.Column(ForeignKey(MessageModel.id), nullable=False)
message_model = relationship("MessageModel") message_model = relationship("MessageModel")
message_correlations_message_instances = relationship(
"MessageCorrelationMessageInstanceModel", cascade="delete"
)
message_type: str = db.Column(db.String(20), nullable=False) message_type: str = db.Column(db.String(20), nullable=False)
payload: str = db.Column(db.JSON) payload: str = db.Column(db.JSON)

View File

@ -95,6 +95,8 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore
task_events = relationship("TaskEventModel", cascade="delete") # type: ignore task_events = relationship("TaskEventModel", cascade="delete") # type: ignore
spiff_logs = relationship("SpiffLoggingModel", cascade="delete") # type: ignore spiff_logs = relationship("SpiffLoggingModel", cascade="delete") # type: ignore
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore
bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore
start_in_seconds: int | None = db.Column(db.Integer) start_in_seconds: int | None = db.Column(db.Integer)

View File

@ -19,6 +19,7 @@ class SpiffLoggingModel(SpiffworkflowBaseDBModel):
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
bpmn_process_identifier: str = db.Column(db.String(255), nullable=False) bpmn_process_identifier: str = db.Column(db.String(255), nullable=False)
bpmn_task_identifier: str = db.Column(db.String(255), nullable=False) bpmn_task_identifier: str = db.Column(db.String(255), nullable=False)
bpmn_task_name: str = db.Column(db.String(255), nullable=True)
bpmn_task_type: str = db.Column(db.String(255), nullable=True) bpmn_task_type: str = db.Column(db.String(255), nullable=True)
spiff_task_guid: str = db.Column(db.String(50), nullable=False) spiff_task_guid: str = db.Column(db.String(50), nullable=False)
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)

View File

@ -202,10 +202,12 @@ def process_model_show(process_group_id: str, process_model_id: str) -> Any:
def process_model_list( def process_model_list(
process_group_id: str, page: int = 1, per_page: int = 100 process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process model list!""" """Process model list!"""
process_models = ProcessModelService().get_process_models(process_group_id) process_models = ProcessModelService().get_process_models(
process_group_id=process_group_identifier
)
batch = ProcessModelService().get_batch( batch = ProcessModelService().get_batch(
process_models, page=page, per_page=per_page process_models, page=page, per_page=per_page
) )
@ -530,8 +532,8 @@ def message_start(
def process_instance_list( def process_instance_list(
process_group_id: str, process_group_identifier: Optional[str] = None,
process_model_id: str, process_model_identifier: Optional[str] = None,
page: int = 1, page: int = 1,
per_page: int = 100, per_page: int = 100,
start_from: Optional[int] = None, start_from: Optional[int] = None,
@ -541,11 +543,15 @@ def process_instance_list(
process_status: Optional[str] = None, process_status: Optional[str] = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_list.""" """Process_instance_list."""
process_model = get_process_model(process_model_id, process_group_id) process_instance_query = ProcessInstanceModel.query
if process_model_identifier is not None and process_group_identifier is not None:
process_model = get_process_model(
process_model_identifier, process_group_identifier
)
results = ProcessInstanceModel.query.filter_by( process_instance_query = process_instance_query.filter_by(
process_model_identifier=process_model.id process_model_identifier=process_model.id
) )
# this can never happen. obviously the class has the columns it defines. this is just to appease mypy. # this can never happen. obviously the class has the columns it defines. this is just to appease mypy.
if ( if (
@ -561,17 +567,28 @@ def process_instance_list(
) )
if start_from is not None: if start_from is not None:
results = results.filter(ProcessInstanceModel.start_in_seconds >= start_from) process_instance_query = process_instance_query.filter(
ProcessInstanceModel.start_in_seconds >= start_from
)
if start_till is not None: if start_till is not None:
results = results.filter(ProcessInstanceModel.start_in_seconds <= start_till) process_instance_query = process_instance_query.filter(
ProcessInstanceModel.start_in_seconds <= start_till
)
if end_from is not None: if end_from is not None:
results = results.filter(ProcessInstanceModel.end_in_seconds >= end_from) process_instance_query = process_instance_query.filter(
ProcessInstanceModel.end_in_seconds >= end_from
)
if end_till is not None: if end_till is not None:
results = results.filter(ProcessInstanceModel.end_in_seconds <= end_till) process_instance_query = process_instance_query.filter(
ProcessInstanceModel.end_in_seconds <= end_till
)
if process_status is not None: if process_status is not None:
results = results.filter(ProcessInstanceModel.status == process_status) process_status_array = process_status.split(",")
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.status.in_(process_status_array) # type: ignore
)
process_instances = results.order_by( process_instances = process_instance_query.order_by(
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
).paginate(page, per_page, False) ).paginate(page, per_page, False)
@ -1088,7 +1105,7 @@ def get_spiff_task_from_process_instance(
# #
# Methods for secrets CRUD - maybe move somewhere else: # Methods for secrets CRUD - maybe move somewhere else:
# #
def get_secret(key: str) -> str | None: def get_secret(key: str) -> Optional[str]:
"""Get_secret.""" """Get_secret."""
return SecretService.get_secret(key) return SecretService.get_secret(key)

View File

@ -1,6 +1,7 @@
"""Logging_service.""" """Logging_service."""
import json import json
import logging import logging
import re
from typing import Any from typing import Any
from typing import Optional from typing import Optional
@ -113,6 +114,8 @@ def setup_logger(app: Flask) -> None:
"%(asctime)s - %(name)s - %(levelname)s - %(message)s" "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
) )
app.logger.debug("Printing log to create app logger")
# the json formatter is nice for real environments but makes # the json formatter is nice for real environments but makes
# debugging locally a little more difficult # debugging locally a little more difficult
if app.env != "development": if app.env != "development":
@ -133,14 +136,15 @@ def setup_logger(app: Flask) -> None:
spiff_logger_filehandler = None spiff_logger_filehandler = None
if app.config["SPIFFWORKFLOW_BACKEND_LOG_TO_FILE"]: if app.config["SPIFFWORKFLOW_BACKEND_LOG_TO_FILE"]:
spiff_logger_filehandler = logging.FileHandler( spiff_logger_filehandler = logging.FileHandler(
f"{app.root_path}/../../log/{app.env}.log" f"{app.instance_path}/../../log/{app.env}.log"
) )
spiff_logger_filehandler.setLevel(spiff_log_level) spiff_logger_filehandler.setLevel(spiff_log_level)
spiff_logger_filehandler.setFormatter(log_formatter) spiff_logger_filehandler.setFormatter(log_formatter)
# make all loggers act the same # make all loggers act the same
for name in logging.root.manager.loggerDict: for name in logging.root.manager.loggerDict:
if "spiff" not in name: # use a regex so spiffworkflow_backend isn't filtered out
if not re.match(r"^spiff\b", name):
the_logger = logging.getLogger(name) the_logger = logging.getLogger(name)
the_logger.setLevel(log_level) the_logger.setLevel(log_level)
if spiff_logger_filehandler: if spiff_logger_filehandler:
@ -178,6 +182,7 @@ class DBHandler(logging.Handler):
bpmn_process_identifier = record.workflow # type: ignore bpmn_process_identifier = record.workflow # type: ignore
spiff_task_guid = str(record.task_id) # type: ignore spiff_task_guid = str(record.task_id) # type: ignore
bpmn_task_identifier = str(record.task_spec) # type: ignore bpmn_task_identifier = str(record.task_spec) # type: ignore
bpmn_task_name = record.task_name if hasattr(record, "task_name") else None # type: ignore
bpmn_task_type = record.task_type if hasattr(record, "task_type") else None # type: ignore bpmn_task_type = record.task_type if hasattr(record, "task_type") else None # type: ignore
timestamp = record.created timestamp = record.created
message = record.msg if hasattr(record, "msg") else None message = record.msg if hasattr(record, "msg") else None
@ -186,6 +191,7 @@ class DBHandler(logging.Handler):
process_instance_id=record.process_instance_id, # type: ignore process_instance_id=record.process_instance_id, # type: ignore
bpmn_process_identifier=bpmn_process_identifier, bpmn_process_identifier=bpmn_process_identifier,
spiff_task_guid=spiff_task_guid, spiff_task_guid=spiff_task_guid,
bpmn_task_name=bpmn_task_name,
bpmn_task_identifier=bpmn_task_identifier, bpmn_task_identifier=bpmn_task_identifier,
bpmn_task_type=bpmn_task_type, bpmn_task_type=bpmn_task_type,
message=message, message=message,

View File

@ -357,7 +357,7 @@ class ProcessInstanceProcessor:
@staticmethod @staticmethod
def __get_bpmn_process_instance( def __get_bpmn_process_instance(
process_instance_model: ProcessInstanceModel, process_instance_model: ProcessInstanceModel,
spec: WorkflowSpec = None, spec: Optional[WorkflowSpec] = None,
validate_only: bool = False, validate_only: bool = False,
subprocesses: Optional[IdToBpmnProcessSpecMapping] = None, subprocesses: Optional[IdToBpmnProcessSpecMapping] = None,
) -> BpmnWorkflow: ) -> BpmnWorkflow:
@ -368,12 +368,17 @@ class ProcessInstanceProcessor:
original_spiff_logger_log_level = spiff_logger.level original_spiff_logger_log_level = spiff_logger.level
spiff_logger.setLevel(logging.WARNING) spiff_logger.setLevel(logging.WARNING)
bpmn_process_instance = ( try:
ProcessInstanceProcessor._serializer.deserialize_json( bpmn_process_instance = (
process_instance_model.bpmn_json ProcessInstanceProcessor._serializer.deserialize_json(
process_instance_model.bpmn_json
)
) )
) except Exception as err:
spiff_logger.setLevel(original_spiff_logger_log_level) raise (err)
finally:
spiff_logger.setLevel(original_spiff_logger_log_level)
bpmn_process_instance.script_engine = ( bpmn_process_instance.script_engine = (
ProcessInstanceProcessor._script_engine ProcessInstanceProcessor._script_engine
) )
@ -563,10 +568,14 @@ class ProcessInstanceProcessor:
bpmn_process_identifier bpmn_process_identifier
) )
new_bpmn_files.add(new_bpmn_file_full_path) new_bpmn_files.add(new_bpmn_file_full_path)
dmn_file_glob = os.path.join(
os.path.dirname(new_bpmn_file_full_path), "*.dmn"
)
parser.add_dmn_files_by_glob(dmn_file_glob)
processed_identifiers.add(bpmn_process_identifier) processed_identifiers.add(bpmn_process_identifier)
for new_bpmn_file_full_path in new_bpmn_files: if new_bpmn_files:
parser.add_bpmn_file(new_bpmn_file_full_path) parser.add_bpmn_files(new_bpmn_files)
ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files( ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files(
parser, processed_identifiers parser, processed_identifiers
) )

View File

@ -54,7 +54,7 @@ class SecretService:
return secret_model return secret_model
@staticmethod @staticmethod
def get_secret(key: str) -> str | None: def get_secret(key: str) -> Optional[str]:
"""Get_secret.""" """Get_secret."""
secret: SecretModel = ( secret: SecretModel = (
db.session.query(SecretModel).filter(SecretModel.key == key).first() db.session.query(SecretModel).filter(SecretModel.key == key).first()

View File

@ -215,7 +215,7 @@ class TestProcessApi(BaseTest):
# get all models # get all models
response = client.get( response = client.get(
f"/v1.0/process-groups/{group_id}/process-models", f"/v1.0/process-models?process_group_identifier={group_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -226,7 +226,7 @@ class TestProcessApi(BaseTest):
# get first page, 1 per page # get first page, 1 per page
response = client.get( response = client.get(
f"/v1.0/process-groups/{group_id}/process-models?page=1&per_page=1", f"/v1.0/process-models?page=1&per_page=1&process_group_identifier={group_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -238,7 +238,7 @@ class TestProcessApi(BaseTest):
# get second page, 1 per page # get second page, 1 per page
response = client.get( response = client.get(
f"/v1.0/process-groups/{group_id}/process-models?page=2&per_page=1", f"/v1.0/process-models?page=2&per_page=1&process_group_identifier={group_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -250,7 +250,7 @@ class TestProcessApi(BaseTest):
# get first page, 3 per page # get first page, 3 per page
response = client.get( response = client.get(
f"/v1.0/process-groups/{group_id}/process-models?page=1&per_page=3", f"/v1.0/process-models?page=1&per_page=3&process_group_identifier={group_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -262,7 +262,7 @@ class TestProcessApi(BaseTest):
# get second page, 3 per page # get second page, 3 per page
response = client.get( response = client.get(
f"/v1.0/process-groups/{group_id}/process-models?page=2&per_page=3", f"/v1.0/process-models?page=2&per_page=3&process_group_identifier={group_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
# there should only be 2 left # there should only be 2 left
@ -436,11 +436,11 @@ class TestProcessApi(BaseTest):
"""Test_process_model_file_update.""" """Test_process_model_file_update."""
self.create_spec_file(client) self.create_spec_file(client)
spec = load_test_spec("random_fact") process_model = load_test_spec("random_fact")
data = {"key1": "THIS DATA"} data = {"key1": "THIS DATA"}
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.put( response = client.put(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
data=data, data=data,
follow_redirects=True, follow_redirects=True,
content_type="multipart/form-data", content_type="multipart/form-data",
@ -457,11 +457,11 @@ class TestProcessApi(BaseTest):
"""Test_process_model_file_update.""" """Test_process_model_file_update."""
self.create_spec_file(client) self.create_spec_file(client)
spec = load_test_spec("random_fact") process_model = load_test_spec("random_fact")
data = {"file": (io.BytesIO(b""), "random_fact.svg")} data = {"file": (io.BytesIO(b""), "random_fact.svg")}
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.put( response = client.put(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
data=data, data=data,
follow_redirects=True, follow_redirects=True,
content_type="multipart/form-data", content_type="multipart/form-data",
@ -478,12 +478,12 @@ class TestProcessApi(BaseTest):
"""Test_process_model_file_update.""" """Test_process_model_file_update."""
original_file = self.create_spec_file(client) original_file = self.create_spec_file(client)
spec = load_test_spec("random_fact") process_model = load_test_spec("random_fact")
new_file_contents = b"THIS_IS_NEW_DATA" new_file_contents = b"THIS_IS_NEW_DATA"
data = {"file": (io.BytesIO(new_file_contents), "random_fact.svg")} data = {"file": (io.BytesIO(new_file_contents), "random_fact.svg")}
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.put( response = client.put(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
data=data, data=data,
follow_redirects=True, follow_redirects=True,
content_type="multipart/form-data", content_type="multipart/form-data",
@ -495,7 +495,7 @@ class TestProcessApi(BaseTest):
assert response.json["ok"] assert response.json["ok"]
response = client.get( response = client.get(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
@ -509,10 +509,10 @@ class TestProcessApi(BaseTest):
"""Test_process_model_file_update.""" """Test_process_model_file_update."""
self.create_spec_file(client) self.create_spec_file(client)
spec = load_test_spec("random_fact") process_model = load_test_spec("random_fact")
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.delete( response = client.delete(
f"/v1.0/process-models/INCORRECT-NON-EXISTENT-GROUP/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/INCORRECT-NON-EXISTENT-GROUP/{process_model.id}/files/random_fact.svg",
follow_redirects=True, follow_redirects=True,
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
@ -527,10 +527,10 @@ class TestProcessApi(BaseTest):
"""Test_process_model_file_update.""" """Test_process_model_file_update."""
self.create_spec_file(client) self.create_spec_file(client)
spec = load_test_spec("random_fact") process_model = load_test_spec("random_fact")
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.delete( response = client.delete(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact_DOES_NOT_EXIST.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact_DOES_NOT_EXIST.svg",
follow_redirects=True, follow_redirects=True,
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
@ -545,10 +545,10 @@ class TestProcessApi(BaseTest):
"""Test_process_model_file_update.""" """Test_process_model_file_update."""
self.create_spec_file(client) self.create_spec_file(client)
spec = load_test_spec("random_fact") process_model = load_test_spec("random_fact")
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.delete( response = client.delete(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
follow_redirects=True, follow_redirects=True,
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
@ -558,7 +558,7 @@ class TestProcessApi(BaseTest):
assert response.json["ok"] assert response.json["ok"]
response = client.get( response = client.get(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 404 assert response.status_code == 404
@ -586,9 +586,9 @@ class TestProcessApi(BaseTest):
) -> None: ) -> None:
"""Test_get_workflow_from_workflow_spec.""" """Test_get_workflow_from_workflow_spec."""
user = self.find_or_create_user() user = self.find_or_create_user()
spec = load_test_spec("hello_world") process_model = load_test_spec("hello_world")
response = client.post( response = client.post(
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/process-instances", f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/process-instances",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 201 assert response.status_code == 201
@ -935,7 +935,7 @@ class TestProcessApi(BaseTest):
) )
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances", "/v1.0/process-instances",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
@ -983,7 +983,7 @@ class TestProcessApi(BaseTest):
) )
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances?per_page=2&page=3", "/v1.0/process-instances?per_page=2&page=3",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
@ -994,7 +994,7 @@ class TestProcessApi(BaseTest):
assert response.json["pagination"]["total"] == 5 assert response.json["pagination"]["total"] == 5
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances?per_page=2&page=1", "/v1.0/process-instances?per_page=2&page=1",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
@ -1031,7 +1031,7 @@ class TestProcessApi(BaseTest):
# Without filtering we should get all 5 instances # Without filtering we should get all 5 instances
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances", f"/v1.0/process-instances?process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -1042,7 +1042,7 @@ class TestProcessApi(BaseTest):
# we should get 1 instance each time # we should get 1 instance each time
for i in range(5): for i in range(5):
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}", f"/v1.0/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}&process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -1050,10 +1050,20 @@ class TestProcessApi(BaseTest):
assert len(results) == 1 assert len(results) == 1
assert results[0]["status"] == ProcessInstanceStatus[statuses[i]].value assert results[0]["status"] == ProcessInstanceStatus[statuses[i]].value
response = client.get(
f"/v1.0/process-instances?process_status=not_started,complete&process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}",
headers=self.logged_in_headers(user),
)
assert response.json is not None
results = response.json["results"]
assert len(results) == 2
assert results[0]["status"] in ["complete", "not_started"]
assert results[1]["status"] in ["complete", "not_started"]
# filter by start/end seconds # filter by start/end seconds
# start > 1000 - this should eliminate the first # start > 1000 - this should eliminate the first
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=1001", "/v1.0/process-instances?start_from=1001",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -1064,7 +1074,7 @@ class TestProcessApi(BaseTest):
# start > 2000, end < 5000 - this should eliminate the first 2 and the last # start > 2000, end < 5000 - this should eliminate the first 2 and the last
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=2001&end_till=5999", "/v1.0/process-instances?start_from=2001&end_till=5999",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -1075,7 +1085,7 @@ class TestProcessApi(BaseTest):
# start > 1000, start < 4000 - this should eliminate the first and the last 2 # start > 1000, start < 4000 - this should eliminate the first and the last 2
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=1001&start_till=3999", "/v1.0/process-instances?start_from=1001&start_till=3999",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None
@ -1086,7 +1096,7 @@ class TestProcessApi(BaseTest):
# end > 2000, end < 6000 - this should eliminate the first and the last # end > 2000, end < 6000 - this should eliminate the first and the last
response = client.get( response = client.get(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?end_from=2001&end_till=5999", "/v1.0/process-instances?end_from=2001&end_till=5999",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.json is not None assert response.json is not None

View File

@ -94,7 +94,7 @@ class TestSecretService(SecretServiceTestHelpers):
self.add_test_secret(user) self.add_test_secret(user)
with pytest.raises(ApiError) as ae: with pytest.raises(ApiError) as ae:
self.add_test_secret(user) self.add_test_secret(user)
assert "Duplicate entry" in ae.value.message assert ae.value.code == "create_secret_error"
def test_get_secret(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: def test_get_secret(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None:
"""Test_get_secret.""" """Test_get_secret."""
@ -151,6 +151,7 @@ class TestSecretService(SecretServiceTestHelpers):
with pytest.raises(ApiError) as ae: with pytest.raises(ApiError) as ae:
SecretService.update_secret(secret.key + "x", "some_new_value", user.id) SecretService.update_secret(secret.key + "x", "some_new_value", user.id)
assert "Resource does not exist" in ae.value.message assert "Resource does not exist" in ae.value.message
assert ae.value.code == "update_secret_error"
def test_delete_secret( def test_delete_secret(
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
@ -244,8 +245,6 @@ class TestSecretService(SecretServiceTestHelpers):
allowed_relative_path=process_model_relative_path, allowed_relative_path=process_model_relative_path,
) )
assert "Resource already exists" in ae.value.message assert "Resource already exists" in ae.value.message
assert "IntegrityError" in ae.value.message
assert "Duplicate entry" in ae.value.message
def test_secret_add_allowed_process_bad_user_fails( def test_secret_add_allowed_process_bad_user_fails(
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
@ -286,7 +285,6 @@ class TestSecretService(SecretServiceTestHelpers):
allowed_relative_path=process_model_relative_path, allowed_relative_path=process_model_relative_path,
) )
assert "Resource does not exist" in ae.value.message assert "Resource does not exist" in ae.value.message
print("test_secret_add_allowed_process_bad_secret")
def test_secret_delete_allowed_process( def test_secret_delete_allowed_process(
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
@ -438,7 +436,6 @@ class TestSecretServiceApi(SecretServiceTestHelpers):
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert secret_response.status_code == 404 assert secret_response.status_code == 404
print("test_delete_secret_bad_key")
def test_add_secret_allowed_process( def test_add_secret_allowed_process(
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None