pyl w/ burnettk

This commit is contained in:
jasquat 2023-01-19 15:37:55 -05:00
parent 972682260b
commit 0bf13094af
18 changed files with 85 additions and 96 deletions

View File

@ -17,21 +17,21 @@ def setup_database_uri(app: Flask) -> None:
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
if app.config.get("SPIFF_DATABASE_TYPE") == "sqlite":
app.config[
"SQLALCHEMY_DATABASE_URI"
] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
app.config["SQLALCHEMY_DATABASE_URI"] = (
f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
)
elif app.config.get("SPIFF_DATABASE_TYPE") == "postgres":
app.config[
"SQLALCHEMY_DATABASE_URI"
] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
app.config["SQLALCHEMY_DATABASE_URI"] = (
f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
)
else:
# use pswd to trick flake8 with hardcoded passwords
db_pswd = os.environ.get("DB_PASSWORD")
if db_pswd is None:
db_pswd = ""
app.config[
"SQLALCHEMY_DATABASE_URI"
] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
app.config["SQLALCHEMY_DATABASE_URI"] = (
f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
)
else:
app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get(
"SPIFFWORKFLOW_BACKEND_DATABASE_URI"

View File

@ -15,7 +15,7 @@ from flask import jsonify
from flask import make_response
from sentry_sdk import capture_exception
from sentry_sdk import set_tag
from SpiffWorkflow.exceptions import WorkflowException
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.specs.base import TaskSpec # type: ignore
from SpiffWorkflow.task import Task # type: ignore
@ -41,7 +41,7 @@ class ApiError(Exception):
task_data: dict | str | None = field(default_factory=dict)
task_id: str = ""
task_name: str = ""
task_trace: list | None = field(default_factory=dict)
task_trace: list | None = field(default_factory=list)
def __str__(self) -> str:
"""Instructions to print instance as a string."""
@ -65,7 +65,7 @@ class ApiError(Exception):
offset: int = 0,
error_type: str = "",
error_line: str = "",
task_trace: dict | None = None,
task_trace: list | None = None,
) -> ApiError:
"""Constructs an API Error with details pulled from the current task."""
instance = cls(error_code, message, status_code=status_code)
@ -166,7 +166,7 @@ def set_user_sentry_context() -> None:
set_tag("username", username)
@api_error_blueprint.app_errorhandler(Exception)
@api_error_blueprint.app_errorhandler(Exception) # type: ignore
def handle_exception(exception: Exception) -> flask.wrappers.Response:
"""Handles unexpected exceptions."""
set_user_sentry_context()

View File

@ -6,10 +6,10 @@ import time
from typing import Any
from flask_migrate import Migrate # type: ignore
from flask_sqlalchemy import SQLAlchemy # type: ignore
from sqlalchemy import event # type: ignore
from sqlalchemy.engine.base import Connection # type: ignore
from sqlalchemy.orm.mapper import Mapper # type: ignore
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import event
from sqlalchemy.engine.base import Connection
from sqlalchemy.orm.mapper import Mapper
db = SQLAlchemy()
migrate = Migrate()
@ -59,8 +59,7 @@ class SpiffworkflowBaseDBModel(db.Model): # type: ignore
def update_created_modified_on_create_listener(
mapper: Mapper, _connection: Connection, target: SpiffworkflowBaseDBModel
) -> None:
"""Event listener that runs before a record is updated, and sets the create/modified field accordingly.
"""
"""Event listener that runs before a record is updated, and sets the create/modified field accordingly."""
if "created_at_in_seconds" in mapper.columns.keys():
target.created_at_in_seconds = round(time.time())
if "updated_at_in_seconds" in mapper.columns.keys():
@ -70,8 +69,7 @@ def update_created_modified_on_create_listener(
def update_modified_on_update_listener(
mapper: Mapper, _connection: Connection, target: SpiffworkflowBaseDBModel
) -> None:
"""Event listener that runs before a record is updated, and sets the modified field accordingly.
"""
"""Event listener that runs before a record is updated, and sets the modified field accordingly."""
if "updated_at_in_seconds" in mapper.columns.keys():
if db.session.is_modified(target, include_collections=False):
target.updated_at_in_seconds = round(time.time())
@ -83,5 +81,5 @@ def add_listeners() -> None:
This should be called after importing all subclasses
"""
for cls in SpiffworkflowBaseDBModel._all_subclasses():
event.listen(cls, "before_insert", update_created_modified_on_create_listener)
event.listen(cls, "before_update", update_modified_on_update_listener)
event.listen(cls, "before_insert", update_created_modified_on_create_listener) # type: ignore
event.listen(cls, "before_update", update_modified_on_update_listener) # type: ignore

View File

@ -38,8 +38,7 @@ class SpecReference:
class SpecReferenceCache(SpiffworkflowBaseDBModel):
"""A cache of information about all the Processes and Decisions defined in all files.
"""
"""A cache of information about all the Processes and Decisions defined in all files."""
__tablename__ = "spec_reference_cache"
__table_args__ = (

View File

@ -27,8 +27,7 @@ class ScriptUnauthorizedForUserError(Exception):
class Script:
"""Provides an abstract class that defines how scripts should work, this must be extended in all Script Tasks.
"""
"""Provides an abstract class that defines how scripts should work, this must be extended in all Script Tasks."""
@abstractmethod
def get_description(self) -> str:

View File

@ -60,8 +60,7 @@ class AuthenticationService:
@classmethod
def open_id_endpoint_for_name(cls, name: str) -> str:
"""All openid systems provide a mapping of static names to the full path of that endpoint.
"""
"""All openid systems provide a mapping of static names to the full path of that endpoint."""
openid_config_url = f"{cls.server_url()}/.well-known/openid-configuration"
if name not in AuthenticationService.ENDPOINT_CACHE:
response = requests.get(openid_config_url)
@ -201,8 +200,7 @@ class AuthenticationService:
@classmethod
def get_auth_token_from_refresh_token(cls, refresh_token: str) -> dict:
"""Converts a refresh token to an Auth Token by calling the openid's auth endpoint.
"""
"""Converts a refresh token to an Auth Token by calling the openid's auth endpoint."""
backend_basic_auth_string = f"{cls.client_id()}:{cls.secret_key()}"
backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)

View File

@ -171,8 +171,7 @@ class AuthorizationService:
@classmethod
def delete_all_permissions(cls) -> None:
"""Delete_all_permissions_and_recreate. EXCEPT For permissions for the current user?
"""
"""Delete_all_permissions_and_recreate. EXCEPT For permissions for the current user?"""
for model in [PermissionAssignmentModel, PermissionTargetModel]:
db.session.query(model).delete()
@ -283,9 +282,9 @@ class AuthorizationService:
"""Find_or_create_permission_target."""
uri_with_percent = re.sub(r"\*", "%", uri)
target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX)
permission_target: Optional[
PermissionTargetModel
] = PermissionTargetModel.query.filter_by(uri=target_uri_normalized).first()
permission_target: Optional[PermissionTargetModel] = (
PermissionTargetModel.query.filter_by(uri=target_uri_normalized).first()
)
if permission_target is None:
permission_target = PermissionTargetModel(uri=target_uri_normalized)
db.session.add(permission_target)
@ -300,13 +299,13 @@ class AuthorizationService:
permission: str,
) -> PermissionAssignmentModel:
"""Create_permission_for_principal."""
permission_assignment: Optional[
PermissionAssignmentModel
] = PermissionAssignmentModel.query.filter_by(
principal_id=principal.id,
permission_target_id=permission_target.id,
permission=permission,
).first()
permission_assignment: Optional[PermissionAssignmentModel] = (
PermissionAssignmentModel.query.filter_by(
principal_id=principal.id,
permission_target_id=permission_target.id,
permission=permission,
).first()
)
if permission_assignment is None:
permission_assignment = PermissionAssignmentModel(
principal_id=principal.id,
@ -435,8 +434,10 @@ class AuthorizationService:
except jwt.InvalidTokenError as exception:
raise ApiError(
"token_invalid",
"The Authentication token you provided is invalid. You need a new"
" token. ",
(
"The Authentication token you provided is invalid. You need a new"
" token. "
),
) from exception
@staticmethod

View File

@ -15,13 +15,11 @@ class BackgroundProcessingService:
self.app = app
def process_waiting_process_instances(self) -> None:
"""Since this runs in a scheduler, we need to specify the app context as well.
"""
"""Since this runs in a scheduler, we need to specify the app context as well."""
with self.app.app_context():
ProcessInstanceService.do_waiting()
def process_message_instances_with_app_context(self) -> None:
"""Since this runs in a scheduler, we need to specify the app context as well.
"""
"""Since this runs in a scheduler, we need to specify the app context as well."""
with self.app.app_context():
MessageService.process_message_instances()

View File

@ -41,8 +41,7 @@ class ErrorHandlingService:
def handle_error(
self, _processor: ProcessInstanceProcessor, _error: Union[ApiError, Exception]
) -> None:
"""On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception.
"""
"""On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception."""
process_model = ProcessModelService.get_process_model(
_processor.process_model_identifier
)

View File

@ -151,8 +151,7 @@ class FileSystemService:
@staticmethod
def _get_files(file_path: str, file_name: Optional[str] = None) -> List[File]:
"""Returns an array of File objects at the given path, can be restricted to just one file.
"""
"""Returns an array of File objects at the given path, can be restricted to just one file."""
files = []
items = os.scandir(file_path)
for item in items:

View File

@ -52,8 +52,7 @@ class JsonFormatter(logging.Formatter):
self.datefmt = None
def usesTime(self) -> bool:
"""Overwritten to look for the attribute in the format dict values instead of the fmt string.
"""
"""Overwritten to look for the attribute in the format dict values instead of the fmt string."""
return "asctime" in self.fmt_dict.values()
# we are overriding a method that returns a string and returning a dict, hence the Any

View File

@ -23,7 +23,7 @@ import dateparser
import pytz
from flask import current_app
from lxml import etree # type: ignore
from lxml.etree import XMLSyntaxError
from lxml.etree import XMLSyntaxError # type: ignore
from RestrictedPython import safe_globals # type: ignore
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore
@ -38,7 +38,7 @@ from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
from SpiffWorkflow.spiff.serializer.task_spec_converters import BoundaryEventConverter # type: ignore
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
@ -242,7 +242,7 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
except WorkflowException as e:
raise e
except Exception as e:
raise self.create_task_exec_exception(task, script, e)
raise self.create_task_exec_exception(task, script, e) from e
def call_service(
self,
@ -299,8 +299,7 @@ class ProcessInstanceProcessor:
def __init__(
self, process_instance_model: ProcessInstanceModel, validate_only: bool = False
) -> None:
"""Create a Workflow Processor based on the serialized information available in the process_instance model.
"""
"""Create a Workflow Processor based on the serialized information available in the process_instance model."""
tld = current_app.config["THREAD_LOCAL_DATA"]
tld.process_instance_id = process_instance_model.id
tld.spiff_step = process_instance_model.spiff_step
@ -406,8 +405,10 @@ class ProcessInstanceProcessor:
raise (
ApiError(
"process_model_not_found",
"The given process model was not found:"
f" {process_model_identifier}.",
(
"The given process model was not found:"
f" {process_model_identifier}."
),
)
)
spec_files = SpecFileService.get_files(process_model_info)
@ -537,9 +538,11 @@ class ProcessInstanceProcessor:
potential_owner_ids.append(lane_owner_user.id)
self.raise_if_no_potential_owners(
potential_owner_ids,
"No users found in task data lane owner list for lane:"
f" {task_lane}. The user list used:"
f" {task.data['lane_owners'][task_lane]}",
(
"No users found in task data lane owner list for lane:"
f" {task_lane}. The user list used:"
f" {task.data['lane_owners'][task_lane]}"
),
)
else:
group_model = GroupModel.query.filter_by(identifier=task_lane).first()
@ -692,9 +695,9 @@ class ProcessInstanceProcessor:
):
continue
subprocesses_by_child_task_ids[
task_id
] = subprocesses_by_child_task_ids[subprocess_id]
subprocesses_by_child_task_ids[task_id] = (
subprocesses_by_child_task_ids[subprocess_id]
)
self.get_highest_level_calling_subprocesses_by_child_task_ids(
subprocesses_by_child_task_ids, task_typename_by_task_id
)
@ -1014,8 +1017,7 @@ class ProcessInstanceProcessor:
def get_spec(
files: List[File], process_model_info: ProcessModelInfo
) -> Tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]:
"""Returns a SpiffWorkflow specification for the given process_instance spec, using the files provided.
"""
"""Returns a SpiffWorkflow specification for the given process_instance spec, using the files provided."""
parser = ProcessInstanceProcessor.get_parser()
for file in files:
@ -1031,7 +1033,7 @@ class ProcessInstanceProcessor:
raise ApiError(
error_code="invalid_xml",
message=f"'{file.name}' is not a valid xml file." + str(xse),
)
) from xse
if (
process_model_info.primary_process_id is None
or process_model_info.primary_process_id == ""
@ -1108,8 +1110,10 @@ class ProcessInstanceProcessor:
if not bpmn_message.correlations:
raise ApiError(
"message_correlations_missing",
"Could not find any message correlations bpmn_message:"
f" {bpmn_message.name}",
(
"Could not find any message correlations bpmn_message:"
f" {bpmn_message.name}"
),
)
message_correlations = []
@ -1129,8 +1133,10 @@ class ProcessInstanceProcessor:
if message_correlation_property is None:
raise ApiError(
"message_correlations_missing_from_process",
"Could not find a known message correlation with"
f" identifier:{message_correlation_property_identifier}",
(
"Could not find a known message correlation with"
f" identifier:{message_correlation_property_identifier}"
),
)
message_correlations.append(
{
@ -1193,8 +1199,10 @@ class ProcessInstanceProcessor:
if message_model is None:
raise ApiError(
"invalid_message_name",
"Invalid message name:"
f" {waiting_task.task_spec.event_definition.name}.",
(
"Invalid message name:"
f" {waiting_task.task_spec.event_definition.name}."
),
)
# Ensure we are only creating one message instance for each waiting message
@ -1479,8 +1487,7 @@ class ProcessInstanceProcessor:
return self.bpmn_process_instance.get_ready_user_tasks() # type: ignore
def get_current_user_tasks(self) -> list[SpiffTask]:
"""Return a list of all user tasks that are READY or COMPLETE and are parallel to the READY Task.
"""
"""Return a list of all user tasks that are READY or COMPLETE and are parallel to the READY Task."""
ready_tasks = self.bpmn_process_instance.get_ready_user_tasks()
additional_tasks = []
if len(ready_tasks) > 0:
@ -1537,8 +1544,7 @@ class ProcessInstanceProcessor:
return None
def find_spec_and_field(self, spec_name: str, field_id: Union[str, int]) -> Any:
"""Tracks down a form field by name in the process_instance spec(s), Returns a tuple of the task, and form.
"""
"""Tracks down a form field by name in the process_instance spec(s), Returns a tuple of the task, and form."""
process_instances = [self.bpmn_process_instance]
for task in self.bpmn_process_instance.get_ready_user_tasks():
if task.process_instance not in process_instances:

View File

@ -312,9 +312,9 @@ class ProcessInstanceReportService:
process_instance_dict = process_instance["ProcessInstanceModel"].serialized
for metadata_column in metadata_columns:
if metadata_column["accessor"] not in process_instance_dict:
process_instance_dict[
metadata_column["accessor"]
] = process_instance[metadata_column["accessor"]]
process_instance_dict[metadata_column["accessor"]] = (
process_instance[metadata_column["accessor"]]
)
results.append(process_instance_dict)
return results

View File

@ -224,8 +224,7 @@ class ProcessInstanceService:
@staticmethod
def extract_form_data(latest_data: dict, task: SpiffTask) -> dict:
"""Extracts data from the latest_data that is directly related to the form that is being submitted.
"""
"""Extracts data from the latest_data that is directly related to the form that is being submitted."""
data = {}
if hasattr(task.task_spec, "form"):

View File

@ -5,7 +5,6 @@ from datetime import datetime
from typing import List
from typing import Optional
from flask_bpmn.models.db import db
from lxml import etree # type: ignore
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator # type: ignore
@ -336,8 +335,7 @@ class SpecFileService(FileSystemService):
@staticmethod
def update_message_cache(ref: SpecReference) -> None:
"""Assure we have a record in the database of all possible message ids and names.
"""
"""Assure we have a record in the database of all possible message ids and names."""
for message_model_identifier in ref.messages.keys():
message_model = MessageModel.query.filter_by(
identifier=message_model_identifier

View File

@ -40,8 +40,7 @@ def load_test_spec(
bpmn_file_name: Optional[str] = None,
process_model_source_directory: Optional[str] = None,
) -> ProcessModelInfo:
"""Loads a bpmn file into the process model dir based on a directory in tests/data.
"""
"""Loads a bpmn file into the process model dir based on a directory in tests/data."""
if process_model_source_directory is None:
raise Exception("You must inclode a `process_model_source_directory`.")

View File

@ -3103,8 +3103,7 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_process_instance_list_with_report_metadata_and_process_initator.
"""
"""Test_can_get_process_instance_list_with_report_metadata_and_process_initator."""
user_one = self.create_user_with_permission(username="user_one")
process_model = load_test_spec(

View File

@ -5,7 +5,6 @@ import sys
import pytest
from flask import Flask
from flask.testing import FlaskClient
from flask_bpmn.models.db import db
from lxml import etree # type: ignore
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@ -139,8 +138,7 @@ class TestSpecFileService(BaseTest):
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""When a BPMN processes identifier is changed in a file, the old id is removed from the cache.
"""
"""When a BPMN processes identifier is changed in a file, the old id is removed from the cache."""
old_identifier = "ye_old_identifier"
process_id_lookup = SpecReferenceCache(
identifier=old_identifier,