Merge remote-tracking branch 'origin/main' into feature/typeahead_in_custom_widgets

This commit is contained in:
jasquat 2023-06-01 10:43:37 -04:00
commit 27bd66d93b
No known key found for this signature in database
123 changed files with 69 additions and 695 deletions

View File

@ -49,7 +49,7 @@ function run_pre_commmit() {
for react_project in "${react_projects[@]}" ; do for react_project in "${react_projects[@]}" ; do
# if pre, only do stuff when there are changes # if pre, only do stuff when there are changes
if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$react_project")" ]]; then if [[ -n "$(git status --porcelain "$react_project")" ]]; then
pushd "$react_project" pushd "$react_project"
npm run lint:fix npm run lint:fix
popd popd
@ -57,6 +57,7 @@ for react_project in "${react_projects[@]}" ; do
done done
for python_project in "${python_projects[@]}" ; do for python_project in "${python_projects[@]}" ; do
# if pre, only do stuff when there are changes
if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$python_project")" ]]; then if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$python_project")" ]]; then
pushd "$python_project" pushd "$python_project"
run_autofixers || run_autofixers run_autofixers || run_autofixers

View File

@ -35,37 +35,44 @@ matches_filename_pattern() {
remove_useless_comments() { remove_useless_comments() {
local file_name="$1" local file_name="$1"
echo "grepping" # echo "grepping"
matches=$(ggrep --group-separator=HOTSEP -B 1 -E '^\s*"""' "$file_name" || echo '') # matches=$(grep --group-separator=HOTSEP -B 1 -E '^\s*"""' "$file_name" || echo '')
if [[ -n "$matches" ]]; then # if [[ -n "$matches" ]]; then
matches="${matches}\nHOTSEP" # matches="${matches}\nHOTSEP"
echo -e "$matches" # echo -e "$matches"
while read -d'HOTSEP' -r match || [ -n "$match" ]; do # while read -d'HOTSEP' -r match || [ -n "$match" ]; do
echo "match: ${match}" # echo "match: ${match}"
if [[ -n "$match" ]]; then # if [[ -n "$match" ]]; then
code_line_of_match=$(head -n 1 <<< "$match") # code_line_of_match=$(head -n 1 <<< "$match")
comment_line_of_match=$(sed -n '2 p' <<< "$match") # echo "code_line_of_match: ${code_line_of_match}"
echo "code_line_of_match: ${code_line_of_match}" # comment_line_of_match=$(sed -n '2 p' <<< "$match")
comment_line_of_match=$(sed -n '2 p' <<< "$match") # echo "comment_line_of_match: ${comment_line_of_match}"
echo "comment_line_of_match: ${comment_line_of_match}" # comment_contents=$(hot_sed -E 's/^\s*"""(.*)\.""".*$/\1/' <<< "$comment_line_of_match")
comment_contents=$(hot_sed -E 's/^\s*"""(.*)\.""".*$/\1/' <<< "$comment_line_of_match") # echo "comment_contents: ${comment_contents}"
echo "comment_contents: ${comment_contents}" # if grep -Eiq "^class.*${comment_contents}\(Exception\)" <<< "$code_line_of_match"; then
if grep -Eiq "^\s*(def|class) ${comment_contents}\(" <<< "$code_line_of_match"; then # hot_sed -i "s/^(\s*)[^\s]*${comment_line_of_match}.*/\1pass/" "$file_name"
# Remove line from file matching comment_line # fi
hot_sed -i "/${comment_line_of_match}/d" "$file_name" # # if grep -Eiq "^\s*(def|class) ${comment_contents}\(" <<< "$code_line_of_match"; then
fi # # # Remove line from file matching comment_line
fi # # hot_sed -i "/${comment_line_of_match}/d" "$file_name"
done <<< $matches # # fi
fi # fi
# done <<< $matches
# fi
# matches=$(grep -E '\s*(def|class) ' "$file_name" || echo '')
# if [[ -n "$matches" ]]; then
# fi
sed -Ei 's/^(\s*)"""[A-Z]\w*Error\."""/\1pass/' "$file_name"
sed -Ei '/^\s*"""[A-Z]\w*\."""/d' "$file_name"
} }
# Process each Python file in the "src" and "tests" directories # Process each Python file in the "src" and "tests" directories
for file in $(find src tests -type f -name '*.py'); do for file in $(find src tests -type f -name '*.py'); do
# Read the first line of the file # Read the first line of the file
if grep -Eq '/logging_service' <<< "$file"; then # if grep -Eq '/logging_service' <<< "$file"; then
echo "processing file that we hand picked for debugging: ${file}" echo "processing file that we hand picked for debugging: ${file}"
remove_useless_comments "$file" remove_useless_comments "$file"
fi # fi
# this is already done # this is already done
# if [ -s "$file" ]; then # if [ -s "$file" ]; then

View File

@ -33,10 +33,7 @@ from spiffworkflow_backend.services.background_processing_service import Backgro
class MyJSONEncoder(DefaultJSONProvider): class MyJSONEncoder(DefaultJSONProvider):
"""MyJSONEncoder."""
def default(self, obj: Any) -> Any: def default(self, obj: Any) -> Any:
"""Default."""
if hasattr(obj, "serialized"): if hasattr(obj, "serialized"):
return obj.serialized return obj.serialized
elif isinstance(obj, sqlalchemy.engine.row.Row): # type: ignore elif isinstance(obj, sqlalchemy.engine.row.Row): # type: ignore
@ -56,13 +53,11 @@ class MyJSONEncoder(DefaultJSONProvider):
return super().default(obj) return super().default(obj)
def dumps(self, obj: Any, **kwargs: Any) -> Any: def dumps(self, obj: Any, **kwargs: Any) -> Any:
"""Dumps."""
kwargs.setdefault("default", self.default) kwargs.setdefault("default", self.default)
return super().dumps(obj, **kwargs) return super().dumps(obj, **kwargs)
def start_scheduler(app: flask.app.Flask, scheduler_class: BaseScheduler = BackgroundScheduler) -> None: def start_scheduler(app: flask.app.Flask, scheduler_class: BaseScheduler = BackgroundScheduler) -> None:
"""Start_scheduler."""
scheduler = scheduler_class() scheduler = scheduler_class()
# TODO: polling intervals for messages job # TODO: polling intervals for messages job
@ -120,7 +115,6 @@ class NoOpCipher:
def create_app() -> flask.app.Flask: def create_app() -> flask.app.Flask:
"""Create_app."""
faulthandler.enable() faulthandler.enable()
# We need to create the sqlite database in a known location. # We need to create the sqlite database in a known location.
@ -207,7 +201,6 @@ def _setup_prometheus_metrics(app: flask.app.Flask, connexion_app: connexion.app
def get_hacked_up_app_for_script() -> flask.app.Flask: def get_hacked_up_app_for_script() -> flask.app.Flask:
"""Get_hacked_up_app_for_script."""
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "local_development" os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "local_development"
flask_env_key = "FLASK_SESSION_SECRET_KEY" flask_env_key = "FLASK_SESSION_SECRET_KEY"
os.environ[flask_env_key] = "whatevs" os.environ[flask_env_key] = "whatevs"
@ -245,13 +238,11 @@ def traces_sampler(sampling_context: Any) -> Any:
def configure_sentry(app: flask.app.Flask) -> None: def configure_sentry(app: flask.app.Flask) -> None:
"""Configure_sentry."""
import sentry_sdk import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration from sentry_sdk.integrations.flask import FlaskIntegration
# get rid of NotFound errors # get rid of NotFound errors
def before_send(event: Any, hint: Any) -> Any: def before_send(event: Any, hint: Any) -> Any:
"""Before_send."""
if "exc_info" in hint: if "exc_info" in hint:
_exc_type, exc_value, _tb = hint["exc_info"] _exc_type, exc_value, _tb = hint["exc_info"]
# NotFound is mostly from web crawlers # NotFound is mostly from web crawlers

View File

@ -59,7 +59,6 @@ def setup_database_configs(app: Flask) -> None:
def load_config_file(app: Flask, env_config_module: str) -> None: def load_config_file(app: Flask, env_config_module: str) -> None:
"""Load_config_file."""
try: try:
app.config.from_object(env_config_module) app.config.from_object(env_config_module)
print(f"loaded config: {env_config_module}") print(f"loaded config: {env_config_module}")
@ -119,7 +118,6 @@ def _check_for_incompatible_frontend_and_backend_urls(app: Flask) -> None:
def setup_config(app: Flask) -> None: def setup_config(app: Flask) -> None:
"""Setup_config."""
# ensure the instance folder exists # ensure the instance folder exists
try: try:
os.makedirs(app.instance_path) os.makedirs(app.instance_path)

View File

@ -1,4 +1,3 @@
"""Development."""
from os import environ from os import environ
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(

View File

@ -216,7 +216,6 @@ class ApiError(Exception):
def set_user_sentry_context() -> None: def set_user_sentry_context() -> None:
"""Set_user_sentry_context."""
try: try:
username = g.user.username username = g.user.username
except Exception: except Exception:

View File

@ -1,5 +1,2 @@
"""Process_entity_not_found."""
class ProcessEntityNotFoundError(Exception): class ProcessEntityNotFoundError(Exception):
"""ProcessEntityNotFoundError.""" pass

View File

@ -10,14 +10,10 @@ IdToProcessGroupMapping = NewType("IdToProcessGroupMapping", dict[str, "ProcessG
class ProcessGroupLite(TypedDict): class ProcessGroupLite(TypedDict):
"""ProcessGroupLite."""
id: str id: str
display_name: str display_name: str
class ProcessGroupLitesWithCache(TypedDict): class ProcessGroupLitesWithCache(TypedDict):
"""ProcessGroupLitesWithCache."""
cache: dict[str, "ProcessGroup"] cache: dict[str, "ProcessGroup"]
process_groups: list[ProcessGroupLite] process_groups: list[ProcessGroupLite]

View File

@ -1,4 +1,3 @@
"""Message_correlation."""
from dataclasses import dataclass from dataclasses import dataclass
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel

View File

@ -15,8 +15,6 @@ migrate = Migrate()
class SpiffworkflowBaseDBModel(db.Model): # type: ignore class SpiffworkflowBaseDBModel(db.Model): # type: ignore
"""SpiffworkflowBaseDBModel."""
__abstract__ = True __abstract__ = True
@classmethod @classmethod
@ -39,7 +37,6 @@ class SpiffworkflowBaseDBModel(db.Model): # type: ignore
return result return result
def validate_enum_field(self, key: str, value: Any, enum_variable: enum.EnumMeta) -> Any: def validate_enum_field(self, key: str, value: Any, enum_variable: enum.EnumMeta) -> Any:
"""Validate_enum_field."""
try: try:
m_type = getattr(enum_variable, value, None) m_type = getattr(enum_variable, value, None)
except Exception as e: except Exception as e:

View File

@ -10,8 +10,6 @@ from spiffworkflow_backend.models.spec_reference import SpecReference
class FileType(SpiffEnum): class FileType(SpiffEnum):
"""FileType."""
bpmn = "bpmn" bpmn = "bpmn"
csv = "csv" csv = "csv"
dmn = "dmn" dmn = "dmn"
@ -62,8 +60,6 @@ CONTENT_TYPES = {
@dataclass(order=True) @dataclass(order=True)
class File: class File:
"""File."""
sort_index: str = field(init=False) sort_index: str = field(init=False)
content_type: str content_type: str
@ -89,7 +85,6 @@ class File:
last_modified: datetime, last_modified: datetime,
file_size: int, file_size: int,
) -> File: ) -> File:
"""From_file_system."""
instance = cls( instance = cls(
name=file_name, name=file_name,
content_type=content_type, content_type=content_type,

View File

@ -21,8 +21,6 @@ if TYPE_CHECKING:
@dataclass @dataclass
class HumanTaskModel(SpiffworkflowBaseDBModel): class HumanTaskModel(SpiffworkflowBaseDBModel):
"""HumanTaskModel."""
__tablename__ = "human_task" __tablename__ = "human_task"
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
@ -65,7 +63,6 @@ class HumanTaskModel(SpiffworkflowBaseDBModel):
@classmethod @classmethod
def to_task(cls, task: HumanTaskModel) -> Task: def to_task(cls, task: HumanTaskModel) -> Task:
"""To_task."""
can_complete = False can_complete = False
for user in task.human_task_users: for user in task.human_task_users:
if user.user_id == g.user.id: if user.user_id == g.user.id:

View File

@ -13,8 +13,6 @@ from spiffworkflow_backend.models.user import UserModel
@dataclass @dataclass
class HumanTaskUserModel(SpiffworkflowBaseDBModel): class HumanTaskUserModel(SpiffworkflowBaseDBModel):
"""HumanTaskUserModel."""
__tablename__ = "human_task_user" __tablename__ = "human_task_user"
__table_args__ = ( __table_args__ = (

View File

@ -23,15 +23,11 @@ if TYPE_CHECKING:
class MessageTypes(enum.Enum): class MessageTypes(enum.Enum):
"""MessageTypes."""
send = "send" send = "send"
receive = "receive" receive = "receive"
class MessageStatuses(enum.Enum): class MessageStatuses(enum.Enum):
"""MessageStatuses."""
ready = "ready" ready = "ready"
running = "running" running = "running"
completed = "completed" completed = "completed"
@ -67,12 +63,10 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
@validates("message_type") @validates("message_type")
def validate_message_type(self, key: str, value: Any) -> Any: def validate_message_type(self, key: str, value: Any) -> Any:
"""Validate_message_type."""
return self.validate_enum_field(key, value, MessageTypes) return self.validate_enum_field(key, value, MessageTypes)
@validates("status") @validates("status")
def validate_status(self, key: str, value: Any) -> Any: def validate_status(self, key: str, value: Any) -> Any:
"""Validate_status."""
return self.validate_enum_field(key, value, MessageStatuses) return self.validate_enum_field(key, value, MessageStatuses)
def correlates(self, other: Any, expression_engine: PythonScriptEngine) -> bool: def correlates(self, other: Any, expression_engine: PythonScriptEngine) -> bool:
@ -151,7 +145,6 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
def ensure_failure_cause_is_set_if_message_instance_failed( def ensure_failure_cause_is_set_if_message_instance_failed(
session: Any, _flush_context: Any | None, _instances: Any | None session: Any, _flush_context: Any | None, _instances: Any | None
) -> None: ) -> None:
"""Ensure_failure_cause_is_set_if_message_instance_failed."""
for instance in session.new: for instance in session.new:
if isinstance(instance, MessageInstanceModel): if isinstance(instance, MessageInstanceModel):
if instance.status == "failed" and instance.failure_cause is None: if instance.status == "failed" and instance.failure_cause is None:

View File

@ -1,4 +1,3 @@
"""Message_correlation."""
from dataclasses import dataclass from dataclasses import dataclass
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey

View File

@ -1,11 +1,8 @@
"""Message_correlation_property."""
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
class MessageTriggerableProcessModel(SpiffworkflowBaseDBModel): class MessageTriggerableProcessModel(SpiffworkflowBaseDBModel):
"""MessageTriggerableProcessModel."""
__tablename__ = "message_triggerable_process_model" __tablename__ = "message_triggerable_process_model"
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)

View File

@ -1,4 +1,3 @@
"""PermissionAssignment."""
import enum import enum
from typing import Any from typing import Any
@ -12,16 +11,12 @@ from spiffworkflow_backend.models.principal import PrincipalModel
class PermitDeny(enum.Enum): class PermitDeny(enum.Enum):
"""PermitDeny."""
# permit, aka grant # permit, aka grant
permit = "permit" permit = "permit"
deny = "deny" deny = "deny"
class Permission(enum.Enum): class Permission(enum.Enum):
"""Permission."""
# from original requirements # from original requirements
# instantiate = 1 # instantiate = 1
# administer = 2 # administer = 2
@ -34,8 +29,6 @@ class Permission(enum.Enum):
class PermissionAssignmentModel(SpiffworkflowBaseDBModel): class PermissionAssignmentModel(SpiffworkflowBaseDBModel):
"""PermissionAssignmentModel."""
__tablename__ = "permission_assignment" __tablename__ = "permission_assignment"
__table_args__ = ( __table_args__ = (
db.UniqueConstraint( db.UniqueConstraint(
@ -53,10 +46,8 @@ class PermissionAssignmentModel(SpiffworkflowBaseDBModel):
@validates("grant_type") @validates("grant_type")
def validate_grant_type(self, key: str, value: str) -> Any: def validate_grant_type(self, key: str, value: str) -> Any:
"""Validate_grant_type."""
return self.validate_enum_field(key, value, PermitDeny) return self.validate_enum_field(key, value, PermitDeny)
@validates("permission") @validates("permission")
def validate_permission(self, key: str, value: str) -> Any: def validate_permission(self, key: str, value: str) -> Any:
"""Validate_permission."""
return self.validate_enum_field(key, value, Permission) return self.validate_enum_field(key, value, Permission)

View File

@ -1,4 +1,3 @@
"""PermissionTarget."""
import re import re
from dataclasses import dataclass from dataclasses import dataclass
@ -9,13 +8,11 @@ from spiffworkflow_backend.models.db import db
class InvalidPermissionTargetUriError(Exception): class InvalidPermissionTargetUriError(Exception):
"""InvalidPermissionTargetUriError.""" pass
@dataclass @dataclass
class PermissionTargetModel(SpiffworkflowBaseDBModel): class PermissionTargetModel(SpiffworkflowBaseDBModel):
"""PermissionTargetModel."""
URI_ALL = "/%" URI_ALL = "/%"
__tablename__ = "permission_target" __tablename__ = "permission_target"
@ -32,7 +29,6 @@ class PermissionTargetModel(SpiffworkflowBaseDBModel):
@validates("uri") @validates("uri")
def validate_uri(self, key: str, value: str) -> str: def validate_uri(self, key: str, value: str) -> str:
"""Validate_uri."""
if re.search(r"%.", value): if re.search(r"%.", value):
raise InvalidPermissionTargetUriError(f"Wildcard must appear at end: {value}") raise InvalidPermissionTargetUriError(f"Wildcard must appear at end: {value}")
return value return value

View File

@ -11,17 +11,15 @@ from spiffworkflow_backend.models.user import UserModel
class DataValidityError(Exception): class DataValidityError(Exception):
"""DataValidityError.""" pass
class MissingPrincipalError(DataValidityError): class MissingPrincipalError(DataValidityError):
"""MissingPrincipalError.""" pass
@dataclass @dataclass
class PrincipalModel(SpiffworkflowBaseDBModel): class PrincipalModel(SpiffworkflowBaseDBModel):
"""PrincipalModel."""
__tablename__ = "principal" __tablename__ = "principal"
__table_args__ = (CheckConstraint("NOT(user_id IS NULL AND group_id IS NULL)"),) __table_args__ = (CheckConstraint("NOT(user_id IS NULL AND group_id IS NULL)"),)

View File

@ -1,4 +1,3 @@
"""ProcessCaller_model."""
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db

View File

@ -16,8 +16,6 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
@dataclass(order=True) @dataclass(order=True)
class ProcessGroup: class ProcessGroup:
"""ProcessGroup."""
sort_index: str = field(init=False) sort_index: str = field(init=False)
id: str # A unique string name, lower case, under scores (ie, 'my_group') id: str # A unique string name, lower case, under scores (ie, 'my_group')
@ -43,22 +41,16 @@ class ProcessGroup:
@property @property
def serialized(self) -> dict: def serialized(self) -> dict:
"""Serialized."""
original_dict = dataclasses.asdict(self) original_dict = dataclasses.asdict(self)
return {x: original_dict[x] for x in original_dict if x not in ["sort_index"]} return {x: original_dict[x] for x in original_dict if x not in ["sort_index"]}
# for use with os.path.join, so it can work on windows # for use with os.path.join, so it can work on windows
def id_for_file_path(self) -> str: def id_for_file_path(self) -> str:
"""Id_for_file_path."""
return self.id.replace("/", os.sep) return self.id.replace("/", os.sep)
class ProcessGroupSchema(Schema): class ProcessGroupSchema(Schema):
"""ProcessGroupSchema."""
class Meta: class Meta:
"""Meta."""
model = ProcessGroup model = ProcessGroup
fields = [ fields = [
"id", "id",
@ -79,5 +71,4 @@ class ProcessGroupSchema(Schema):
@post_load @post_load
def make_process_group(self, data: dict[str, str | bool | int], **kwargs: dict) -> ProcessGroup: def make_process_group(self, data: dict[str, str | bool | int], **kwargs: dict) -> ProcessGroup:
"""Make_process_group."""
return ProcessGroup(**data) # type: ignore return ProcessGroup(**data) # type: ignore

View File

@ -23,20 +23,18 @@ from spiffworkflow_backend.models.user import UserModel
class ProcessInstanceNotFoundError(Exception): class ProcessInstanceNotFoundError(Exception):
"""ProcessInstanceNotFoundError.""" pass
class ProcessInstanceTaskDataCannotBeUpdatedError(Exception): class ProcessInstanceTaskDataCannotBeUpdatedError(Exception):
"""ProcessInstanceTaskDataCannotBeUpdatedError.""" pass
class ProcessInstanceCannotBeDeletedError(Exception): class ProcessInstanceCannotBeDeletedError(Exception):
"""ProcessInstanceCannotBeDeletedError.""" pass
class ProcessInstanceStatus(SpiffEnum): class ProcessInstanceStatus(SpiffEnum):
"""ProcessInstanceStatus."""
not_started = "not_started" not_started = "not_started"
user_input_required = "user_input_required" user_input_required = "user_input_required"
waiting = "waiting" waiting = "waiting"
@ -47,8 +45,6 @@ class ProcessInstanceStatus(SpiffEnum):
class ProcessInstanceModel(SpiffworkflowBaseDBModel): class ProcessInstanceModel(SpiffworkflowBaseDBModel):
"""ProcessInstanceModel."""
__tablename__ = "process_instance" __tablename__ = "process_instance"
__allow_unmapped__ = True __allow_unmapped__ = True
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
@ -144,11 +140,9 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
@validates("status") @validates("status")
def validate_status(self, key: str, value: Any) -> Any: def validate_status(self, key: str, value: Any) -> Any:
"""Validate_status."""
return self.validate_enum_field(key, value, ProcessInstanceStatus) return self.validate_enum_field(key, value, ProcessInstanceStatus)
def can_submit_task(self) -> bool: def can_submit_task(self) -> bool:
"""Can_submit_task."""
return not self.has_terminal_status() and self.status != "suspended" return not self.has_terminal_status() and self.status != "suspended"
def can_receive_message(self) -> bool: def can_receive_message(self) -> bool:
@ -156,7 +150,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
return not self.has_terminal_status() and self.status != "suspended" return not self.has_terminal_status() and self.status != "suspended"
def has_terminal_status(self) -> bool: def has_terminal_status(self) -> bool:
"""Has_terminal_status."""
return self.status in self.terminal_statuses() return self.status in self.terminal_statuses()
@classmethod @classmethod
@ -174,11 +167,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
class ProcessInstanceModelSchema(Schema): class ProcessInstanceModelSchema(Schema):
"""ProcessInstanceModelSchema."""
class Meta: class Meta:
"""Meta."""
model = ProcessInstanceModel model = ProcessInstanceModel
fields = [ fields = [
"id", "id",
@ -196,13 +185,10 @@ class ProcessInstanceModelSchema(Schema):
status = marshmallow.fields.Method("get_status", dump_only=True) status = marshmallow.fields.Method("get_status", dump_only=True)
def get_status(self, obj: ProcessInstanceModel) -> str: def get_status(self, obj: ProcessInstanceModel) -> str:
"""Get_status."""
return obj.status return obj.status
class ProcessInstanceApi: class ProcessInstanceApi:
"""ProcessInstanceApi."""
def __init__( def __init__(
self, self,
id: int, id: int,
@ -222,11 +208,7 @@ class ProcessInstanceApi:
class ProcessInstanceApiSchema(Schema): class ProcessInstanceApiSchema(Schema):
"""ProcessInstanceApiSchema."""
class Meta: class Meta:
"""Meta."""
model = ProcessInstanceApi model = ProcessInstanceApi
fields = [ fields = [
"id", "id",
@ -243,7 +225,6 @@ class ProcessInstanceApiSchema(Schema):
@marshmallow.post_load @marshmallow.post_load
def make_process_instance(self, data: dict[str, Any], **kwargs: dict) -> ProcessInstanceApi: def make_process_instance(self, data: dict[str, Any], **kwargs: dict) -> ProcessInstanceApi:
"""Make_process_instance."""
keys = [ keys = [
"id", "id",
"status", "status",

View File

@ -9,8 +9,6 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@dataclass @dataclass
class ProcessInstanceQueueModel(SpiffworkflowBaseDBModel): class ProcessInstanceQueueModel(SpiffworkflowBaseDBModel):
"""ProcessInstanceQueueModel."""
__tablename__ = "process_instance_queue" __tablename__ = "process_instance_queue"
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)

View File

@ -1,4 +1,3 @@
"""Process_instance."""
from __future__ import annotations from __future__ import annotations
import sys import sys
@ -48,20 +47,16 @@ class Report(TypedDict):
class ProcessInstanceReportAlreadyExistsError(Exception): class ProcessInstanceReportAlreadyExistsError(Exception):
"""ProcessInstanceReportAlreadyExistsError.""" pass
class ProcessInstanceReportResult(TypedDict): class ProcessInstanceReportResult(TypedDict):
"""ProcessInstanceReportResult."""
report_metadata: ReportMetadata report_metadata: ReportMetadata
results: list[dict] results: list[dict]
# https://stackoverflow.com/a/56842689/6090676 # https://stackoverflow.com/a/56842689/6090676
class Reversor: class Reversor:
"""Reversor."""
def __init__(self, obj: Any): def __init__(self, obj: Any):
"""__init__.""" """__init__."""
self.obj = obj self.obj = obj
@ -77,8 +72,6 @@ class Reversor:
@dataclass @dataclass
class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
"""ProcessInstanceReportModel."""
__tablename__ = "process_instance_report" __tablename__ = "process_instance_report"
__table_args__ = ( __table_args__ = (
db.UniqueConstraint( db.UniqueConstraint(
@ -105,7 +98,6 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
@classmethod @classmethod
def default_order_by(cls) -> list[str]: def default_order_by(cls) -> list[str]:
"""Default_order_by."""
return ["-start_in_seconds", "-id"] return ["-start_in_seconds", "-id"]
@classmethod @classmethod

View File

@ -15,16 +15,12 @@ from spiffworkflow_backend.models.file import File
class NotificationType(enum.Enum): class NotificationType(enum.Enum):
"""NotificationType."""
fault = "fault" fault = "fault"
suspend = "suspend" suspend = "suspend"
@dataclass(order=True) @dataclass(order=True)
class ProcessModelInfo: class ProcessModelInfo:
"""ProcessModelInfo."""
sort_index: str = field(init=False) sort_index: str = field(init=False)
id: str id: str
@ -60,12 +56,10 @@ class ProcessModelInfo:
# this is because we have to store ids in the database, and we want the same # this is because we have to store ids in the database, and we want the same
# database snapshot to work on any OS. # database snapshot to work on any OS.
def id_for_file_path(self) -> str: def id_for_file_path(self) -> str:
"""Id_for_file_path."""
return self.id.replace("/", os.sep) return self.id.replace("/", os.sep)
@classmethod @classmethod
def modify_process_identifier_for_path_param(cls, identifier: str) -> str: def modify_process_identifier_for_path_param(cls, identifier: str) -> str:
"""Identifier."""
if "\\" in identifier: if "\\" in identifier:
raise Exception(f"Found backslash in identifier: {identifier}") raise Exception(f"Found backslash in identifier: {identifier}")
@ -73,11 +67,7 @@ class ProcessModelInfo:
class ProcessModelInfoSchema(Schema): class ProcessModelInfoSchema(Schema):
"""ProcessModelInfoSchema."""
class Meta: class Meta:
"""Meta."""
model = ProcessModelInfo model = ProcessModelInfo
id = marshmallow.fields.String(required=True) id = marshmallow.fields.String(required=True)
@ -99,5 +89,4 @@ class ProcessModelInfoSchema(Schema):
@post_load @post_load
def make_spec(self, data: dict[str, str | bool | int | NotificationType], **_: Any) -> ProcessModelInfo: def make_spec(self, data: dict[str, str | bool | int | NotificationType], **_: Any) -> ProcessModelInfo:
"""Make_spec."""
return ProcessModelInfo(**data) # type: ignore return ProcessModelInfo(**data) # type: ignore

View File

@ -6,8 +6,6 @@ from spiffworkflow_backend.models.db import db
@dataclass @dataclass
class ProcessModelCycleModel(SpiffworkflowBaseDBModel): class ProcessModelCycleModel(SpiffworkflowBaseDBModel):
"""ProcessInstanceQueueModel."""
__tablename__ = "process_model_cycle" __tablename__ = "process_model_cycle"
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)

View File

@ -12,8 +12,6 @@ from spiffworkflow_backend.models.db import db
@dataclass() @dataclass()
class RefreshTokenModel(SpiffworkflowBaseDBModel): class RefreshTokenModel(SpiffworkflowBaseDBModel):
"""RefreshTokenModel."""
__tablename__ = "refresh_token" __tablename__ = "refresh_token"
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)

View File

@ -11,8 +11,6 @@ from spiffworkflow_backend.models.user import UserModel
@dataclass() @dataclass()
class SecretModel(SpiffworkflowBaseDBModel): class SecretModel(SpiffworkflowBaseDBModel):
"""SecretModel."""
__tablename__ = "secret" __tablename__ = "secret"
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
key: str = db.Column(db.String(50), unique=True, nullable=False) key: str = db.Column(db.String(50), unique=True, nullable=False)
@ -32,10 +30,6 @@ class SecretModel(SpiffworkflowBaseDBModel):
class SecretModelSchema(Schema): class SecretModelSchema(Schema):
"""SecretModelSchema."""
class Meta: class Meta:
"""Meta."""
model = SecretModel model = SecretModel
fields = ["key", "value", "user_id"] fields = ["key", "value", "user_id"]

View File

@ -1,4 +1,3 @@
"""Message_model."""
from dataclasses import dataclass from dataclasses import dataclass
from flask_marshmallow import Schema # type: ignore from flask_marshmallow import Schema # type: ignore
@ -10,7 +9,7 @@ from spiffworkflow_backend.models.db import db
class SpecReferenceNotFoundError(Exception): class SpecReferenceNotFoundError(Exception):
"""SpecReferenceNotFoundError.""" pass
@dataclass() @dataclass()
@ -56,7 +55,6 @@ class SpecReferenceCache(SpiffworkflowBaseDBModel):
@classmethod @classmethod
def from_spec_reference(cls, ref: SpecReference) -> "SpecReferenceCache": def from_spec_reference(cls, ref: SpecReference) -> "SpecReferenceCache":
"""From_spec_reference."""
return cls( return cls(
identifier=ref.identifier, identifier=ref.identifier,
display_name=ref.display_name, display_name=ref.display_name,
@ -71,11 +69,7 @@ class SpecReferenceCache(SpiffworkflowBaseDBModel):
class SpecReferenceSchema(Schema): # type: ignore class SpecReferenceSchema(Schema): # type: ignore
"""FileSchema."""
class Meta: class Meta:
"""Meta."""
model = SpecReference model = SpecReference
fields = [ fields = [
"identifier", "identifier",

View File

@ -25,8 +25,6 @@ class TaskNotFoundError(Exception):
class MultiInstanceType(enum.Enum): class MultiInstanceType(enum.Enum):
"""MultiInstanceType."""
none = "none" none = "none"
looping = "looping" looping = "looping"
parallel = "parallel" parallel = "parallel"
@ -93,8 +91,6 @@ class TaskModel(SpiffworkflowBaseDBModel):
class Task: class Task:
"""Task."""
HUMAN_TASK_TYPES = ["User Task", "Manual Task"] HUMAN_TASK_TYPES = ["User Task", "Manual Task"]
def __init__( def __init__(
@ -212,38 +208,22 @@ class Task:
class OptionSchema(Schema): class OptionSchema(Schema):
"""OptionSchema."""
class Meta: class Meta:
"""Meta."""
fields = ["id", "name", "data"] fields = ["id", "name", "data"]
class ValidationSchema(Schema): class ValidationSchema(Schema):
"""ValidationSchema."""
class Meta: class Meta:
"""Meta."""
fields = ["name", "config"] fields = ["name", "config"]
class FormFieldPropertySchema(Schema): class FormFieldPropertySchema(Schema):
"""FormFieldPropertySchema."""
class Meta: class Meta:
"""Meta."""
fields = ["id", "value"] fields = ["id", "value"]
class FormFieldSchema(Schema): class FormFieldSchema(Schema):
"""FormFieldSchema."""
class Meta: class Meta:
"""Meta."""
fields = [ fields = [
"id", "id",
"type", "type",
@ -269,11 +249,7 @@ class FormFieldSchema(Schema):
class TaskSchema(Schema): class TaskSchema(Schema):
"""TaskSchema."""
class Meta: class Meta:
"""Meta."""
fields = [ fields = [
"id", "id",
"name", "name",
@ -304,5 +280,4 @@ class TaskSchema(Schema):
@marshmallow.post_load @marshmallow.post_load
def make_task(self, data: dict[str, Any], **kwargs: dict) -> Task: def make_task(self, data: dict[str, Any], **kwargs: dict) -> Task:
"""Make_task."""
return Task(**data) return Task(**data)

View File

@ -94,11 +94,7 @@ class UserModel(SpiffworkflowBaseDBModel):
class UserModelSchema(Schema): class UserModelSchema(Schema):
"""UserModelSchema."""
class Meta: class Meta:
"""Meta."""
model = UserModel model = UserModel
# load_instance = True # load_instance = True
# include_relationships = False # include_relationships = False

View File

@ -1,4 +1,3 @@
"""UserGroupAssignment."""
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
@ -9,8 +8,6 @@ from spiffworkflow_backend.models.user import UserModel
class UserGroupAssignmentModel(SpiffworkflowBaseDBModel): class UserGroupAssignmentModel(SpiffworkflowBaseDBModel):
"""UserGroupAssignmentModel."""
__tablename__ = "user_group_assignment" __tablename__ = "user_group_assignment"
__table_args__ = (db.UniqueConstraint("user_id", "group_id", name="user_group_assignment_unique"),) __table_args__ = (db.UniqueConstraint("user_id", "group_id", name="user_group_assignment_unique"),)

View File

@ -1,4 +1,3 @@
"""UserGroupAssignment."""
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
@ -24,7 +23,6 @@ class UserGroupAssignmentWaitingModel(SpiffworkflowBaseDBModel):
group = relationship("GroupModel", overlaps="groups,user_group_assignments_waiting,users") # type: ignore group = relationship("GroupModel", overlaps="groups,user_group_assignments_waiting,users") # type: ignore
def is_match_all(self) -> bool: def is_match_all(self) -> bool:
"""Is_match_all."""
if self.username == self.MATCH_ALL_USERS: if self.username == self.MATCH_ALL_USERS:
return True return True
return False return False

View File

@ -6,6 +6,5 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
def status() -> Response: def status() -> Response:
"""Status."""
ProcessInstanceModel.query.filter().first() ProcessInstanceModel.query.filter().first()
return make_response({"ok": True}, 200) return make_response({"ok": True}, 200)

View File

@ -21,7 +21,6 @@ def message_instance_list(
page: int = 1, page: int = 1,
per_page: int = 100, per_page: int = 100,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Message_instance_list."""
# to make sure the process instance exists # to make sure the process instance exists
message_instances_query = MessageInstanceModel.query message_instances_query = MessageInstanceModel.query

View File

@ -122,7 +122,6 @@ def token() -> dict:
@openid_blueprint.route("/end_session", methods=["GET"]) @openid_blueprint.route("/end_session", methods=["GET"])
def end_session() -> Response: def end_session() -> Response:
"""Logout."""
redirect_url = request.args.get("post_logout_redirect_uri", "http://localhost") redirect_url = request.args.get("post_logout_redirect_uri", "http://localhost")
request.args.get("id_token_hint") request.args.get("id_token_hint")
return redirect(redirect_url) return redirect(redirect_url)
@ -130,7 +129,6 @@ def end_session() -> Response:
@openid_blueprint.route("/refresh", methods=["POST"]) @openid_blueprint.route("/refresh", methods=["POST"])
def refresh() -> str: def refresh() -> str:
"""Refresh."""
return "" return ""

View File

@ -30,7 +30,6 @@ process_api_blueprint = Blueprint("process_api", __name__)
def permissions_check(body: dict[str, dict[str, list[str]]]) -> flask.wrappers.Response: def permissions_check(body: dict[str, dict[str, list[str]]]) -> flask.wrappers.Response:
"""Permissions_check."""
if "requests_to_check" not in body: if "requests_to_check" not in body:
raise ( raise (
ApiError( ApiError(
@ -82,7 +81,6 @@ def _process_data_fetcher(
process_data_identifier: str, process_data_identifier: str,
download_file_data: bool, download_file_data: bool,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_data_show."""
if download_file_data: if download_file_data:
file_data = ProcessInstanceFileDataModel.query.filter_by( file_data = ProcessInstanceFileDataModel.query.filter_by(
digest=process_data_identifier, digest=process_data_identifier,
@ -128,7 +126,6 @@ def process_data_show(
process_data_identifier: str, process_data_identifier: str,
modified_process_model_identifier: str, modified_process_model_identifier: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_data_show."""
return _process_data_fetcher( return _process_data_fetcher(
process_instance_id, process_instance_id,
process_data_identifier, process_data_identifier,
@ -141,7 +138,6 @@ def process_data_file_download(
process_data_identifier: str, process_data_identifier: str,
modified_process_model_identifier: str, modified_process_model_identifier: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_data_file_download."""
return _process_data_fetcher( return _process_data_fetcher(
process_instance_id, process_instance_id,
process_data_identifier, process_data_identifier,
@ -155,7 +151,6 @@ def process_data_file_download(
# test with: ngrok http 7000 # test with: ngrok http 7000
# where 7000 is the port the app is running on locally # where 7000 is the port the app is running on locally
def github_webhook_receive(body: dict) -> Response: def github_webhook_receive(body: dict) -> Response:
"""Github_webhook_receive."""
auth_header = request.headers.get("X-Hub-Signature-256") auth_header = request.headers.get("X-Hub-Signature-256")
AuthorizationService.verify_sha256_token(auth_header) AuthorizationService.verify_sha256_token(auth_header)
result = GitService.handle_web_hook(body) result = GitService.handle_web_hook(body)
@ -163,7 +158,6 @@ def github_webhook_receive(body: dict) -> Response:
def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any:
"""Get_required_parameter_or_raise."""
return_value = None return_value = None
if parameter in post_body: if parameter in post_body:
return_value = post_body[parameter] return_value = post_body[parameter]
@ -181,7 +175,6 @@ def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any])
def _commit_and_push_to_git(message: str) -> None: def _commit_and_push_to_git(message: str) -> None:
"""Commit_and_push_to_git."""
if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE"]: if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE"]:
git_output = GitService.commit(message=message) git_output = GitService.commit(message=message)
current_app.logger.info(f"git output: {git_output}") current_app.logger.info(f"git output: {git_output}")
@ -190,14 +183,12 @@ def _commit_and_push_to_git(message: str) -> None:
def _un_modify_modified_process_model_id(modified_process_model_identifier: str) -> str: def _un_modify_modified_process_model_id(modified_process_model_identifier: str) -> str:
"""Un_modify_modified_process_model_id."""
return modified_process_model_identifier.replace(":", "/") return modified_process_model_identifier.replace(":", "/")
def _find_process_instance_by_id_or_raise( def _find_process_instance_by_id_or_raise(
process_instance_id: int, process_instance_id: int,
) -> ProcessInstanceModel: ) -> ProcessInstanceModel:
"""Find_process_instance_by_id_or_raise."""
process_instance_query = ProcessInstanceModel.query.filter_by(id=process_instance_id) process_instance_query = ProcessInstanceModel.query.filter_by(id=process_instance_id)
# we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves: # we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves:
@ -222,7 +213,6 @@ def _find_process_instance_by_id_or_raise(
# process_model_id uses forward slashes on all OSes # process_model_id uses forward slashes on all OSes
# this seems to return an object where process_model.id has backslashes on windows # this seems to return an object where process_model.id has backslashes on windows
def _get_process_model(process_model_id: str) -> ProcessModelInfo: def _get_process_model(process_model_id: str) -> ProcessModelInfo:
"""Get_process_model."""
process_model = None process_model = None
try: try:
process_model = ProcessModelService.get_process_model(process_model_id) process_model = ProcessModelService.get_process_model(process_model_id)
@ -239,7 +229,6 @@ def _get_process_model(process_model_id: str) -> ProcessModelInfo:
def _find_principal_or_raise() -> PrincipalModel: def _find_principal_or_raise() -> PrincipalModel:
"""Find_principal_or_raise."""
principal = PrincipalModel.query.filter_by(user_id=g.user.id).first() principal = PrincipalModel.query.filter_by(user_id=g.user.id).first()
if principal is None: if principal is None:
raise ( raise (

View File

@ -19,7 +19,6 @@ from spiffworkflow_backend.services.process_model_service import ProcessModelWit
def process_group_create(body: dict) -> flask.wrappers.Response: def process_group_create(body: dict) -> flask.wrappers.Response:
"""Add_process_group."""
process_group = ProcessGroup(**body) process_group = ProcessGroup(**body)
if ProcessModelService.is_process_model_identifier(process_group.id): if ProcessModelService.is_process_model_identifier(process_group.id):
@ -42,7 +41,6 @@ def process_group_create(body: dict) -> flask.wrappers.Response:
def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response: def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response:
"""Process_group_delete."""
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id) process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
try: try:
@ -101,7 +99,6 @@ def process_group_list(
def process_group_show( def process_group_show(
modified_process_group_id: str, modified_process_group_id: str,
) -> Any: ) -> Any:
"""Process_group_show."""
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id) process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
try: try:
process_group = ProcessModelService.get_process_group(process_group_id) process_group = ProcessModelService.get_process_group(process_group_id)
@ -119,7 +116,6 @@ def process_group_show(
def process_group_move(modified_process_group_identifier: str, new_location: str) -> flask.wrappers.Response: def process_group_move(modified_process_group_identifier: str, new_location: str) -> flask.wrappers.Response:
"""Process_group_move."""
original_process_group_id = _un_modify_modified_process_model_id(modified_process_group_identifier) original_process_group_id = _un_modify_modified_process_model_id(modified_process_group_identifier)
new_process_group = ProcessModelService.process_group_move(original_process_group_id, new_location) new_process_group = ProcessModelService.process_group_move(original_process_group_id, new_location)
_commit_and_push_to_git( _commit_and_push_to_git(

View File

@ -58,7 +58,6 @@ from spiffworkflow_backend.services.task_service import TaskService
def process_instance_create( def process_instance_create(
modified_process_model_identifier: str, modified_process_model_identifier: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Create_process_instance."""
process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier) process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier)
process_model = _get_process_model(process_model_identifier) process_model = _get_process_model(process_model_identifier)
@ -86,7 +85,6 @@ def process_instance_run(
modified_process_model_identifier: str, modified_process_model_identifier: str,
process_instance_id: int, process_instance_id: int,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_run."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
if process_instance.status != "not_started": if process_instance.status != "not_started":
raise ApiError( raise ApiError(
@ -140,7 +138,6 @@ def process_instance_terminate(
process_instance_id: int, process_instance_id: int,
modified_process_model_identifier: str, modified_process_model_identifier: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_run."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
@ -161,7 +158,6 @@ def process_instance_suspend(
process_instance_id: int, process_instance_id: int,
modified_process_model_identifier: str, modified_process_model_identifier: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_suspend."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
@ -182,7 +178,6 @@ def process_instance_resume(
process_instance_id: int, process_instance_id: int,
modified_process_model_identifier: str, modified_process_model_identifier: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_resume."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
@ -297,7 +292,6 @@ def process_instance_show_for_me(
process_instance_id: int, process_instance_id: int,
process_identifier: str | None = None, process_identifier: str | None = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_show_for_me."""
process_instance = _find_process_instance_for_me_or_raise(process_instance_id) process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
return _get_process_instance( return _get_process_instance(
process_instance=process_instance, process_instance=process_instance,
@ -311,7 +305,6 @@ def process_instance_show(
process_instance_id: int, process_instance_id: int,
process_identifier: str | None = None, process_identifier: str | None = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Create_process_instance."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
return _get_process_instance( return _get_process_instance(
process_instance=process_instance, process_instance=process_instance,
@ -323,7 +316,6 @@ def process_instance_show(
def process_instance_delete( def process_instance_delete(
process_instance_id: int, modified_process_model_identifier: str process_instance_id: int, modified_process_model_identifier: str
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Create_process_instance."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
if not process_instance.has_terminal_status(): if not process_instance.has_terminal_status():
@ -362,7 +354,6 @@ def process_instance_report_update(
report_id: int, report_id: int,
body: dict[str, Any], body: dict[str, Any],
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_report_update."""
process_instance_report = ProcessInstanceReportModel.query.filter_by( process_instance_report = ProcessInstanceReportModel.query.filter_by(
id=report_id, id=report_id,
created_by_id=g.user.id, created_by_id=g.user.id,
@ -383,7 +374,6 @@ def process_instance_report_update(
def process_instance_report_delete( def process_instance_report_delete(
report_id: int, report_id: int,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_report_delete."""
process_instance_report = ProcessInstanceReportModel.query.filter_by( process_instance_report = ProcessInstanceReportModel.query.filter_by(
id=report_id, id=report_id,
created_by_id=g.user.id, created_by_id=g.user.id,
@ -408,7 +398,6 @@ def process_instance_task_list_without_task_data_for_me(
bpmn_process_guid: str | None = None, bpmn_process_guid: str | None = None,
to_task_guid: str | None = None, to_task_guid: str | None = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_task_list_without_task_data_for_me."""
process_instance = _find_process_instance_for_me_or_raise(process_instance_id) process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
return process_instance_task_list( return process_instance_task_list(
_modified_process_model_identifier=modified_process_model_identifier, _modified_process_model_identifier=modified_process_model_identifier,
@ -426,7 +415,6 @@ def process_instance_task_list_without_task_data(
bpmn_process_guid: str | None = None, bpmn_process_guid: str | None = None,
to_task_guid: str | None = None, to_task_guid: str | None = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_task_list_without_task_data."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
return process_instance_task_list( return process_instance_task_list(
_modified_process_model_identifier=modified_process_model_identifier, _modified_process_model_identifier=modified_process_model_identifier,
@ -444,7 +432,6 @@ def process_instance_task_list(
to_task_guid: str | None = None, to_task_guid: str | None = None,
most_recent_tasks_only: bool = False, most_recent_tasks_only: bool = False,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_task_list."""
bpmn_process_ids = [] bpmn_process_ids = []
if bpmn_process_guid: if bpmn_process_guid:
bpmn_process = BpmnProcessModel.query.filter_by(guid=bpmn_process_guid).first() bpmn_process = BpmnProcessModel.query.filter_by(guid=bpmn_process_guid).first()
@ -592,7 +579,6 @@ def process_instance_reset(
def process_instance_find_by_id( def process_instance_find_by_id(
process_instance_id: int, process_instance_id: int,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_find_by_id."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
modified_process_model_identifier = ProcessModelInfo.modify_process_identifier_for_path_param( modified_process_model_identifier = ProcessModelInfo.modify_process_identifier_for_path_param(
process_instance.process_model_identifier process_instance.process_model_identifier

View File

@ -39,7 +39,6 @@ from spiffworkflow_backend.services.spec_file_service import SpecFileService
def process_model_create( def process_model_create(
modified_process_group_id: str, body: dict[str, str | bool | int | None | list] modified_process_group_id: str, body: dict[str, str | bool | int | None | list]
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_model_create."""
body_include_list = [ body_include_list = [
"id", "id",
"display_name", "display_name",
@ -88,7 +87,6 @@ def process_model_create(
def process_model_delete( def process_model_delete(
modified_process_model_identifier: str, modified_process_model_identifier: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_model_delete."""
process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model_identifier = modified_process_model_identifier.replace(":", "/")
try: try:
ProcessModelService.process_model_delete(process_model_identifier) ProcessModelService.process_model_delete(process_model_identifier)
@ -107,7 +105,6 @@ def process_model_update(
modified_process_model_identifier: str, modified_process_model_identifier: str,
body: dict[str, str | bool | int | None | list], body: dict[str, str | bool | int | None | list],
) -> Any: ) -> Any:
"""Process_model_update."""
process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model_identifier = modified_process_model_identifier.replace(":", "/")
body_include_list = [ body_include_list = [
"display_name", "display_name",
@ -143,7 +140,6 @@ def process_model_update(
def process_model_show(modified_process_model_identifier: str, include_file_references: bool = False) -> Any: def process_model_show(modified_process_model_identifier: str, include_file_references: bool = False) -> Any:
"""Process_model_show."""
process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier) process_model = _get_process_model(process_model_identifier)
files = sorted( files = sorted(
@ -167,7 +163,6 @@ def process_model_show(modified_process_model_identifier: str, include_file_refe
def process_model_move(modified_process_model_identifier: str, new_location: str) -> flask.wrappers.Response: def process_model_move(modified_process_model_identifier: str, new_location: str) -> flask.wrappers.Response:
"""Process_model_move."""
original_process_model_id = _un_modify_modified_process_model_id(modified_process_model_identifier) original_process_model_id = _un_modify_modified_process_model_id(modified_process_model_identifier)
new_process_model = ProcessModelService.process_model_move(original_process_model_id, new_location) new_process_model = ProcessModelService.process_model_move(original_process_model_id, new_location)
_commit_and_push_to_git( _commit_and_push_to_git(
@ -179,7 +174,6 @@ def process_model_move(modified_process_model_identifier: str, new_location: str
def process_model_publish( def process_model_publish(
modified_process_model_identifier: str, branch_to_update: str | None = None modified_process_model_identifier: str, branch_to_update: str | None = None
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_model_publish."""
if branch_to_update is None: if branch_to_update is None:
branch_to_update = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"] branch_to_update = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"]
if branch_to_update is None: if branch_to_update is None:
@ -241,7 +235,6 @@ def process_model_file_update(
def process_model_file_delete(modified_process_model_identifier: str, file_name: str) -> flask.wrappers.Response: def process_model_file_delete(modified_process_model_identifier: str, file_name: str) -> flask.wrappers.Response:
"""Process_model_file_delete."""
process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier) process_model = _get_process_model(process_model_identifier)
@ -276,7 +269,6 @@ def process_model_file_delete(modified_process_model_identifier: str, file_name:
def process_model_file_create( def process_model_file_create(
modified_process_model_identifier: str, modified_process_model_identifier: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_model_file_create."""
message = f"User: {g.user.username} added process model file" message = f"User: {g.user.username} added process model file"
return _create_or_update_process_model_file(modified_process_model_identifier, message, 201) return _create_or_update_process_model_file(modified_process_model_identifier, message, 201)
@ -331,7 +323,6 @@ def process_model_test_run(
def process_model_create_with_natural_language( def process_model_create_with_natural_language(
modified_process_group_id: str, body: dict[str, str] modified_process_group_id: str, body: dict[str, str]
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_model_create_with_natural_language."""
pattern = re.compile( pattern = re.compile(
r"Create a (?P<pm_name>.*?) process model with a (?P<form_name>.*?) form that" r" collects (?P<columns>.*)" r"Create a (?P<pm_name>.*?) process model with a (?P<form_name>.*?) form that" r" collects (?P<columns>.*)"
) )
@ -448,7 +439,6 @@ def process_model_create_with_natural_language(
def _get_file_from_request() -> FileStorage: def _get_file_from_request() -> FileStorage:
"""Get_file_from_request."""
request_file: FileStorage | None = connexion.request.files.get("file") request_file: FileStorage | None = connexion.request.files.get("file")
if not request_file: if not request_file:
raise ApiError( raise ApiError(

View File

@ -21,7 +21,6 @@ from spiffworkflow_backend.services.spec_file_service import SpecFileService
def script_unit_test_create( def script_unit_test_create(
modified_process_model_identifier: str, body: dict[str, str | bool | int] modified_process_model_identifier: str, body: dict[str, str | bool | int]
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Script_unit_test_create."""
bpmn_task_identifier = _get_required_parameter_or_raise("bpmn_task_identifier", body) bpmn_task_identifier = _get_required_parameter_or_raise("bpmn_task_identifier", body)
input_json = _get_required_parameter_or_raise("input_json", body) input_json = _get_required_parameter_or_raise("input_json", body)
expected_output_json = _get_required_parameter_or_raise("expected_output_json", body) expected_output_json = _get_required_parameter_or_raise("expected_output_json", body)
@ -95,7 +94,6 @@ def script_unit_test_create(
def script_unit_test_run( def script_unit_test_run(
modified_process_model_identifier: str, body: dict[str, str | bool | int] modified_process_model_identifier: str, body: dict[str, str | bool | int]
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Script_unit_test_run."""
# FIXME: We should probably clear this somewhere else but this works # FIXME: We should probably clear this somewhere else but this works
current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None

View File

@ -14,7 +14,6 @@ from spiffworkflow_backend.services.user_service import UserService
def secret_show(key: str) -> Response: def secret_show(key: str) -> Response:
"""Secret_show."""
secret = SecretService.get_secret(key) secret = SecretService.get_secret(key)
# normal serialization does not include the secret value, but this is the one endpoint where we want to return the goods # normal serialization does not include the secret value, but this is the one endpoint where we want to return the goods
@ -28,7 +27,6 @@ def secret_list(
page: int = 1, page: int = 1,
per_page: int = 100, per_page: int = 100,
) -> Response: ) -> Response:
"""Secret_list."""
secrets = ( secrets = (
SecretModel.query.order_by(SecretModel.key) SecretModel.query.order_by(SecretModel.key)
.join(UserModel) .join(UserModel)

View File

@ -15,13 +15,11 @@ from spiffworkflow_backend.services.service_task_service import ServiceTaskServi
def service_task_list() -> flask.wrappers.Response: def service_task_list() -> flask.wrappers.Response:
"""Service_task_list."""
available_connectors = ServiceTaskService.available_connectors() available_connectors = ServiceTaskService.available_connectors()
return Response(json.dumps(available_connectors), status=200, mimetype="application/json") return Response(json.dumps(available_connectors), status=200, mimetype="application/json")
def authentication_list() -> flask.wrappers.Response: def authentication_list() -> flask.wrappers.Response:
"""Authentication_list."""
available_authentications = ServiceTaskService.authentication_list() available_authentications = ServiceTaskService.authentication_list()
response_json = { response_json = {
"results": available_authentications, "results": available_authentications,
@ -36,7 +34,6 @@ def authentication_callback(
service: str, service: str,
auth_method: str, auth_method: str,
) -> werkzeug.wrappers.Response: ) -> werkzeug.wrappers.Response:
"""Authentication_callback."""
verify_token(request.args.get("token"), force_run=True) verify_token(request.args.get("token"), force_run=True)
response = request.args["response"] response = request.args["response"]
SecretService.update_secret(f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True) SecretService.update_secret(f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True)

View File

@ -61,15 +61,11 @@ from spiffworkflow_backend.services.task_service import TaskService
class TaskDataSelectOption(TypedDict): class TaskDataSelectOption(TypedDict):
"""TaskDataSelectOption."""
value: str value: str
label: str label: str
class ReactJsonSchemaSelectOption(TypedDict): class ReactJsonSchemaSelectOption(TypedDict):
"""ReactJsonSchemaSelectOption."""
type: str type: str
title: str title: str
enum: list[str] enum: list[str]
@ -78,7 +74,6 @@ class ReactJsonSchemaSelectOption(TypedDict):
def task_list_my_tasks( def task_list_my_tasks(
process_instance_id: int | None = None, page: int = 1, per_page: int = 100 process_instance_id: int | None = None, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Task_list_my_tasks."""
principal = _find_principal_or_raise() principal = _find_principal_or_raise()
assigned_user = aliased(UserModel) assigned_user = aliased(UserModel)
process_initiator_user = aliased(UserModel) process_initiator_user = aliased(UserModel)
@ -142,12 +137,10 @@ def task_list_my_tasks(
def task_list_for_my_open_processes(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: def task_list_for_my_open_processes(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
"""Task_list_for_my_open_processes."""
return _get_tasks(page=page, per_page=per_page) return _get_tasks(page=page, per_page=per_page)
def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
"""Task_list_for_me."""
return _get_tasks( return _get_tasks(
processes_started_by_user=False, processes_started_by_user=False,
has_lane_assignment_id=False, has_lane_assignment_id=False,
@ -159,7 +152,6 @@ def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Respo
def task_list_for_my_groups( def task_list_for_my_groups(
user_group_identifier: str | None = None, page: int = 1, per_page: int = 100 user_group_identifier: str | None = None, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Task_list_for_my_groups."""
return _get_tasks( return _get_tasks(
user_group_identifier=user_group_identifier, user_group_identifier=user_group_identifier,
processes_started_by_user=False, processes_started_by_user=False,
@ -575,7 +567,6 @@ def task_submit(
body: dict[str, Any], body: dict[str, Any],
save_as_draft: bool = False, save_as_draft: bool = False,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Task_submit_user_data."""
with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"): with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"):
return _task_submit_shared(process_instance_id, task_guid, body, save_as_draft) return _task_submit_shared(process_instance_id, task_guid, body, save_as_draft)
@ -587,7 +578,6 @@ def _get_tasks(
per_page: int = 100, per_page: int = 100,
user_group_identifier: str | None = None, user_group_identifier: str | None = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Get_tasks."""
user_id = g.user.id user_id = g.user.id
# use distinct to ensure we only get one row per human task otherwise # use distinct to ensure we only get one row per human task otherwise
@ -680,7 +670,6 @@ def _get_tasks(
def _prepare_form_data(form_file: str, task_model: TaskModel, process_model: ProcessModelInfo) -> dict: def _prepare_form_data(form_file: str, task_model: TaskModel, process_model: ProcessModelInfo) -> dict:
"""Prepare_form_data."""
if task_model.data is None: if task_model.data is None:
return {} return {}
@ -707,7 +696,6 @@ def _prepare_form_data(form_file: str, task_model: TaskModel, process_model: Pro
def _render_jinja_template(unprocessed_template: str, task_model: TaskModel) -> str: def _render_jinja_template(unprocessed_template: str, task_model: TaskModel) -> str:
"""Render_jinja_template."""
jinja_environment = jinja2.Environment(autoescape=True, lstrip_blocks=True, trim_blocks=True) jinja_environment = jinja2.Environment(autoescape=True, lstrip_blocks=True, trim_blocks=True)
try: try:
template = jinja_environment.from_string(unprocessed_template) template = jinja_environment.from_string(unprocessed_template)
@ -736,7 +724,6 @@ def _get_spiff_task_from_process_instance(
process_instance: ProcessInstanceModel, process_instance: ProcessInstanceModel,
processor: ProcessInstanceProcessor | None = None, processor: ProcessInstanceProcessor | None = None,
) -> SpiffTask: ) -> SpiffTask:
"""Get_spiff_task_from_process_instance."""
if processor is None: if processor is None:
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
task_uuid = uuid.UUID(task_guid) task_uuid = uuid.UUID(task_guid)
@ -755,7 +742,6 @@ def _get_spiff_task_from_process_instance(
# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches # originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches
def _update_form_schema_with_task_data_as_needed(in_dict: dict, task_model: TaskModel) -> None: def _update_form_schema_with_task_data_as_needed(in_dict: dict, task_model: TaskModel) -> None:
"""Update_nested."""
if task_model.data is None: if task_model.data is None:
return None return None
@ -787,7 +773,6 @@ def _update_form_schema_with_task_data_as_needed(in_dict: dict, task_model: Task
def map_function( def map_function(
task_data_select_option: TaskDataSelectOption, task_data_select_option: TaskDataSelectOption,
) -> ReactJsonSchemaSelectOption: ) -> ReactJsonSchemaSelectOption:
"""Map_function."""
return { return {
"type": "string", "type": "string",
"enum": [task_data_select_option["value"]], "enum": [task_data_select_option["value"]],

View File

@ -296,7 +296,6 @@ def login_with_access_token(access_token: str) -> Response:
def login_api() -> Response: def login_api() -> Response:
"""Login_api."""
redirect_url = "/v1.0/login_api_return" redirect_url = "/v1.0/login_api_return"
state = AuthenticationService.generate_state(redirect_url) state = AuthenticationService.generate_state(redirect_url)
login_redirect_url = AuthenticationService().get_login_redirect_url(state.decode("UTF-8"), redirect_url) login_redirect_url = AuthenticationService().get_login_redirect_url(state.decode("UTF-8"), redirect_url)
@ -316,7 +315,6 @@ def login_api_return(code: str, state: str, session_state: str) -> str:
def logout(id_token: str, redirect_url: str | None) -> Response: def logout(id_token: str, redirect_url: str | None) -> Response:
"""Logout."""
if redirect_url is None: if redirect_url is None:
redirect_url = "" redirect_url = ""
tld = current_app.config["THREAD_LOCAL_DATA"] tld = current_app.config["THREAD_LOCAL_DATA"]
@ -325,13 +323,11 @@ def logout(id_token: str, redirect_url: str | None) -> Response:
def logout_return() -> Response: def logout_return() -> Response:
"""Logout_return."""
frontend_url = str(current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"]) frontend_url = str(current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"])
return redirect(f"{frontend_url}/") return redirect(f"{frontend_url}/")
def get_decoded_token(token: str) -> dict | None: def get_decoded_token(token: str) -> dict | None:
"""Get_token_type."""
try: try:
decoded_token = jwt.decode(token, options={"verify_signature": False}) decoded_token = jwt.decode(token, options={"verify_signature": False})
except Exception as e: except Exception as e:
@ -348,7 +344,6 @@ def get_decoded_token(token: str) -> dict | None:
def get_scope(token: str) -> str: def get_scope(token: str) -> str:
"""Get_scope."""
scope = "" scope = ""
decoded_token = jwt.decode(token, options={"verify_signature": False}) decoded_token = jwt.decode(token, options={"verify_signature": False})
if "scope" in decoded_token: if "scope" in decoded_token:
@ -357,7 +352,6 @@ def get_scope(token: str) -> str:
def get_user_from_decoded_internal_token(decoded_token: dict) -> UserModel | None: def get_user_from_decoded_internal_token(decoded_token: dict) -> UserModel | None:
"""Get_user_from_decoded_internal_token."""
sub = decoded_token["sub"] sub = decoded_token["sub"]
parts = sub.split("::") parts = sub.split("::")
service = parts[0].split(":")[1] service = parts[0].split(":")[1]

View File

@ -1,4 +1,3 @@
"""Main."""
import json import json
from typing import Any from typing import Any
from typing import Final from typing import Final
@ -64,7 +63,6 @@ user_blueprint = Blueprint("main", __name__)
# #
@user_blueprint.route("/user/<username>", methods=["DELETE"]) @user_blueprint.route("/user/<username>", methods=["DELETE"])
def delete_user(username: str) -> flask.wrappers.Response: def delete_user(username: str) -> flask.wrappers.Response:
"""Delete_user."""
user = UserModel.query.filter_by(username=username).first() user = UserModel.query.filter_by(username=username).first()
if user is None: if user is None:
raise ( raise (
@ -83,7 +81,6 @@ def delete_user(username: str) -> flask.wrappers.Response:
@user_blueprint.route("/group/<group_name>", methods=["GET"]) @user_blueprint.route("/group/<group_name>", methods=["GET"])
def create_group(group_name: str) -> flask.wrappers.Response: def create_group(group_name: str) -> flask.wrappers.Response:
"""Create_group."""
group = GroupModel.query.filter_by(name=group_name).first() group = GroupModel.query.filter_by(name=group_name).first()
if group is not None: if group is not None:
raise ( raise (
@ -106,7 +103,6 @@ def create_group(group_name: str) -> flask.wrappers.Response:
@user_blueprint.route("/group/<group_name>", methods=["DELETE"]) @user_blueprint.route("/group/<group_name>", methods=["DELETE"])
def delete_group(group_name: str) -> flask.wrappers.Response: def delete_group(group_name: str) -> flask.wrappers.Response:
"""Delete_group."""
group = GroupModel.query.filter_by(name=group_name).first() group = GroupModel.query.filter_by(name=group_name).first()
if group is None: if group is None:
raise ( raise (
@ -125,7 +121,6 @@ def delete_group(group_name: str) -> flask.wrappers.Response:
@user_blueprint.route("/assign_user_to_group", methods=["POST"]) @user_blueprint.route("/assign_user_to_group", methods=["POST"])
def assign_user_to_group() -> flask.wrappers.Response: def assign_user_to_group() -> flask.wrappers.Response:
"""Assign_user_to_group."""
user = get_user_from_request() user = get_user_from_request()
group = get_group_from_request() group = get_group_from_request()
@ -152,7 +147,6 @@ def assign_user_to_group() -> flask.wrappers.Response:
@user_blueprint.route("/remove_user_from_group", methods=["POST"]) @user_blueprint.route("/remove_user_from_group", methods=["POST"])
def remove_user_from_group() -> flask.wrappers.Response: def remove_user_from_group() -> flask.wrappers.Response:
"""Remove_user_from_group."""
user = get_user_from_request() user = get_user_from_request()
group = get_group_from_request() group = get_group_from_request()
@ -177,14 +171,12 @@ def remove_user_from_group() -> flask.wrappers.Response:
def get_value_from_request_json(key: str) -> Any: def get_value_from_request_json(key: str) -> Any:
"""Get_value_from_request_json."""
if request.json is None: if request.json is None:
return None return None
return request.json.get(key) return request.json.get(key)
def get_user_from_request() -> Any: def get_user_from_request() -> Any:
"""Get_user_from_request."""
user_id = get_value_from_request_json("user_id") user_id = get_value_from_request_json("user_id")
if user_id is None: if user_id is None:
@ -209,7 +201,6 @@ def get_user_from_request() -> Any:
def get_group_from_request() -> Any: def get_group_from_request() -> Any:
"""Get_group_from_request."""
group_id = get_value_from_request_json("group_id") group_id = get_value_from_request_json("group_id")
if group_id is None: if group_id is None:

View File

@ -23,7 +23,6 @@ def user_exists_by_username(body: dict[str, Any]) -> flask.wrappers.Response:
def user_search(username_prefix: str) -> flask.wrappers.Response: def user_search(username_prefix: str) -> flask.wrappers.Response:
"""User_search."""
found_users = UserModel.query.filter(UserModel.username.like(f"{username_prefix}%")).all() # type: ignore found_users = UserModel.query.filter(UserModel.username.like(f"{username_prefix}%")).all() # type: ignore
response_json = { response_json = {
@ -34,7 +33,6 @@ def user_search(username_prefix: str) -> flask.wrappers.Response:
def user_group_list_for_current_user() -> flask.wrappers.Response: def user_group_list_for_current_user() -> flask.wrappers.Response:
"""User_group_list_for_current_user."""
groups = g.user.groups groups = g.user.groups
# TODO: filter out the default group and have a way to know what is the default group # TODO: filter out the default group and have a way to know what is the default group
group_identifiers = [ group_identifiers = [

View File

@ -9,10 +9,7 @@ from sqlalchemy import or_
class DeleteProcessInstancesWithCriteria(Script): class DeleteProcessInstancesWithCriteria(Script):
"""DeleteProcessInstancesWithCriteria."""
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return "Delete process instances that match the provided criteria," return "Delete process instances that match the provided criteria,"
def run( def run(
@ -21,7 +18,6 @@ class DeleteProcessInstancesWithCriteria(Script):
*args: Any, *args: Any,
**kwargs: Any, **kwargs: Any,
) -> Any: ) -> Any:
"""Run."""
criteria_list = args[0] criteria_list = args[0]
delete_criteria = [] delete_criteria = []

View File

@ -5,20 +5,16 @@ from spiffworkflow_backend.scripts.script import Script
class FactService(Script): class FactService(Script):
"""FactService."""
@staticmethod @staticmethod
def requires_privileged_permissions() -> bool: def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions.""" """We have deemed this function safe to run without elevated permissions."""
return False return False
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Just your basic class that can pull in data from a few api endpoints and return """Just your basic class that can pull in data from a few api endpoints and
do a basic task.""" do a basic task."""
def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any: def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any:
"""Run."""
if "type" not in kwargs: if "type" not in kwargs:
raise Exception("Please specify a 'type' of fact as a keyword argument.") raise Exception("Please specify a 'type' of fact as a keyword argument.")
else: else:

View File

@ -1,4 +1,3 @@
"""Get_env."""
from collections import OrderedDict from collections import OrderedDict
from typing import Any from typing import Any
@ -11,10 +10,7 @@ from spiffworkflow_backend.scripts.script import Script
class GetAllPermissions(Script): class GetAllPermissions(Script):
"""GetAllPermissions."""
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Get all permissions currently in the system.""" return """Get all permissions currently in the system."""
def run( def run(
@ -23,7 +19,6 @@ class GetAllPermissions(Script):
*args: Any, *args: Any,
**kwargs: Any, **kwargs: Any,
) -> Any: ) -> Any:
"""Run."""
permission_assignments = ( permission_assignments = (
PermissionAssignmentModel.query.join( PermissionAssignmentModel.query.join(
PrincipalModel, PrincipalModel,
@ -46,14 +41,12 @@ class GetAllPermissions(Script):
permissions.setdefault((pa.group_identifier, pa.uri), []).append(pa.permission) permissions.setdefault((pa.group_identifier, pa.uri), []).append(pa.permission)
def replace_suffix(string: str, old: str, new: str) -> str: def replace_suffix(string: str, old: str, new: str) -> str:
"""Replace_suffix."""
if string.endswith(old): if string.endswith(old):
return string[: -len(old)] + new return string[: -len(old)] + new
return string return string
# sort list of strings based on a specific order # sort list of strings based on a specific order
def sort_by_order(string_list: list, order: list) -> list: def sort_by_order(string_list: list, order: list) -> list:
"""Sort_by_order."""
return sorted(string_list, key=lambda x: order.index(x)) return sorted(string_list, key=lambda x: order.index(x))
return [ return [

View File

@ -14,11 +14,9 @@ class GetCurrentUser(Script):
return False return False
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Return the current user.""" return """Return the current user."""
def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any: def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
"""Run."""
# dump the user using our json encoder and then load it back up as a dict # dump the user using our json encoder and then load it back up as a dict
# to remove unwanted field types # to remove unwanted field types
user_as_json_string = current_app.json.dumps(g.user) user_as_json_string = current_app.json.dumps(g.user)

View File

@ -10,20 +10,16 @@ class TaskNotGivenToScriptError(Exception):
class GetDataSizes(Script): class GetDataSizes(Script):
"""GetDataSizes."""
@staticmethod @staticmethod
def requires_privileged_permissions() -> bool: def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions.""" """We have deemed this function safe to run without elevated permissions."""
return False return False
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Returns a dictionary of information about the size of task data and return """Returns a dictionary of information about the size of task data and
the python environment for the currently running process.""" the python environment for the currently running process."""
def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any: def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
"""Run."""
if script_attributes_context.task is None: if script_attributes_context.task is None:
raise TaskNotGivenToScriptError( raise TaskNotGivenToScriptError(
"The task was not given to script 'get_data_sizes'. " "The task was not given to script 'get_data_sizes'. "

View File

@ -7,15 +7,12 @@ from spiffworkflow_backend.scripts.script import Script
class GetEncodedFileData(Script): class GetEncodedFileData(Script):
"""GetEncodedFileData."""
@staticmethod @staticmethod
def requires_privileged_permissions() -> bool: def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions.""" """We have deemed this function safe to run without elevated permissions."""
return False return False
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Returns a string which is the encoded file data. This is a very expensive call.""" return """Returns a string which is the encoded file data. This is a very expensive call."""
def run( def run(
@ -24,7 +21,6 @@ class GetEncodedFileData(Script):
*args: Any, *args: Any,
**kwargs: Any, **kwargs: Any,
) -> Any: ) -> Any:
"""Run."""
# example input: # example input:
# "data:some/mimetype;name=testing.txt;base64,spifffiledatadigest+7a2051ffefd1eaf475dbef9fda019cb3d4a10eb8aea4c2c2a84a50a797a541bf" # noqa: B950,E501 # "data:some/mimetype;name=testing.txt;base64,spifffiledatadigest+7a2051ffefd1eaf475dbef9fda019cb3d4a10eb8aea4c2c2a84a50a797a541bf" # noqa: B950,E501
digest_reference = args[0] digest_reference = args[0]

View File

@ -5,17 +5,13 @@ from spiffworkflow_backend.scripts.script import Script
class GetEnv(Script): class GetEnv(Script):
"""GetEnv."""
@staticmethod @staticmethod
def requires_privileged_permissions() -> bool: def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions.""" """We have deemed this function safe to run without elevated permissions."""
return False return False
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Returns the current environment - ie testing, staging, production.""" return """Returns the current environment - ie testing, staging, production."""
def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any: def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
"""Run."""
return script_attributes_context.environment_identifier return script_attributes_context.environment_identifier

View File

@ -1,4 +1,3 @@
"""Get_env."""
from typing import Any from typing import Any
from flask import current_app from flask import current_app
@ -7,17 +6,13 @@ from spiffworkflow_backend.scripts.script import Script
class GetFrontendUrl(Script): class GetFrontendUrl(Script):
"""GetFrontendUrl."""
@staticmethod @staticmethod
def requires_privileged_permissions() -> bool: def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions.""" """We have deemed this function safe to run without elevated permissions."""
return False return False
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Return the url to the frontend.""" return """Return the url to the frontend."""
def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any: def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any:
"""Run."""
return current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"] return current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"]

View File

@ -1,4 +1,3 @@
"""Get_env."""
from typing import Any from typing import Any
from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.group import GroupModel
@ -8,15 +7,12 @@ from spiffworkflow_backend.scripts.script import Script
class GetGroupMembers(Script): class GetGroupMembers(Script):
"""GetGroupMembers."""
@staticmethod @staticmethod
def requires_privileged_permissions() -> bool: def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions.""" """We have deemed this function safe to run without elevated permissions."""
return False return False
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Return the list of usernames of the users in the given group.""" return """Return the list of usernames of the users in the given group."""
def run( def run(
@ -25,7 +21,6 @@ class GetGroupMembers(Script):
*args: Any, *args: Any,
**kwargs: Any, **kwargs: Any,
) -> Any: ) -> Any:
"""Run."""
group_identifier = args[0] group_identifier = args[0]
group = GroupModel.query.filter_by(identifier=group_identifier).first() group = GroupModel.query.filter_by(identifier=group_identifier).first()
if group is None: if group is None:

View File

@ -22,7 +22,6 @@ class GetLastUserCompletingTask(Script):
*_args: Any, *_args: Any,
**kwargs: Any, **kwargs: Any,
) -> Any: ) -> Any:
"""Run."""
# dump the user using our json encoder and then load it back up as a dict # dump the user using our json encoder and then load it back up as a dict
# to remove unwanted field types # to remove unwanted field types
if len(_args) == 2: if len(_args) == 2:

View File

@ -8,20 +8,16 @@ from spiffworkflow_backend.scripts.script import Script
class GetLocaltime(Script): class GetLocaltime(Script):
"""GetLocaltime."""
@staticmethod @staticmethod
def requires_privileged_permissions() -> bool: def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions.""" """We have deemed this function safe to run without elevated permissions."""
return False return False
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Converts a Datetime object into a Datetime object for a specific timezone. return """Converts a Datetime object into a Datetime object for a specific timezone.
Defaults to US/Eastern.""" Defaults to US/Eastern."""
def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> datetime: def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> datetime:
"""Run."""
if len(args) > 0 or "datetime" in kwargs: if len(args) > 0 or "datetime" in kwargs:
if "datetime" in kwargs: if "datetime" in kwargs:
date_time = kwargs["datetime"] date_time = kwargs["datetime"]

View File

@ -22,7 +22,6 @@ class GetProcessInitiatorUser(Script):
*_args: Any, *_args: Any,
**kwargs: Any, **kwargs: Any,
) -> Any: ) -> Any:
"""Run."""
process_instance = ( process_instance = (
ProcessInstanceModel.query.filter_by(id=script_attributes_context.process_instance_id) ProcessInstanceModel.query.filter_by(id=script_attributes_context.process_instance_id)
.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)

View File

@ -6,12 +6,8 @@ from spiffworkflow_backend.services.secret_service import SecretService
class GetSecret(Script): class GetSecret(Script):
"""GetSecret."""
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Returns the value for a previously configured secret.""" return """Returns the value for a previously configured secret."""
def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any: def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any:
"""Run."""
return SecretService.get_secret(args[0]).value return SecretService.get_secret(args[0]).value

View File

@ -1,4 +1,3 @@
"""Get_process_info."""
from typing import Any from typing import Any
from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext
@ -6,19 +5,15 @@ from spiffworkflow_backend.scripts.script import Script
class GetToplevelProcessInfo(Script): class GetToplevelProcessInfo(Script):
"""GetProcessInfo."""
@staticmethod @staticmethod
def requires_privileged_permissions() -> bool: def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions.""" """We have deemed this function safe to run without elevated permissions."""
return False return False
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Returns a dictionary of information about the currently running process.""" return """Returns a dictionary of information about the currently running process."""
def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any: def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
"""Run."""
return { return {
"process_instance_id": script_attributes_context.process_instance_id, "process_instance_id": script_attributes_context.process_instance_id,
"process_model_identifier": script_attributes_context.process_model_identifier, "process_model_identifier": script_attributes_context.process_model_identifier,

View File

@ -7,15 +7,12 @@ from spiffworkflow_backend.scripts.script import Script
class GetMarkdownFileDownloadLink(Script): class GetMarkdownFileDownloadLink(Script):
"""GetMarkdownFileDownloadLink."""
@staticmethod @staticmethod
def requires_privileged_permissions() -> bool: def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions.""" """We have deemed this function safe to run without elevated permissions."""
return False return False
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Returns a string which is a string in markdown format.""" return """Returns a string which is a string in markdown format."""
def run( def run(
@ -24,7 +21,6 @@ class GetMarkdownFileDownloadLink(Script):
*args: Any, *args: Any,
**kwargs: Any, **kwargs: Any,
) -> Any: ) -> Any:
"""Run."""
# example input: # example input:
# "data:some/mimetype;name=testing.txt;base64,spifffiledatadigest+7a2051ffefd1eaf475dbef9fda019cb3d4a10eb8aea4c2c2a84a50a797a541bf" # noqa: B950,E501 # "data:some/mimetype;name=testing.txt;base64,spifffiledatadigest+7a2051ffefd1eaf475dbef9fda019cb3d4a10eb8aea4c2c2a84a50a797a541bf" # noqa: B950,E501
digest_reference = args[0] digest_reference = args[0]

View File

@ -1,4 +1,3 @@
"""Get_env."""
from typing import Any from typing import Any
from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext
@ -7,10 +6,7 @@ from spiffworkflow_backend.services.authorization_service import AuthorizationSe
class RefreshPermissions(Script): class RefreshPermissions(Script):
"""RefreshPermissions."""
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
return """Add permissions using a dict. return """Add permissions using a dict.
group_info: [ group_info: [
{ {

View File

@ -20,7 +20,7 @@ SCRIPT_SUB_CLASSES = None
class ScriptUnauthorizedForUserError(Exception): class ScriptUnauthorizedForUserError(Exception):
"""ScriptUnauthorizedForUserError.""" pass
class ProcessInstanceIdMissingError(Exception): class ProcessInstanceIdMissingError(Exception):
@ -36,7 +36,6 @@ class Script:
@abstractmethod @abstractmethod
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description."""
raise ApiError("invalid_script", "This script does not supply a description.") raise ApiError("invalid_script", "This script does not supply a description.")
@abstractmethod @abstractmethod
@ -46,7 +45,6 @@ class Script:
*args: Any, *args: Any,
**kwargs: Any, **kwargs: Any,
) -> Any: ) -> Any:
"""Run."""
raise ApiError( raise ApiError(
"invalid_script", "invalid_script",
"This is an internal error. The script you are trying to execute '%s' " % self.__class__.__name__ "This is an internal error. The script you are trying to execute '%s' " % self.__class__.__name__
@ -108,7 +106,6 @@ class Script:
instance = subclass() instance = subclass()
def check_script_permission() -> None: def check_script_permission() -> None:
"""Check_script_permission."""
if subclass.requires_privileged_permissions(): if subclass.requires_privileged_permissions():
script_function_name = get_script_function_name(subclass) script_function_name = get_script_function_name(subclass)
uri = f"/can-run-privileged-script/{script_function_name}" uri = f"/can-run-privileged-script/{script_function_name}"
@ -132,7 +129,6 @@ class Script:
) )
def run_script_if_allowed(*ar: Any, **kw: Any) -> Any: def run_script_if_allowed(*ar: Any, **kw: Any) -> Any:
"""Run_script_if_allowed."""
check_script_permission() check_script_permission()
return subclass.run( return subclass.run(
instance, instance,
@ -144,7 +140,6 @@ class Script:
return run_script_if_allowed return run_script_if_allowed
def get_script_function_name(subclass: type[Script]) -> str: def get_script_function_name(subclass: type[Script]) -> str:
"""Get_script_function_name."""
return subclass.__module__.split(".")[-1] return subclass.__module__.split(".")[-1]
execlist = {} execlist = {}
@ -158,7 +153,6 @@ class Script:
@classmethod @classmethod
def get_all_subclasses(cls) -> list[type[Script]]: def get_all_subclasses(cls) -> list[type[Script]]:
"""Get_all_subclasses."""
# This is expensive to generate, never changes after we load up. # This is expensive to generate, never changes after we load up.
global SCRIPT_SUB_CLASSES # noqa: PLW0603, allow global for performance global SCRIPT_SUB_CLASSES # noqa: PLW0603, allow global for performance
if not SCRIPT_SUB_CLASSES: if not SCRIPT_SUB_CLASSES:

View File

@ -10,7 +10,6 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]: def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
"""Load_fixtures."""
current_app.logger.debug("load_acceptance_test_fixtures() start") current_app.logger.debug("load_acceptance_test_fixtures() start")
test_process_model_id = "misc/acceptance-tests-group-one/acceptance-tests-model-1" test_process_model_id = "misc/acceptance-tests-group-one/acceptance-tests-model-1"
user = BaseTest.find_or_create_user() user = BaseTest.find_or_create_user()

View File

@ -54,8 +54,6 @@ class AuthenticationProviderTypes(enum.Enum):
class AuthenticationService: class AuthenticationService:
"""AuthenticationService."""
ENDPOINT_CACHE: dict = {} # We only need to find the openid endpoints once, then we can cache them. ENDPOINT_CACHE: dict = {} # We only need to find the openid endpoints once, then we can cache them.
@staticmethod @staticmethod
@ -89,11 +87,9 @@ class AuthenticationService:
@staticmethod @staticmethod
def get_backend_url() -> str: def get_backend_url() -> str:
"""Get_backend_url."""
return str(current_app.config["SPIFFWORKFLOW_BACKEND_URL"]) return str(current_app.config["SPIFFWORKFLOW_BACKEND_URL"])
def logout(self, id_token: str, redirect_url: str | None = None) -> Response: def logout(self, id_token: str, redirect_url: str | None = None) -> Response:
"""Logout."""
if redirect_url is None: if redirect_url is None:
redirect_url = f"{self.get_backend_url()}/v1.0/logout_return" redirect_url = f"{self.get_backend_url()}/v1.0/logout_return"
request_url = ( request_url = (
@ -106,12 +102,10 @@ class AuthenticationService:
@staticmethod @staticmethod
def generate_state(redirect_url: str) -> bytes: def generate_state(redirect_url: str) -> bytes:
"""Generate_state."""
state = base64.b64encode(bytes(str({"redirect_url": redirect_url}), "UTF-8")) state = base64.b64encode(bytes(str({"redirect_url": redirect_url}), "UTF-8"))
return state return state
def get_login_redirect_url(self, state: str, redirect_url: str = "/v1.0/login_return") -> str: def get_login_redirect_url(self, state: str, redirect_url: str = "/v1.0/login_return") -> str:
"""Get_login_redirect_url."""
return_redirect_url = f"{self.get_backend_url()}{redirect_url}" return_redirect_url = f"{self.get_backend_url()}{redirect_url}"
login_redirect_url = ( login_redirect_url = (
self.open_id_endpoint_for_name("authorization_endpoint") self.open_id_endpoint_for_name("authorization_endpoint")
@ -124,7 +118,6 @@ class AuthenticationService:
return login_redirect_url return login_redirect_url
def get_auth_token_object(self, code: str, redirect_url: str = "/v1.0/login_return") -> dict: def get_auth_token_object(self, code: str, redirect_url: str = "/v1.0/login_return") -> dict:
"""Get_auth_token_object."""
backend_basic_auth_string = f"{self.client_id()}:{self.secret_key()}" backend_basic_auth_string = f"{self.client_id()}:{self.secret_key()}"
backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)
@ -200,7 +193,6 @@ class AuthenticationService:
@staticmethod @staticmethod
def store_refresh_token(user_id: int, refresh_token: str) -> None: def store_refresh_token(user_id: int, refresh_token: str) -> None:
"""Store_refresh_token."""
refresh_token_model = RefreshTokenModel.query.filter(RefreshTokenModel.user_id == user_id).first() refresh_token_model = RefreshTokenModel.query.filter(RefreshTokenModel.user_id == user_id).first()
if refresh_token_model: if refresh_token_model:
refresh_token_model.token = refresh_token refresh_token_model.token = refresh_token
@ -217,7 +209,6 @@ class AuthenticationService:
@staticmethod @staticmethod
def get_refresh_token(user_id: int) -> str | None: def get_refresh_token(user_id: int) -> str | None:
"""Get_refresh_token."""
refresh_token_object: RefreshTokenModel = RefreshTokenModel.query.filter( refresh_token_object: RefreshTokenModel = RefreshTokenModel.query.filter(
RefreshTokenModel.user_id == user_id RefreshTokenModel.user_id == user_id
).first() ).first()

View File

@ -5,11 +5,8 @@ from spiffworkflow_backend.services.spec_file_service import SpecFileService
class DataSetupService: class DataSetupService:
"""DataSetupService."""
@classmethod @classmethod
def run_setup(cls) -> list: def run_setup(cls) -> list:
"""Run_setup."""
return cls.save_all_process_models() return cls.save_all_process_models()
@classmethod @classmethod

View File

@ -13,11 +13,11 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
class FileSystemService: class FileSystemService:
"""FileSystemService."""
""" Simple Service meant for extension that provides some useful """Simple Service meant for extension that provides some useful
methods for dealing with the File system. methods for dealing with the File system.
""" """
PROCESS_GROUP_JSON_FILE = "process_group.json" PROCESS_GROUP_JSON_FILE = "process_group.json"
PROCESS_MODEL_JSON_FILE = "process_model.json" PROCESS_MODEL_JSON_FILE = "process_model.json"
@ -25,7 +25,6 @@ class FileSystemService:
@staticmethod @staticmethod
@contextmanager @contextmanager
def cd(newdir: str) -> Generator: def cd(newdir: str) -> Generator:
"""Cd."""
prevdir = os.getcwd() prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir)) os.chdir(os.path.expanduser(newdir))
try: try:
@ -35,14 +34,12 @@ class FileSystemService:
@staticmethod @staticmethod
def root_path() -> str: def root_path() -> str:
"""Root_path."""
dir_name = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] dir_name = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]
# ensure this is a string - thanks mypy... # ensure this is a string - thanks mypy...
return os.path.abspath(os.path.join(dir_name, "")) return os.path.abspath(os.path.join(dir_name, ""))
@staticmethod @staticmethod
def id_string_to_relative_path(id_string: str) -> str: def id_string_to_relative_path(id_string: str) -> str:
"""Id_string_to_relative_path."""
return id_string.replace("/", os.sep) return id_string.replace("/", os.sep)
@classmethod @classmethod
@ -56,7 +53,6 @@ class FileSystemService:
@staticmethod @staticmethod
def full_path_from_relative_path(relative_path: str) -> str: def full_path_from_relative_path(relative_path: str) -> str:
"""Full_path_from_relative_path."""
return os.path.join(FileSystemService.root_path(), relative_path) return os.path.join(FileSystemService.root_path(), relative_path)
@staticmethod @staticmethod
@ -81,13 +77,11 @@ class FileSystemService:
@staticmethod @staticmethod
def full_path_to_process_model_file(process_model: ProcessModelInfo) -> str: def full_path_to_process_model_file(process_model: ProcessModelInfo) -> str:
"""Full_path_to_process_model_file."""
return os.path.join( return os.path.join(
FileSystemService.process_model_full_path(process_model), process_model.primary_file_name # type: ignore FileSystemService.process_model_full_path(process_model), process_model.primary_file_name # type: ignore
) )
def next_display_order(self, process_model: ProcessModelInfo) -> int: def next_display_order(self, process_model: ProcessModelInfo) -> int:
"""Next_display_order."""
path = self.process_group_path_for_spec(process_model) path = self.process_group_path_for_spec(process_model)
if os.path.exists(path): if os.path.exists(path):
return len(next(os.walk(path))[1]) return len(next(os.walk(path))[1])
@ -96,20 +90,17 @@ class FileSystemService:
@staticmethod @staticmethod
def write_file_data_to_system(file_path: str, file_data: bytes) -> None: def write_file_data_to_system(file_path: str, file_data: bytes) -> None:
"""Write_file_data_to_system."""
os.makedirs(os.path.dirname(file_path), exist_ok=True) os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, "wb") as f_handle: with open(file_path, "wb") as f_handle:
f_handle.write(file_data) f_handle.write(file_data)
@staticmethod @staticmethod
def get_extension(file_name: str) -> str: def get_extension(file_name: str) -> str:
"""Get_extension."""
_, file_extension = os.path.splitext(file_name) _, file_extension = os.path.splitext(file_name)
return file_extension.lower().strip()[1:] return file_extension.lower().strip()[1:]
@staticmethod @staticmethod
def assert_valid_file_name(file_name: str) -> None: def assert_valid_file_name(file_name: str) -> None:
"""Assert_valid_file_name."""
file_extension = FileSystemService.get_extension(file_name) file_extension = FileSystemService.get_extension(file_name)
if file_extension not in FileType.list(): if file_extension not in FileType.list():
raise ApiError( raise ApiError(
@ -134,7 +125,6 @@ class FileSystemService:
@staticmethod @staticmethod
def file_type(file_name: str) -> FileType: def file_type(file_name: str) -> FileType:
"""File_type."""
extension = FileSystemService.get_extension(file_name) extension = FileSystemService.get_extension(file_name)
return FileType[extension] return FileType[extension]
@ -157,7 +147,6 @@ class FileSystemService:
@staticmethod @staticmethod
def to_file_object(file_name: str, file_path: str) -> File: def to_file_object(file_name: str, file_path: str) -> File:
"""To_file_object."""
file_type = FileSystemService.file_type(file_name) file_type = FileSystemService.file_type(file_name)
content_type = CONTENT_TYPES[file_type.name] content_type = CONTENT_TYPES[file_type.name]
last_modified = FileSystemService._last_modified(file_path) last_modified = FileSystemService._last_modified(file_path)
@ -167,7 +156,6 @@ class FileSystemService:
@staticmethod @staticmethod
def to_file_object_from_dir_entry(item: os.DirEntry) -> File: def to_file_object_from_dir_entry(item: os.DirEntry) -> File:
"""To_file_object_from_dir_entry."""
extension = FileSystemService.get_extension(item.name) extension = FileSystemService.get_extension(item.name)
try: try:
file_type = FileType[extension] file_type = FileType[extension]

View File

@ -12,28 +12,25 @@ from spiffworkflow_backend.services.file_system_service import FileSystemService
class MissingGitConfigsError(Exception): class MissingGitConfigsError(Exception):
"""MissingGitConfigsError.""" pass
class InvalidGitWebhookBodyError(Exception): class InvalidGitWebhookBodyError(Exception):
"""InvalidGitWebhookBodyError.""" pass
class GitCloneUrlMismatchError(Exception): class GitCloneUrlMismatchError(Exception):
"""GitCloneUrlMismatchError.""" pass
class GitCommandError(Exception): class GitCommandError(Exception):
"""GitCommandError.""" pass
# TOOD: check for the existence of git and configs on bootup if publishing is enabled # TOOD: check for the existence of git and configs on bootup if publishing is enabled
class GitService: class GitService:
"""GitService."""
@classmethod @classmethod
def get_current_revision(cls) -> str: def get_current_revision(cls) -> str:
"""Get_current_revision."""
bpmn_spec_absolute_dir = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] bpmn_spec_absolute_dir = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]
# The value includes a carriage return character at the end, so we don't grab the last character # The value includes a carriage return character at the end, so we don't grab the last character
with FileSystemService.cd(bpmn_spec_absolute_dir): with FileSystemService.cd(bpmn_spec_absolute_dir):
@ -46,7 +43,6 @@ class GitService:
revision: str, revision: str,
file_name: str | None = None, file_name: str | None = None,
) -> str: ) -> str:
"""Get_instance_file_contents_for_revision."""
bpmn_spec_absolute_dir = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] bpmn_spec_absolute_dir = current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]
process_model_relative_path = FileSystemService.process_model_relative_path(process_model) process_model_relative_path = FileSystemService.process_model_relative_path(process_model)
file_name_to_use = file_name file_name_to_use = file_name
@ -67,7 +63,6 @@ class GitService:
repo_path: str | None = None, repo_path: str | None = None,
branch_name: str | None = None, branch_name: str | None = None,
) -> str: ) -> str:
"""Commit."""
cls.check_for_basic_configs() cls.check_for_basic_configs()
branch_name_to_use = branch_name branch_name_to_use = branch_name
if branch_name_to_use is None: if branch_name_to_use is None:
@ -89,7 +84,6 @@ class GitService:
@classmethod @classmethod
def check_for_basic_configs(cls) -> None: def check_for_basic_configs(cls) -> None:
"""Check_for_basic_configs."""
if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH"] is None: if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH"] is None:
raise MissingGitConfigsError( raise MissingGitConfigsError(
"Missing config for SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH. " "Missing config for SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH. "
@ -98,7 +92,6 @@ class GitService:
@classmethod @classmethod
def check_for_publish_configs(cls) -> None: def check_for_publish_configs(cls) -> None:
"""Check_for_configs."""
cls.check_for_basic_configs() cls.check_for_basic_configs()
if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"] is None: if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"] is None:
raise MissingGitConfigsError( raise MissingGitConfigsError(
@ -113,14 +106,12 @@ class GitService:
@classmethod @classmethod
def run_shell_command_as_boolean(cls, command: list[str]) -> bool: def run_shell_command_as_boolean(cls, command: list[str]) -> bool:
"""Run_shell_command_as_boolean."""
# we know result will be a bool here # we know result will be a bool here
result: bool = cls.run_shell_command(command, return_success_state=True) # type: ignore result: bool = cls.run_shell_command(command, return_success_state=True) # type: ignore
return result return result
@classmethod @classmethod
def run_shell_command_to_get_stdout(cls, command: list[str]) -> str: def run_shell_command_to_get_stdout(cls, command: list[str]) -> str:
"""Run_shell_command_to_get_stdout."""
# we know result will be a CompletedProcess here # we know result will be a CompletedProcess here
result: subprocess.CompletedProcess[bytes] = cls.run_shell_command( result: subprocess.CompletedProcess[bytes] = cls.run_shell_command(
command, return_success_state=False command, return_success_state=False
@ -131,7 +122,6 @@ class GitService:
def run_shell_command( def run_shell_command(
cls, command: list[str], return_success_state: bool = False cls, command: list[str], return_success_state: bool = False
) -> subprocess.CompletedProcess[bytes] | bool: ) -> subprocess.CompletedProcess[bytes] | bool:
"""Run_shell_command."""
my_env = os.environ.copy() my_env = os.environ.copy()
my_env["GIT_COMMITTER_NAME"] = current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME") or "unknown" my_env["GIT_COMMITTER_NAME"] = current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME") or "unknown"
@ -162,7 +152,6 @@ class GitService:
# only supports github right now # only supports github right now
@classmethod @classmethod
def handle_web_hook(cls, webhook: dict) -> bool: def handle_web_hook(cls, webhook: dict) -> bool:
"""Handle_web_hook."""
cls.check_for_publish_configs() cls.check_for_publish_configs()
if "repository" not in webhook or "clone_url" not in webhook["repository"]: if "repository" not in webhook or "clone_url" not in webhook["repository"]:
@ -203,7 +192,6 @@ class GitService:
@classmethod @classmethod
def publish(cls, process_model_id: str, branch_to_update: str) -> str: def publish(cls, process_model_id: str, branch_to_update: str) -> str:
"""Publish."""
cls.check_for_publish_configs() cls.check_for_publish_configs()
source_process_model_root = FileSystemService.root_path() source_process_model_root = FileSystemService.root_path()
source_process_model_path = os.path.join(source_process_model_root, process_model_id) source_process_model_path = os.path.join(source_process_model_root, process_model_id)

View File

@ -19,7 +19,7 @@ from flask.app import Flask
class InvalidLogLevelError(Exception): class InvalidLogLevelError(Exception):
"""InvalidLogLevelError.""" pass
# originally from https://stackoverflow.com/a/70223539/6090676 # originally from https://stackoverflow.com/a/70223539/6090676
@ -87,7 +87,6 @@ class JsonFormatter(logging.Formatter):
def setup_logger(app: Flask) -> None: def setup_logger(app: Flask) -> None:
"""Setup_logger."""
upper_log_level_string = app.config["SPIFFWORKFLOW_BACKEND_LOG_LEVEL"].upper() upper_log_level_string = app.config["SPIFFWORKFLOW_BACKEND_LOG_LEVEL"].upper()
log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]

View File

@ -10,12 +10,10 @@ from spiffworkflow_backend.services.process_instance_service import ProcessInsta
class MessageServiceError(Exception): class MessageServiceError(Exception):
"""MessageServiceError.""" pass
class MessageService: class MessageService:
"""MessageService."""
@classmethod @classmethod
def correlate_send_message(cls, message_instance_send: MessageInstanceModel) -> MessageInstanceModel | None: def correlate_send_message(cls, message_instance_send: MessageInstanceModel) -> MessageInstanceModel | None:
"""Connects the given send message to a 'receive' message if possible. """Connects the given send message to a 'receive' message if possible.

View File

@ -95,31 +95,28 @@ def _import(name: str, glbls: dict[str, Any], *args: Any) -> None:
class PotentialOwnerIdList(TypedDict): class PotentialOwnerIdList(TypedDict):
"""PotentialOwnerIdList."""
potential_owner_ids: list[int] potential_owner_ids: list[int]
lane_assignment_id: int | None lane_assignment_id: int | None
class ProcessInstanceProcessorError(Exception): class ProcessInstanceProcessorError(Exception):
"""ProcessInstanceProcessorError.""" pass
class NoPotentialOwnersForTaskError(Exception): class NoPotentialOwnersForTaskError(Exception):
"""NoPotentialOwnersForTaskError.""" pass
class PotentialOwnerUserNotFoundError(Exception): class PotentialOwnerUserNotFoundError(Exception):
"""PotentialOwnerUserNotFoundError.""" pass
class MissingProcessInfoError(Exception): class MissingProcessInfoError(Exception):
"""MissingProcessInfoError.""" pass
class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore
def __init__(self, environment_globals: dict[str, Any]): def __init__(self, environment_globals: dict[str, Any]):
"""BoxedTaskDataBasedScriptEngineEnvironment."""
self._last_result: dict[str, Any] = {} self._last_result: dict[str, Any] = {}
super().__init__(environment_globals) super().__init__(environment_globals)
@ -159,7 +156,6 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
PYTHON_ENVIRONMENT_STATE_KEY = "spiff__python_env_state" PYTHON_ENVIRONMENT_STATE_KEY = "spiff__python_env_state"
def __init__(self, environment_globals: dict[str, Any]): def __init__(self, environment_globals: dict[str, Any]):
"""NonTaskDataBasedScriptEngineEnvironment."""
self.state: dict[str, Any] = {} self.state: dict[str, Any] = {}
self.non_user_defined_keys = set([*environment_globals.keys()] + ["__builtins__"]) self.non_user_defined_keys = set([*environment_globals.keys()] + ["__builtins__"])
super().__init__(environment_globals) super().__init__(environment_globals)
@ -368,8 +364,6 @@ IdToBpmnProcessSpecMapping = NewType("IdToBpmnProcessSpecMapping", dict[str, Bpm
class ProcessInstanceProcessor: class ProcessInstanceProcessor:
"""ProcessInstanceProcessor."""
_script_engine = CustomBpmnScriptEngine() _script_engine = CustomBpmnScriptEngine()
SERIALIZER_VERSION = "1.0-spiffworkflow-backend" SERIALIZER_VERSION = "1.0-spiffworkflow-backend"
@ -453,7 +447,6 @@ class ProcessInstanceProcessor:
def get_process_model_and_subprocesses( def get_process_model_and_subprocesses(
cls, process_model_identifier: str cls, process_model_identifier: str
) -> tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]: ) -> tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]:
"""Get_process_model_and_subprocesses."""
process_model_info = ProcessModelService.get_process_model(process_model_identifier) process_model_info = ProcessModelService.get_process_model(process_model_identifier)
if process_model_info is None: if process_model_info is None:
raise ( raise (
@ -467,7 +460,6 @@ class ProcessInstanceProcessor:
@classmethod @classmethod
def get_bpmn_process_instance_from_process_model(cls, process_model_identifier: str) -> BpmnWorkflow: def get_bpmn_process_instance_from_process_model(cls, process_model_identifier: str) -> BpmnWorkflow:
"""Get_all_bpmn_process_identifiers_for_process_model."""
(bpmn_process_spec, subprocesses) = cls.get_process_model_and_subprocesses( (bpmn_process_spec, subprocesses) = cls.get_process_model_and_subprocesses(
process_model_identifier, process_model_identifier,
) )
@ -684,7 +676,6 @@ class ProcessInstanceProcessor:
return spiff_bpmn_process_dict return spiff_bpmn_process_dict
def current_user(self) -> Any: def current_user(self) -> Any:
"""Current_user."""
current_user = None current_user = None
if UserService.has_user(): if UserService.has_user():
current_user = UserService.current_user() current_user = UserService.current_user()
@ -702,7 +693,6 @@ class ProcessInstanceProcessor:
spec: BpmnProcessSpec, spec: BpmnProcessSpec,
subprocesses: IdToBpmnProcessSpecMapping | None = None, subprocesses: IdToBpmnProcessSpecMapping | None = None,
) -> BpmnWorkflow: ) -> BpmnWorkflow:
"""Get_bpmn_process_instance_from_workflow_spec."""
bpmn_process_instance = BpmnWorkflow( bpmn_process_instance = BpmnWorkflow(
spec, spec,
subprocess_specs=subprocesses, subprocess_specs=subprocesses,
@ -752,18 +742,15 @@ class ProcessInstanceProcessor:
) )
def slam_in_data(self, data: dict) -> None: def slam_in_data(self, data: dict) -> None:
"""Slam_in_data."""
self.bpmn_process_instance.data = DeepMerge.merge(self.bpmn_process_instance.data, data) self.bpmn_process_instance.data = DeepMerge.merge(self.bpmn_process_instance.data, data)
self.save() self.save()
def raise_if_no_potential_owners(self, potential_owner_ids: list[int], message: str) -> None: def raise_if_no_potential_owners(self, potential_owner_ids: list[int], message: str) -> None:
"""Raise_if_no_potential_owners."""
if not potential_owner_ids: if not potential_owner_ids:
raise NoPotentialOwnersForTaskError(message) raise NoPotentialOwnersForTaskError(message)
def get_potential_owner_ids_from_task(self, task: SpiffTask) -> PotentialOwnerIdList: def get_potential_owner_ids_from_task(self, task: SpiffTask) -> PotentialOwnerIdList:
"""Get_potential_owner_ids_from_task."""
task_spec = task.task_spec task_spec = task.task_spec
task_lane = "process_initiator" task_lane = "process_initiator"
if task_spec.lane is not None and task_spec.lane != "": if task_spec.lane is not None and task_spec.lane != "":
@ -803,7 +790,6 @@ class ProcessInstanceProcessor:
} }
def extract_metadata(self, process_model_info: ProcessModelInfo) -> None: def extract_metadata(self, process_model_info: ProcessModelInfo) -> None:
"""Extract_metadata."""
metadata_extraction_paths = process_model_info.metadata_extraction_paths metadata_extraction_paths = process_model_info.metadata_extraction_paths
if metadata_extraction_paths is None: if metadata_extraction_paths is None:
return return
@ -1156,7 +1142,6 @@ class ProcessInstanceProcessor:
@staticmethod @staticmethod
def get_parser() -> MyCustomParser: def get_parser() -> MyCustomParser:
"""Get_parser."""
parser = MyCustomParser() parser = MyCustomParser()
return parser return parser
@ -1164,7 +1149,6 @@ class ProcessInstanceProcessor:
def backfill_missing_spec_reference_records( def backfill_missing_spec_reference_records(
bpmn_process_identifier: str, bpmn_process_identifier: str,
) -> str | None: ) -> str | None:
"""Backfill_missing_spec_reference_records."""
process_models = ProcessModelService.get_process_models(recursive=True) process_models = ProcessModelService.get_process_models(recursive=True)
for process_model in process_models: for process_model in process_models:
try: try:
@ -1181,7 +1165,6 @@ class ProcessInstanceProcessor:
def bpmn_file_full_path_from_bpmn_process_identifier( def bpmn_file_full_path_from_bpmn_process_identifier(
bpmn_process_identifier: str, bpmn_process_identifier: str,
) -> str: ) -> str:
"""Bpmn_file_full_path_from_bpmn_process_identifier."""
if bpmn_process_identifier is None: if bpmn_process_identifier is None:
raise ValueError( raise ValueError(
"bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None" "bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None"
@ -1213,7 +1196,6 @@ class ProcessInstanceProcessor:
parser: SpiffBpmnParser, parser: SpiffBpmnParser,
processed_identifiers: set[str] | None = None, processed_identifiers: set[str] | None = None,
) -> None: ) -> None:
"""Update_spiff_parser_with_all_process_dependency_files."""
if processed_identifiers is None: if processed_identifiers is None:
processed_identifiers = set() processed_identifiers = set()
processor_dependencies = parser.get_process_dependencies() processor_dependencies = parser.get_process_dependencies()
@ -1292,7 +1274,6 @@ class ProcessInstanceProcessor:
@staticmethod @staticmethod
def status_of(bpmn_process_instance: BpmnWorkflow) -> ProcessInstanceStatus: def status_of(bpmn_process_instance: BpmnWorkflow) -> ProcessInstanceStatus:
"""Status_of."""
if bpmn_process_instance.is_completed(): if bpmn_process_instance.is_completed():
return ProcessInstanceStatus.complete return ProcessInstanceStatus.complete
user_tasks = bpmn_process_instance.get_ready_user_tasks() user_tasks = bpmn_process_instance.get_ready_user_tasks()
@ -1312,7 +1293,6 @@ class ProcessInstanceProcessor:
return ProcessInstanceStatus.waiting return ProcessInstanceStatus.waiting
def get_status(self) -> ProcessInstanceStatus: def get_status(self) -> ProcessInstanceStatus:
"""Get_status."""
the_status = self.status_of(self.bpmn_process_instance) the_status = self.status_of(self.bpmn_process_instance)
# current_app.logger.debug(f"the_status: {the_status} for instance {self.process_instance_model.id}") # current_app.logger.debug(f"the_status: {the_status} for instance {self.process_instance_model.id}")
return the_status return the_status
@ -1430,7 +1410,6 @@ class ProcessInstanceProcessor:
return 0 return 0
def check_task_data_size(self) -> None: def check_task_data_size(self) -> None:
"""CheckTaskDataSize."""
task_data_len = self.get_task_data_size(self.bpmn_process_instance) task_data_len = self.get_task_data_size(self.bpmn_process_instance)
# Not sure what the number here should be but this now matches the mysql # Not sure what the number here should be but this now matches the mysql
@ -1446,13 +1425,11 @@ class ProcessInstanceProcessor:
) )
def serialize(self) -> dict: def serialize(self) -> dict:
"""Serialize."""
self.check_task_data_size() self.check_task_data_size()
self.preserve_script_engine_state() self.preserve_script_engine_state()
return self._serializer.workflow_to_dict(self.bpmn_process_instance) # type: ignore return self._serializer.workflow_to_dict(self.bpmn_process_instance) # type: ignore
def next_user_tasks(self) -> list[SpiffTask]: def next_user_tasks(self) -> list[SpiffTask]:
"""Next_user_tasks."""
return self.bpmn_process_instance.get_ready_user_tasks() # type: ignore return self.bpmn_process_instance.get_ready_user_tasks() # type: ignore
def next_task(self) -> SpiffTask: def next_task(self) -> SpiffTask:
@ -1523,7 +1500,6 @@ class ProcessInstanceProcessor:
return next_task_to_return return next_task_to_return
def completed_user_tasks(self) -> list[SpiffTask]: def completed_user_tasks(self) -> list[SpiffTask]:
"""Completed_user_tasks."""
user_tasks = self.bpmn_process_instance.get_tasks(TaskState.COMPLETED) user_tasks = self.bpmn_process_instance.get_tasks(TaskState.COMPLETED)
user_tasks.reverse() user_tasks.reverse()
user_tasks = list( user_tasks = list(
@ -1545,7 +1521,6 @@ class ProcessInstanceProcessor:
return task_json return task_json
def complete_task(self, spiff_task: SpiffTask, human_task: HumanTaskModel, user: UserModel) -> None: def complete_task(self, spiff_task: SpiffTask, human_task: HumanTaskModel, user: UserModel) -> None:
"""Complete_task."""
task_model = TaskModel.query.filter_by(guid=human_task.task_id).first() task_model = TaskModel.query.filter_by(guid=human_task.task_id).first()
if task_model is None: if task_model is None:
raise TaskNotFoundError( raise TaskNotFoundError(
@ -1581,7 +1556,6 @@ class ProcessInstanceProcessor:
self.save() self.save()
def get_data(self) -> dict[str, Any]: def get_data(self) -> dict[str, Any]:
"""Get_data."""
return self.bpmn_process_instance.data # type: ignore return self.bpmn_process_instance.data # type: ignore
def get_current_data(self) -> dict[str, Any]: def get_current_data(self) -> dict[str, Any]:
@ -1606,11 +1580,9 @@ class ProcessInstanceProcessor:
return {} return {}
def get_process_instance_id(self) -> int: def get_process_instance_id(self) -> int:
"""Get_process_instance_id."""
return self.process_instance_model.id return self.process_instance_model.id
def get_ready_user_tasks(self) -> list[SpiffTask]: def get_ready_user_tasks(self) -> list[SpiffTask]:
"""Get_ready_user_tasks."""
return self.bpmn_process_instance.get_ready_user_tasks() # type: ignore return self.bpmn_process_instance.get_ready_user_tasks() # type: ignore
def get_current_user_tasks(self) -> list[SpiffTask]: def get_current_user_tasks(self) -> list[SpiffTask]:
@ -1624,7 +1596,6 @@ class ProcessInstanceProcessor:
return ready_tasks + additional_tasks # type: ignore return ready_tasks + additional_tasks # type: ignore
def get_all_user_tasks(self) -> list[SpiffTask]: def get_all_user_tasks(self) -> list[SpiffTask]:
"""Get_all_user_tasks."""
all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
return [t for t in all_tasks if t.task_spec.manual] return [t for t in all_tasks if t.task_spec.manual]
@ -1633,12 +1604,10 @@ class ProcessInstanceProcessor:
return [t for t in all_tasks if t.task_spec.manual and t.state in [TaskState.COMPLETED, TaskState.CANCELLED]] return [t for t in all_tasks if t.task_spec.manual and t.state in [TaskState.COMPLETED, TaskState.CANCELLED]]
def get_all_waiting_tasks(self) -> list[SpiffTask]: def get_all_waiting_tasks(self) -> list[SpiffTask]:
"""Get_all_ready_or_waiting_tasks."""
all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
return [t for t in all_tasks if t.state in [TaskState.WAITING]] return [t for t in all_tasks if t.state in [TaskState.WAITING]]
def get_all_ready_or_waiting_tasks(self) -> list[SpiffTask]: def get_all_ready_or_waiting_tasks(self) -> list[SpiffTask]:
"""Get_all_ready_or_waiting_tasks."""
all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
return [t for t in all_tasks if t.state in [TaskState.WAITING, TaskState.READY]] return [t for t in all_tasks if t.state in [TaskState.WAITING, TaskState.READY]]
@ -1649,7 +1618,6 @@ class ProcessInstanceProcessor:
def get_task_by_bpmn_identifier( def get_task_by_bpmn_identifier(
cls, bpmn_task_identifier: str, bpmn_process_instance: BpmnWorkflow cls, bpmn_task_identifier: str, bpmn_process_instance: BpmnWorkflow
) -> SpiffTask | None: ) -> SpiffTask | None:
"""Get_task_by_id."""
all_tasks = bpmn_process_instance.get_tasks(TaskState.ANY_MASK) all_tasks = bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
for task in all_tasks: for task in all_tasks:
if task.task_spec.name == bpmn_task_identifier: if task.task_spec.name == bpmn_task_identifier:
@ -1691,7 +1659,6 @@ class ProcessInstanceProcessor:
db.session.commit() db.session.commit()
def suspend(self) -> None: def suspend(self) -> None:
"""Suspend."""
self.process_instance_model.status = ProcessInstanceStatus.suspended.value self.process_instance_model.status = ProcessInstanceStatus.suspended.value
db.session.add(self.process_instance_model) db.session.add(self.process_instance_model)
ProcessInstanceTmpService.add_event_to_process_instance( ProcessInstanceTmpService.add_event_to_process_instance(
@ -1700,7 +1667,6 @@ class ProcessInstanceProcessor:
db.session.commit() db.session.commit()
def resume(self) -> None: def resume(self) -> None:
"""Resume."""
self.process_instance_model.status = ProcessInstanceStatus.waiting.value self.process_instance_model.status = ProcessInstanceStatus.waiting.value
db.session.add(self.process_instance_model) db.session.add(self.process_instance_model)
ProcessInstanceTmpService.add_event_to_process_instance( ProcessInstanceTmpService.add_event_to_process_instance(

View File

@ -36,8 +36,6 @@ class ProcessInstanceReportMetadataInvalidError(Exception):
class ProcessInstanceReportService: class ProcessInstanceReportService:
"""ProcessInstanceReportService."""
@classmethod @classmethod
def system_metadata_map(cls, metadata_key: str) -> ReportMetadata | None: def system_metadata_map(cls, metadata_key: str) -> ReportMetadata | None:
# TODO replace with system reports that are loaded on launch (or similar) # TODO replace with system reports that are loaded on launch (or similar)
@ -231,7 +229,6 @@ class ProcessInstanceReportService:
process_instance_sqlalchemy_rows: list[sqlalchemy.engine.row.Row], # type: ignore process_instance_sqlalchemy_rows: list[sqlalchemy.engine.row.Row], # type: ignore
metadata_columns: list[ReportMetadataColumn], metadata_columns: list[ReportMetadataColumn],
) -> list[dict]: ) -> list[dict]:
"""Add_metadata_columns_to_process_instance."""
results = [] results = []
cls.non_metadata_columns() cls.non_metadata_columns()
for process_instance_row in process_instance_sqlalchemy_rows: for process_instance_row in process_instance_sqlalchemy_rows:
@ -301,7 +298,6 @@ class ProcessInstanceReportService:
@classmethod @classmethod
def get_column_names_for_model(cls, model: type[SpiffworkflowBaseDBModel]) -> list[str]: def get_column_names_for_model(cls, model: type[SpiffworkflowBaseDBModel]) -> list[str]:
"""Get_column_names_for_model."""
return [i.name for i in model.__table__.columns] return [i.name for i in model.__table__.columns]
@classmethod @classmethod

View File

@ -38,8 +38,6 @@ from spiffworkflow_backend.specs.start_event import StartConfiguration
class ProcessInstanceService: class ProcessInstanceService:
"""ProcessInstanceService."""
FILE_DATA_DIGEST_PREFIX = "spifffiledatadigest+" FILE_DATA_DIGEST_PREFIX = "spifffiledatadigest+"
TASK_STATE_LOCKED = "locked" TASK_STATE_LOCKED = "locked"
@ -65,7 +63,6 @@ class ProcessInstanceService:
user: UserModel, user: UserModel,
start_configuration: StartConfiguration | None = None, start_configuration: StartConfiguration | None = None,
) -> tuple[ProcessInstanceModel, StartConfiguration]: ) -> tuple[ProcessInstanceModel, StartConfiguration]:
"""Get_process_instance_from_spec."""
db.session.commit() db.session.commit()
try: try:
current_git_revision = GitService.get_current_revision() current_git_revision = GitService.get_current_revision()
@ -96,7 +93,6 @@ class ProcessInstanceService:
process_model_identifier: str, process_model_identifier: str,
user: UserModel, user: UserModel,
) -> ProcessInstanceModel: ) -> ProcessInstanceModel:
"""Create_process_instance_from_process_model_identifier."""
process_model = ProcessModelService.get_process_model(process_model_identifier) process_model = ProcessModelService.get_process_model(process_model_identifier)
process_instance_model, (cycle_count, _, duration_in_seconds) = cls.create_process_instance( process_instance_model, (cycle_count, _, duration_in_seconds) = cls.create_process_instance(
process_model, user process_model, user
@ -190,7 +186,6 @@ class ProcessInstanceService:
@classmethod @classmethod
def do_waiting(cls, status_value: str) -> None: def do_waiting(cls, status_value: str) -> None:
"""Do_waiting."""
run_at_in_seconds_threshold = round(time.time()) run_at_in_seconds_threshold = round(time.time())
process_instance_ids_to_check = ProcessInstanceQueueService.peek_many( process_instance_ids_to_check = ProcessInstanceQueueService.peek_many(
status_value, run_at_in_seconds_threshold status_value, run_at_in_seconds_threshold
@ -275,13 +270,11 @@ class ProcessInstanceService:
return process_instance_api return process_instance_api
def get_process_instance(self, process_instance_id: int) -> Any: def get_process_instance(self, process_instance_id: int) -> Any:
"""Get_process_instance."""
result = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first() result = db.session.query(ProcessInstanceModel).filter(ProcessInstanceModel.id == process_instance_id).first()
return result return result
@staticmethod @staticmethod
def get_users_assigned_to_task(processor: ProcessInstanceProcessor, spiff_task: SpiffTask) -> list[int]: def get_users_assigned_to_task(processor: ProcessInstanceProcessor, spiff_task: SpiffTask) -> list[int]:
"""Get_users_assigned_to_task."""
if processor.process_instance_model.process_initiator_id is None: if processor.process_instance_model.process_initiator_id is None:
raise ApiError.from_task( raise ApiError.from_task(
error_code="invalid_workflow", error_code="invalid_workflow",
@ -464,7 +457,6 @@ class ProcessInstanceService:
@staticmethod @staticmethod
def create_dot_dict(data: dict) -> dict[str, Any]: def create_dot_dict(data: dict) -> dict[str, Any]:
"""Create_dot_dict."""
dot_dict: dict[str, Any] = {} dot_dict: dict[str, Any] = {}
for key, value in data.items(): for key, value in data.items():
ProcessInstanceService.set_dot_value(key, value, dot_dict) ProcessInstanceService.set_dot_value(key, value, dot_dict)
@ -472,7 +464,6 @@ class ProcessInstanceService:
@staticmethod @staticmethod
def get_dot_value(path: str, source: dict) -> Any: def get_dot_value(path: str, source: dict) -> Any:
"""Get_dot_value."""
# Given a path in dot notation, uas as 'fruit.type' tries to find that value in # Given a path in dot notation, uas as 'fruit.type' tries to find that value in
# the source, but looking deep in the dictionary. # the source, but looking deep in the dictionary.
paths = path.split(".") # [a,b,c] paths = path.split(".") # [a,b,c]
@ -491,7 +482,6 @@ class ProcessInstanceService:
@staticmethod @staticmethod
def set_dot_value(path: str, value: Any, target: dict) -> dict: def set_dot_value(path: str, value: Any, target: dict) -> dict:
"""Set_dot_value."""
# Given a path in dot notation, such as "fruit.type", and a value "apple", will # Given a path in dot notation, such as "fruit.type", and a value "apple", will
# set the value in the target dictionary, as target["fruit"]["type"]="apple" # set the value in the target dictionary, as target["fruit"]["type"]="apple"
destination = target destination = target

View File

@ -23,17 +23,16 @@ T = TypeVar("T")
class ProcessModelWithInstancesNotDeletableError(Exception): class ProcessModelWithInstancesNotDeletableError(Exception):
"""ProcessModelWithInstancesNotDeletableError.""" pass
class ProcessModelService(FileSystemService): class ProcessModelService(FileSystemService):
"""ProcessModelService."""
"""This is a way of persisting json files to the file system in a way that mimics the data """This is a way of persisting json files to the file system in a way that mimics the data
as it would have been stored in the database. This is specific to Workflow Specifications, and as it would have been stored in the database. This is specific to Workflow Specifications, and
Workflow Specification process_groups. Workflow Specification process_groups.
We do this, so we can easily drop in a new configuration on the file system, and change all We do this, so we can easily drop in a new configuration on the file system, and change all
the workflow process_models at once, or manage those file in a git repository. """ the workflow process_models at once, or manage those file in a git repository."""
GROUP_SCHEMA = ProcessGroupSchema() GROUP_SCHEMA = ProcessGroupSchema()
PROCESS_MODEL_SCHEMA = ProcessModelInfoSchema() PROCESS_MODEL_SCHEMA = ProcessModelInfoSchema()
@ -45,7 +44,6 @@ class ProcessModelService(FileSystemService):
@classmethod @classmethod
def is_process_group(cls, path: str) -> bool: def is_process_group(cls, path: str) -> bool:
"""Is_group."""
group_json_path = os.path.join(path, cls.PROCESS_GROUP_JSON_FILE) group_json_path = os.path.join(path, cls.PROCESS_GROUP_JSON_FILE)
if os.path.exists(group_json_path): if os.path.exists(group_json_path):
return True return True
@ -53,7 +51,6 @@ class ProcessModelService(FileSystemService):
@classmethod @classmethod
def is_process_group_identifier(cls, process_group_identifier: str) -> bool: def is_process_group_identifier(cls, process_group_identifier: str) -> bool:
"""Is_process_group_identifier."""
if os.path.exists(FileSystemService.root_path()): if os.path.exists(FileSystemService.root_path()):
process_group_path = FileSystemService.full_path_from_id(process_group_identifier) process_group_path = FileSystemService.full_path_from_id(process_group_identifier)
return cls.is_process_group(process_group_path) return cls.is_process_group(process_group_path)
@ -62,7 +59,6 @@ class ProcessModelService(FileSystemService):
@classmethod @classmethod
def is_process_model(cls, path: str) -> bool: def is_process_model(cls, path: str) -> bool:
"""Is_process_model."""
model_json_path = os.path.join(path, cls.PROCESS_MODEL_JSON_FILE) model_json_path = os.path.join(path, cls.PROCESS_MODEL_JSON_FILE)
if os.path.exists(model_json_path): if os.path.exists(model_json_path):
return True return True
@ -70,7 +66,6 @@ class ProcessModelService(FileSystemService):
@classmethod @classmethod
def is_process_model_identifier(cls, process_model_identifier: str) -> bool: def is_process_model_identifier(cls, process_model_identifier: str) -> bool:
"""Is_process_model_identifier."""
if os.path.exists(FileSystemService.root_path()): if os.path.exists(FileSystemService.root_path()):
process_model_path = FileSystemService.full_path_from_id(process_model_identifier) process_model_path = FileSystemService.full_path_from_id(process_model_identifier)
return cls.is_process_model(process_model_path) return cls.is_process_model(process_model_path)
@ -95,12 +90,10 @@ class ProcessModelService(FileSystemService):
@classmethod @classmethod
def add_process_model(cls, process_model: ProcessModelInfo) -> None: def add_process_model(cls, process_model: ProcessModelInfo) -> None:
"""Add_spec."""
cls.save_process_model(process_model) cls.save_process_model(process_model)
@classmethod @classmethod
def update_process_model(cls, process_model: ProcessModelInfo, attributes_to_update: dict) -> None: def update_process_model(cls, process_model: ProcessModelInfo, attributes_to_update: dict) -> None:
"""Update_spec."""
for atu_key, atu_value in attributes_to_update.items(): for atu_key, atu_value in attributes_to_update.items():
if hasattr(process_model, atu_key): if hasattr(process_model, atu_key):
setattr(process_model, atu_key, atu_value) setattr(process_model, atu_key, atu_value)
@ -108,7 +101,6 @@ class ProcessModelService(FileSystemService):
@classmethod @classmethod
def save_process_model(cls, process_model: ProcessModelInfo) -> None: def save_process_model(cls, process_model: ProcessModelInfo) -> None:
"""Save_process_model."""
process_model_path = os.path.abspath( process_model_path = os.path.abspath(
os.path.join(FileSystemService.root_path(), process_model.id_for_file_path()) os.path.join(FileSystemService.root_path(), process_model.id_for_file_path())
) )
@ -149,7 +141,6 @@ class ProcessModelService(FileSystemService):
@classmethod @classmethod
def get_process_model_from_relative_path(cls, relative_path: str) -> ProcessModelInfo: def get_process_model_from_relative_path(cls, relative_path: str) -> ProcessModelInfo:
"""Get_process_model_from_relative_path."""
path = os.path.join(FileSystemService.root_path(), relative_path) path = os.path.join(FileSystemService.root_path(), relative_path)
return cls.__scan_process_model(path) return cls.__scan_process_model(path)
@ -231,7 +222,6 @@ class ProcessModelService(FileSystemService):
def get_parent_group_array_and_cache_it( def get_parent_group_array_and_cache_it(
cls, process_identifier: str, process_group_cache: dict[str, ProcessGroup] cls, process_identifier: str, process_group_cache: dict[str, ProcessGroup]
) -> ProcessGroupLitesWithCache: ) -> ProcessGroupLitesWithCache:
"""Get_parent_group_array."""
full_group_id_path = None full_group_id_path = None
parent_group_array: list[ProcessGroupLite] = [] parent_group_array: list[ProcessGroupLite] = []
for process_group_id_segment in process_identifier.split("/")[0:-1]: for process_group_id_segment in process_identifier.split("/")[0:-1]:
@ -251,7 +241,6 @@ class ProcessModelService(FileSystemService):
@classmethod @classmethod
def get_parent_group_array(cls, process_identifier: str) -> list[ProcessGroupLite]: def get_parent_group_array(cls, process_identifier: str) -> list[ProcessGroupLite]:
"""Get_parent_group_array."""
parent_group_lites_with_cache = cls.get_parent_group_array_and_cache_it(process_identifier, {}) parent_group_lites_with_cache = cls.get_parent_group_array_and_cache_it(process_identifier, {})
return parent_group_lites_with_cache["process_groups"] return parent_group_lites_with_cache["process_groups"]

View File

@ -1,4 +1,3 @@
"""Process_instance_processor."""
import json import json
import sys import sys
import traceback import traceback
@ -14,8 +13,6 @@ PythonScriptContext = dict[str, Any]
@dataclass @dataclass
class ScriptUnitTestResult: class ScriptUnitTestResult:
"""ScriptUnitTestResult."""
result: bool result: bool
context: PythonScriptContext | None = None context: PythonScriptContext | None = None
error: str | None = None error: str | None = None
@ -24,8 +21,6 @@ class ScriptUnitTestResult:
class ScriptUnitTestRunner: class ScriptUnitTestRunner:
"""ScriptUnitTestRunner."""
_script_engine = CustomBpmnScriptEngine() _script_engine = CustomBpmnScriptEngine()
@classmethod @classmethod
@ -35,7 +30,6 @@ class ScriptUnitTestRunner:
input_context: PythonScriptContext, input_context: PythonScriptContext,
expected_output_context: PythonScriptContext, expected_output_context: PythonScriptContext,
) -> ScriptUnitTestResult: ) -> ScriptUnitTestResult:
"""Run_task."""
# make a new variable just for clarity, since we are going to update this dict in place # make a new variable just for clarity, since we are going to update this dict in place
# with the output variables from the script. # with the output variables from the script.
context = input_context.copy() context = input_context.copy()
@ -86,7 +80,6 @@ class ScriptUnitTestRunner:
task: SpiffTask, task: SpiffTask,
test_identifier: str, test_identifier: str,
) -> ScriptUnitTestResult: ) -> ScriptUnitTestResult:
"""Run_test."""
# this is totally made up, but hopefully resembles what spiffworkflow ultimately does # this is totally made up, but hopefully resembles what spiffworkflow ultimately does
unit_tests = task.task_spec.extensions["unitTests"] unit_tests = task.task_spec.extensions["unitTests"]
unit_test = [unit_test for unit_test in unit_tests if unit_test["id"] == test_identifier][0] unit_test = [unit_test for unit_test in unit_tests if unit_test["id"] == test_identifier][0]

View File

@ -5,8 +5,6 @@ from spiffworkflow_backend.models.secret_model import SecretModel
class SecretService: class SecretService:
"""SecretService."""
CIPHER_ENCODING = "ascii" CIPHER_ENCODING = "ascii"
@classmethod @classmethod
@ -33,7 +31,6 @@ class SecretService:
value: str, value: str,
user_id: int, user_id: int,
) -> SecretModel: ) -> SecretModel:
"""Add_secret."""
value = cls._encrypt(value) value = cls._encrypt(value)
secret_model = SecretModel(key=key, value=value, user_id=user_id) secret_model = SecretModel(key=key, value=value, user_id=user_id)
db.session.add(secret_model) db.session.add(secret_model)
@ -51,7 +48,6 @@ class SecretService:
@staticmethod @staticmethod
def get_secret(key: str) -> SecretModel: def get_secret(key: str) -> SecretModel:
"""Get_secret."""
secret = db.session.query(SecretModel).filter(SecretModel.key == key).first() secret = db.session.query(SecretModel).filter(SecretModel.key == key).first()
if isinstance(secret, SecretModel): if isinstance(secret, SecretModel):
return secret return secret

View File

@ -1,4 +1,3 @@
"""ServiceTask_service."""
import json import json
from typing import Any from typing import Any
@ -13,7 +12,7 @@ from spiffworkflow_backend.services.user_service import UserService
class ConnectorProxyError(Exception): class ConnectorProxyError(Exception):
"""ConnectorProxyError.""" pass
def connector_proxy_url() -> Any: def connector_proxy_url() -> Any:
@ -22,11 +21,8 @@ def connector_proxy_url() -> Any:
class ServiceTaskDelegate: class ServiceTaskDelegate:
"""ServiceTaskDelegate."""
@staticmethod @staticmethod
def check_prefixes(value: Any) -> Any: def check_prefixes(value: Any) -> Any:
"""Check_prefixes."""
if isinstance(value, str): if isinstance(value, str):
secret_prefix = "secret:" # noqa: S105 secret_prefix = "secret:" # noqa: S105
if value.startswith(secret_prefix): if value.startswith(secret_prefix):
@ -123,8 +119,6 @@ class ServiceTaskDelegate:
class ServiceTaskService: class ServiceTaskService:
"""ServiceTaskService."""
@staticmethod @staticmethod
def available_connectors() -> Any: def available_connectors() -> Any:
"""Returns a list of available connectors.""" """Returns a list of available connectors."""

View File

@ -19,19 +19,18 @@ from spiffworkflow_backend.services.process_model_service import ProcessModelSer
class ProcessModelFileNotFoundError(Exception): class ProcessModelFileNotFoundError(Exception):
"""ProcessModelFileNotFoundError.""" pass
class ProcessModelFileInvalidError(Exception): class ProcessModelFileInvalidError(Exception):
"""ProcessModelFileInvalidError.""" pass
class SpecFileService(FileSystemService): class SpecFileService(FileSystemService):
"""SpecFileService."""
"""We store spec files on the file system. This allows us to take advantage of Git for """We store spec files on the file system. This allows us to take advantage of Git for
syncing and versioning. syncing and versioning.
The files are stored in a directory whose path is determined by the category and spec names. The files are stored in a directory whose path is determined by the category and spec names.
""" """
@staticmethod @staticmethod
@ -59,7 +58,6 @@ class SpecFileService(FileSystemService):
def get_references_for_process( def get_references_for_process(
process_model_info: ProcessModelInfo, process_model_info: ProcessModelInfo,
) -> list[SpecReference]: ) -> list[SpecReference]:
"""Get_references_for_process."""
files = SpecFileService.get_files(process_model_info) files = SpecFileService.get_files(process_model_info)
references = [] references = []
for file in files: for file in files:
@ -68,7 +66,6 @@ class SpecFileService(FileSystemService):
@classmethod @classmethod
def get_references_for_file(cls, file: File, process_model_info: ProcessModelInfo) -> list[SpecReference]: def get_references_for_file(cls, file: File, process_model_info: ProcessModelInfo) -> list[SpecReference]:
"""Get_references_for_file."""
full_file_path = SpecFileService.full_file_path(process_model_info, file.name) full_file_path = SpecFileService.full_file_path(process_model_info, file.name)
file_contents: bytes = b"" file_contents: bytes = b""
with open(full_file_path) as f: with open(full_file_path) as f:
@ -77,7 +74,6 @@ class SpecFileService(FileSystemService):
@classmethod @classmethod
def get_etree_from_xml_bytes(cls, binary_data: bytes) -> etree.Element: def get_etree_from_xml_bytes(cls, binary_data: bytes) -> etree.Element:
"""Get_etree_from_xml_bytes."""
etree_xml_parser = etree.XMLParser(resolve_entities=False) etree_xml_parser = etree.XMLParser(resolve_entities=False)
return etree.fromstring(binary_data, parser=etree_xml_parser) # noqa: S320 return etree.fromstring(binary_data, parser=etree_xml_parser) # noqa: S320
@ -147,13 +143,11 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def add_file(process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes) -> File: def add_file(process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes) -> File:
"""Add_file."""
# Same as update # Same as update
return SpecFileService.update_file(process_model_info, file_name, binary_data) return SpecFileService.update_file(process_model_info, file_name, binary_data)
@classmethod @classmethod
def validate_bpmn_xml(cls, file_name: str, binary_data: bytes) -> None: def validate_bpmn_xml(cls, file_name: str, binary_data: bytes) -> None:
"""Validate_bpmn_xml."""
file_type = FileSystemService.file_type(file_name) file_type = FileSystemService.file_type(file_name)
if file_type.value == FileType.bpmn.value: if file_type.value == FileType.bpmn.value:
BpmnValidator() BpmnValidator()
@ -202,7 +196,6 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def get_data(process_model_info: ProcessModelInfo, file_name: str) -> bytes: def get_data(process_model_info: ProcessModelInfo, file_name: str) -> bytes:
"""Get_data."""
full_file_path = SpecFileService.full_file_path(process_model_info, file_name) full_file_path = SpecFileService.full_file_path(process_model_info, file_name)
if not os.path.exists(full_file_path): if not os.path.exists(full_file_path):
raise ProcessModelFileNotFoundError( raise ProcessModelFileNotFoundError(
@ -214,24 +207,20 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def full_file_path(process_model: ProcessModelInfo, file_name: str) -> str: def full_file_path(process_model: ProcessModelInfo, file_name: str) -> str:
"""File_path."""
return os.path.abspath(os.path.join(SpecFileService.process_model_full_path(process_model), file_name)) return os.path.abspath(os.path.join(SpecFileService.process_model_full_path(process_model), file_name))
@staticmethod @staticmethod
def last_modified(process_model: ProcessModelInfo, file_name: str) -> datetime: def last_modified(process_model: ProcessModelInfo, file_name: str) -> datetime:
"""Last_modified."""
full_file_path = SpecFileService.full_file_path(process_model, file_name) full_file_path = SpecFileService.full_file_path(process_model, file_name)
return FileSystemService._last_modified(full_file_path) return FileSystemService._last_modified(full_file_path)
@staticmethod @staticmethod
def timestamp(process_model: ProcessModelInfo, file_name: str) -> float: def timestamp(process_model: ProcessModelInfo, file_name: str) -> float:
"""Timestamp."""
full_file_path = SpecFileService.full_file_path(process_model, file_name) full_file_path = SpecFileService.full_file_path(process_model, file_name)
return FileSystemService._timestamp(full_file_path) return FileSystemService._timestamp(full_file_path)
@staticmethod @staticmethod
def delete_file(process_model: ProcessModelInfo, file_name: str) -> None: def delete_file(process_model: ProcessModelInfo, file_name: str) -> None:
"""Delete_file."""
# Fixme: Remember to remove the lookup files when the process_model file is removed. # Fixme: Remember to remove the lookup files when the process_model file is removed.
# lookup_files = session.query(LookupFileModel).filter_by(file_model_id=file_id).all() # lookup_files = session.query(LookupFileModel).filter_by(file_model_id=file_id).all()
# for lf in lookup_files: # for lf in lookup_files:
@ -242,7 +231,6 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def delete_all_files(process_model: ProcessModelInfo) -> None: def delete_all_files(process_model: ProcessModelInfo) -> None:
"""Delete_all_files."""
dir_path = SpecFileService.process_model_full_path(process_model) dir_path = SpecFileService.process_model_full_path(process_model)
if os.path.exists(dir_path): if os.path.exists(dir_path):
shutil.rmtree(dir_path) shutil.rmtree(dir_path)
@ -251,7 +239,6 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def update_caches(ref: SpecReference) -> None: def update_caches(ref: SpecReference) -> None:
"""Update_caches."""
SpecFileService.update_process_cache(ref) SpecFileService.update_process_cache(ref)
SpecFileService.update_process_caller_cache(ref) SpecFileService.update_process_caller_cache(ref)
SpecFileService.update_message_cache(ref) SpecFileService.update_message_cache(ref)
@ -279,14 +266,12 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def clear_caches() -> None: def clear_caches() -> None:
"""Clear_caches."""
db.session.query(SpecReferenceCache).delete() db.session.query(SpecReferenceCache).delete()
ProcessCallerService.clear_cache() ProcessCallerService.clear_cache()
# fixme: likely the other caches should be cleared as well, but we don't have a clean way to do so yet. # fixme: likely the other caches should be cleared as well, but we don't have a clean way to do so yet.
@staticmethod @staticmethod
def update_process_cache(ref: SpecReference) -> None: def update_process_cache(ref: SpecReference) -> None:
"""Update_process_cache."""
process_id_lookup = ( process_id_lookup = (
SpecReferenceCache.query.filter_by(identifier=ref.identifier).filter_by(type=ref.type).first() SpecReferenceCache.query.filter_by(identifier=ref.identifier).filter_by(type=ref.type).first()
) )
@ -350,7 +335,6 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def update_correlation_cache(ref: SpecReference) -> None: def update_correlation_cache(ref: SpecReference) -> None:
"""Update_correlation_cache."""
for name in ref.correlations.keys(): for name in ref.correlations.keys():
correlation_property_retrieval_expressions = ref.correlations[name]["retrieval_expressions"] correlation_property_retrieval_expressions = ref.correlations[name]["retrieval_expressions"]

View File

@ -74,19 +74,16 @@ class UserService:
# Returns true if the current user is logged in. # Returns true if the current user is logged in.
@staticmethod @staticmethod
def has_user() -> bool: def has_user() -> bool:
"""Has_user."""
return "token" in g and bool(g.token) and "user" in g and bool(g.user) return "token" in g and bool(g.token) and "user" in g and bool(g.user)
@staticmethod @staticmethod
def current_user() -> Any: def current_user() -> Any:
"""Current_user."""
if not UserService.has_user(): if not UserService.has_user():
raise ApiError("logged_out", "You are no longer logged in.", status_code=401) raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
return g.user return g.user
@staticmethod @staticmethod
def get_principal_by_user_id(user_id: int) -> PrincipalModel: def get_principal_by_user_id(user_id: int) -> PrincipalModel:
"""Get_principal_by_user_id."""
principal = db.session.query(PrincipalModel).filter(PrincipalModel.user_id == user_id).first() principal = db.session.query(PrincipalModel).filter(PrincipalModel.user_id == user_id).first()
if isinstance(principal, PrincipalModel): if isinstance(principal, PrincipalModel):
return principal return principal
@ -97,7 +94,6 @@ class UserService:
@classmethod @classmethod
def create_principal(cls, child_id: int, id_column_name: str = "user_id") -> PrincipalModel: def create_principal(cls, child_id: int, id_column_name: str = "user_id") -> PrincipalModel:
"""Create_principal."""
column = PrincipalModel.__table__.columns[id_column_name] column = PrincipalModel.__table__.columns[id_column_name]
principal: PrincipalModel | None = PrincipalModel.query.filter(column == child_id).first() principal: PrincipalModel | None = PrincipalModel.query.filter(column == child_id).first()
if principal is None: if principal is None:
@ -117,7 +113,6 @@ class UserService:
@classmethod @classmethod
def add_user_to_group(cls, user: UserModel, group: GroupModel) -> None: def add_user_to_group(cls, user: UserModel, group: GroupModel) -> None:
"""Add_user_to_group."""
exists = UserGroupAssignmentModel().query.filter_by(user_id=user.id).filter_by(group_id=group.id).count() exists = UserGroupAssignmentModel().query.filter_by(user_id=user.id).filter_by(group_id=group.id).count()
if not exists: if not exists:
ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id) ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id)
@ -126,7 +121,6 @@ class UserService:
@classmethod @classmethod
def add_waiting_group_assignment(cls, username: str, group: GroupModel) -> None: def add_waiting_group_assignment(cls, username: str, group: GroupModel) -> None:
"""Add_waiting_group_assignment."""
wugam = ( wugam = (
UserGroupAssignmentWaitingModel().query.filter_by(username=username).filter_by(group_id=group.id).first() UserGroupAssignmentWaitingModel().query.filter_by(username=username).filter_by(group_id=group.id).first()
) )
@ -140,7 +134,6 @@ class UserService:
@classmethod @classmethod
def apply_waiting_group_assignments(cls, user: UserModel) -> None: def apply_waiting_group_assignments(cls, user: UserModel) -> None:
"""Apply_waiting_group_assignments."""
waiting = ( waiting = (
UserGroupAssignmentWaitingModel() UserGroupAssignmentWaitingModel()
.query.filter(UserGroupAssignmentWaitingModel.username == user.username) .query.filter(UserGroupAssignmentWaitingModel.username == user.username)
@ -160,7 +153,6 @@ class UserService:
@staticmethod @staticmethod
def get_user_by_service_and_service_id(service: str, service_id: str) -> UserModel | None: def get_user_by_service_and_service_id(service: str, service_id: str) -> UserModel | None:
"""Get_user_by_service_and_service_id."""
user: UserModel = ( user: UserModel = (
UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first() UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first()
) )
@ -170,7 +162,6 @@ class UserService:
@classmethod @classmethod
def add_user_to_human_tasks_if_appropriate(cls, user: UserModel) -> None: def add_user_to_human_tasks_if_appropriate(cls, user: UserModel) -> None:
"""Add_user_to_human_tasks_if_appropriate."""
group_ids = [g.id for g in user.groups] group_ids = [g.id for g in user.groups]
human_tasks = HumanTaskModel.query.filter( human_tasks = HumanTaskModel.query.filter(
HumanTaskModel.lane_assignment_id.in_(group_ids) # type: ignore HumanTaskModel.lane_assignment_id.in_(group_ids) # type: ignore

View File

@ -388,7 +388,6 @@ class WorkflowExecutionService:
# execution_strategy.spiff_run # execution_strategy.spiff_run
# spiff.[some_run_task_method] # spiff.[some_run_task_method]
def run_and_save(self, exit_at: None = None, save: bool = False) -> None: def run_and_save(self, exit_at: None = None, save: bool = False) -> None:
"""Do_engine_steps."""
with safe_assertion(ProcessInstanceLockService.has_lock(self.process_instance_model.id)) as tripped: with safe_assertion(ProcessInstanceLockService.has_lock(self.process_instance_model.id)) as tripped:
if tripped: if tripped:
raise AssertionError( raise AssertionError(

View File

@ -8,8 +8,6 @@ from spiffworkflow_backend.specs.start_event import StartEvent
class WorkflowService: class WorkflowService:
"""WorkflowService."""
@classmethod @classmethod
def future_start_events(cls, workflow: BpmnWorkflow) -> list[SpiffTask]: def future_start_events(cls, workflow: BpmnWorkflow) -> list[SpiffTask]:
return [t for t in workflow.get_tasks(TaskState.FUTURE) if isinstance(t.task_spec, StartEvent)] return [t for t in workflow.get_tasks(TaskState.FUTURE) if isinstance(t.task_spec, StartEvent)]

View File

@ -34,11 +34,8 @@ from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
class BaseTest: class BaseTest:
"""BaseTest."""
@staticmethod @staticmethod
def find_or_create_user(username: str = "test_user_1") -> UserModel: def find_or_create_user(username: str = "test_user_1") -> UserModel:
"""Find_or_create_user."""
user = UserModel.query.filter_by(username=username).first() user = UserModel.query.filter_by(username=username).first()
if isinstance(user, UserModel): if isinstance(user, UserModel):
return user return user
@ -99,7 +96,6 @@ class BaseTest:
process_group_id: str, process_group_id: str,
display_name: str = "", display_name: str = "",
) -> ProcessGroup: ) -> ProcessGroup:
"""Create_process_group."""
process_group = ProcessGroup(id=process_group_id, display_name=display_name, display_order=0, admin=False) process_group = ProcessGroup(id=process_group_id, display_name=display_name, display_order=0, admin=False)
return ProcessModelService.add_process_group(process_group) return ProcessModelService.add_process_group(process_group)
@ -171,7 +167,6 @@ class BaseTest:
raise Exception("You must include the process_model_id, which must be a path to the model") raise Exception("You must include the process_model_id, which must be a path to the model")
def get_test_data_file_full_path(self, file_name: str, process_model_test_data_dir: str) -> str: def get_test_data_file_full_path(self, file_name: str, process_model_test_data_dir: str) -> str:
"""Get_test_data_file_contents."""
return os.path.join( return os.path.join(
current_app.instance_path, current_app.instance_path,
"..", "..",
@ -183,7 +178,6 @@ class BaseTest:
) )
def get_test_data_file_contents(self, file_name: str, process_model_test_data_dir: str) -> bytes: def get_test_data_file_contents(self, file_name: str, process_model_test_data_dir: str) -> bytes:
"""Get_test_data_file_contents."""
file_full_path = self.get_test_data_file_full_path(file_name, process_model_test_data_dir) file_full_path = self.get_test_data_file_full_path(file_name, process_model_test_data_dir)
with open(file_full_path, "rb") as file: with open(file_full_path, "rb") as file:
return file.read() return file.read()
@ -283,7 +277,6 @@ class BaseTest:
status: str | None = "not_started", status: str | None = "not_started",
user: UserModel | None = None, user: UserModel | None = None,
) -> ProcessInstanceModel: ) -> ProcessInstanceModel:
"""Create_process_instance_from_process_model."""
if user is None: if user is None:
user = self.find_or_create_user() user = self.find_or_create_user()
@ -342,7 +335,6 @@ class BaseTest:
target_uri: str, target_uri: str,
expected_result: bool = True, expected_result: bool = True,
) -> None: ) -> None:
"""Assert_user_has_permission."""
has_permission = AuthorizationService.user_has_permission( has_permission = AuthorizationService.user_has_permission(
user=user, user=user,
permission=permission, permission=permission,
@ -351,11 +343,9 @@ class BaseTest:
assert has_permission is expected_result assert has_permission is expected_result
def modify_process_identifier_for_path_param(self, identifier: str) -> str: def modify_process_identifier_for_path_param(self, identifier: str) -> str:
"""Modify_process_identifier_for_path_param."""
return ProcessModelInfo.modify_process_identifier_for_path_param(identifier) return ProcessModelInfo.modify_process_identifier_for_path_param(identifier)
def un_modify_modified_process_identifier_for_path_param(self, modified_identifier: str) -> str: def un_modify_modified_process_identifier_for_path_param(self, modified_identifier: str) -> str:
"""Un_modify_modified_process_model_id."""
return modified_identifier.replace(":", "/") return modified_identifier.replace(":", "/")
def create_process_model_with_metadata(self) -> ProcessModelInfo: def create_process_model_with_metadata(self) -> ProcessModelInfo:

View File

@ -8,8 +8,6 @@ from spiffworkflow_backend.services.spec_file_service import SpecFileService
class ExampleDataLoader: class ExampleDataLoader:
"""ExampleDataLoader."""
@staticmethod @staticmethod
def create_spec( def create_spec(
process_model_id: str, process_model_id: str,

View File

@ -1,5 +1,3 @@
"""User."""
from spiffworkflow_backend.exceptions.process_entity_not_found_error import ProcessEntityNotFoundError from spiffworkflow_backend.exceptions.process_entity_not_found_error import ProcessEntityNotFoundError
from spiffworkflow_backend.models.process_group import ProcessGroup from spiffworkflow_backend.models.process_group import ProcessGroup
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo

View File

@ -7,10 +7,7 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
class TestAuthentication(BaseTest): class TestAuthentication(BaseTest):
"""TestAuthentication."""
def test_get_login_state(self) -> None: def test_get_login_state(self) -> None:
"""Test_get_login_state."""
redirect_url = "http://example.com/" redirect_url = "http://example.com/"
state = AuthenticationService.generate_state(redirect_url) state = AuthenticationService.generate_state(redirect_url)
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8")) state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))

View File

@ -2,8 +2,7 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
class TestAuthorization(BaseTest): class TestAuthorization(BaseTest):
"""TestAuthorization.""" pass
# def test_get_bearer_token(self, app: Flask) -> None: # def test_get_bearer_token(self, app: Flask) -> None:
# """Test_get_bearer_token.""" # """Test_get_bearer_token."""
# for user_id in ("user_1", "user_2", "admin_1", "admin_2"): # for user_id in ("user_1", "user_2", "admin_1", "admin_2"):

View File

@ -1,4 +1,3 @@
"""Test_various_bpmn_constructs."""
from typing import Any from typing import Any
from flask.app import Flask from flask.app import Flask
@ -53,7 +52,6 @@ class TestForGoodErrors(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_task_data_is_set_even_if_process_instance_errors."""
process_model = load_test_spec( process_model = load_test_spec(
process_model_id="group/end_user_instructions_error", process_model_id="group/end_user_instructions_error",
bpmn_file_name="instructions_error.bpmn", bpmn_file_name="instructions_error.bpmn",

View File

@ -13,8 +13,6 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
class TestNestedGroups(BaseTest): class TestNestedGroups(BaseTest):
"""TestNestedGroups."""
def test_delete_group_with_running_instance( def test_delete_group_with_running_instance(
self, self,
app: Flask, app: Flask,
@ -22,7 +20,6 @@ class TestNestedGroups(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_delete_group_with_running_instance."""
process_group_id = "test_group" process_group_id = "test_group"
process_model_id = "manual_task" process_model_id = "manual_task"
bpmn_file_name = "manual_task.bpmn" bpmn_file_name = "manual_task.bpmn"
@ -65,7 +62,6 @@ class TestNestedGroups(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_delete_group_with_running_instance_in_nested_group."""
process_group_a = ProcessGroup( process_group_a = ProcessGroup(
id="group_a", id="group_a",
display_name="Group A", display_name="Group A",
@ -120,7 +116,6 @@ class TestNestedGroups(BaseTest):
client: FlaskClient, client: FlaskClient,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
) -> None: ) -> None:
"""Test_nested_groups."""
# /process-groups/{process_group_path}/show # /process-groups/{process_group_path}/show
target_uri = "/v1.0/process-groups/group_a,group_b" target_uri = "/v1.0/process-groups/group_a,group_b"
user = self.find_or_create_user() user = self.find_or_create_user()
@ -134,7 +129,6 @@ class TestNestedGroups(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_add_nested_group."""
process_group_a = ProcessGroup( process_group_a = ProcessGroup(
id="group_a", id="group_a",
display_name="Group A", display_name="Group A",
@ -179,7 +173,6 @@ class TestNestedGroups(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_model_create."""
process_group_a = ProcessGroup( process_group_a = ProcessGroup(
id="group_a", id="group_a",
display_name="Group A", display_name="Group A",
@ -226,7 +219,6 @@ class TestNestedGroups(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_group_show."""
# target_uri = "/process-groups/{process_group_id}" # target_uri = "/process-groups/{process_group_id}"
# user = self.find_or_create_user("testadmin1") # user = self.find_or_create_user("testadmin1")
# self.add_permissions_to_user( # self.add_permissions_to_user(

View File

@ -1,4 +1,3 @@
"""Test_authentication."""
import base64 import base64
import jwt import jwt
@ -46,7 +45,6 @@ class TestFlaskOpenId(BaseTest):
client: FlaskClient, client: FlaskClient,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
) -> None: ) -> None:
"""Test_get_token."""
code = "testadmin1:1234123412341234" code = "testadmin1:1234123412341234"
"""It should be possible to get a token.""" """It should be possible to get a token."""

View File

@ -40,15 +40,12 @@ from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
class TestProcessApi(BaseTest): class TestProcessApi(BaseTest):
"""TestProcessAPi."""
def test_returns_403_if_user_does_not_have_permission( def test_returns_403_if_user_does_not_have_permission(
self, self,
app: Flask, app: Flask,
client: FlaskClient, client: FlaskClient,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
) -> None: ) -> None:
"""Test_returns_403_if_user_does_not_have_permission."""
user = self.find_or_create_user() user = self.find_or_create_user()
response = client.get( response = client.get(
"/v1.0/process-groups", "/v1.0/process-groups",
@ -75,7 +72,6 @@ class TestProcessApi(BaseTest):
client: FlaskClient, client: FlaskClient,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
) -> None: ) -> None:
"""Test_permissions_check."""
user = self.find_or_create_user() user = self.find_or_create_user()
self.add_permissions_to_user(user, target_uri="/v1.0/process-groups", permission_names=["read"]) self.add_permissions_to_user(user, target_uri="/v1.0/process-groups", permission_names=["read"])
request_body = { request_body = {
@ -107,7 +103,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_model_create."""
process_group_id = "test_process_group" process_group_id = "test_process_group"
process_group_display_name = "Test Process Group" process_group_display_name = "Test Process Group"
# creates the group directory, and the json file # creates the group directory, and the json file
@ -157,7 +152,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_model_create_with_natural_language."""
process_group_id = "test_process_group" process_group_id = "test_process_group"
process_group_description = "Test Process Group" process_group_description = "Test Process Group"
process_model_id = "sample" process_model_id = "sample"
@ -268,7 +262,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_model_delete."""
process_group_id = "test_process_group" process_group_id = "test_process_group"
process_model_id = "sample" process_model_id = "sample"
process_model_identifier = f"{process_group_id}/{process_model_id}" process_model_identifier = f"{process_group_id}/{process_model_id}"
@ -299,7 +292,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_model_delete_with_instances."""
test_process_group_id = "runs_without_input" test_process_group_id = "runs_without_input"
test_process_model_id = "sample" test_process_model_id = "sample"
bpmn_file_name = "sample.bpmn" bpmn_file_name = "sample.bpmn"
@ -350,7 +342,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_model_update."""
self.create_process_group_with_api(client, with_super_admin_user, "test_process_group", "Test Process Group") self.create_process_group_with_api(client, with_super_admin_user, "test_process_group", "Test Process Group")
process_model_identifier = "test_process_group/make_cookies" process_model_identifier = "test_process_group/make_cookies"
self.create_process_model_with_api( self.create_process_model_with_api(
@ -390,7 +381,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_model_list_all."""
group_id = "test_group/test_sub_group" group_id = "test_group/test_sub_group"
self.create_process_group_with_api(client, with_super_admin_user, group_id) self.create_process_group_with_api(client, with_super_admin_user, group_id)
@ -425,7 +415,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_model_list."""
# create a group # create a group
group_id = "test_group" group_id = "test_group"
self.create_process_group_with_api(client, with_super_admin_user, group_id) self.create_process_group_with_api(client, with_super_admin_user, group_id)
@ -592,7 +581,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_add_process_group."""
process_group = ProcessGroup( process_group = ProcessGroup(
id="test", id="test",
display_name="Another Test Category", display_name="Another Test Category",
@ -629,7 +617,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_group_delete."""
process_group_id = "test" process_group_id = "test"
process_group_display_name = "My Process Group" process_group_display_name = "My Process Group"
@ -851,7 +838,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_model_file_update."""
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
bad_process_model_identifier = f"x{process_model_identifier}" bad_process_model_identifier = f"x{process_model_identifier}"
modified_bad_process_model_identifier = bad_process_model_identifier.replace("/", ":") modified_bad_process_model_identifier = bad_process_model_identifier.replace("/", ":")
@ -872,7 +858,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_model_file_update."""
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
modified_process_model_identifier = process_model_identifier.replace("/", ":") modified_process_model_identifier = process_model_identifier.replace("/", ":")
@ -914,7 +899,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_model_file_update."""
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
modified_process_model_identifier = process_model_identifier.replace("/", ":") modified_process_model_identifier = process_model_identifier.replace("/", ":")
@ -949,7 +933,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_get_file."""
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
modified_process_model_identifier = process_model_identifier.replace("/", ":") modified_process_model_identifier = process_model_identifier.replace("/", ":")
@ -969,7 +952,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_get_workflow_from_workflow_spec."""
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
modified_process_model_identifier = process_model_identifier.replace("/", ":") modified_process_model_identifier = process_model_identifier.replace("/", ":")
@ -1100,7 +1082,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_get_process_group_when_found."""
process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user) process_model_identifier = self.create_group_and_model_with_bpmn(client, with_super_admin_user)
process_group_id, process_model_id = os.path.split(process_model_identifier) process_group_id, process_model_id = os.path.split(process_model_identifier)
@ -1122,7 +1103,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_get_process_group_show_when_nested."""
self.create_group_and_model_with_bpmn( self.create_group_and_model_with_bpmn(
client=client, client=client,
user=with_super_admin_user, user=with_super_admin_user,
@ -1156,7 +1136,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_get_process_model_when_found."""
process_model_identifier = self.create_group_and_model_with_bpmn( process_model_identifier = self.create_group_and_model_with_bpmn(
client, with_super_admin_user, bpmn_file_name="random_fact.bpmn" client, with_super_admin_user, bpmn_file_name="random_fact.bpmn"
) )
@ -1181,7 +1160,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_get_process_model_when_not_found."""
process_model_dir_name = "THIS_NO_EXISTS" process_model_dir_name = "THIS_NO_EXISTS"
group_id = self.create_process_group_with_api(client, with_super_admin_user, "my_group") group_id = self.create_process_group_with_api(client, with_super_admin_user, "my_group")
bad_process_model_id = f"{group_id}/{process_model_dir_name}" bad_process_model_id = f"{group_id}/{process_model_dir_name}"
@ -1201,7 +1179,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_instance_create."""
test_process_model_id = "runs_without_input/sample" test_process_model_id = "runs_without_input/sample"
headers = self.logged_in_headers(with_super_admin_user) headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id_with_api(client, test_process_model_id, headers) response = self.create_process_instance_from_process_model_id_with_api(client, test_process_model_id, headers)
@ -1220,7 +1197,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_instance_run."""
# process_model_id = "runs_without_input/sample" # process_model_id = "runs_without_input/sample"
process_model_identifier = self.create_group_and_model_with_bpmn( process_model_identifier = self.create_group_and_model_with_bpmn(
client=client, client=client,
@ -1257,7 +1233,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_instance_show."""
process_group_id = "simple_script" process_group_id = "simple_script"
process_model_id = "simple_script" process_model_id = "simple_script"
process_model_identifier = self.create_group_and_model_with_bpmn( process_model_identifier = self.create_group_and_model_with_bpmn(
@ -1296,7 +1271,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_instance_show_with_specified_process_identifier."""
process_model_id = "call_activity_nested" process_model_id = "call_activity_nested"
process_model_identifier = self.create_group_and_model_with_bpmn( process_model_identifier = self.create_group_and_model_with_bpmn(
client=client, client=client,
@ -1343,7 +1317,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_message_send_when_starting_process_instance."""
# ensure process model is loaded # ensure process model is loaded
process_group_id = "test_message_send" process_group_id = "test_message_send"
process_model_id = "message_receiver" process_model_id = "message_receiver"
@ -1391,7 +1364,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_message_send_when_providing_message_to_running_process_instance."""
process_group_id = "test_message_send" process_group_id = "test_message_send"
process_model_id = "message_sender" process_model_id = "message_sender"
bpmn_file_name = "message_sender.bpmn" bpmn_file_name = "message_sender.bpmn"
@ -1467,7 +1439,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_message_send_when_providing_message_to_running_process_instance."""
process_group_id = "test_message_send" process_group_id = "test_message_send"
process_model_id = "message_sender" process_model_id = "message_sender"
bpmn_file_name = "message_sender.bpmn" bpmn_file_name = "message_sender.bpmn"
@ -1562,7 +1533,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_message_send_when_providing_message_to_running_process_instance."""
# this task will wait on a catch event # this task will wait on a catch event
process_group_id = "test_message_send" process_group_id = "test_message_send"
process_model_id = "message_sender" process_model_id = "message_sender"
@ -1625,7 +1595,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_instance_delete."""
process_group_id = "my_process_group" process_group_id = "my_process_group"
process_model_id = "sample" process_model_id = "sample"
bpmn_file_location = "sample" bpmn_file_location = "sample"
@ -1665,7 +1634,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_instance_run_user_task."""
process_group_id = "my_process_group" process_group_id = "my_process_group"
process_model_id = "dynamic_enum_select_fields" process_model_id = "dynamic_enum_select_fields"
bpmn_file_location = "dynamic_enum_select_fields" bpmn_file_location = "dynamic_enum_select_fields"
@ -1807,7 +1775,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_instance_list_with_default_list."""
process_group_id = "runs_without_input" process_group_id = "runs_without_input"
process_model_id = "sample" process_model_id = "sample"
bpmn_file_location = "sample" bpmn_file_location = "sample"
@ -1843,7 +1810,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_instance_list_with_paginated_items."""
process_group_id = "runs_without_input" process_group_id = "runs_without_input"
process_model_id = "sample" process_model_id = "sample"
bpmn_file_name = "sample.bpmn" bpmn_file_name = "sample.bpmn"
@ -1882,7 +1848,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_instance_list_filter."""
process_group_id = "runs_without_input" process_group_id = "runs_without_input"
process_model_id = "sample" process_model_id = "sample"
bpmn_file_name = "sample.bpmn" bpmn_file_name = "sample.bpmn"
@ -2058,7 +2023,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_instance_report_list."""
process_group_id = "runs_without_input" process_group_id = "runs_without_input"
process_model_id = "sample" process_model_id = "sample"
bpmn_file_name = "sample.bpmn" bpmn_file_name = "sample.bpmn"
@ -2097,7 +2061,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_error_handler."""
process_group_id = "data" process_group_id = "data"
process_model_id = "error" process_model_id = "error"
bpmn_file_name = "error.bpmn" bpmn_file_name = "error.bpmn"
@ -2138,7 +2101,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_error_handler_suspend."""
process_group_id = "data" process_group_id = "data"
process_model_id = "error" process_model_id = "error"
bpmn_file_name = "error.bpmn" bpmn_file_name = "error.bpmn"
@ -2174,7 +2136,6 @@ class TestProcessApi(BaseTest):
assert process.status == "suspended" assert process.status == "suspended"
def test_error_handler_system_notification(self) -> None: def test_error_handler_system_notification(self) -> None:
"""Test_error_handler_system_notification."""
# TODO: make sure the system notification process is run on exceptions # TODO: make sure the system notification process is run on exceptions
... ...
@ -2185,7 +2146,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_task_data_is_set_even_if_process_instance_errors."""
process_model = load_test_spec( process_model = load_test_spec(
process_model_id="group/error_with_task_data", process_model_id="group/error_with_task_data",
bpmn_file_name="script_error_with_task_data.bpmn", bpmn_file_name="script_error_with_task_data.bpmn",
@ -2214,7 +2174,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_model_file_create."""
process_group_id = "hello_world" process_group_id = "hello_world"
process_model_id = "hello_world" process_model_id = "hello_world"
file_name = "hello_world.svg" file_name = "hello_world.svg"
@ -2249,7 +2208,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_can_get_message_instances_by_process_instance_id."""
process_group_id = "test_message_send" process_group_id = "test_message_send"
process_model_id = "message_receiver" process_model_id = "message_receiver"
bpmn_file_name = "message_receiver.bpmn" bpmn_file_name = "message_receiver.bpmn"
@ -2540,7 +2498,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_instance_suspend."""
bpmn_file_name = "manual_task.bpmn" bpmn_file_name = "manual_task.bpmn"
bpmn_file_location = "manual_task" bpmn_file_location = "manual_task"
process_model_identifier = self.create_group_and_model_with_bpmn( process_model_identifier = self.create_group_and_model_with_bpmn(
@ -2606,7 +2563,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_script_unit_test_run."""
process_group_id = "test_group" process_group_id = "test_group"
process_model_id = "simple_script" process_model_id = "simple_script"
bpmn_file_name = "simple_script.bpmn" bpmn_file_name = "simple_script.bpmn"
@ -2663,7 +2619,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_script_unit_test_run."""
process_group_id = "test_group" process_group_id = "test_group"
process_model_id = "process_navigation" process_model_id = "process_navigation"
bpmn_file_name = "process_navigation.bpmn" bpmn_file_name = "process_navigation.bpmn"
@ -2740,7 +2695,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_script_unit_test_run."""
process_group_id = "test_group" process_group_id = "test_group"
process_model_id = "manual_task" process_model_id = "manual_task"
bpmn_file_name = "manual_task.bpmn" bpmn_file_name = "manual_task.bpmn"
@ -2801,7 +2755,6 @@ class TestProcessApi(BaseTest):
assert len(response.json) == 7 assert len(response.json) == 7
def setup_initial_groups_for_move_tests(self, client: FlaskClient, with_super_admin_user: UserModel) -> None: def setup_initial_groups_for_move_tests(self, client: FlaskClient, with_super_admin_user: UserModel) -> None:
"""Setup_initial_groups_for_move_tests."""
groups = ["group_a", "group_b", "group_b/group_bb"] groups = ["group_a", "group_b", "group_b/group_bb"]
# setup initial groups # setup initial groups
for group in groups: for group in groups:
@ -2819,7 +2772,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_move_model."""
self.setup_initial_groups_for_move_tests(client, with_super_admin_user) self.setup_initial_groups_for_move_tests(client, with_super_admin_user)
process_model_id = "test_model" process_model_id = "test_model"
@ -2867,7 +2819,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_move_group."""
self.setup_initial_groups_for_move_tests(client, with_super_admin_user) self.setup_initial_groups_for_move_tests(client, with_super_admin_user)
# add sub group to `group_a` # add sub group to `group_a`
@ -3094,7 +3045,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_can_get_process_instance_list_with_report_metadata_and_process_initator."""
user_one = self.create_user_with_permission(username="user_one") user_one = self.create_user_with_permission(username="user_one")
process_model = load_test_spec( process_model = load_test_spec(
@ -3267,7 +3217,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_instance_list_can_order_by_metadata."""
process_model = load_test_spec( process_model = load_test_spec(
"test_group/hello_world", "test_group/hello_world",
process_model_source_directory="nested-task-data-structure", process_model_source_directory="nested-task-data-structure",
@ -3339,7 +3288,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_process_data_show."""
process_model = load_test_spec( process_model = load_test_spec(
"test_group/data_object_test", "test_group/data_object_test",
process_model_source_directory="data_object_test", process_model_source_directory="data_object_test",

View File

@ -1,4 +1,3 @@
"""Test_users_controller."""
from flask.app import Flask from flask.app import Flask
from flask.testing import FlaskClient from flask.testing import FlaskClient
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
@ -8,8 +7,6 @@ from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
class TestProcessInstancesController(BaseTest): class TestProcessInstancesController(BaseTest):
"""TestProcessInstancesController."""
def test_find_by_id( def test_find_by_id(
self, self,
app: Flask, app: Flask,
@ -17,7 +14,6 @@ class TestProcessInstancesController(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_user_search_returns_a_user."""
user_one = self.create_user_with_permission(username="user_one", target_uri="/process-instances/find-by-id/*") user_one = self.create_user_with_permission(username="user_one", target_uri="/process-instances/find-by-id/*")
user_two = self.create_user_with_permission(username="user_two", target_uri="/process-instances/find-by-id/*") user_two = self.create_user_with_permission(username="user_two", target_uri="/process-instances/find-by-id/*")

View File

@ -12,8 +12,6 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
class SecretServiceTestHelpers(BaseTest): class SecretServiceTestHelpers(BaseTest):
"""SecretServiceTestHelpers."""
test_key = "test_key" test_key = "test_key"
test_value = "test_value" test_value = "test_value"
test_process_group_id = "test" test_process_group_id = "test"
@ -23,11 +21,9 @@ class SecretServiceTestHelpers(BaseTest):
test_process_model_description = "Om nom nom delicious cookies" test_process_model_description = "Om nom nom delicious cookies"
def add_test_secret(self, user: UserModel) -> SecretModel: def add_test_secret(self, user: UserModel) -> SecretModel:
"""Add_test_secret."""
return SecretService().add_secret(self.test_key, self.test_value, user.id) return SecretService().add_secret(self.test_key, self.test_value, user.id)
def add_test_process(self, client: FlaskClient, user: UserModel) -> ProcessModelInfo: def add_test_process(self, client: FlaskClient, user: UserModel) -> ProcessModelInfo:
"""Add_test_process."""
self.create_process_group_with_api( self.create_process_group_with_api(
client, client,
user, user,
@ -47,15 +43,12 @@ class SecretServiceTestHelpers(BaseTest):
class TestSecretService(SecretServiceTestHelpers): class TestSecretService(SecretServiceTestHelpers):
"""TestSecretService."""
def test_add_secret( def test_add_secret(
self, self,
app: Flask, app: Flask,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_add_secret."""
test_secret = self.add_test_secret(with_super_admin_user) test_secret = self.add_test_secret(with_super_admin_user)
assert test_secret is not None assert test_secret is not None
@ -69,7 +62,6 @@ class TestSecretService(SecretServiceTestHelpers):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_add_secret_duplicate_key_fails."""
self.add_test_secret(with_super_admin_user) self.add_test_secret(with_super_admin_user)
with pytest.raises(ApiError) as ae: with pytest.raises(ApiError) as ae:
self.add_test_secret(with_super_admin_user) self.add_test_secret(with_super_admin_user)
@ -81,7 +73,6 @@ class TestSecretService(SecretServiceTestHelpers):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_get_secret."""
self.add_test_secret(with_super_admin_user) self.add_test_secret(with_super_admin_user)
secret = SecretService().get_secret(self.test_key) secret = SecretService().get_secret(self.test_key)
@ -94,7 +85,6 @@ class TestSecretService(SecretServiceTestHelpers):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_get_secret_bad_service."""
self.add_test_secret(with_super_admin_user) self.add_test_secret(with_super_admin_user)
with pytest.raises(ApiError): with pytest.raises(ApiError):
@ -124,7 +114,6 @@ class TestSecretService(SecretServiceTestHelpers):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_update_secret_bad_secret_fails."""
secret = self.add_test_secret(with_super_admin_user) secret = self.add_test_secret(with_super_admin_user)
with pytest.raises(ApiError) as ae: with pytest.raises(ApiError) as ae:
SecretService.update_secret(secret.key + "x", "some_new_value", with_super_admin_user.id) SecretService.update_secret(secret.key + "x", "some_new_value", with_super_admin_user.id)
@ -154,7 +143,6 @@ class TestSecretService(SecretServiceTestHelpers):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_delete_secret_bad_secret_fails."""
self.add_test_secret(with_super_admin_user) self.add_test_secret(with_super_admin_user)
with pytest.raises(ApiError) as ae: with pytest.raises(ApiError) as ae:
SecretService.delete_secret(self.test_key + "x", with_super_admin_user.id) SecretService.delete_secret(self.test_key + "x", with_super_admin_user.id)

View File

@ -20,7 +20,6 @@ class TestSecretsController(SecretServiceTestHelpers):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_add_secret."""
secret_model = SecretModel( secret_model = SecretModel(
key=self.test_key, key=self.test_key,
value=self.test_value, value=self.test_value,
@ -66,7 +65,6 @@ class TestSecretsController(SecretServiceTestHelpers):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_update_secret."""
self.add_test_secret(with_super_admin_user) self.add_test_secret(with_super_admin_user)
secret: SecretModel | None = SecretService.get_secret(self.test_key) secret: SecretModel | None = SecretService.get_secret(self.test_key)
assert secret assert secret

View File

@ -6,8 +6,6 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
class TestUsersController(BaseTest): class TestUsersController(BaseTest):
"""TestUsersController."""
def test_user_search_returns_a_user( def test_user_search_returns_a_user(
self, self,
app: Flask, app: Flask,
@ -15,7 +13,6 @@ class TestUsersController(BaseTest):
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_user_search_returns_a_user."""
self.find_or_create_user(username="aa") self.find_or_create_user(username="aa")
self.find_or_create_user(username="ab") self.find_or_create_user(username="ab")
self.find_or_create_user(username="abc") self.find_or_create_user(username="abc")

View File

@ -1,4 +1,3 @@
"""Test_get_localtime."""
import json import json
from flask import g from flask import g

View File

@ -1,4 +1,3 @@
"""Test_environment_var_script."""
from flask import Flask from flask import Flask
from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext
from spiffworkflow_backend.scripts.get_env import GetEnv from spiffworkflow_backend.scripts.get_env import GetEnv

View File

@ -1,4 +1,3 @@
"""Test_get_localtime."""
from flask.app import Flask from flask.app import Flask
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
@ -14,7 +13,6 @@ class TestGetProcessInitiatorUser(BaseTest):
app: Flask, app: Flask,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
) -> None: ) -> None:
"""Test_sets_permission_correctly_on_human_task."""
initiator_user = self.find_or_create_user("initiator_user") initiator_user = self.find_or_create_user("initiator_user")
assert initiator_user.principal is not None assert initiator_user.principal is not None
AuthorizationService.import_permissions_from_yaml_file() AuthorizationService.import_permissions_from_yaml_file()

View File

@ -1,4 +1,3 @@
"""Test_get_localtime."""
import pytest import pytest
from flask.app import Flask from flask.app import Flask
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor

Some files were not shown because too many files have changed in this diff Show More