upgrade black, give it the preview flag, and let it rip on long strings

This commit is contained in:
burnettk 2022-12-30 23:08:00 -05:00
parent 3c395ca368
commit 09e012674a
39 changed files with 315 additions and 160 deletions

View File

@ -11,6 +11,12 @@ repos:
require_serial: true require_serial: true
# exclude: ^migrations/ # exclude: ^migrations/
exclude: "/migrations/" exclude: "/migrations/"
# otherwise it will not fix long lines if the long lines contain long strings
# https://github.com/psf/black/pull/1132
# https://github.com/psf/black/pull/1609
args: [--preview]
- id: check-added-large-files - id: check-added-large-files
files: ^spiffworkflow-backend/ files: ^spiffworkflow-backend/
name: Check for added large files name: Check for added large files

50
poetry.lock generated
View File

@ -163,7 +163,7 @@ python-versions = "*"
[[package]] [[package]]
name = "black" name = "black"
version = "22.10.0" version = "23.1a1"
description = "The uncompromising code formatter." description = "The uncompromising code formatter."
category = "dev" category = "dev"
optional = false optional = false
@ -614,7 +614,7 @@ werkzeug = "*"
type = "git" type = "git"
url = "https://github.com/sartography/flask-bpmn" url = "https://github.com/sartography/flask-bpmn"
reference = "main" reference = "main"
resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4" resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b"
[[package]] [[package]]
name = "flask-cors" name = "flask-cors"
@ -1760,7 +1760,7 @@ lxml = "*"
type = "git" type = "git"
url = "https://github.com/sartography/SpiffWorkflow" url = "https://github.com/sartography/SpiffWorkflow"
reference = "main" reference = "main"
resolved_reference = "bba7ddf5478af579b891ca63c50babbfccf6b7a4" resolved_reference = "80640024a8030481645f0c34f34c57e88f7b4f0c"
[[package]] [[package]]
name = "sqlalchemy" name = "sqlalchemy"
@ -2182,27 +2182,18 @@ billiard = [
{file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"},
] ]
black = [ black = [
{file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, {file = "black-23.1a1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fb7641d442ede92538bc70fa0201f884753a7d0f62f26c722b7b00301b95902"},
{file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, {file = "black-23.1a1-cp310-cp310-win_amd64.whl", hash = "sha256:88288a645402106b8eb9f50d7340ae741e16240bb01c2eed8466549153daa96e"},
{file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, {file = "black-23.1a1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db1d8027ce7ae53f0ccf02b0be0b8808fefb291d6cb1543420f4165d96d364c"},
{file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, {file = "black-23.1a1-cp311-cp311-win_amd64.whl", hash = "sha256:88ec25a64063945b4591b6378bead544c5d3260de1c93ad96f3ad2d76ddd76fd"},
{file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, {file = "black-23.1a1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dff6f0157e47fbbeada046fca144b6557d3be2fb2602d668881cd179f04a352"},
{file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, {file = "black-23.1a1-cp37-cp37m-win_amd64.whl", hash = "sha256:ca658b69260a18bf7aa0b0a6562dbbd304a737487d1318998aaca5a75901fd2c"},
{file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, {file = "black-23.1a1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85dede655442f5e246e7abd667fe07e14916897ba52f3640b5489bf11f7dbf67"},
{file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, {file = "black-23.1a1-cp38-cp38-win_amd64.whl", hash = "sha256:ddbf9da228726d46f45c29024263e160d41030a415097254817d65127012d1a2"},
{file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, {file = "black-23.1a1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63330069d8ec909cf4e2c4d43a7f00aeb03335430ef9fec6cd2328e6ebde8a77"},
{file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, {file = "black-23.1a1-cp39-cp39-win_amd64.whl", hash = "sha256:793c9176beb2adf295f6b863d9a4dc953fe2ac359ca3da108d71d14cb2c09e52"},
{file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, {file = "black-23.1a1-py3-none-any.whl", hash = "sha256:e88e4b633d64b9e7adc4a6b922f52bb204af9f90d7b1e3317e6490f2b598b1ea"},
{file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, {file = "black-23.1a1.tar.gz", hash = "sha256:0b945a5a1e5a5321f884de0061d5a8585d947c9b608e37b6d26ceee4dfdf4b62"},
{file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"},
{file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"},
{file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"},
{file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"},
{file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"},
{file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"},
{file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"},
{file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"},
{file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"},
] ]
blinker = [ blinker = [
{file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"}, {file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"},
@ -2857,7 +2848,18 @@ psycopg2 = [
{file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"}, {file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"},
] ]
pyasn1 = [ pyasn1 = [
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
] ]
pycodestyle = [ pycodestyle = [

View File

@ -27,7 +27,6 @@ def main():
"""Main.""" """Main."""
app = get_hacked_up_app_for_script() app = get_hacked_up_app_for_script()
with app.app_context(): with app.app_context():
process_model_identifier_ticket = "ticket" process_model_identifier_ticket = "ticket"
db.session.query(ProcessInstanceModel).filter( db.session.query(ProcessInstanceModel).filter(
ProcessInstanceModel.process_model_identifier ProcessInstanceModel.process_model_identifier

View File

@ -40,7 +40,8 @@ def hello_world():
return ( return (
'Hello, %s, <a href="/private">See private</a> ' 'Hello, %s, <a href="/private">See private</a> '
'<a href="/logout">Log out</a>' '<a href="/logout">Log out</a>'
) % oidc.user_getfield("preferred_username") % oidc.user_getfield("preferred_username")
)
else: else:
return 'Welcome anonymous, <a href="/private">Log in</a>' return 'Welcome anonymous, <a href="/private">Log in</a>'

View File

@ -93,7 +93,8 @@ def create_app() -> flask.app.Flask:
if os.environ.get("FLASK_SESSION_SECRET_KEY") is None: if os.environ.get("FLASK_SESSION_SECRET_KEY") is None:
raise KeyError( raise KeyError(
"Cannot find the secret_key from the environment. Please set FLASK_SESSION_SECRET_KEY" "Cannot find the secret_key from the environment. Please set"
" FLASK_SESSION_SECRET_KEY"
) )
app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY") app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY")

View File

@ -17,21 +17,21 @@ def setup_database_uri(app: Flask) -> None:
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None: if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}" database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
if app.config.get("SPIFF_DATABASE_TYPE") == "sqlite": if app.config.get("SPIFF_DATABASE_TYPE") == "sqlite":
app.config[ app.config["SQLALCHEMY_DATABASE_URI"] = (
"SQLALCHEMY_DATABASE_URI" f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3" )
elif app.config.get("SPIFF_DATABASE_TYPE") == "postgres": elif app.config.get("SPIFF_DATABASE_TYPE") == "postgres":
app.config[ app.config["SQLALCHEMY_DATABASE_URI"] = (
"SQLALCHEMY_DATABASE_URI" f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}" )
else: else:
# use pswd to trick flake8 with hardcoded passwords # use pswd to trick flake8 with hardcoded passwords
db_pswd = os.environ.get("DB_PASSWORD") db_pswd = os.environ.get("DB_PASSWORD")
if db_pswd is None: if db_pswd is None:
db_pswd = "" db_pswd = ""
app.config[ app.config["SQLALCHEMY_DATABASE_URI"] = (
"SQLALCHEMY_DATABASE_URI" f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}" )
else: else:
app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get( app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get(
"SPIFFWORKFLOW_BACKEND_DATABASE_URI" "SPIFFWORKFLOW_BACKEND_DATABASE_URI"
@ -91,10 +91,12 @@ def setup_config(app: Flask) -> None:
app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"], app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"],
) )
print( print(
f"set permissions file name config: {app.config['SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME']}" "set permissions file name config:"
f" {app.config['SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME']}"
) )
print( print(
f"set permissions file name full path: {app.config['PERMISSIONS_FILE_FULLPATH']}" "set permissions file name full path:"
f" {app.config['PERMISSIONS_FILE_FULLPATH']}"
) )
# unversioned (see .gitignore) config that can override everything and include secrets. # unversioned (see .gitignore) config that can override everything and include secrets.

View File

@ -86,5 +86,6 @@ def ensure_failure_cause_is_set_if_message_instance_failed(
if isinstance(instance, MessageInstanceModel): if isinstance(instance, MessageInstanceModel):
if instance.status == "failed" and instance.failure_cause is None: if instance.status == "failed" and instance.failure_cause is None:
raise ValueError( raise ValueError(
f"{instance.__class__.__name__}: failure_cause must be set if status is failed" f"{instance.__class__.__name__}: failure_cause must be set if"
" status is failed"
) )

View File

@ -62,7 +62,10 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
active_human_tasks = relationship( active_human_tasks = relationship(
"HumanTaskModel", "HumanTaskModel",
primaryjoin="and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id, HumanTaskModel.completed == False)", primaryjoin=(
"and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id,"
" HumanTaskModel.completed == False)"
),
) # type: ignore ) # type: ignore
human_tasks = relationship( human_tasks = relationship(

View File

@ -43,8 +43,8 @@ class Task:
FIELD_TYPE_EMAIL = "email" # email: Email address FIELD_TYPE_EMAIL = "email" # email: Email address
FIELD_TYPE_URL = "url" # url: Website address FIELD_TYPE_URL = "url" # url: Website address
FIELD_PROP_AUTO_COMPLETE_MAX = ( FIELD_PROP_AUTO_COMPLETE_MAX = ( # Not used directly, passed in from the front end.
"autocomplete_num" # Not used directly, passed in from the front end. "autocomplete_num"
) )
# Required field # Required field
@ -77,8 +77,8 @@ class Task:
# File specific field properties # File specific field properties
FIELD_PROP_DOC_CODE = "doc_code" # to associate a file upload field with a doc code FIELD_PROP_DOC_CODE = "doc_code" # to associate a file upload field with a doc code
FIELD_PROP_FILE_DATA = ( FIELD_PROP_FILE_DATA = ( # to associate a bit of data with a specific file upload file.
"file_data" # to associate a bit of data with a specific file upload file. "file_data"
) )
# Additional properties # Additional properties

View File

@ -131,8 +131,11 @@ def message_start(
raise ( raise (
ApiError( ApiError(
error_code="cannot_find_waiting_message", error_code="cannot_find_waiting_message",
message=f"Could not find waiting message for identifier {message_identifier} " message=(
f"and process instance {process_instance.id}", "Could not find waiting message for identifier"
f" {message_identifier} and process instance"
f" {process_instance.id}"
),
status_code=400, status_code=400,
) )
) )
@ -151,7 +154,10 @@ def message_start(
raise ( raise (
ApiError( ApiError(
error_code="cannot_start_message", error_code="cannot_start_message",
message=f"Message with identifier cannot be start with message: {message_identifier}", message=(
"Message with identifier cannot be start with message:"
f" {message_identifier}"
),
status_code=400, status_code=400,
) )
) )

View File

@ -43,7 +43,9 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R
raise ( raise (
ApiError( ApiError(
error_code="could_not_requests_to_check", error_code="could_not_requests_to_check",
message="The key 'requests_to_check' not found at root of request body.", message=(
"The key 'requests_to_check' not found at root of request body."
),
status_code=400, status_code=400,
) )
) )
@ -139,7 +141,8 @@ def task_data_update(
if process_instance: if process_instance:
if process_instance.status != "suspended": if process_instance.status != "suspended":
raise ProcessInstanceTaskDataCannotBeUpdatedError( raise ProcessInstanceTaskDataCannotBeUpdatedError(
f"The process instance needs to be suspended to udpate the task-data. It is currently: {process_instance.status}" "The process instance needs to be suspended to udpate the task-data."
f" It is currently: {process_instance.status}"
) )
process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json) process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json)
@ -163,12 +166,18 @@ def task_data_update(
else: else:
raise ApiError( raise ApiError(
error_code="update_task_data_error", error_code="update_task_data_error",
message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", message=(
f"Could not find Task: {task_id} in Instance:"
f" {process_instance_id}."
),
) )
else: else:
raise ApiError( raise ApiError(
error_code="update_task_data_error", error_code="update_task_data_error",
message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", message=(
f"Could not update task data for Instance: {process_instance_id}, and"
f" Task: {task_id}."
),
) )
return Response( return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)), json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
@ -236,7 +245,9 @@ def manual_complete_task(
else: else:
raise ApiError( raise ApiError(
error_code="complete_task", error_code="complete_task",
message=f"Could not complete Task {task_id} in Instance {process_instance_id}", message=(
f"Could not complete Task {task_id} in Instance {process_instance_id}"
),
) )
return Response( return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)), json.dumps(ProcessInstanceModelSchema().dump(process_instance)),

View File

@ -124,6 +124,7 @@ def process_group_move(
original_process_group_id, new_location original_process_group_id, new_location
) )
_commit_and_push_to_git( _commit_and_push_to_git(
f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}" f"User: {g.user.username} moved process group {original_process_group_id} to"
f" {new_process_group.id}"
) )
return make_response(jsonify(new_process_group), 200) return make_response(jsonify(new_process_group), 200)

View File

@ -94,7 +94,10 @@ def process_instance_run(
if process_instance.status != "not_started": if process_instance.status != "not_started":
raise ApiError( raise ApiError(
error_code="process_instance_not_runnable", error_code="process_instance_not_runnable",
message=f"Process Instance ({process_instance.id}) is currently running or has already run.", message=(
f"Process Instance ({process_instance.id}) is currently running or has"
" already run."
),
status_code=400, status_code=400,
) )
@ -350,8 +353,8 @@ def process_instance_delete(
if not process_instance.has_terminal_status(): if not process_instance.has_terminal_status():
raise ProcessInstanceCannotBeDeletedError( raise ProcessInstanceCannotBeDeletedError(
f"Process instance ({process_instance.id}) cannot be deleted since it does not have a terminal status. " f"Process instance ({process_instance.id}) cannot be deleted since it does"
f"Current status is {process_instance.status}." f" not have a terminal status. Current status is {process_instance.status}."
) )
# (Pdb) db.session.delete # (Pdb) db.session.delete
@ -620,7 +623,8 @@ def _get_process_instance(
).first() ).first()
if spec_reference is None: if spec_reference is None:
raise SpecReferenceNotFoundError( raise SpecReferenceNotFoundError(
f"Could not find given process identifier in the cache: {process_identifier}" "Could not find given process identifier in the cache:"
f" {process_identifier}"
) )
process_model_with_diagram = ProcessModelService.get_process_model( process_model_with_diagram = ProcessModelService.get_process_model(
@ -678,7 +682,10 @@ def _find_process_instance_for_me_or_raise(
raise ( raise (
ApiError( ApiError(
error_code="process_instance_cannot_be_found", error_code="process_instance_cannot_be_found",
message=f"Process instance with id {process_instance_id} cannot be found that is associated with you.", message=(
f"Process instance with id {process_instance_id} cannot be found"
" that is associated with you."
),
status_code=400, status_code=400,
) )
) )

View File

@ -40,7 +40,10 @@ def script_unit_test_create(
if file is None: if file is None:
raise ApiError( raise ApiError(
error_code="cannot_find_file", error_code="cannot_find_file",
message=f"Could not find the primary bpmn file for process_model: {process_model.id}", message=(
"Could not find the primary bpmn file for process_model:"
f" {process_model.id}"
),
status_code=404, status_code=404,
) )

View File

@ -157,7 +157,10 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
raise ( raise (
ApiError( ApiError(
error_code="no_human_task", error_code="no_human_task",
message=f"Cannot find a task to complete for task id '{task_id}' and process instance {process_instance_id}.", message=(
f"Cannot find a task to complete for task id '{task_id}' and"
f" process instance {process_instance_id}."
),
status_code=500, status_code=500,
) )
) )
@ -203,7 +206,10 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
raise ( raise (
ApiError( ApiError(
error_code="missing_form_file", error_code="missing_form_file",
message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}", message=(
"Cannot find a form file for process_instance_id:"
f" {process_instance_id}, task_id: {task_id}"
),
status_code=400, status_code=400,
) )
) )
@ -221,7 +227,10 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
raise ( raise (
ApiError( ApiError(
error_code="error_loading_form", error_code="error_loading_form",
message=f"Could not load form schema from: {form_schema_file_name}. Error was: {str(exception)}", message=(
f"Could not load form schema from: {form_schema_file_name}."
f" Error was: {str(exception)}"
),
status_code=400, status_code=400,
) )
) from exception ) from exception
@ -285,8 +294,10 @@ def task_submit(
if not process_instance.can_submit_task(): if not process_instance.can_submit_task():
raise ApiError( raise ApiError(
error_code="process_instance_not_runnable", error_code="process_instance_not_runnable",
message=f"Process Instance ({process_instance.id}) has status " message=(
f"{process_instance.status} which does not allow tasks to be submitted.", f"Process Instance ({process_instance.id}) has status "
f"{process_instance.status} which does not allow tasks to be submitted."
),
status_code=400, status_code=400,
) )
@ -317,7 +328,10 @@ def task_submit(
raise ( raise (
ApiError( ApiError(
error_code="no_human_task", error_code="no_human_task",
message=f"Cannot find a task to complete for task id '{task_id}' and process instance {process_instance_id}.", message=(
f"Cannot find a task to complete for task id '{task_id}' and"
f" process instance {process_instance_id}."
),
status_code=500, status_code=500,
) )
) )
@ -511,7 +525,10 @@ def _update_form_schema_with_task_data_as_needed(
raise ( raise (
ApiError( ApiError(
error_code="missing_task_data_var", error_code="missing_task_data_var",
message=f"Task data is missing variable: {task_data_var}", message=(
"Task data is missing variable:"
f" {task_data_var}"
),
status_code=500, status_code=500,
) )
) )

View File

@ -67,13 +67,16 @@ def verify_token(
user_model = get_user_from_decoded_internal_token(decoded_token) user_model = get_user_from_decoded_internal_token(decoded_token)
except Exception as e: except Exception as e:
current_app.logger.error( current_app.logger.error(
f"Exception in verify_token getting user from decoded internal token. {e}" "Exception in verify_token getting user from decoded"
f" internal token. {e}"
) )
elif "iss" in decoded_token.keys(): elif "iss" in decoded_token.keys():
try: try:
if AuthenticationService.validate_id_token(token): if AuthenticationService.validate_id_token(token):
user_info = decoded_token user_info = decoded_token
except ApiError as ae: # API Error is only thrown in the token is outdated. except (
ApiError
) as ae: # API Error is only thrown in the token is outdated.
# Try to refresh the token # Try to refresh the token
user = UserService.get_user_by_service_and_service_id( user = UserService.get_user_by_service_and_service_id(
decoded_token["iss"], decoded_token["sub"] decoded_token["iss"], decoded_token["sub"]

View File

@ -26,6 +26,7 @@ user_blueprint = Blueprint("main", __name__)
# user = UserService.create_user('internal', username) # user = UserService.create_user('internal', username)
# return Response(json.dumps({"id": user.id}), status=201, mimetype=APPLICATION_JSON) # return Response(json.dumps({"id": user.id}), status=201, mimetype=APPLICATION_JSON)
# def _create_user(username): # def _create_user(username):
# user = UserModel.query.filter_by(username=username).first() # user = UserModel.query.filter_by(username=username).first()
# if user is not None: # if user is not None:

View File

@ -35,7 +35,10 @@ class FactService(Script):
if fact == "cat": if fact == "cat":
details = "The cat in the hat" # self.get_cat() details = "The cat in the hat" # self.get_cat()
elif fact == "norris": elif fact == "norris":
details = "Chuck Norris doesnt read books. He stares them down until he gets the information he wants." details = (
"Chuck Norris doesnt read books. He stares them down until he gets the"
" information he wants."
)
elif fact == "buzzword": elif fact == "buzzword":
details = "Move the Needle." # self.get_buzzword() details = "Move the Needle." # self.get_buzzword()
else: else:

View File

@ -32,7 +32,8 @@ class GetGroupMembers(Script):
group = GroupModel.query.filter_by(identifier=group_identifier).first() group = GroupModel.query.filter_by(identifier=group_identifier).first()
if group is None: if group is None:
raise GroupNotFoundError( raise GroupNotFoundError(
f"Script 'get_group_members' could not find group with identifier '{group_identifier}'." "Script 'get_group_members' could not find group with identifier"
f" '{group_identifier}'."
) )
usernames = [u.username for u in group.users] usernames = [u.username for u in group.users]

View File

@ -28,5 +28,7 @@ class GetProcessInfo(Script):
"""Run.""" """Run."""
return { return {
"process_instance_id": script_attributes_context.process_instance_id, "process_instance_id": script_attributes_context.process_instance_id,
"process_model_identifier": script_attributes_context.process_model_identifier, "process_model_identifier": (
script_attributes_context.process_model_identifier
),
} }

View File

@ -98,8 +98,9 @@ class Script:
).first() ).first()
if process_instance is None: if process_instance is None:
raise ProcessInstanceNotFoundError( raise ProcessInstanceNotFoundError(
f"Could not find a process instance with id '{script_attributes_context.process_instance_id}' " "Could not find a process instance with id"
f"when running script '{script_function_name}'" f" '{script_attributes_context.process_instance_id}' when"
f" running script '{script_function_name}'"
) )
user = process_instance.process_initiator user = process_instance.process_initiator
has_permission = AuthorizationService.user_has_permission( has_permission = AuthorizationService.user_has_permission(
@ -107,7 +108,8 @@ class Script:
) )
if not has_permission: if not has_permission:
raise ScriptUnauthorizedForUserError( raise ScriptUnauthorizedForUserError(
f"User {user.username} does not have access to run privileged script '{script_function_name}'" f"User {user.username} does not have access to run"
f" privileged script '{script_function_name}'"
) )
def run_script_if_allowed(*ar: Any, **kw: Any) -> Any: def run_script_if_allowed(*ar: Any, **kw: Any) -> Any:
@ -149,7 +151,7 @@ class Script:
"""_get_all_subclasses.""" """_get_all_subclasses."""
# hackish mess to make sure we have all the modules loaded for the scripts # hackish mess to make sure we have all the modules loaded for the scripts
pkg_dir = os.path.dirname(__file__) pkg_dir = os.path.dirname(__file__)
for (_module_loader, name, _ispkg) in pkgutil.iter_modules([pkg_dir]): for _module_loader, name, _ispkg in pkgutil.iter_modules([pkg_dir]):
importlib.import_module("." + name, __package__) importlib.import_module("." + name, __package__)
"""Returns a list of all classes that extend this class.""" """Returns a list of all classes that extend this class."""

View File

@ -29,7 +29,6 @@ def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
# suspended - 6 hours ago # suspended - 6 hours ago
process_instances = [] process_instances = []
for i in range(len(statuses)): for i in range(len(statuses)):
process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier( process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
test_process_model_id, user test_process_model_id, user
) )

View File

@ -128,7 +128,8 @@ class AuthorizationService:
# to check for exact matches as well # to check for exact matches as well
# see test_user_can_access_base_path_when_given_wildcard_permission unit test # see test_user_can_access_base_path_when_given_wildcard_permission unit test
text( text(
f"'{target_uri_normalized}' = replace(replace(permission_target.uri, '/%', ''), ':%', '')" f"'{target_uri_normalized}' ="
" replace(replace(permission_target.uri, '/%', ''), ':%', '')"
), ),
) )
) )
@ -200,7 +201,8 @@ class AuthorizationService:
if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"] is None: if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"] is None:
raise ( raise (
PermissionsFileNotSetError( PermissionsFileNotSetError(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in order to import permissions" "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in"
" order to import permissions"
) )
) )
@ -280,9 +282,9 @@ class AuthorizationService:
"""Find_or_create_permission_target.""" """Find_or_create_permission_target."""
uri_with_percent = re.sub(r"\*", "%", uri) uri_with_percent = re.sub(r"\*", "%", uri)
target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX) target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX)
permission_target: Optional[ permission_target: Optional[PermissionTargetModel] = (
PermissionTargetModel PermissionTargetModel.query.filter_by(uri=target_uri_normalized).first()
] = PermissionTargetModel.query.filter_by(uri=target_uri_normalized).first() )
if permission_target is None: if permission_target is None:
permission_target = PermissionTargetModel(uri=target_uri_normalized) permission_target = PermissionTargetModel(uri=target_uri_normalized)
db.session.add(permission_target) db.session.add(permission_target)
@ -297,13 +299,13 @@ class AuthorizationService:
permission: str, permission: str,
) -> PermissionAssignmentModel: ) -> PermissionAssignmentModel:
"""Create_permission_for_principal.""" """Create_permission_for_principal."""
permission_assignment: Optional[ permission_assignment: Optional[PermissionAssignmentModel] = (
PermissionAssignmentModel PermissionAssignmentModel.query.filter_by(
] = PermissionAssignmentModel.query.filter_by( principal_id=principal.id,
principal_id=principal.id, permission_target_id=permission_target.id,
permission_target_id=permission_target.id, permission=permission,
permission=permission, ).first()
).first() )
if permission_assignment is None: if permission_assignment is None:
permission_assignment = PermissionAssignmentModel( permission_assignment = PermissionAssignmentModel(
principal_id=principal.id, principal_id=principal.id,
@ -403,7 +405,10 @@ class AuthorizationService:
raise ApiError( raise ApiError(
error_code="unauthorized", error_code="unauthorized",
message=f"User {g.user.username} is not authorized to perform requested action: {permission_string} - {request.path}", message=(
f"User {g.user.username} is not authorized to perform requested action:"
f" {permission_string} - {request.path}"
),
status_code=403, status_code=403,
) )
@ -482,7 +487,10 @@ class AuthorizationService:
except jwt.InvalidTokenError as exception: except jwt.InvalidTokenError as exception:
raise ApiError( raise ApiError(
"token_invalid", "token_invalid",
"The Authentication token you provided is invalid. You need a new token. ", (
"The Authentication token you provided is invalid. You need a new"
" token. "
),
) from exception ) from exception
@staticmethod @staticmethod
@ -504,8 +512,9 @@ class AuthorizationService:
if user not in human_task.potential_owners: if user not in human_task.potential_owners:
raise UserDoesNotHaveAccessToTaskError( raise UserDoesNotHaveAccessToTaskError(
f"User {user.username} does not have access to update task'{spiff_task.task_spec.name}'" f"User {user.username} does not have access to update"
f" for process instance '{process_instance_id}'" f" task'{spiff_task.task_spec.name}' for process instance"
f" '{process_instance_id}'"
) )
return True return True
@ -723,8 +732,9 @@ class AuthorizationService:
) )
else: else:
raise InvalidPermissionError( raise InvalidPermissionError(
f"Target uri '{target}' with permission set '{permission_set}' is invalid. " f"Target uri '{target}' with permission set '{permission_set}' is"
f"The target uri must either be a macro of PG, PM, BASIC, or ALL or an api uri." " invalid. The target uri must either be a macro of PG, PM, BASIC, or"
" ALL or an api uri."
) )
return permissions_to_assign return permissions_to_assign

View File

@ -173,13 +173,15 @@ class GitService:
if "repository" not in webhook or "clone_url" not in webhook["repository"]: if "repository" not in webhook or "clone_url" not in webhook["repository"]:
raise InvalidGitWebhookBodyError( raise InvalidGitWebhookBodyError(
f"Cannot find required keys of 'repository:clone_url' from webhook body: {webhook}" "Cannot find required keys of 'repository:clone_url' from webhook"
f" body: {webhook}"
) )
clone_url = webhook["repository"]["clone_url"] clone_url = webhook["repository"]["clone_url"]
if clone_url != current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"]: if clone_url != current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"]:
raise GitCloneUrlMismatchError( raise GitCloneUrlMismatchError(
f"Configured clone url does not match clone url from webhook: {clone_url}" "Configured clone url does not match clone url from webhook:"
f" {clone_url}"
) )
if "ref" not in webhook: if "ref" not in webhook:
@ -189,8 +191,8 @@ class GitService:
if current_app.config["GIT_BRANCH"] is None: if current_app.config["GIT_BRANCH"] is None:
raise MissingGitConfigsError( raise MissingGitConfigsError(
"Missing config for GIT_BRANCH. " "Missing config for GIT_BRANCH. This is required for updating the"
"This is required for updating the repository as a result of the webhook" " repository as a result of the webhook"
) )
ref = webhook["ref"] ref = webhook["ref"]

View File

@ -122,7 +122,8 @@ def setup_logger(app: Flask) -> None:
if upper_log_level_string not in log_levels: if upper_log_level_string not in log_levels:
raise InvalidLogLevelError( raise InvalidLogLevelError(
f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}" f"Log level given is invalid: '{upper_log_level_string}'. Valid options are"
f" {log_levels}"
) )
log_level = getattr(logging, upper_log_level_string) log_level = getattr(logging, upper_log_level_string)
@ -176,7 +177,8 @@ def setup_logger(app: Flask) -> None:
spiff_logger = logging.getLogger("spiff") spiff_logger = logging.getLogger("spiff")
spiff_logger.setLevel(spiff_log_level) spiff_logger.setLevel(spiff_log_level)
spiff_formatter = logging.Formatter( spiff_formatter = logging.Formatter(
"%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s | %(process)s | %(processName)s | %(process_instance_id)s" "%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s |"
" %(process)s | %(processName)s | %(process_instance_id)s"
) )
# if you add a handler to spiff, it will be used/inherited by spiff.metrics # if you add a handler to spiff, it will be used/inherited by spiff.metrics

View File

@ -145,8 +145,11 @@ class MessageService:
if process_instance_receive is None: if process_instance_receive is None:
raise MessageServiceError( raise MessageServiceError(
( (
f"Process instance cannot be found for queued message: {message_instance_receive.id}." (
f"Tried with id {message_instance_receive.process_instance_id}", "Process instance cannot be found for queued message:"
f" {message_instance_receive.id}.Tried with id"
f" {message_instance_receive.process_instance_id}"
),
) )
) )
@ -182,7 +185,6 @@ class MessageService:
) )
for message_instance_receive in message_instances_receive: for message_instance_receive in message_instances_receive:
# sqlalchemy supports select / where statements like active record apparantly # sqlalchemy supports select / where statements like active record apparantly
# https://docs.sqlalchemy.org/en/14/core/tutorial.html#conjunctions # https://docs.sqlalchemy.org/en/14/core/tutorial.html#conjunctions
message_correlation_select = ( message_correlation_select = (

View File

@ -215,14 +215,14 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
except Exception as exception: except Exception as exception:
if task is None: if task is None:
raise ProcessInstanceProcessorError( raise ProcessInstanceProcessorError(
"Error evaluating expression: " "Error evaluating expression: '%s', exception: %s"
"'%s', exception: %s" % (expression, str(exception)), % (expression, str(exception)),
) from exception ) from exception
else: else:
raise WorkflowTaskExecException( raise WorkflowTaskExecException(
task, task,
"Error evaluating expression " "Error evaluating expression '%s', %s"
"'%s', %s" % (expression, str(exception)), % (expression, str(exception)),
) from exception ) from exception
def execute( def execute(
@ -300,9 +300,7 @@ class ProcessInstanceProcessor:
tld.spiff_step = process_instance_model.spiff_step tld.spiff_step = process_instance_model.spiff_step
# we want this to be the fully qualified path to the process model including all group subcomponents # we want this to be the fully qualified path to the process model including all group subcomponents
current_app.config[ current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = (
"THREAD_LOCAL_DATA"
].process_model_identifier = (
f"{process_instance_model.process_model_identifier}" f"{process_instance_model.process_model_identifier}"
) )
@ -383,8 +381,10 @@ class ProcessInstanceProcessor:
except MissingSpecError as ke: except MissingSpecError as ke:
raise ApiError( raise ApiError(
error_code="unexpected_process_instance_structure", error_code="unexpected_process_instance_structure",
message="Failed to deserialize process_instance" message=(
" '%s' due to a mis-placed or missing task '%s'" "Failed to deserialize process_instance"
" '%s' due to a mis-placed or missing task '%s'"
)
% (self.process_model_identifier, str(ke)), % (self.process_model_identifier, str(ke)),
) from ke ) from ke
@ -400,7 +400,10 @@ class ProcessInstanceProcessor:
raise ( raise (
ApiError( ApiError(
"process_model_not_found", "process_model_not_found",
f"The given process model was not found: {process_model_identifier}.", (
"The given process model was not found:"
f" {process_model_identifier}."
),
) )
) )
spec_files = SpecFileService.get_files(process_model_info) spec_files = SpecFileService.get_files(process_model_info)
@ -530,8 +533,11 @@ class ProcessInstanceProcessor:
potential_owner_ids.append(lane_owner_user.id) potential_owner_ids.append(lane_owner_user.id)
self.raise_if_no_potential_owners( self.raise_if_no_potential_owners(
potential_owner_ids, potential_owner_ids,
f"No users found in task data lane owner list for lane: {task_lane}. " (
f"The user list used: {task.data['lane_owners'][task_lane]}", "No users found in task data lane owner list for lane:"
f" {task_lane}. The user list used:"
f" {task.data['lane_owners'][task_lane]}"
),
) )
else: else:
group_model = GroupModel.query.filter_by(identifier=task_lane).first() group_model = GroupModel.query.filter_by(identifier=task_lane).first()
@ -722,7 +728,8 @@ class ProcessInstanceProcessor:
if payload is not None: if payload is not None:
event_definition.payload = payload event_definition.payload = payload
current_app.logger.info( current_app.logger.info(
f"Event of type {event_definition.event_type} sent to process instance {self.process_instance_model.id}" f"Event of type {event_definition.event_type} sent to process instance"
f" {self.process_instance_model.id}"
) )
self.bpmn_process_instance.catch(event_definition) self.bpmn_process_instance.catch(event_definition)
self.do_engine_steps(save=True) self.do_engine_steps(save=True)
@ -732,12 +739,14 @@ class ProcessInstanceProcessor:
spiff_task = self.bpmn_process_instance.get_task(UUID(task_id)) spiff_task = self.bpmn_process_instance.get_task(UUID(task_id))
if execute: if execute:
current_app.logger.info( current_app.logger.info(
f"Manually executing Task {spiff_task.task_spec.name} of process instance {self.process_instance_model.id}" f"Manually executing Task {spiff_task.task_spec.name} of process"
f" instance {self.process_instance_model.id}"
) )
spiff_task.complete() spiff_task.complete()
else: else:
current_app.logger.info( current_app.logger.info(
f"Skipping Task {spiff_task.task_spec.name} of process instance {self.process_instance_model.id}" f"Skipping Task {spiff_task.task_spec.name} of process instance"
f" {self.process_instance_model.id}"
) )
spiff_task._set_state(TaskState.COMPLETED) spiff_task._set_state(TaskState.COMPLETED)
for child in spiff_task.children: for child in spiff_task.children:
@ -781,7 +790,8 @@ class ProcessInstanceProcessor:
"""Bpmn_file_full_path_from_bpmn_process_identifier.""" """Bpmn_file_full_path_from_bpmn_process_identifier."""
if bpmn_process_identifier is None: if bpmn_process_identifier is None:
raise ValueError( raise ValueError(
"bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None" "bpmn_file_full_path_from_bpmn_process_identifier:"
" bpmn_process_identifier is unexpectedly None"
) )
spec_reference = SpecReferenceCache.query.filter_by( spec_reference = SpecReferenceCache.query.filter_by(
@ -803,7 +813,10 @@ class ProcessInstanceProcessor:
raise ( raise (
ApiError( ApiError(
error_code="could_not_find_bpmn_process_identifier", error_code="could_not_find_bpmn_process_identifier",
message="Could not find the the given bpmn process identifier from any sources: %s" message=(
"Could not find the the given bpmn process identifier from any"
" sources: %s"
)
% bpmn_process_identifier, % bpmn_process_identifier,
) )
) )
@ -827,7 +840,6 @@ class ProcessInstanceProcessor:
new_bpmn_files = set() new_bpmn_files = set()
for bpmn_process_identifier in processor_dependencies_new: for bpmn_process_identifier in processor_dependencies_new:
# ignore identifiers that spiff already knows about # ignore identifiers that spiff already knows about
if bpmn_process_identifier in bpmn_process_identifiers_in_parser: if bpmn_process_identifier in bpmn_process_identifiers_in_parser:
continue continue
@ -870,7 +882,10 @@ class ProcessInstanceProcessor:
raise ( raise (
ApiError( ApiError(
error_code="no_primary_bpmn_error", error_code="no_primary_bpmn_error",
message="There is no primary BPMN process id defined for process_model %s" message=(
"There is no primary BPMN process id defined for"
" process_model %s"
)
% process_model_info.id, % process_model_info.id,
) )
) )
@ -931,7 +946,10 @@ class ProcessInstanceProcessor:
if not bpmn_message.correlations: if not bpmn_message.correlations:
raise ApiError( raise ApiError(
"message_correlations_missing", "message_correlations_missing",
f"Could not find any message correlations bpmn_message: {bpmn_message.name}", (
"Could not find any message correlations bpmn_message:"
f" {bpmn_message.name}"
),
) )
message_correlations = [] message_correlations = []
@ -951,12 +969,16 @@ class ProcessInstanceProcessor:
if message_correlation_property is None: if message_correlation_property is None:
raise ApiError( raise ApiError(
"message_correlations_missing_from_process", "message_correlations_missing_from_process",
"Could not find a known message correlation with identifier:" (
f"{message_correlation_property_identifier}", "Could not find a known message correlation with"
f" identifier:{message_correlation_property_identifier}"
),
) )
message_correlations.append( message_correlations.append(
{ {
"message_correlation_property": message_correlation_property, "message_correlation_property": (
message_correlation_property
),
"name": message_correlation_key, "name": message_correlation_key,
"value": message_correlation_property_value, "value": message_correlation_property_value,
} }
@ -1013,7 +1035,10 @@ class ProcessInstanceProcessor:
if message_model is None: if message_model is None:
raise ApiError( raise ApiError(
"invalid_message_name", "invalid_message_name",
f"Invalid message name: {waiting_task.task_spec.event_definition.name}.", (
"Invalid message name:"
f" {waiting_task.task_spec.event_definition.name}."
),
) )
# Ensure we are only creating one message instance for each waiting message # Ensure we are only creating one message instance for each waiting message

View File

@ -283,9 +283,9 @@ class ProcessInstanceReportService:
process_instance_dict = process_instance["ProcessInstanceModel"].serialized process_instance_dict = process_instance["ProcessInstanceModel"].serialized
for metadata_column in metadata_columns: for metadata_column in metadata_columns:
if metadata_column["accessor"] not in process_instance_dict: if metadata_column["accessor"] not in process_instance_dict:
process_instance_dict[ process_instance_dict[metadata_column["accessor"]] = (
metadata_column["accessor"] process_instance[metadata_column["accessor"]]
] = process_instance[metadata_column["accessor"]] )
results.append(process_instance_dict) results.append(process_instance_dict)
return results return results

View File

@ -85,7 +85,8 @@ class ProcessInstanceService:
db.session.add(process_instance) db.session.add(process_instance)
db.session.commit() db.session.commit()
error_message = ( error_message = (
f"Error running waiting task for process_instance {process_instance.id}" "Error running waiting task for process_instance"
f" {process_instance.id}"
+ f"({process_instance.process_model_identifier}). {str(e)}" + f"({process_instance.process_model_identifier}). {str(e)}"
) )
current_app.logger.error(error_message) current_app.logger.error(error_message)
@ -178,7 +179,10 @@ class ProcessInstanceService:
else: else:
raise ApiError.from_task( raise ApiError.from_task(
error_code="task_lane_user_error", error_code="task_lane_user_error",
message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." message=(
"Spiff Task %s lane user dict must have a key called"
" 'value' with the user's uid in it."
)
% spiff_task.task_spec.name, % spiff_task.task_spec.name,
task=spiff_task, task=spiff_task,
) )

View File

@ -146,7 +146,10 @@ class ProcessModelService(FileSystemService):
if len(instances) > 0: if len(instances) > 0:
raise ApiError( raise ApiError(
error_code="existing_instances", error_code="existing_instances",
message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.", message=(
f"We cannot delete the model `{process_model_id}`, there are"
" existing instances that depend on it."
),
) )
process_model = self.get_process_model(process_model_id) process_model = self.get_process_model(process_model_id)
path = self.workflow_path(process_model) path = self.workflow_path(process_model)
@ -339,8 +342,11 @@ class ProcessModelService(FileSystemService):
if len(problem_models) > 0: if len(problem_models) > 0:
raise ApiError( raise ApiError(
error_code="existing_instances", error_code="existing_instances",
message=f"We cannot delete the group `{process_group_id}`, " message=(
f"there are models with existing instances inside the group. {problem_models}", f"We cannot delete the group `{process_group_id}`, there are"
" models with existing instances inside the group."
f" {problem_models}"
),
) )
shutil.rmtree(path) shutil.rmtree(path)
self.cleanup_process_group_display_order() self.cleanup_process_group_display_order()
@ -392,7 +398,10 @@ class ProcessModelService(FileSystemService):
if process_group is None: if process_group is None:
raise ApiError( raise ApiError(
error_code="process_group_could_not_be_loaded_from_disk", error_code="process_group_could_not_be_loaded_from_disk",
message=f"We could not load the process_group from disk from: {dir_path}", message=(
"We could not load the process_group from disk from:"
f" {dir_path}"
),
) )
else: else:
process_group_id = dir_path.replace(FileSystemService.root_path(), "") process_group_id = dir_path.replace(FileSystemService.root_path(), "")
@ -457,7 +466,10 @@ class ProcessModelService(FileSystemService):
if process_model_info is None: if process_model_info is None:
raise ApiError( raise ApiError(
error_code="process_model_could_not_be_loaded_from_disk", error_code="process_model_could_not_be_loaded_from_disk",
message=f"We could not load the process_model from disk with data: {data}", message=(
"We could not load the process_model from disk with data:"
f" {data}"
),
) )
else: else:
if name is None: if name is None:

View File

@ -112,7 +112,10 @@ class ScriptUnitTestRunner:
except json.decoder.JSONDecodeError as ex: except json.decoder.JSONDecodeError as ex:
return ScriptUnitTestResult( return ScriptUnitTestResult(
result=False, result=False,
error=f"Failed to parse expectedOutputJson: {unit_test['expectedOutputJson']}: {str(ex)}", error=(
"Failed to parse expectedOutputJson:"
f" {unit_test['expectedOutputJson']}: {str(ex)}"
),
) )
script = task.task_spec.script script = task.task_spec.script

View File

@ -44,8 +44,10 @@ class SecretService:
except Exception as e: except Exception as e:
raise ApiError( raise ApiError(
error_code="create_secret_error", error_code="create_secret_error",
message=f"There was an error creating a secret with key: {key} and value ending with: {value[:-4]}. " message=(
f"Original error is {e}", f"There was an error creating a secret with key: {key} and value"
f" ending with: {value[:-4]}. Original error is {e}"
),
) from e ) from e
return secret_model return secret_model
@ -89,7 +91,9 @@ class SecretService:
else: else:
raise ApiError( raise ApiError(
error_code="update_secret_error", error_code="update_secret_error",
message=f"Cannot update secret with key: {key}. Resource does not exist.", message=(
f"Cannot update secret with key: {key}. Resource does not exist."
),
status_code=404, status_code=404,
) )
@ -104,11 +108,16 @@ class SecretService:
except Exception as e: except Exception as e:
raise ApiError( raise ApiError(
error_code="delete_secret_error", error_code="delete_secret_error",
message=f"Could not delete secret with key: {key}. Original error is: {e}", message=(
f"Could not delete secret with key: {key}. Original error"
f" is: {e}"
),
) from e ) from e
else: else:
raise ApiError( raise ApiError(
error_code="delete_secret_error", error_code="delete_secret_error",
message=f"Cannot delete secret with key: {key}. Resource does not exist.", message=(
f"Cannot delete secret with key: {key}. Resource does not exist."
),
status_code=404, status_code=404,
) )

View File

@ -192,7 +192,8 @@ class SpecFileService(FileSystemService):
full_file_path = SpecFileService.full_file_path(process_model_info, file_name) full_file_path = SpecFileService.full_file_path(process_model_info, file_name)
if not os.path.exists(full_file_path): if not os.path.exists(full_file_path):
raise ProcessModelFileNotFoundError( raise ProcessModelFileNotFoundError(
f"No file found with name {file_name} in {process_model_info.display_name}" f"No file found with name {file_name} in"
f" {process_model_info.display_name}"
) )
with open(full_file_path, "rb") as f_handle: with open(full_file_path, "rb") as f_handle:
spec_file_data = f_handle.read() spec_file_data = f_handle.read()
@ -314,8 +315,9 @@ class SpecFileService(FileSystemService):
).first() ).first()
if message_model is None: if message_model is None:
raise ValidationException( raise ValidationException(
f"Could not find message model with identifier '{message_model_identifier}'" "Could not find message model with identifier"
f"Required by a Start Event in : {ref.file_name}" f" '{message_model_identifier}'Required by a Start Event in :"
f" {ref.file_name}"
) )
message_triggerable_process_model = ( message_triggerable_process_model = (
MessageTriggerableProcessModel.query.filter_by( MessageTriggerableProcessModel.query.filter_by(
@ -335,7 +337,8 @@ class SpecFileService(FileSystemService):
!= ref.process_model_id != ref.process_model_id
): ):
raise ValidationException( raise ValidationException(
f"Message model is already used to start process model {ref.process_model_id}" "Message model is already used to start process model"
f" {ref.process_model_id}"
) )
@staticmethod @staticmethod
@ -353,8 +356,9 @@ class SpecFileService(FileSystemService):
).first() ).first()
if message_model is None: if message_model is None:
raise ValidationException( raise ValidationException(
f"Could not find message model with identifier '{message_model_identifier}'" "Could not find message model with identifier"
f"specified by correlation property: {cpre}" f" '{message_model_identifier}'specified by correlation"
f" property: {cpre}"
) )
# fixme: I think we are currently ignoring the correction properties. # fixme: I think we are currently ignoring the correction properties.
message_correlation_property = ( message_correlation_property = (

View File

@ -133,7 +133,6 @@ class BaseTest:
) -> TestResponse: ) -> TestResponse:
"""Create_process_model.""" """Create_process_model."""
if process_model_id is not None: if process_model_id is not None:
# make sure we have a group # make sure we have a group
process_group_id, _ = os.path.split(process_model_id) process_group_id, _ = os.path.split(process_model_id)
modified_process_group_id = process_group_id.replace("/", ":") modified_process_group_id = process_group_id.replace("/", ":")
@ -141,7 +140,6 @@ class BaseTest:
os.path.join(FileSystemService.root_path(), process_group_id) os.path.join(FileSystemService.root_path(), process_group_id)
) )
if ProcessModelService.is_group(process_group_path): if ProcessModelService.is_group(process_group_path):
if exception_notification_addresses is None: if exception_notification_addresses is None:
exception_notification_addresses = [] exception_notification_addresses = []
@ -171,7 +169,8 @@ class BaseTest:
raise Exception("You must create the group first") raise Exception("You must create the group first")
else: else:
raise Exception( raise Exception(
"You must include the process_model_id, which must be a path to the model" "You must include the process_model_id, which must be a path to the"
" model"
) )
def get_test_data_file_contents( def get_test_data_file_contents(

View File

@ -343,7 +343,8 @@ class TestProcessApi(BaseTest):
assert data["error_code"] == "existing_instances" assert data["error_code"] == "existing_instances"
assert ( assert (
data["message"] data["message"]
== f"We cannot delete the model `{process_model_identifier}`, there are existing instances that depend on it." == f"We cannot delete the model `{process_model_identifier}`, there are"
" existing instances that depend on it."
) )
def test_process_model_update( def test_process_model_update(
@ -2058,7 +2059,6 @@ class TestProcessApi(BaseTest):
mail = app.config["MAIL_APP"] mail = app.config["MAIL_APP"]
with mail.record_messages() as outbox: with mail.record_messages() as outbox:
response = client.post( response = client.post(
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user), headers=self.logged_in_headers(with_super_admin_user),
@ -2961,7 +2961,9 @@ class TestProcessApi(BaseTest):
) -> None: ) -> None:
"""Test_can_get_process_instance_list_with_report_metadata.""" """Test_can_get_process_instance_list_with_report_metadata."""
process_model = load_test_spec( process_model = load_test_spec(
process_model_id="save_process_instance_metadata/save_process_instance_metadata", process_model_id=(
"save_process_instance_metadata/save_process_instance_metadata"
),
bpmn_file_name="save_process_instance_metadata.bpmn", bpmn_file_name="save_process_instance_metadata.bpmn",
process_model_source_directory="save_process_instance_metadata", process_model_source_directory="save_process_instance_metadata",
) )
@ -3018,7 +3020,9 @@ class TestProcessApi(BaseTest):
) -> None: ) -> None:
"""Test_can_get_process_instance_list_with_report_metadata.""" """Test_can_get_process_instance_list_with_report_metadata."""
process_model = load_test_spec( process_model = load_test_spec(
process_model_id="save_process_instance_metadata/save_process_instance_metadata", process_model_id=(
"save_process_instance_metadata/save_process_instance_metadata"
),
bpmn_file_name="save_process_instance_metadata.bpmn", bpmn_file_name="save_process_instance_metadata.bpmn",
process_model_source_directory="save_process_instance_metadata", process_model_source_directory="save_process_instance_metadata",
) )

View File

@ -28,7 +28,9 @@ class TestSaveProcessInstanceMetadata(BaseTest):
client, with_super_admin_user, "test_group", "test_group" client, with_super_admin_user, "test_group", "test_group"
) )
process_model = load_test_spec( process_model = load_test_spec(
process_model_id="save_process_instance_metadata/save_process_instance_metadata", process_model_id=(
"save_process_instance_metadata/save_process_instance_metadata"
),
bpmn_file_name="save_process_instance_metadata.bpmn", bpmn_file_name="save_process_instance_metadata.bpmn",
process_model_source_directory="save_process_instance_metadata", process_model_source_directory="save_process_instance_metadata",
) )

View File

@ -16,6 +16,7 @@ from spiffworkflow_backend.services.user_service import UserService
# we think we can get the list of roles for a user. # we think we can get the list of roles for a user.
# spiff needs a way to determine what each role allows. # spiff needs a way to determine what each role allows.
# user role allows list and read of all process groups/models # user role allows list and read of all process groups/models
# super-admin role allows create, update, and delete of all process groups/models # super-admin role allows create, update, and delete of all process groups/models
# * super-admins users maybe conventionally get the user role as well # * super-admins users maybe conventionally get the user role as well

View File

@ -52,7 +52,8 @@ class TestProcessInstanceProcessor(BaseTest):
result = script_engine._evaluate("fact_service(type='norris')", {}) result = script_engine._evaluate("fact_service(type='norris')", {})
assert ( assert (
result result
== "Chuck Norris doesnt read books. He stares them down until he gets the information he wants." == "Chuck Norris doesnt read books. He stares them down until he gets the"
" information he wants."
) )
app.config["THREAD_LOCAL_DATA"].process_model_identifier = None app.config["THREAD_LOCAL_DATA"].process_model_identifier = None
app.config["THREAD_LOCAL_DATA"].process_instance_id = None app.config["THREAD_LOCAL_DATA"].process_instance_id = None

View File

@ -880,7 +880,9 @@ class TestProcessInstanceReportService(BaseTest):
process_instance_report = ProcessInstanceReportService.report_with_identifier( process_instance_report = ProcessInstanceReportService.report_with_identifier(
user=user_one, user=user_one,
report_identifier="system_report_completed_instances_with_tasks_completed_by_me", report_identifier=(
"system_report_completed_instances_with_tasks_completed_by_me"
),
) )
report_filter = ( report_filter = (
ProcessInstanceReportService.filter_from_metadata_with_overrides( ProcessInstanceReportService.filter_from_metadata_with_overrides(
@ -983,7 +985,9 @@ class TestProcessInstanceReportService(BaseTest):
process_instance_report = ProcessInstanceReportService.report_with_identifier( process_instance_report = ProcessInstanceReportService.report_with_identifier(
user=user_one, user=user_one,
report_identifier="system_report_completed_instances_with_tasks_completed_by_my_groups", report_identifier=(
"system_report_completed_instances_with_tasks_completed_by_my_groups"
),
) )
report_filter = ( report_filter = (
ProcessInstanceReportService.filter_from_metadata_with_overrides( ProcessInstanceReportService.filter_from_metadata_with_overrides(