get compatible with default ruff ruleset
This commit is contained in:
parent
1a13f70f74
commit
cf40546a09
|
@ -170,6 +170,12 @@ implicit_reexport = true
|
||||||
namespace_packages = true
|
namespace_packages = true
|
||||||
explicit_package_bases = false
|
explicit_package_bases = false
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 130
|
||||||
|
|
||||||
|
[tool.ruff.per-file-ignores]
|
||||||
|
"migrations/versions/*.py" = ["E501"]
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=1.0.0"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
|
@ -7,6 +7,8 @@ some models need to be loaded before others for relationships and to
|
||||||
avoid circular imports
|
avoid circular imports
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# unused imports are needed for SQLAlchemy to load the models
|
||||||
|
# ruff: noqa: F401
|
||||||
|
|
||||||
from spiffworkflow_backend.models.db import add_listeners
|
from spiffworkflow_backend.models.db import add_listeners
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@ class GetEncodedFileData(Script):
|
||||||
) -> Any:
|
) -> Any:
|
||||||
"""Run."""
|
"""Run."""
|
||||||
# example input:
|
# example input:
|
||||||
# "data:some/mimetype;name=testing.txt;base64,spifffiledatadigest+7a2051ffefd1eaf475dbef9fda019cb3d4a10eb8aea4c2c2a84a50a797a541bf"
|
# "data:some/mimetype;name=testing.txt;base64,spifffiledatadigest+7a2051ffefd1eaf475dbef9fda019cb3d4a10eb8aea4c2c2a84a50a797a541bf" # noqa: B950,E501
|
||||||
digest_reference = args[0]
|
digest_reference = args[0]
|
||||||
digest = digest_reference[-64:]
|
digest = digest_reference[-64:]
|
||||||
process_instance_id = script_attributes_context.process_instance_id
|
process_instance_id = script_attributes_context.process_instance_id
|
||||||
|
|
|
@ -30,7 +30,7 @@ class GetMarkdownFileDownloadLink(Script):
|
||||||
) -> Any:
|
) -> Any:
|
||||||
"""Run."""
|
"""Run."""
|
||||||
# example input:
|
# example input:
|
||||||
# "data:some/mimetype;name=testing.txt;base64,spifffiledatadigest+7a2051ffefd1eaf475dbef9fda019cb3d4a10eb8aea4c2c2a84a50a797a541bf"
|
# "data:some/mimetype;name=testing.txt;base64,spifffiledatadigest+7a2051ffefd1eaf475dbef9fda019cb3d4a10eb8aea4c2c2a84a50a797a541bf" # noqa: B950,E501
|
||||||
digest_reference = args[0]
|
digest_reference = args[0]
|
||||||
parts = digest_reference.split(";")
|
parts = digest_reference.split(";")
|
||||||
digest = parts[2].split(",")[1][-64:]
|
digest = parts[2].split(",")[1][-64:]
|
||||||
|
|
|
@ -18,7 +18,7 @@ from flask.app import Flask
|
||||||
# %(message)s—The log message.
|
# %(message)s—The log message.
|
||||||
|
|
||||||
# full message list:
|
# full message list:
|
||||||
# {'name': 'gunicorn.error', 'msg': 'GET /admin/token', 'args': (), 'levelname': 'DEBUG', 'levelno': 10, 'pathname': '~/.cache/pypoetry/virtualenvs/spiffworkflow-backend-R_hdWfN1-py3.10/lib/python3.10/site-packages/gunicorn/glogging.py', 'filename': 'glogging.py', 'module': 'glogging', 'exc_info': None, 'exc_text': None, 'stack_info': None, 'lineno': 267, 'funcName': 'debug', 'created': 1657307111.4513023, 'msecs': 451.30228996276855, 'relativeCreated': 1730.785846710205, 'thread': 139945864087360, 'threadName': 'MainThread', 'processName': 'MainProcess', 'process': 2109561, 'message': 'GET /admin/token', 'asctime': '2022-07-08T15:05:11.451Z'}
|
# {'name': 'gunicorn.error', 'msg': 'GET /admin/token', 'args': (), 'levelname': 'DEBUG', 'levelno': 10, 'pathname': '~/.cache/pypoetry/virtualenvs/spiffworkflow-backend-R_hdWfN1-py3.10/lib/python3.10/site-packages/gunicorn/glogging.py', 'filename': 'glogging.py', 'module': 'glogging', 'exc_info': None, 'exc_text': None, 'stack_info': None, 'lineno': 267, 'funcName': 'debug', 'created': 1657307111.4513023, 'msecs': 451.30228996276855, 'relativeCreated': 1730.785846710205, 'thread': 139945864087360, 'threadName': 'MainThread', 'processName': 'MainProcess', 'process': 2109561, 'message': 'GET /admin/token', 'asctime': '2022-07-08T15:05:11.451Z'} # noqa: E501
|
||||||
|
|
||||||
|
|
||||||
class InvalidLogLevelError(Exception):
|
class InvalidLogLevelError(Exception):
|
||||||
|
|
|
@ -1689,8 +1689,10 @@ class ProcessInstanceProcessor:
|
||||||
)
|
)
|
||||||
task_service.update_all_tasks_from_spiff_tasks(spiff_tasks, deleted_tasks, start_time)
|
task_service.update_all_tasks_from_spiff_tasks(spiff_tasks, deleted_tasks, start_time)
|
||||||
|
|
||||||
# we may want to move this to task_service.update_all_tasks_from_spiff_tasks but not sure it's always good to it.
|
# we may want to move this to task_service.update_all_tasks_from_spiff_tasks,
|
||||||
# for cancelled tasks, spiff only returns tasks that were cancelled, not the ones that were deleted so we have to find them
|
# but not sure it's always good to it.
|
||||||
|
# for cancelled tasks, spiff only returns tasks that were cancelled,
|
||||||
|
# not the ones that were deleted so we have to find them
|
||||||
spiff_task_guids = [str(st.id) for st in spiff_tasks]
|
spiff_task_guids = [str(st.id) for st in spiff_tasks]
|
||||||
tasks_no_longer_in_spiff = TaskModel.query.filter(
|
tasks_no_longer_in_spiff = TaskModel.query.filter(
|
||||||
and_(
|
and_(
|
||||||
|
|
|
@ -18,7 +18,7 @@ class TestProcessInstanceService(BaseTest):
|
||||||
"""TestProcessInstanceService."""
|
"""TestProcessInstanceService."""
|
||||||
|
|
||||||
SAMPLE_FILE_DATA = "data:some/mimetype;name=testing.txt;base64,dGVzdGluZwo="
|
SAMPLE_FILE_DATA = "data:some/mimetype;name=testing.txt;base64,dGVzdGluZwo="
|
||||||
SAMPLE_DIGEST_REFERENCE = f"data:some/mimetype;name=testing.txt;base64,{ProcessInstanceService.FILE_DATA_DIGEST_PREFIX}12a61f4e173fb3a11c05d6471f74728f76231b4a5fcd9667cef3af87a3ae4dc2" # noqa: B950
|
SAMPLE_DIGEST_REFERENCE = f"data:some/mimetype;name=testing.txt;base64,{ProcessInstanceService.FILE_DATA_DIGEST_PREFIX}12a61f4e173fb3a11c05d6471f74728f76231b4a5fcd9667cef3af87a3ae4dc2" # noqa: B950,E501
|
||||||
|
|
||||||
def _check_sample_file_data_model(
|
def _check_sample_file_data_model(
|
||||||
self,
|
self,
|
||||||
|
|
Loading…
Reference in New Issue