avoid sending two errors to sentry w/ burnettk
This commit is contained in:
parent
87d6b99644
commit
d063d928ac
|
@ -0,0 +1 @@
|
|||
"""__init.py__"""
|
|
@ -0,0 +1,20 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function error_handler() {
|
||||
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||
exit "$2"
|
||||
}
|
||||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
# HELP: runs backend and frontend in tmux. REQUIRES running in a current TMUX session.
|
||||
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
|
||||
# https://stackoverflow.com/a/39523222/6090676
|
||||
# The syntax for a specific pane is tmux send-keys -t {session}:{window}.{pane}, so tmux send-keys -t Test:Test1.1 "TEST" C-m would send that to the first pane. –
|
||||
pane_uid=$(tmux split-window -t backend.2 -c "${script_dir}/../spiffworkflow-backend" -P -F '#{pane_id}')
|
||||
tmux send-keys -t "$pane_uid" "./bin/run_server_locally" Enter
|
||||
|
||||
pane_uid=$(tmux split-window -t frontend.2 -c "${script_dir}/../spiffworkflow-frontend" -P -F '#{pane_id}')
|
||||
tmux send-keys -t "$pane_uid" "npm start" Enter
|
|
@ -88,7 +88,6 @@ class ApiError(Exception):
|
|||
# Assure that there is nothing in the json data that can't be serialized.
|
||||
instance.task_data = ApiError.remove_unserializeable_from_dict(task.data)
|
||||
|
||||
current_app.logger.error(message, exc_info=True)
|
||||
return instance
|
||||
|
||||
@staticmethod
|
||||
|
@ -125,7 +124,6 @@ class ApiError(Exception):
|
|||
instance.task_name = task_spec.description or ""
|
||||
if task_spec._wf_spec:
|
||||
instance.file_name = task_spec._wf_spec.file
|
||||
current_app.logger.error(message, exc_info=True)
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
|
@ -182,9 +180,9 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response:
|
|||
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
|
||||
)
|
||||
|
||||
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception
|
||||
# seems to break the sentry sdk context where we no longer get back
|
||||
# an event id or send out tags like username
|
||||
# # !!!NOTE!!!: do this after sentry stuff since calling logger.exception
|
||||
# # seems to break the sentry sdk context where we no longer get back
|
||||
# # an event id or send out tags like username
|
||||
current_app.logger.exception(exception)
|
||||
|
||||
# set api_exception like this to avoid confusing mypy
|
||||
|
|
|
@ -9,6 +9,20 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
|
||||
export FLASK_SESSION_SECRET_KEY="this_is_recreate_db_secret_key"
|
||||
|
||||
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
|
||||
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../../sample-process-models"
|
||||
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../sample-process-models"
|
||||
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
>&2 echo "ERROR: Could not find a location for the sample processes. Last tried: $BPMN_SPEC_ABSOLUTE_DIR"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
export BPMN_SPEC_ABSOLUTE_DIR
|
||||
fi
|
||||
|
||||
tasks=""
|
||||
if [[ "${1:-}" == "clean" ]]; then
|
||||
subcommand="${2:-}"
|
||||
|
|
|
@ -19,7 +19,16 @@ fi
|
|||
|
||||
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../sample-process-models"
|
||||
|
||||
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../../sample-process-models"
|
||||
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../sample-process-models"
|
||||
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
>&2 echo "ERROR: Could not find a location for the sample processes. Last tried: $BPMN_SPEC_ABSOLUTE_DIR"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
export BPMN_SPEC_ABSOLUTE_DIR
|
||||
fi
|
||||
|
||||
export FLASK_SESSION_SECRET_KEY=super_secret_key
|
||||
|
|
|
@ -36,21 +36,9 @@ from spiffworkflow_backend import create_app # noqa: E402
|
|||
def app() -> Flask:
|
||||
"""App."""
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "testing"
|
||||
|
||||
# os.environ["FLASK_SESSION_SECRET_KEY"] = "this_is_testing_secret_key"
|
||||
os.environ["FLASK_SESSION_SECRET_KEY"] = "super_secret_key"
|
||||
app = create_app()
|
||||
|
||||
# NOTE: set this here since nox shoves tests and src code to
|
||||
# different places and this allows us to know exactly where we are at the start
|
||||
app.config["BPMN_SPEC_ABSOLUTE_DIR"] = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
"tests",
|
||||
"spiffworkflow_backend",
|
||||
"files",
|
||||
"bpmn_specs",
|
||||
)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
|
|
|
@ -95,7 +95,7 @@ python-versions = ">=3.5"
|
|||
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
|
||||
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
|
||||
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
|
||||
tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
|
||||
tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
|
||||
|
||||
[[package]]
|
||||
name = "Babel"
|
||||
|
@ -268,7 +268,7 @@ optional = false
|
|||
python-versions = ">=3.6.0"
|
||||
|
||||
[package.extras]
|
||||
unicode_backport = ["unicodedata2"]
|
||||
unicode-backport = ["unicodedata2"]
|
||||
|
||||
[[package]]
|
||||
name = "classify-imports"
|
||||
|
@ -639,7 +639,7 @@ werkzeug = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/flask-bpmn"
|
||||
reference = "main"
|
||||
resolved_reference = "a8b90f2ca09ef1cbb24a491c36f1cc9437477325"
|
||||
resolved_reference = "a901d7ffb2b79abfec17c332bbe77c43c1d28705"
|
||||
|
||||
[[package]]
|
||||
name = "Flask-Cors"
|
||||
|
@ -1512,7 +1512,7 @@ urllib3 = ">=1.21.1,<1.27"
|
|||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "requests-toolbelt"
|
||||
|
@ -1625,7 +1625,7 @@ falcon = ["falcon (>=1.4)"]
|
|||
fastapi = ["fastapi (>=0.79.0)"]
|
||||
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
|
||||
httpx = ["httpx (>=0.16.0)"]
|
||||
pure_eval = ["asttokens", "executing", "pure-eval"]
|
||||
pure-eval = ["asttokens", "executing", "pure-eval"]
|
||||
pyspark = ["pyspark (>=2.4.4)"]
|
||||
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
|
||||
rq = ["rq (>=0.6)"]
|
||||
|
@ -1873,7 +1873,7 @@ pytz = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "12f81480a5f9e848e64221b9287c6cfa6cb682b4"
|
||||
resolved_reference = "2d3bd00854ab483e823c4b386430abc9267f536b"
|
||||
|
||||
[[package]]
|
||||
name = "SQLAlchemy"
|
||||
|
@ -1891,19 +1891,19 @@ aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
|
|||
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
|
||||
asyncio = ["greenlet (!=0.4.17)"]
|
||||
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
|
||||
mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"]
|
||||
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
|
||||
mssql = ["pyodbc"]
|
||||
mssql_pymssql = ["pymssql"]
|
||||
mssql_pyodbc = ["pyodbc"]
|
||||
mssql-pymssql = ["pymssql"]
|
||||
mssql-pyodbc = ["pyodbc"]
|
||||
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
|
||||
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
|
||||
mysql_connector = ["mysql-connector-python"]
|
||||
mysql-connector = ["mysql-connector-python"]
|
||||
oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
|
||||
postgresql = ["psycopg2 (>=2.7)"]
|
||||
postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
|
||||
postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
|
||||
postgresql_psycopg2binary = ["psycopg2-binary"]
|
||||
postgresql_psycopg2cffi = ["psycopg2cffi"]
|
||||
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
|
||||
postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
|
||||
postgresql-psycopg2binary = ["psycopg2-binary"]
|
||||
postgresql-psycopg2cffi = ["psycopg2cffi"]
|
||||
pymysql = ["pymysql", "pymysql (<1)"]
|
||||
sqlcipher = ["sqlcipher3_binary"]
|
||||
|
||||
|
@ -3051,18 +3051,7 @@ py = [
|
|||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||
]
|
||||
pyasn1 = [
|
||||
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
|
||||
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
|
||||
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
|
||||
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
|
||||
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
|
||||
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
|
||||
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
|
||||
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
|
||||
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
|
||||
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
|
||||
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
|
||||
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
|
||||
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
|
||||
]
|
||||
pycodestyle = [
|
||||
|
|
|
@ -27,15 +27,11 @@ flask-marshmallow = "*"
|
|||
flask-migrate = "*"
|
||||
flask-restful = "*"
|
||||
werkzeug = "*"
|
||||
# go back to main once https://github.com/sartography/SpiffWorkflow/pull/241 is merged
|
||||
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
|
||||
# SpiffWorkflow = {develop = true, path = "/Users/kevin/projects/github/sartography/SpiffWorkflow"}
|
||||
# SpiffWorkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"}
|
||||
sentry-sdk = "^1.10"
|
||||
sphinx-autoapi = "^2.0"
|
||||
# flask-bpmn = {develop = true, path = "/home/jason/projects/github/sartography/flask-bpmn"}
|
||||
# flask-bpmn = {develop = true, path = "/Users/kevin/projects/github/sartography/flask-bpmn"}
|
||||
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
|
||||
# flask-bpmn = {develop = true, path = "../flask-bpmn"}
|
||||
mysql-connector-python = "^8.0.29"
|
||||
pytest-flask = "^1.2.0"
|
||||
pytest-flask-sqlalchemy = "^1.1.0"
|
||||
|
|
|
@ -148,6 +148,7 @@ def configure_sentry(app: flask.app.Flask) -> None:
|
|||
import sentry_sdk
|
||||
from sentry_sdk.integrations.flask import FlaskIntegration
|
||||
|
||||
# get rid of NotFound errors
|
||||
def before_send(event: Any, hint: Any) -> Any:
|
||||
"""Before_send."""
|
||||
if "exc_info" in hint:
|
||||
|
|
|
@ -8,6 +8,10 @@ from werkzeug.utils import ImportStringError
|
|||
from spiffworkflow_backend.services.logging_service import setup_logger
|
||||
|
||||
|
||||
class ConfigurationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def setup_database_uri(app: Flask) -> None:
|
||||
"""Setup_database_uri."""
|
||||
if os.environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
|
||||
|
@ -85,5 +89,8 @@ def setup_config(app: Flask) -> None:
|
|||
# src/spiffworkflow_backend/config/secrets.py
|
||||
app.config.from_pyfile(os.path.join("config", "secrets.py"), silent=True)
|
||||
|
||||
if app.config["BPMN_SPEC_ABSOLUTE_DIR"] is None:
|
||||
raise ConfigurationError("BPMN_SPEC_ABSOLUTE_DIR config must be set")
|
||||
|
||||
thread_local_data = threading.local()
|
||||
app.config["THREAD_LOCAL_DATA"] = thread_local_data
|
||||
|
|
|
@ -7,7 +7,7 @@ SELF_REGISTRATION = environ.get("SELF_REGISTRATION", default=False)
|
|||
|
||||
DEVELOPMENT = False
|
||||
|
||||
BPMN_SPEC_ABSOLUTE_DIR = environ.get("BPMN_SPEC_ABSOLUTE_DIR", default="")
|
||||
BPMN_SPEC_ABSOLUTE_DIR = environ.get("BPMN_SPEC_ABSOLUTE_DIR")
|
||||
CORS_DEFAULT = "*"
|
||||
CORS_ALLOW_ORIGINS = re.split(
|
||||
r",\s*", environ.get("CORS_ALLOW_ORIGINS", default=CORS_DEFAULT)
|
||||
|
|
|
@ -65,7 +65,7 @@ permissions:
|
|||
uri: /v1.0/process-models/finance/*
|
||||
|
||||
read-all:
|
||||
groups: [finance, admin, "Project Lead"]
|
||||
groups: [admin, "Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /*
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
"""Testing.py."""
|
||||
from os import environ
|
||||
import os
|
||||
|
||||
from spiffworkflow_backend.config.default import BPMN_SPEC_ABSOLUTE_DIR
|
||||
|
||||
|
||||
TESTING = True
|
||||
|
@ -15,3 +18,13 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
|||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
|
||||
)
|
||||
|
||||
# NOTE: set this here since nox shoves tests and src code to
|
||||
# different places and this allows us to know exactly where we are at the start
|
||||
BPMN_SPEC_ABSOLUTE_DIR = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
"tests",
|
||||
"spiffworkflow_backend",
|
||||
"files",
|
||||
"bpmn_specs",
|
||||
)
|
||||
|
|
|
@ -22,6 +22,7 @@ from spiffworkflow_backend.models.principal import PrincipalModel
|
|||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.models.user import UserNotFoundError
|
||||
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
|
||||
from spiffworkflow_backend.services.group_service import GroupService
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
|
@ -138,25 +139,11 @@ class AuthorizationService:
|
|||
default_group = None
|
||||
if "default_group" in permission_configs:
|
||||
default_group_identifier = permission_configs["default_group"]
|
||||
default_group = GroupModel.query.filter_by(
|
||||
identifier=default_group_identifier
|
||||
).first()
|
||||
if default_group is None:
|
||||
default_group = GroupModel(identifier=default_group_identifier)
|
||||
db.session.add(default_group)
|
||||
db.session.commit()
|
||||
UserService.create_principal(
|
||||
default_group.id, id_column_name="group_id"
|
||||
)
|
||||
default_group = GroupService.find_or_create_group(default_group_identifier)
|
||||
|
||||
if "groups" in permission_configs:
|
||||
for group_identifier, group_config in permission_configs["groups"].items():
|
||||
group = GroupModel.query.filter_by(identifier=group_identifier).first()
|
||||
if group is None:
|
||||
group = GroupModel(identifier=group_identifier)
|
||||
db.session.add(group)
|
||||
db.session.commit()
|
||||
UserService.create_principal(group.id, id_column_name="group_id")
|
||||
group = GroupService.find_or_create_group(group_identifier)
|
||||
for username in group_config["users"]:
|
||||
user = UserModel.query.filter_by(username=username).first()
|
||||
if user is None:
|
||||
|
@ -186,13 +173,9 @@ class AuthorizationService:
|
|||
for allowed_permission in permission_config["allowed_permissions"]:
|
||||
if "groups" in permission_config:
|
||||
for group_identifier in permission_config["groups"]:
|
||||
principal = (
|
||||
PrincipalModel.query.join(GroupModel)
|
||||
.filter(GroupModel.identifier == group_identifier)
|
||||
.first()
|
||||
)
|
||||
group = GroupService.find_or_create_group(group_identifier)
|
||||
cls.create_permission_for_principal(
|
||||
principal, permission_target, allowed_permission
|
||||
group.principal, permission_target, allowed_permission
|
||||
)
|
||||
if "users" in permission_config:
|
||||
for username in permission_config["users"]:
|
||||
|
|
|
@ -24,26 +24,19 @@ class EmailService:
|
|||
"""We will receive all data related to an email and send it."""
|
||||
mail = current_app.config["MAIL_APP"]
|
||||
|
||||
# Send mail
|
||||
try:
|
||||
msg = Message(
|
||||
subject,
|
||||
sender=sender,
|
||||
recipients=recipients,
|
||||
body=content,
|
||||
html=content_html,
|
||||
cc=cc,
|
||||
bcc=bcc,
|
||||
reply_to=reply_to,
|
||||
)
|
||||
msg = Message(
|
||||
subject,
|
||||
sender=sender,
|
||||
recipients=recipients,
|
||||
body=content,
|
||||
html=content_html,
|
||||
cc=cc,
|
||||
bcc=bcc,
|
||||
reply_to=reply_to,
|
||||
)
|
||||
|
||||
if attachment_files is not None:
|
||||
for file in attachment_files:
|
||||
msg.attach(file["name"], file["type"], file["data"])
|
||||
if attachment_files is not None:
|
||||
for file in attachment_files:
|
||||
msg.attach(file["name"], file["type"], file["data"])
|
||||
|
||||
mail.send(msg)
|
||||
|
||||
except Exception as e:
|
||||
# app.logger.error('An exception happened in EmailService', exc_info=True)
|
||||
# app.logger.error(str(e))
|
||||
raise e
|
||||
mail.send(msg)
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from typing import Optional
|
||||
from flask_bpmn.models.db import db
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
||||
class GroupService():
|
||||
|
||||
@classmethod
|
||||
def find_or_create_group(cls, group_identifier: str) -> GroupModel:
|
||||
group: Optional[GroupModel] = GroupModel.query.filter_by(
|
||||
identifier=group_identifier
|
||||
).first()
|
||||
if group is None:
|
||||
group = GroupModel(identifier=group_identifier)
|
||||
db.session.add(group)
|
||||
db.session.commit()
|
||||
UserService.create_principal(
|
||||
group.id, id_column_name="group_id"
|
||||
)
|
||||
return group
|
|
@ -186,8 +186,12 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
|||
methods.update(external_methods)
|
||||
super().execute(task, script, methods)
|
||||
except WorkflowException as e:
|
||||
raise e
|
||||
print("WORKFLOW")
|
||||
print(f"e: {e}")
|
||||
# raise e
|
||||
raise Exception("NEW ERROR")
|
||||
except Exception as e:
|
||||
print("EXCEPTIONWORK")
|
||||
raise WorkflowTaskExecException(task, f" {script}, {e}", e) from e
|
||||
|
||||
def call_service(
|
||||
|
|
|
@ -167,8 +167,8 @@ class ProcessInstanceService:
|
|||
f"task {spiff_task.task_spec.name}, it is not in the task event model, "
|
||||
f"and it should be."
|
||||
)
|
||||
current_app.logger.error(
|
||||
"missing_form_data", missing_form_error, exc_info=True
|
||||
current_app.logger.exception(
|
||||
"missing_form_data", missing_form_error
|
||||
)
|
||||
return {}
|
||||
else:
|
||||
|
|
Loading…
Reference in New Issue