avoid sending two errors to sentry w/ burnettk

This commit is contained in:
jasquat 2022-10-27 15:33:59 -04:00
parent 87d6b99644
commit d063d928ac
18 changed files with 133 additions and 96 deletions

View File

@ -0,0 +1 @@
"""__init.py__"""

20
bin/run_servers_locally Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
# HELP: runs backend and frontend in tmux. REQUIRES running in a current TMUX session.
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
# https://stackoverflow.com/a/39523222/6090676
# The syntax for a specific pane is tmux send-keys -t {session}:{window}.{pane}, so tmux send-keys -t Test:Test1.1 "TEST" C-m would send that to the first pane.
pane_uid=$(tmux split-window -t backend.2 -c "${script_dir}/../spiffworkflow-backend" -P -F '#{pane_id}')
tmux send-keys -t "$pane_uid" "./bin/run_server_locally" Enter
pane_uid=$(tmux split-window -t frontend.2 -c "${script_dir}/../spiffworkflow-frontend" -P -F '#{pane_id}')
tmux send-keys -t "$pane_uid" "npm start" Enter

View File

@ -88,7 +88,6 @@ class ApiError(Exception):
# Assure that there is nothing in the json data that can't be serialized. # Assure that there is nothing in the json data that can't be serialized.
instance.task_data = ApiError.remove_unserializeable_from_dict(task.data) instance.task_data = ApiError.remove_unserializeable_from_dict(task.data)
current_app.logger.error(message, exc_info=True)
return instance return instance
@staticmethod @staticmethod
@ -125,7 +124,6 @@ class ApiError(Exception):
instance.task_name = task_spec.description or "" instance.task_name = task_spec.description or ""
if task_spec._wf_spec: if task_spec._wf_spec:
instance.file_name = task_spec._wf_spec.file instance.file_name = task_spec._wf_spec.file
current_app.logger.error(message, exc_info=True)
return instance return instance
@classmethod @classmethod
@ -182,9 +180,9 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response:
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}" f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
) )
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception # # !!!NOTE!!!: do this after sentry stuff since calling logger.exception
# seems to break the sentry sdk context where we no longer get back # # seems to break the sentry sdk context where we no longer get back
# an event id or send out tags like username # # an event id or send out tags like username
current_app.logger.exception(exception) current_app.logger.exception(exception)
# set api_exception like this to avoid confusing mypy # set api_exception like this to avoid confusing mypy

View File

@ -9,6 +9,20 @@ set -o errtrace -o errexit -o nounset -o pipefail
export FLASK_SESSION_SECRET_KEY="this_is_recreate_db_secret_key" export FLASK_SESSION_SECRET_KEY="this_is_recreate_db_secret_key"
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../../sample-process-models"
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../sample-process-models"
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
>&2 echo "ERROR: Could not find a location for the sample processes. Last tried: $BPMN_SPEC_ABSOLUTE_DIR"
exit 1
fi
fi
export BPMN_SPEC_ABSOLUTE_DIR
fi
tasks="" tasks=""
if [[ "${1:-}" == "clean" ]]; then if [[ "${1:-}" == "clean" ]]; then
subcommand="${2:-}" subcommand="${2:-}"

View File

@ -19,7 +19,16 @@ fi
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../sample-process-models"
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../../sample-process-models"
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../sample-process-models"
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
>&2 echo "ERROR: Could not find a location for the sample processes. Last tried: $BPMN_SPEC_ABSOLUTE_DIR"
exit 1
fi
fi
export BPMN_SPEC_ABSOLUTE_DIR
fi fi
export FLASK_SESSION_SECRET_KEY=super_secret_key export FLASK_SESSION_SECRET_KEY=super_secret_key

View File

@ -36,21 +36,9 @@ from spiffworkflow_backend import create_app # noqa: E402
def app() -> Flask: def app() -> Flask:
"""App.""" """App."""
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "testing" os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "testing"
# os.environ["FLASK_SESSION_SECRET_KEY"] = "this_is_testing_secret_key"
os.environ["FLASK_SESSION_SECRET_KEY"] = "super_secret_key" os.environ["FLASK_SESSION_SECRET_KEY"] = "super_secret_key"
app = create_app() app = create_app()
# NOTE: set this here since nox shoves tests and src code to
# different places and this allows us to know exactly where we are at the start
app.config["BPMN_SPEC_ABSOLUTE_DIR"] = os.path.join(
os.path.dirname(__file__),
"tests",
"spiffworkflow_backend",
"files",
"bpmn_specs",
)
return app return app

View File

@ -95,7 +95,7 @@ python-versions = ">=3.5"
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
[[package]] [[package]]
name = "Babel" name = "Babel"
@ -268,7 +268,7 @@ optional = false
python-versions = ">=3.6.0" python-versions = ">=3.6.0"
[package.extras] [package.extras]
unicode_backport = ["unicodedata2"] unicode-backport = ["unicodedata2"]
[[package]] [[package]]
name = "classify-imports" name = "classify-imports"
@ -639,7 +639,7 @@ werkzeug = "*"
type = "git" type = "git"
url = "https://github.com/sartography/flask-bpmn" url = "https://github.com/sartography/flask-bpmn"
reference = "main" reference = "main"
resolved_reference = "a8b90f2ca09ef1cbb24a491c36f1cc9437477325" resolved_reference = "a901d7ffb2b79abfec17c332bbe77c43c1d28705"
[[package]] [[package]]
name = "Flask-Cors" name = "Flask-Cors"
@ -1512,7 +1512,7 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras] [package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"] socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]] [[package]]
name = "requests-toolbelt" name = "requests-toolbelt"
@ -1625,7 +1625,7 @@ falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"] fastapi = ["fastapi (>=0.79.0)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)"] flask = ["blinker (>=1.1)", "flask (>=0.11)"]
httpx = ["httpx (>=0.16.0)"] httpx = ["httpx (>=0.16.0)"]
pure_eval = ["asttokens", "executing", "pure-eval"] pure-eval = ["asttokens", "executing", "pure-eval"]
pyspark = ["pyspark (>=2.4.4)"] pyspark = ["pyspark (>=2.4.4)"]
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
rq = ["rq (>=0.6)"] rq = ["rq (>=0.6)"]
@ -1873,7 +1873,7 @@ pytz = "*"
type = "git" type = "git"
url = "https://github.com/sartography/SpiffWorkflow" url = "https://github.com/sartography/SpiffWorkflow"
reference = "main" reference = "main"
resolved_reference = "12f81480a5f9e848e64221b9287c6cfa6cb682b4" resolved_reference = "2d3bd00854ab483e823c4b386430abc9267f536b"
[[package]] [[package]]
name = "SQLAlchemy" name = "SQLAlchemy"
@ -1891,19 +1891,19 @@ aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"] asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
mssql = ["pyodbc"] mssql = ["pyodbc"]
mssql_pymssql = ["pymssql"] mssql-pymssql = ["pymssql"]
mssql_pyodbc = ["pyodbc"] mssql-pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
mysql_connector = ["mysql-connector-python"] mysql-connector = ["mysql-connector-python"]
oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
postgresql = ["psycopg2 (>=2.7)"] postgresql = ["psycopg2 (>=2.7)"]
postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
postgresql_psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql_psycopg2cffi = ["psycopg2cffi"] postgresql-psycopg2cffi = ["psycopg2cffi"]
pymysql = ["pymysql", "pymysql (<1)"] pymysql = ["pymysql", "pymysql (<1)"]
sqlcipher = ["sqlcipher3_binary"] sqlcipher = ["sqlcipher3_binary"]
@ -3051,18 +3051,7 @@ py = [
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
] ]
pyasn1 = [ pyasn1 = [
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
] ]
pycodestyle = [ pycodestyle = [

View File

@ -27,15 +27,11 @@ flask-marshmallow = "*"
flask-migrate = "*" flask-migrate = "*"
flask-restful = "*" flask-restful = "*"
werkzeug = "*" werkzeug = "*"
# go back to main once https://github.com/sartography/SpiffWorkflow/pull/241 is merged
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
# SpiffWorkflow = {develop = true, path = "/Users/kevin/projects/github/sartography/SpiffWorkflow"}
# SpiffWorkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"}
sentry-sdk = "^1.10" sentry-sdk = "^1.10"
sphinx-autoapi = "^2.0" sphinx-autoapi = "^2.0"
# flask-bpmn = {develop = true, path = "/home/jason/projects/github/sartography/flask-bpmn"}
# flask-bpmn = {develop = true, path = "/Users/kevin/projects/github/sartography/flask-bpmn"}
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"} flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
# flask-bpmn = {develop = true, path = "../flask-bpmn"}
mysql-connector-python = "^8.0.29" mysql-connector-python = "^8.0.29"
pytest-flask = "^1.2.0" pytest-flask = "^1.2.0"
pytest-flask-sqlalchemy = "^1.1.0" pytest-flask-sqlalchemy = "^1.1.0"

View File

@ -148,6 +148,7 @@ def configure_sentry(app: flask.app.Flask) -> None:
import sentry_sdk import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration from sentry_sdk.integrations.flask import FlaskIntegration
# get rid of NotFound errors
def before_send(event: Any, hint: Any) -> Any: def before_send(event: Any, hint: Any) -> Any:
"""Before_send.""" """Before_send."""
if "exc_info" in hint: if "exc_info" in hint:

View File

@ -8,6 +8,10 @@ from werkzeug.utils import ImportStringError
from spiffworkflow_backend.services.logging_service import setup_logger from spiffworkflow_backend.services.logging_service import setup_logger
class ConfigurationError(Exception):
pass
def setup_database_uri(app: Flask) -> None: def setup_database_uri(app: Flask) -> None:
"""Setup_database_uri.""" """Setup_database_uri."""
if os.environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None: if os.environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
@ -85,5 +89,8 @@ def setup_config(app: Flask) -> None:
# src/spiffworkflow_backend/config/secrets.py # src/spiffworkflow_backend/config/secrets.py
app.config.from_pyfile(os.path.join("config", "secrets.py"), silent=True) app.config.from_pyfile(os.path.join("config", "secrets.py"), silent=True)
if app.config["BPMN_SPEC_ABSOLUTE_DIR"] is None:
raise ConfigurationError("BPMN_SPEC_ABSOLUTE_DIR config must be set")
thread_local_data = threading.local() thread_local_data = threading.local()
app.config["THREAD_LOCAL_DATA"] = thread_local_data app.config["THREAD_LOCAL_DATA"] = thread_local_data

View File

@ -7,7 +7,7 @@ SELF_REGISTRATION = environ.get("SELF_REGISTRATION", default=False)
DEVELOPMENT = False DEVELOPMENT = False
BPMN_SPEC_ABSOLUTE_DIR = environ.get("BPMN_SPEC_ABSOLUTE_DIR", default="") BPMN_SPEC_ABSOLUTE_DIR = environ.get("BPMN_SPEC_ABSOLUTE_DIR")
CORS_DEFAULT = "*" CORS_DEFAULT = "*"
CORS_ALLOW_ORIGINS = re.split( CORS_ALLOW_ORIGINS = re.split(
r",\s*", environ.get("CORS_ALLOW_ORIGINS", default=CORS_DEFAULT) r",\s*", environ.get("CORS_ALLOW_ORIGINS", default=CORS_DEFAULT)

View File

@ -65,7 +65,7 @@ permissions:
uri: /v1.0/process-models/finance/* uri: /v1.0/process-models/finance/*
read-all: read-all:
groups: [finance, admin, "Project Lead"] groups: [admin, "Project Lead"]
users: [] users: []
allowed_permissions: [read] allowed_permissions: [read]
uri: /* uri: /*

View File

@ -1,5 +1,8 @@
"""Testing.py.""" """Testing.py."""
from os import environ from os import environ
import os
from spiffworkflow_backend.config.default import BPMN_SPEC_ABSOLUTE_DIR
TESTING = True TESTING = True
@ -15,3 +18,13 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get( SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug" "SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
) )
# NOTE: set this here since nox shoves tests and src code to
# different places and this allows us to know exactly where we are at the start
BPMN_SPEC_ABSOLUTE_DIR = os.path.join(
os.path.dirname(__file__),
"tests",
"spiffworkflow_backend",
"files",
"bpmn_specs",
)

View File

@ -22,6 +22,7 @@ from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserNotFoundError from spiffworkflow_backend.models.user import UserNotFoundError
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
from spiffworkflow_backend.services.group_service import GroupService
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
) )
@ -138,25 +139,11 @@ class AuthorizationService:
default_group = None default_group = None
if "default_group" in permission_configs: if "default_group" in permission_configs:
default_group_identifier = permission_configs["default_group"] default_group_identifier = permission_configs["default_group"]
default_group = GroupModel.query.filter_by( default_group = GroupService.find_or_create_group(default_group_identifier)
identifier=default_group_identifier
).first()
if default_group is None:
default_group = GroupModel(identifier=default_group_identifier)
db.session.add(default_group)
db.session.commit()
UserService.create_principal(
default_group.id, id_column_name="group_id"
)
if "groups" in permission_configs: if "groups" in permission_configs:
for group_identifier, group_config in permission_configs["groups"].items(): for group_identifier, group_config in permission_configs["groups"].items():
group = GroupModel.query.filter_by(identifier=group_identifier).first() group = GroupService.find_or_create_group(group_identifier)
if group is None:
group = GroupModel(identifier=group_identifier)
db.session.add(group)
db.session.commit()
UserService.create_principal(group.id, id_column_name="group_id")
for username in group_config["users"]: for username in group_config["users"]:
user = UserModel.query.filter_by(username=username).first() user = UserModel.query.filter_by(username=username).first()
if user is None: if user is None:
@ -186,13 +173,9 @@ class AuthorizationService:
for allowed_permission in permission_config["allowed_permissions"]: for allowed_permission in permission_config["allowed_permissions"]:
if "groups" in permission_config: if "groups" in permission_config:
for group_identifier in permission_config["groups"]: for group_identifier in permission_config["groups"]:
principal = ( group = GroupService.find_or_create_group(group_identifier)
PrincipalModel.query.join(GroupModel)
.filter(GroupModel.identifier == group_identifier)
.first()
)
cls.create_permission_for_principal( cls.create_permission_for_principal(
principal, permission_target, allowed_permission group.principal, permission_target, allowed_permission
) )
if "users" in permission_config: if "users" in permission_config:
for username in permission_config["users"]: for username in permission_config["users"]:

View File

@ -24,26 +24,19 @@ class EmailService:
"""We will receive all data related to an email and send it.""" """We will receive all data related to an email and send it."""
mail = current_app.config["MAIL_APP"] mail = current_app.config["MAIL_APP"]
# Send mail msg = Message(
try: subject,
msg = Message( sender=sender,
subject, recipients=recipients,
sender=sender, body=content,
recipients=recipients, html=content_html,
body=content, cc=cc,
html=content_html, bcc=bcc,
cc=cc, reply_to=reply_to,
bcc=bcc, )
reply_to=reply_to,
)
if attachment_files is not None: if attachment_files is not None:
for file in attachment_files: for file in attachment_files:
msg.attach(file["name"], file["type"], file["data"]) msg.attach(file["name"], file["type"], file["data"])
mail.send(msg) mail.send(msg)
except Exception as e:
# app.logger.error('An exception happened in EmailService', exc_info=True)
# app.logger.error(str(e))
raise e

View File

@ -0,0 +1,21 @@
from spiffworkflow_backend.models.group import GroupModel
from typing import Optional
from flask_bpmn.models.db import db
from spiffworkflow_backend.services.user_service import UserService
class GroupService():
@classmethod
def find_or_create_group(cls, group_identifier: str) -> GroupModel:
group: Optional[GroupModel] = GroupModel.query.filter_by(
identifier=group_identifier
).first()
if group is None:
group = GroupModel(identifier=group_identifier)
db.session.add(group)
db.session.commit()
UserService.create_principal(
group.id, id_column_name="group_id"
)
return group

View File

@ -186,8 +186,12 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
methods.update(external_methods) methods.update(external_methods)
super().execute(task, script, methods) super().execute(task, script, methods)
except WorkflowException as e: except WorkflowException as e:
raise e print("WORKFLOW")
print(f"e: {e}")
# raise e
raise Exception("NEW ERROR")
except Exception as e: except Exception as e:
print("EXCEPTIONWORK")
raise WorkflowTaskExecException(task, f" {script}, {e}", e) from e raise WorkflowTaskExecException(task, f" {script}, {e}", e) from e
def call_service( def call_service(

View File

@ -167,8 +167,8 @@ class ProcessInstanceService:
f"task {spiff_task.task_spec.name}, it is not in the task event model, " f"task {spiff_task.task_spec.name}, it is not in the task event model, "
f"and it should be." f"and it should be."
) )
current_app.logger.error( current_app.logger.exception(
"missing_form_data", missing_form_error, exc_info=True "missing_form_data", missing_form_error
) )
return {} return {}
else: else: