Squashed 'spiffworkflow-backend/' changes from 03bf7a61..10c443a2

10c443a2 Merge pull request #130 from sartography/feature/data
71c803aa allow passing in the log level into the app w/ burnettk
daeb82d9 Merge pull request #126 from sartography/dependabot/pip/typing-extensions-4.4.0
14c8f52c Merge pull request #123 from sartography/dependabot/pip/dot-github/workflows/poetry-1.2.2
92d204e6 Merge remote-tracking branch 'origin/main' into feature/data
1cb77901 run the save all bpmn script on server boot w/ burnettk
16a6f476 Bump typing-extensions from 4.3.0 to 4.4.0
d8ac61fc Bump poetry from 1.2.1 to 1.2.2 in /.github/workflows
3be27786 Merge pull request #131 from sartography/feature/permissions2
1fd8fc78 Merge remote-tracking branch 'origin/main' into feature/permissions2
d29621ae data setup on app boot
0b21a5d4 refactor bin/save_all_bpmn.py into service code
02fb9d61 lint
c95db461 refactor scripts
98628fc2 This caused a problem with scopes when token timed out.
d8b2323b merged in main and resolved conflicts
d01b4fc7 updated sentry-sdk to resolve deprecation warnings
5851ddf5 update for mypy in python 3.9
508f9900 merged in main and resolved conflicts
68d69978 precommit w/ burnettk
85a4ee16 removed debug print statements w/ burnettk
93eb91f4 added keycloak configs and user perms for staging w/ burnettk
e4ded8fc added method to import permissions from yml file w/ burnettk
22ba89ae use percents instead of asterisks to better support db syntax w/ burnettk
0c116ae8 postgres does not use backticks w/ burnettk
621ad3ef attempting to see if sql like statement works in other dbs as well w/ burnettk

git-subtree-dir: spiffworkflow-backend
git-subtree-split: 10c443a2d82752e8ed9d1679afe6409d81029006
This commit is contained in:
Jon Herron 2022-10-12 15:28:52 -04:00
parent 1aea5356de
commit 492681e5de
34 changed files with 983 additions and 259 deletions

View File

@ -1,5 +1,5 @@
pip==22.2.2
nox==2022.8.7
nox-poetry==1.0.1
poetry==1.2.1
poetry==1.2.2
virtualenv==20.16.5

View File

@ -38,6 +38,10 @@ if [[ "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" == "true" ]]; then
workers=1
fi
if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then
SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py
fi
export IS_GUNICORN="true"
export PROCESS_WAITING_MESSAGES="true"

View File

@ -1,10 +1,9 @@
"""Grabs tickets from csv and makes process instances."""
import csv
import os
from flask_bpmn.models.db import db
from spiffworkflow_backend import create_app
from spiffworkflow_backend import get_hacked_up_app_for_script
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
@ -26,10 +25,7 @@ def print_process_instance_count(process_model_identifier_ticket: str) -> None:
def main():
"""Main."""
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "development"
flask_env_key = "FLASK_SESSION_SECRET_KEY"
os.environ[flask_env_key] = "whatevs"
app = create_app()
app = get_hacked_up_app_for_script()
with app.app_context():
process_model_identifier_ticket = "ticket"

View File

@ -7,6 +7,12 @@ function error_handler() {
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
arg="${1:-}"
if [[ "$arg" == "acceptance" ]]; then
export SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=true
export SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=acceptance_tests.yml
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_ENV=development
fi
@ -25,5 +31,10 @@ else
if [[ -z "${PROCESS_WAITING_MESSAGES:-}" ]]; then
export PROCESS_WAITING_MESSAGES="true"
fi
export FLASK_DEBUG=1
if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then
SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py
fi
FLASK_APP=src/spiffworkflow_backend poetry run flask run -p 7000
fi

View File

@ -1,97 +1,22 @@
"""Grabs tickets from csv and makes process instances."""
import os
from spiffworkflow_backend import create_app
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
# from lxml.etree import Element as EtreeElement
from spiffworkflow_backend import get_hacked_up_app_for_script
from spiffworkflow_backend.services.data_setup_service import DataSetupService
def main():
def main() -> None:
"""Main."""
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "development"
flask_env_key = "FLASK_SESSION_SECRET_KEY"
os.environ[flask_env_key] = "whatevs"
if "BPMN_SPEC_ABSOLUTE_DIR" not in os.environ:
home = os.environ["HOME"]
full_process_model_path = (
f"{home}/projects/github/sartography/sample-process-models"
)
if os.path.isdir(full_process_model_path):
os.environ["BPMN_SPEC_ABSOLUTE_DIR"] = full_process_model_path
else:
raise Exception(f"Could not find {full_process_model_path}")
app = create_app()
app = get_hacked_up_app_for_script()
with app.app_context():
no_primary = []
failing_process_models = []
process_models = ProcessModelService().get_process_models()
for process_model in process_models:
if process_model.primary_file_name:
bpmn_xml_file_contents = SpecFileService.get_data(
process_model, process_model.primary_file_name
)
bad_files = [
"B.1.0.bpmn",
"C.1.0.bpmn",
"C.2.0.bpmn",
"C.6.0.bpmn",
"TC-5.1.bpmn",
]
if process_model.primary_file_name in bad_files:
continue
print(f"primary_file_name: {process_model.primary_file_name}")
try:
SpecFileService.update_file(
process_model,
process_model.primary_file_name,
bpmn_xml_file_contents,
)
except Exception as ex:
failing_process_models.append(
(process_model.primary_file_name, str(ex))
)
# files = SpecFileService.get_files(
# process_model, extension_filter="bpmn"
# )
# bpmn_etree_element: EtreeElement = (
# SpecFileService.get_etree_element_from_binary_data(
# bpmn_xml_file_contents, process_model.primary_file_name
# )
# )
# if len(files) == 1:
# try:
# new_bpmn_process_identifier = (
# SpecFileService.get_bpmn_process_identifier(
# bpmn_etree_element
# )
# )
# if (
# process_model.primary_process_id
# != new_bpmn_process_identifier
# ):
# print(
# "primary_process_id: ", process_model.primary_process_id
# )
# # attributes_to_update = {
# # "primary_process_id": new_bpmn_process_identifier
# # }
# # ProcessModelService().update_spec(
# # process_model, attributes_to_update
# # )
# # except Exception as exception:
# except Exception:
# print(f"BAD ONE: {process_model.id}")
# # raise exception
else:
no_primary.append(process_model)
# for bpmn in no_primary:
# print(bpmn)
failing_process_models = DataSetupService.save_all_process_models()
for bpmn_errors in failing_process_models:
print(bpmn_errors)
if len(failing_process_models) > 0:
if (
os.environ.get("SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS")
!= "false"
and len(failing_process_models) > 0
):
exit(1)

View File

@ -470,6 +470,50 @@
"webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister": false,
"webAuthnPolicyPasswordlessAcceptableAaguids": [],
"users": [
{
"id": "4048e9a7-8afa-4e69-9904-389657221abe",
"createdTimestamp": 1665517741516,
"username": "alex",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "81a61a3b-228d-42b3-b39a-f62d8e7f57ca",
"type": "password",
"createdDate": 1665517748308,
"secretData": "{\"value\":\"13OdXlB1S1EqHL+3/0y4LYp/LGCn0UW8/Wh9ykgpUbRrwdX6dY3iiMlKePfTy5nXoH/ISmPlxNKOe5z7FWXsgg==\",\"salt\":\"pv0SEb7Ctk5tpu2y32L2kw==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "b4dc5a30-4bd7-44fc-88b5-839fbb8567ea",
"createdTimestamp": 1665518311550,
"username": "amir",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "e589f3ad-bf7b-4756-89f7-7894c03c2831",
"type": "password",
"createdDate": 1665518319210,
"secretData": "{\"value\":\"mamd7Hi6nV5suylSrUgwWon3Gw3WeOIvAJu9g39Mq1iYoXWj2rI870bGHiSITLaFBpdjLOEmlu9feKkULOXNpQ==\",\"salt\":\"wG7tkMQfPKRW9ymu4ekujQ==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "4c436296-8471-4105-b551-80eee96b43bb",
"createdTimestamp": 1657139858075,
@ -520,6 +564,248 @@
"notBefore": 0,
"groups": []
},
{
"id": "99e7e4ea-d4ae-4944-bd31-873dac7b004c",
"createdTimestamp": 1665517024483,
"username": "dan",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "d517c520-f500-4542-80e5-7144daef1e32",
"type": "password",
"createdDate": 1665517033429,
"secretData": "{\"value\":\"rgWPI1YobMfDaaT3di2+af3gHU8bkreRElAHgYFA+dXHw0skiGVd1t57kNLEP49M6zKYjZzlOKr0qvAxQF0oSg==\",\"salt\":\"usMZebZnPYXhD6ID95bizg==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "1834a79d-917f-4e4c-ab38-8ec376179fe9",
"createdTimestamp": 1665517805115,
"username": "daniel",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "f240495c-265b-42fc-99db-46928580d07d",
"type": "password",
"createdDate": 1665517812636,
"secretData": "{\"value\":\"sRCF3tFOZrUbEW220cVHhQ7e89iKqjgAMyO0BaYCPZZw1tEjZ+drGj+bfwRbuuK0Nps3t//YGVELsejRogWkcw==\",\"salt\":\"XQtLR9oZctkyRTi2Be+Z0g==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "72d32cba-e2e2-489d-9141-4d94e3bb2cda",
"createdTimestamp": 1665517787787,
"username": "elizabeth",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "ae951ec8-9fc9-4f1b-b340-bbbe463ae5c2",
"type": "password",
"createdDate": 1665517794484,
"secretData": "{\"value\":\"oudGUsbh8utUavZ8OmoUvggCYxr+RHCgwcqpub5AgbITsK4DgY01X0SlDGRTdNGOIqoHse8zGBNmcyBNPWjC0w==\",\"salt\":\"auHilaAS2Lo7oa0UaA7L6A==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "087bdc16-e362-4340-aa60-1ff71a45f844",
"createdTimestamp": 1665516884829,
"username": "harmeet",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "89c26090-9bd3-46ac-b038-883d02e3f125",
"type": "password",
"createdDate": 1665516905862,
"secretData": "{\"value\":\"vDzTFQhjg8l8XgQ/YFYZSMLxQovFc/wflVBiRtAk/UWRKhJwuz3XInFbQ64wbYppBlXDYSmYis3luKv6YyUWjQ==\",\"salt\":\"58OQLETS0sM9VpXWoNa6rQ==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "13f5481e-c6b5-450d-8aaf-e13c1c1f5914",
"createdTimestamp": 1665518332327,
"username": "jakub",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "ce141fa5-b8d5-4bbe-93e7-22e7119f97c2",
"type": "password",
"createdDate": 1665518338651,
"secretData": "{\"value\":\"+L4TmIGURzFtyRMFyKbPmQ8iYSC639K0GLNHXM+T/cLiMGxVr/wvWj5j435c1V9P+kwO2CnGtd09IsSN8cXuXg==\",\"salt\":\"a2eNeYyoci5fpkPJJy735g==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "3965a6c8-31df-474f-9a45-c268ed98e3fd",
"createdTimestamp": 1665518284693,
"username": "jarrad",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "113e0343-1069-476d-83f9-21d98edb9cfa",
"type": "password",
"createdDate": 1665518292234,
"secretData": "{\"value\":\"1CeBMYC3yiJ/cmIxHs/bSea3kxItLNnaIkPNRk2HefZiCdfUKcJ/QLI0O9QO108G2Lzg9McR33EB72zbFAfYUw==\",\"salt\":\"2kWgItvYvzJkgJU9ICWMAw==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "58bcce19-41ec-4ae7-b930-b37be7ad4ba3",
"createdTimestamp": 1665516949583,
"username": "jason",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "40abf32e-f0cc-4a17-8231-1a69a02c1b0b",
"type": "password",
"createdDate": 1665516957192,
"secretData": "{\"value\":\"nCnRYH5rLRMu1E7C260SowAdvJfQCSdf4LigcIzSkoPwT+qfLT5ut5m99zakNLeHLoCtGhO2lSVGUQWhdCUYJw==\",\"salt\":\"mW5QN/RSr55I04VI6FTERA==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "29c11638-3b32-4024-8594-91c8b09e713c",
"createdTimestamp": 1665518366585,
"username": "jon",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "8b520e01-5b9b-44ab-9ee8-505bd0831a45",
"type": "password",
"createdDate": 1665518373016,
"secretData": "{\"value\":\"lZBDnz49zW6EkT2t7JSQjOzBlYhjhkw3hHefcOC4tmet+h/dAuxSGRuLibJHBap2j6G9Z2SoRqtyS8bwGbR42g==\",\"salt\":\"MI90jmxbLAno0g5O4BCeHw==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "af15c167-d0e7-4a41-ac2c-109188dd7166",
"createdTimestamp": 1665516966482,
"username": "kb",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "2c0be363-038f-48f1-86d6-91fdd28657cf",
"type": "password",
"createdDate": 1665516982394,
"secretData": "{\"value\":\"yvliX8Mn+lgpxfMpkjfsV8CASgghEgPA2P1/DR1GP5LSFoGwGCEwj0SmeQAo+MQjBsn3nfvtL9asQvmIYdNZwQ==\",\"salt\":\"kFr1K94QCEx9eGD25rZR9g==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "6f5bfa09-7494-4a2f-b871-cf327048cac7",
"createdTimestamp": 1665517010600,
"username": "manuchehr",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "07dabf55-b5d3-4f98-abba-3334086ecf5e",
"type": "password",
"createdDate": 1665517017682,
"secretData": "{\"value\":\"1btDXHraz9l0Gp4g1xxdcuZffLsuKsW0tHwQGzoEtTlI/iZdrKPG9WFlCEFd84qtpdYPJD/tvzn6ZK6zU4/GlQ==\",\"salt\":\"jHtMiO+4jMv9GqLhC9wg4w==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "d1c46b47-67c4-4d07-9cf4-6b1ceac88fc1",
"createdTimestamp": 1665517760255,
"username": "mike",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "1ed375fb-0f1a-4c2a-9243-2477242cf7bd",
"type": "password",
"createdDate": 1665517768715,
"secretData": "{\"value\":\"S1cxZ3dgNB+A6yfMchDWEGP8OyZaaAOU/IUKn+QWFt255yoFqs28pfmwCsevdzuh0YfygO9GBgBv7qZQ2pknNQ==\",\"salt\":\"i+Q9zEHNxfi8TAHw17Dv6w==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "a15da457-7ebb-49d4-9dcc-6876cb71600d",
"createdTimestamp": 1657115919770,
@ -545,6 +831,28 @@
"notBefore": 0,
"groups": []
},
{
"id": "f3852a7d-8adf-494f-b39d-96ad4c899ee5",
"createdTimestamp": 1665516926300,
"username": "sasha",
"enabled": true,
"totp": false,
"emailVerified": false,
"credentials": [
{
"id": "4a170af4-6f0c-4e7b-b70c-e674edf619df",
"type": "password",
"createdDate": 1665516934662,
"secretData": "{\"value\":\"/cimS+PL6p+YnOCF9ZSA6UuwmmLZ7aVUZUthiFDqp/sn0c8GTpWmAdDIbJy2Ut+D4Rx605kRFQaekzRgSYPxcg==\",\"salt\":\"0dmUnLfqK745YHVSz6HOZg==\",\"additionalParameters\":{}}",
"credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
}
],
"disableableCredentialTypes": [],
"requiredActions": [],
"realmRoles": ["default-roles-spiffworkflow"],
"notBefore": 0,
"groups": []
},
{
"id": "487d3a85-89dd-4839-957a-c3f6d70551f6",
"createdTimestamp": 1657115173081,
@ -1056,14 +1364,6 @@
"allowRemoteResourceManagement": true,
"policyEnforcementMode": "ENFORCING",
"resources": [
{
"name": "Default Resource",
"type": "urn:spiffworkflow-backend:resources:default",
"ownerManagedAccess": false,
"attributes": {},
"_id": "8e00e4a3-3fff-4521-b7f0-95f66c2f79d2",
"uris": ["/*"]
},
{
"name": "everything",
"ownerManagedAccess": false,
@ -1085,6 +1385,14 @@
}
]
},
{
"name": "Default Resource",
"type": "urn:spiffworkflow-backend:resources:default",
"ownerManagedAccess": false,
"attributes": {},
"_id": "8e00e4a3-3fff-4521-b7f0-95f66c2f79d2",
"uris": ["/*"]
},
{
"name": "process-model-with-repeating-form-crud",
"type": "process-model",
@ -1994,14 +2302,14 @@
"subComponents": {},
"config": {
"allowed-protocol-mapper-types": [
"oidc-usermodel-attribute-mapper",
"oidc-address-mapper",
"oidc-full-name-mapper",
"oidc-sha256-pairwise-sub-mapper",
"oidc-usermodel-property-mapper",
"saml-role-list-mapper",
"oidc-usermodel-attribute-mapper",
"saml-user-property-mapper",
"saml-user-attribute-mapper"
"saml-role-list-mapper",
"oidc-full-name-mapper",
"saml-user-attribute-mapper",
"oidc-address-mapper",
"oidc-sha256-pairwise-sub-mapper"
]
}
},
@ -2023,14 +2331,14 @@
"subComponents": {},
"config": {
"allowed-protocol-mapper-types": [
"saml-user-property-mapper",
"saml-user-attribute-mapper",
"oidc-full-name-mapper",
"oidc-usermodel-attribute-mapper",
"oidc-sha256-pairwise-sub-mapper",
"oidc-usermodel-property-mapper",
"saml-role-list-mapper",
"oidc-address-mapper"
"oidc-sha256-pairwise-sub-mapper",
"oidc-address-mapper",
"saml-user-property-mapper",
"oidc-full-name-mapper",
"oidc-usermodel-property-mapper",
"oidc-usermodel-attribute-mapper"
]
}
},
@ -2144,7 +2452,7 @@
"supportedLocales": [],
"authenticationFlows": [
{
"id": "a2e35646-200f-4d14-98ba-c9b5150d8753",
"id": "24ffe820-51bc-402b-b165-7745b6363275",
"alias": "Account verification options",
"description": "Method with which to verity the existing account",
"providerId": "basic-flow",
@ -2170,7 +2478,7 @@
]
},
{
"id": "d85a3c40-8cc9-43a1-ba04-0c8ca2c072da",
"id": "a1e19975-9f44-4ddd-ab5a-2315afa028b1",
"alias": "Authentication Options",
"description": "Authentication options.",
"providerId": "basic-flow",
@ -2204,7 +2512,7 @@
]
},
{
"id": "e127feb1-c4d8-471a-9afc-c21df984462e",
"id": "88ee8214-27f8-4da3-ba54-cb69053bf593",
"alias": "Browser - Conditional OTP",
"description": "Flow to determine if the OTP is required for the authentication",
"providerId": "basic-flow",
@ -2230,7 +2538,7 @@
]
},
{
"id": "f8f6347b-7eb1-44ca-a912-a826a8f93b6d",
"id": "2a720f72-2f6f-4e64-906c-2be5e2fd95fb",
"alias": "Direct Grant - Conditional OTP",
"description": "Flow to determine if the OTP is required for the authentication",
"providerId": "basic-flow",
@ -2256,7 +2564,7 @@
]
},
{
"id": "d2bb8529-3fb8-4085-9153-b56a930829cd",
"id": "b6f70fef-da90-4033-9f0e-d1b7f8619e68",
"alias": "First broker login - Conditional OTP",
"description": "Flow to determine if the OTP is required for the authentication",
"providerId": "basic-flow",
@ -2282,7 +2590,7 @@
]
},
{
"id": "6ccd1a2e-0184-43d4-80e4-7400a008408f",
"id": "c3869d8d-dda3-4b13-a7f5-55f29195d03a",
"alias": "Handle Existing Account",
"description": "Handle what to do if there is existing account with same email/username like authenticated identity provider",
"providerId": "basic-flow",
@ -2308,7 +2616,7 @@
]
},
{
"id": "f13bd8b5-895a-44a0-82a6-067dffdcffa9",
"id": "e2855580-7582-4835-b2af-de34215532fe",
"alias": "Reset - Conditional OTP",
"description": "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
"providerId": "basic-flow",
@ -2334,7 +2642,7 @@
]
},
{
"id": "3ef752df-8070-4864-9f1e-2900317924b2",
"id": "4224394c-485e-42ee-a65a-2bdc6eb092fd",
"alias": "User creation or linking",
"description": "Flow for the existing/non-existing user alternatives",
"providerId": "basic-flow",
@ -2361,7 +2669,7 @@
]
},
{
"id": "9adb8fbe-b778-4ee1-9a1b-c01021aee03e",
"id": "fef8981c-e419-4564-ae91-755e489e6d60",
"alias": "Verify Existing Account by Re-authentication",
"description": "Reauthentication of existing account",
"providerId": "basic-flow",
@ -2387,7 +2695,7 @@
]
},
{
"id": "1958f0c6-aaa0-41df-bbe1-be12668286f5",
"id": "f214f005-ad6c-4314-86b9-8d973fbaa3d2",
"alias": "browser",
"description": "browser based authentication",
"providerId": "basic-flow",
@ -2429,7 +2737,7 @@
]
},
{
"id": "c4a0fb82-e755-465f-a0d1-c87846836397",
"id": "7a4f7246-66dd-44f6-9c57-917ba6e62197",
"alias": "clients",
"description": "Base authentication for clients",
"providerId": "client-flow",
@ -2471,7 +2779,7 @@
]
},
{
"id": "3d377bcf-c7b0-4356-bf2f-f83fb1e4aca9",
"id": "2ff421f8-d280-4d56-bd34-25b2a5c3148e",
"alias": "direct grant",
"description": "OpenID Connect Resource Owner Grant",
"providerId": "basic-flow",
@ -2505,7 +2813,7 @@
]
},
{
"id": "97d2ac80-b725-44f8-b171-655bc28cac2a",
"id": "ae42aaf0-f2a7-4e38-81be-c9fc06dea76e",
"alias": "docker auth",
"description": "Used by Docker clients to authenticate against the IDP",
"providerId": "basic-flow",
@ -2523,7 +2831,7 @@
]
},
{
"id": "0fcc3a08-ea77-42e4-a1fb-858abcf1759a",
"id": "e5aa743d-c889-422e-ba9f-90fee8c7f5d9",
"alias": "first broker login",
"description": "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
"providerId": "basic-flow",
@ -2550,7 +2858,7 @@
]
},
{
"id": "ac743fa7-98df-4933-898f-44b716ff55e2",
"id": "a54ebefa-6ef6-4e42-a016-2b56af3f8aaa",
"alias": "forms",
"description": "Username, password, otp and other auth forms.",
"providerId": "basic-flow",
@ -2576,7 +2884,7 @@
]
},
{
"id": "65451a14-aa9d-49da-807a-f934b10775cb",
"id": "b5d4595a-88b2-4ea9-aeea-d796b0b9085d",
"alias": "http challenge",
"description": "An authentication flow based on challenge-response HTTP Authentication Schemes",
"providerId": "basic-flow",
@ -2602,7 +2910,7 @@
]
},
{
"id": "733a256d-0ccb-4197-852c-91bf62f80e4b",
"id": "da2eba73-45d5-4f0f-bfe8-8812481cde93",
"alias": "registration",
"description": "registration flow",
"providerId": "basic-flow",
@ -2621,7 +2929,7 @@
]
},
{
"id": "d34e94db-5cfd-412b-9555-bfcf3ab7b21b",
"id": "6d49fc23-14db-49a2-89b5-58439022e649",
"alias": "registration form",
"description": "registration form",
"providerId": "form-flow",
@ -2663,7 +2971,7 @@
]
},
{
"id": "2c90ffbf-2de2-41df-bfb0-ddd089bf8c57",
"id": "a0615de2-cf4a-4812-a9ef-fbc4e38e3d10",
"alias": "reset credentials",
"description": "Reset credentials for a user if they forgot their password or something",
"providerId": "basic-flow",
@ -2705,7 +3013,7 @@
]
},
{
"id": "a779f34a-421c-4b7c-b94a-5b8736cf485b",
"id": "69f5f241-2b8a-4fe0-a38d-e4abee38add2",
"alias": "saml ecp",
"description": "SAML ECP Profile Authentication Flow",
"providerId": "basic-flow",
@ -2725,14 +3033,14 @@
],
"authenticatorConfig": [
{
"id": "d99b0848-0378-4a5d-9a72-6efd758e935f",
"id": "7257ea10-3ff4-4001-8171-edc7a7e5b751",
"alias": "create unique user config",
"config": {
"require.password.update.after.registration": "false"
}
},
{
"id": "ab775beb-09ca-4f94-b62b-16f0692269e9",
"id": "105a6011-5d34-4b70-aaf1-52833e8f62b6",
"alias": "review profile config",
"config": {
"update.profile.on.first.login": "missing"

View File

@ -1,8 +1,8 @@
"""empty message
Revision ID: 88e30afd19ac
Revision ID: 5f7d61fa371c
Revises:
Create Date: 2022-10-11 09:39:40.882490
Create Date: 2022-10-11 14:45:41.213890
"""
from alembic import op
@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '88e30afd19ac'
revision = '5f7d61fa371c'
down_revision = None
branch_labels = None
depends_on = None
@ -226,8 +226,8 @@ def upgrade():
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('principal_id', sa.Integer(), nullable=False),
sa.Column('permission_target_id', sa.Integer(), nullable=False),
sa.Column('grant_type', sa.Enum('permit', 'deny', name='permitdeny'), nullable=True),
sa.Column('permission', sa.Enum('create', 'read', 'update', 'delete', 'list', 'instantiate', name='permission'), nullable=True),
sa.Column('grant_type', sa.String(length=50), nullable=True),
sa.Column('permission', sa.String(length=50), nullable=True),
sa.ForeignKeyConstraint(['permission_target_id'], ['permission_target.id'], ),
sa.ForeignKeyConstraint(['principal_id'], ['principal.id'], ),
sa.PrimaryKeyConstraint('id'),

View File

@ -1,32 +0,0 @@
group-admin:
type: Group
users: [jakub, kb, alex, dan, mike, jason]
group-finance:
type: Group
users: [harmeet, sasha]
group-hr:
type: Group
users: [manuchehr]
permission-admin:
type: Permission
groups: [group-admin]
users: []
allowed_permissions: [CREATE, READ, UPDATE, DELETE, LIST, INSTANTIATE]
uri: /*
permission-finance-admin:
type: Permission
groups: [group-a]
users: []
allowed_permissions: [CREATE, READ, UPDATE, DELETE]
uri: /v1.0/process-groups/finance/*
permission-read-all:
type: Permission
groups: [group-finance, group-hr, group-admin]
users: []
allowed_permissions: [READ]
uri: /*

55
poetry.lock generated
View File

@ -95,7 +95,7 @@ python-versions = ">=3.5"
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
[[package]]
name = "Babel"
@ -268,7 +268,7 @@ optional = false
python-versions = ">=3.6.0"
[package.extras]
unicode_backport = ["unicodedata2"]
unicode-backport = ["unicodedata2"]
[[package]]
name = "classify-imports"
@ -1512,7 +1512,7 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-toolbelt"
@ -1625,7 +1625,7 @@ falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
httpx = ["httpx (>=0.16.0)"]
pure_eval = ["asttokens", "executing", "pure-eval"]
pure-eval = ["asttokens", "executing", "pure-eval"]
pyspark = ["pyspark (>=2.4.4)"]
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
rq = ["rq (>=0.6)"]
@ -1884,19 +1884,19 @@ aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"]
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
mssql = ["pyodbc"]
mssql_pymssql = ["pymssql"]
mssql_pyodbc = ["pyodbc"]
mssql-pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
mysql_connector = ["mysql-connector-python"]
mysql-connector = ["mysql-connector-python"]
oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
postgresql = ["psycopg2 (>=2.7)"]
postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
postgresql_psycopg2binary = ["psycopg2-binary"]
postgresql_psycopg2cffi = ["psycopg2cffi"]
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
pymysql = ["pymysql", "pymysql (<1)"]
sqlcipher = ["sqlcipher3_binary"]
@ -1986,6 +1986,14 @@ category = "main"
optional = false
python-versions = "*"
[[package]]
name = "types-PyYAML"
version = "6.0.12"
description = "Typing stubs for PyYAML"
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "types-requests"
version = "2.28.11.1"
@ -2007,7 +2015,7 @@ python-versions = "*"
[[package]]
name = "typing-extensions"
version = "4.3.0"
version = "4.4.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "main"
optional = false
@ -2178,7 +2186,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=
[metadata]
lock-version = "1.1"
python-versions = ">=3.9,<3.11"
content-hash = "ba476dd0748bb440b522d1bf24fb62eb30ce3cfbd48b9e3d8f7b5069ddc78ba9"
content-hash = "7b4eb35239359ebff4c5597052aedc14b47cc7d1880b5617632edbb957511908"
[metadata.files]
alabaster = [
@ -2957,18 +2965,7 @@ py = [
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
]
pyasn1 = [
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
]
pycodestyle = [
@ -3396,6 +3393,10 @@ types-pytz = [
{file = "types-pytz-2022.4.0.0.tar.gz", hash = "sha256:17d66e4b16e80ceae0787726f3a22288df7d3f9fdebeb091dc64b92c0e4ea09d"},
{file = "types_pytz-2022.4.0.0-py3-none-any.whl", hash = "sha256:950b0f3d64ed5b03a3e29c1e38fe2be8371c933c8e97922d0352345336eb8af4"},
]
types-PyYAML = [
{file = "types-PyYAML-6.0.12.tar.gz", hash = "sha256:f6f350418125872f3f0409d96a62a5a5ceb45231af5cc07ee0034ec48a3c82fa"},
{file = "types_PyYAML-6.0.12-py3-none-any.whl", hash = "sha256:29228db9f82df4f1b7febee06bbfb601677882e98a3da98132e31c6874163e15"},
]
types-requests = [
{file = "types-requests-2.28.11.1.tar.gz", hash = "sha256:02b1806c5b9904edcd87fa29236164aea0e6cdc4d93ea020cd615ef65cb43d65"},
{file = "types_requests-2.28.11.1-py3-none-any.whl", hash = "sha256:1ff2c1301f6fe58b5d1c66cdf631ca19734cb3b1a4bbadc878d75557d183291a"},
@ -3405,8 +3406,8 @@ types-urllib3 = [
{file = "types_urllib3-1.26.25-py3-none-any.whl", hash = "sha256:c1d78cef7bd581e162e46c20a57b2e1aa6ebecdcf01fd0713bb90978ff3e3427"},
]
typing-extensions = [
{file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},
{file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"},
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
]
typing-inspect = [
{file = "typing_inspect-0.8.0-py3-none-any.whl", hash = "sha256:5fbf9c1e65d4fa01e701fe12a5bca6c6e08a4ffd5bc60bfac028253a447c5188"},

View File

@ -40,7 +40,7 @@ mysql-connector-python = "^8.0.29"
pytest-flask = "^1.2.0"
pytest-flask-sqlalchemy = "^1.1.0"
psycopg2 = "^2.9.3"
typing-extensions = "^4.3.0"
typing-extensions = "^4.4.0"
connexion = {extras = [ "swagger-ui",], version = "^2"}
lxml = "^4.9.1"
marshmallow-enum = "^1.5.1"
@ -55,6 +55,7 @@ Jinja2 = "^3.1.2"
RestrictedPython = "^5.2"
Flask-SQLAlchemy = "^3"
orjson = "^3.8.0"
types-PyYAML = "^6.0.12"
[tool.poetry.dev-dependencies]

View File

@ -116,6 +116,24 @@ def create_app() -> flask.app.Flask:
return app # type: ignore
def get_hacked_up_app_for_script() -> flask.app.Flask:
"""Get_hacked_up_app_for_script."""
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "development"
flask_env_key = "FLASK_SESSION_SECRET_KEY"
os.environ[flask_env_key] = "whatevs"
if "BPMN_SPEC_ABSOLUTE_DIR" not in os.environ:
home = os.environ["HOME"]
full_process_model_path = (
f"{home}/projects/github/sartography/sample-process-models"
)
if os.path.isdir(full_process_model_path):
os.environ["BPMN_SPEC_ABSOLUTE_DIR"] = full_process_model_path
else:
raise Exception(f"Could not find {full_process_model_path}")
app = create_app()
return app
def configure_sentry(app: flask.app.Flask) -> None:
"""Configure_sentry."""
import sentry_sdk

View File

@ -8,9 +8,9 @@ servers:
- url: http://localhost:5000/v1.0
security:
- jwt: ["secret"]
- oAuth2AuthCode:
- read_email
- uid
# - oAuth2AuthCode:
# - read_email
# - uid
paths:
/login:

View File

@ -54,9 +54,6 @@ def setup_config(app: Flask) -> None:
else:
app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True)
setup_database_uri(app)
setup_logger(app)
env_config_module = "spiffworkflow_backend.config." + app.config["ENV_IDENTIFIER"]
try:
app.config.from_object(env_config_module)
@ -65,6 +62,18 @@ def setup_config(app: Flask) -> None:
f"Cannot find config module: {env_config_module}"
) from exception
setup_database_uri(app)
setup_logger(app)
app.config["PERMISSIONS_FILE_FULLPATH"] = None
if app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"]:
app.config["PERMISSIONS_FILE_FULLPATH"] = os.path.join(
app.root_path,
"config",
"permissions",
app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"],
)
# unversioned (see .gitignore) config that can override everything and include secrets.
# src/spiffworkflow_backend/config/secrets.py
app.config.from_pyfile(os.path.join("config", "secrets.py"), silent=True)

View File

@ -42,6 +42,14 @@ CONNECTOR_PROXY_URL = environ.get(
"CONNECTOR_PROXY_URL", default="http://localhost:7004"
)
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"
)
# Sentry Configuration
SENTRY_DSN = environ.get("SENTRY_DSN", default="")
SENTRY_SAMPLE_RATE = environ.get("SENTRY_SAMPLE_RATE", default="1.0")
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="info"
)

View File

@ -1 +1,10 @@
"""Development."""
from os import environ
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="staging.yml"
)
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
)

View File

@ -0,0 +1,13 @@
groups:
admin:
users: [ciadmin1]
common-user:
users: [ciuser1]
permissions:
admin:
groups: [admin, common-user]
users: []
allowed_permissions: [create, read, update, delete, list, instantiate]
uri: /*

View File

@ -0,0 +1,28 @@
groups:
admin:
users: [jakub, kb, alex, dan, mike, jason, amir, jarrad, elizabeth, jon]
finance:
users: [harmeet, sasha]
hr:
users: [manuchehr]
permissions:
admin:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete, list, instantiate]
uri: /*
finance-admin:
groups: [finance]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/finance/*
read-all:
groups: [finance, hr, admin]
users: []
allowed_permissions: [read]
uri: /*

View File

@ -0,0 +1,28 @@
groups:
admin:
users: [testadmin1, testadmin2]
finance:
users: [testuser1, testuser2]
hr:
users: [testuser2, testuser3, testuser4]
permissions:
admin:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete, list, instantiate]
uri: /*
read-all:
groups: [finance, hr, admin]
users: []
allowed_permissions: [read]
uri: /*
finance-admin:
groups: [finance]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/finance/*

View File

@ -7,3 +7,7 @@ SECRET_KEY = "the_secret_key"
SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = (
environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="true") == "true"
)
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="testing.yml"
)

View File

@ -1,10 +1,11 @@
"""PermissionAssignment."""
import enum
from typing import Any
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import Enum
from sqlalchemy import ForeignKey
from sqlalchemy.orm import validates
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.principal import PrincipalModel
@ -26,12 +27,12 @@ class Permission(enum.Enum):
# administer = 2
# view_instance = 3
create = 1
read = 2
update = 3
delete = 4
list = 5
instantiate = 6 # this is something you do to a process model
create = "create"
read = "read"
update = "update"
delete = "delete"
list = "list"
instantiate = "instantiate" # this is something you do to a process model
class PermissionAssignmentModel(SpiffworkflowBaseDBModel):
@ -51,5 +52,15 @@ class PermissionAssignmentModel(SpiffworkflowBaseDBModel):
permission_target_id = db.Column(
ForeignKey(PermissionTargetModel.id), nullable=False
)
grant_type = db.Column(Enum(PermitDeny))
permission = db.Column(Enum(Permission))
grant_type = db.Column(db.String(50))
permission = db.Column(db.String(50))
@validates("grant_type")
def validate_grant_type(self, key: str, value: str) -> Any:
"""Validate_grant_type."""
return self.validate_enum_field(key, value, PermitDeny)
@validates("permission")
def validate_permission(self, key: str, value: str) -> Any:
"""Validate_permission."""
return self.validate_enum_field(key, value, Permission)

View File

@ -1,26 +1,28 @@
"""PermissionTarget."""
import re
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy.orm import validates
# process groups and models are not in the db
# from sqlalchemy import ForeignKey # type: ignore
#
# from spiffworkflow_backend.models.process_group import ProcessGroupModel
# from spiffworkflow_backend.models.process_model import ProcessModel
class InvalidPermissionTargetUriError(Exception):
"""InvalidPermissionTargetUriError."""
class PermissionTargetModel(SpiffworkflowBaseDBModel):
"""PermissionTargetModel."""
__tablename__ = "permission_target"
# __table_args__ = (
# CheckConstraint(
# "NOT(process_group_id IS NULL AND process_model_identifier IS NULL AND process_instance_id IS NULL)"
# ),
# )
id = db.Column(db.Integer, primary_key=True)
uri = db.Column(db.String(255), unique=True, nullable=False)
# process_group_id = db.Column(ForeignKey(ProcessGroupModel.id), nullable=True) # type: ignore
# process_model_identifier = db.Column(ForeignKey(ProcessModel.id), nullable=True) # type: ignore
# process_instance_id = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=True) # type: ignore
@validates("uri")
def validate_uri(self, key: str, value: str) -> str:
"""Validate_uri."""
if re.search(r"%.", value):
raise InvalidPermissionTargetUriError(
f"Wildcard must appear at end: {value}"
)
return value

View File

@ -17,6 +17,10 @@ from spiffworkflow_backend.services.authentication_service import (
)
class UserNotFoundError(Exception):
"""UserNotFoundError."""
class UserModel(SpiffworkflowBaseDBModel):
"""UserModel."""

View File

@ -17,6 +17,7 @@ from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authentication_service import (
PublicAuthenticationService,
)
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.user_service import UserService
"""
@ -250,6 +251,14 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response
if user_model:
g.user = user_model.id
# this may eventually get too slow.
# when it does, be careful about backgrounding, because
# the user will immediately need permissions to use the site.
# we are also a little apprehensive about pre-creating users
# before the user signs in, because we won't know things like
# the external service user identifier.
AuthorizationService.import_permissions_from_yaml_file()
redirect_url = (
f"{state_redirect_url}?"
+ f"access_token={id_token_object['access_token']}&"

View File

@ -2,6 +2,7 @@
import json
import time
from flask import current_app
from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
@ -9,8 +10,9 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
def load_fixtures() -> list[ProcessInstanceModel]:
def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
"""Load_fixtures."""
current_app.logger.debug("load_acceptance_test_fixtures() start")
test_process_group_id = "acceptance-tests-group-one"
test_process_model_id = "acceptance-tests-model-1"
user = BaseTest.find_or_create_user()
@ -40,4 +42,5 @@ def load_fixtures() -> list[ProcessInstanceModel]:
process_instances.append(process_instance)
db.session.commit()
current_app.logger.debug("load_acceptance_test_fixtures() end")
return process_instances

View File

@ -1,15 +1,24 @@
"""Authorization_service."""
import re
from typing import Optional
from typing import Union
import jwt
import yaml
from flask import current_app
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
from sqlalchemy import text
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.principal import MissingPrincipalError
from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserNotFoundError
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
from spiffworkflow_backend.services.user_service import UserService
class AuthorizationService:
@ -21,20 +30,21 @@ class AuthorizationService:
) -> bool:
"""Has_permission."""
principal_ids = [p.id for p in principals]
permission_assignments = (
PermissionAssignmentModel.query.filter(
PermissionAssignmentModel.principal_id.in_(principal_ids)
)
.filter_by(permission=permission)
.join(PermissionTargetModel)
.filter_by(uri=target_uri)
.filter(text(f"'{target_uri}' LIKE permission_target.uri"))
.all()
)
for permission_assignment in permission_assignments:
if permission_assignment.grant_type.value == "permit":
if permission_assignment.grant_type == "permit":
return True
elif permission_assignment.grant_type.value == "deny":
elif permission_assignment.grant_type == "deny":
return False
else:
raise Exception("Unknown grant type")
@ -61,7 +71,105 @@ class AuthorizationService:
principals.append(group.principal)
return cls.has_permission(principals, permission, target_uri)
# return False
@classmethod
def import_permissions_from_yaml_file(
cls, raise_if_missing_user: bool = False
) -> None:
"""Import_permissions_from_yaml_file."""
permission_configs = None
with open(current_app.config["PERMISSIONS_FILE_FULLPATH"]) as file:
permission_configs = yaml.safe_load(file)
if "groups" in permission_configs:
for group_identifier, group_config in permission_configs["groups"].items():
group = GroupModel.query.filter_by(identifier=group_identifier).first()
if group is None:
group = GroupModel(identifier=group_identifier)
db.session.add(group)
db.session.commit()
UserService.create_principal(group.id, id_column_name="group_id")
for username in group_config["users"]:
user = UserModel.query.filter_by(username=username).first()
if user is None:
if raise_if_missing_user:
raise (
UserNotFoundError(
f"Could not find a user with name: {username}"
)
)
continue
user_group_assignemnt = UserGroupAssignmentModel.query.filter_by(
user_id=user.id, group_id=group.id
).first()
if user_group_assignemnt is None:
user_group_assignemnt = UserGroupAssignmentModel(
user_id=user.id, group_id=group.id
)
db.session.add(user_group_assignemnt)
db.session.commit()
if "permissions" in permission_configs:
for _permission_identifier, permission_config in permission_configs[
"permissions"
].items():
uri = permission_config["uri"]
uri_with_percent = re.sub(r"\*", "%", uri)
permission_target = PermissionTargetModel.query.filter_by(
uri=uri_with_percent
).first()
if permission_target is None:
permission_target = PermissionTargetModel(uri=uri_with_percent)
db.session.add(permission_target)
db.session.commit()
for allowed_permission in permission_config["allowed_permissions"]:
if "groups" in permission_config:
for group_identifier in permission_config["groups"]:
principal = (
PrincipalModel.query.join(GroupModel)
.filter(GroupModel.identifier == group_identifier)
.first()
)
cls.create_permission_for_principal(
principal, permission_target, allowed_permission
)
if "users" in permission_config:
for username in permission_config["users"]:
principal = (
PrincipalModel.query.join(UserModel)
.filter(UserModel.username == username)
.first()
)
cls.create_permission_for_principal(
principal, permission_target, allowed_permission
)
@classmethod
def create_permission_for_principal(
cls,
principal: PrincipalModel,
permission_target: PermissionTargetModel,
permission: str,
) -> PermissionAssignmentModel:
"""Create_permission_for_principal."""
permission_assignment: Optional[
PermissionAssignmentModel
] = PermissionAssignmentModel.query.filter_by(
principal_id=principal.id,
permission_target_id=permission_target.id,
permission=permission,
).first()
if permission_assignment is None:
permission_assignment = PermissionAssignmentModel(
principal_id=principal.id,
permission_target_id=permission_target.id,
permission=permission,
grant_type="permit",
)
db.session.add(permission_assignment)
db.session.commit()
return permission_assignment
# def refresh_token(self, token: str) -> str:
# """Refresh_token."""

View File

@ -0,0 +1,93 @@
"""Data_setup_service."""
from flask import current_app
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
class DataSetupService:
"""DataSetupService."""
@classmethod
def run_setup(cls) -> list:
"""Run_setup."""
return cls.save_all_process_models()
@classmethod
def save_all_process_models(cls) -> list:
"""Save_all."""
current_app.logger.debug("DataSetupService.save_all_process_models() start")
failing_process_models = []
process_models = ProcessModelService().get_process_models()
for process_model in process_models:
if process_model.primary_file_name:
bpmn_xml_file_contents = SpecFileService.get_data(
process_model, process_model.primary_file_name
)
bad_files = [
"B.1.0.bpmn",
"C.1.0.bpmn",
"C.2.0.bpmn",
"C.6.0.bpmn",
"TC-5.1.bpmn",
]
if process_model.primary_file_name in bad_files:
continue
current_app.logger.debug(
f"primary_file_name: {process_model.primary_file_name}"
)
try:
SpecFileService.update_file(
process_model,
process_model.primary_file_name,
bpmn_xml_file_contents,
)
except Exception as ex:
failing_process_models.append(
(
f"{process_model.process_group_id}/{process_model.id}/{process_model.primary_file_name}",
str(ex),
)
)
# files = SpecFileService.get_files(
# process_model, extension_filter="bpmn"
# )
# bpmn_etree_element: EtreeElement = (
# SpecFileService.get_etree_element_from_binary_data(
# bpmn_xml_file_contents, process_model.primary_file_name
# )
# )
# if len(files) == 1:
# try:
# new_bpmn_process_identifier = (
# SpecFileService.get_bpmn_process_identifier(
# bpmn_etree_element
# )
# )
# if (
# process_model.primary_process_id
# != new_bpmn_process_identifier
# ):
# print(
# "primary_process_id: ", process_model.primary_process_id
# )
# # attributes_to_update = {
# # "primary_process_id": new_bpmn_process_identifier
# # }
# # ProcessModelService().update_spec(
# # process_model, attributes_to_update
# # )
# # except Exception as exception:
# except Exception:
# print(f"BAD ONE: {process_model.id}")
# # raise exception
else:
failing_process_models.append(
(
f"{process_model.process_group_id}/{process_model.id}",
"primary_file_name not set",
)
)
current_app.logger.debug("DataSetupService.save_all_process_models() end")
return failing_process_models

View File

@ -23,7 +23,14 @@ from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
# full message list:
# {'name': 'gunicorn.error', 'msg': 'GET /admin/token', 'args': (), 'levelname': 'DEBUG', 'levelno': 10, 'pathname': '~/.cache/pypoetry/virtualenvs/spiffworkflow-backend-R_hdWfN1-py3.10/lib/python3.10/site-packages/gunicorn/glogging.py', 'filename': 'glogging.py', 'module': 'glogging', 'exc_info': None, 'exc_text': None, 'stack_info': None, 'lineno': 267, 'funcName': 'debug', 'created': 1657307111.4513023, 'msecs': 451.30228996276855, 'relativeCreated': 1730.785846710205, 'thread': 139945864087360, 'threadName': 'MainThread', 'processName': 'MainProcess', 'process': 2109561, 'message': 'GET /admin/token', 'asctime': '2022-07-08T15:05:11.451Z'}
class InvalidLogLevelError(Exception):
"""InvalidLogLevelError."""
# originally from https://stackoverflow.com/a/70223539/6090676
class JsonFormatter(logging.Formatter):
"""Formatter that outputs JSON strings after parsing the LogRecord.
@ -108,8 +115,16 @@ class SpiffFilter(logging.Filter):
def setup_logger(app: Flask) -> None:
"""Setup_logger."""
log_level = logging.DEBUG
spiff_log_level = logging.DEBUG
upper_log_level_string = app.config["SPIFFWORKFLOW_BACKEND_LOG_LEVEL"].upper()
log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
if upper_log_level_string not in log_levels:
raise InvalidLogLevelError(
f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}"
)
log_level = getattr(logging, upper_log_level_string)
spiff_log_level = getattr(logging, upper_log_level_string)
log_formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)

View File

@ -270,13 +270,17 @@ class UserService:
)
@classmethod
def create_principal(cls, user_id: int) -> PrincipalModel:
def create_principal(
cls, child_id: int, id_column_name: str = "user_id"
) -> PrincipalModel:
"""Create_principal."""
principal: Optional[PrincipalModel] = PrincipalModel.query.filter_by(
user_id=user_id
column = PrincipalModel.__table__.columns[id_column_name]
principal: Optional[PrincipalModel] = PrincipalModel.query.filter(
column == child_id
).first()
if principal is None:
principal = PrincipalModel(user_id=user_id)
principal = PrincipalModel()
setattr(principal, id_column_name, child_id)
db.session.add(principal)
try:
db.session.commit()
@ -285,7 +289,7 @@ class UserService:
current_app.logger.error(f"Exception in create_principal: {e}")
raise ApiError(
error_code="add_principal_error",
message=f"Could not create principal {user_id}",
message=f"Could not create principal {child_id}",
) from e
return principal

View File

@ -22,6 +22,7 @@ from spiffworkflow_backend.models.process_model import NotificationType
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.user_service import UserService
@ -262,3 +263,18 @@ class BaseTest:
)
with open(file_full_path, "rb") as file:
return file.read()
def assert_user_has_permission(
self,
user: UserModel,
permission: str,
target_uri: str,
expected_result: bool = True,
) -> None:
"""Assert_user_has_permission."""
has_permission = AuthorizationService.user_has_permission(
user=user,
permission=permission,
target_uri=target_uri,
)
assert has_permission is expected_result

View File

@ -1,12 +1,14 @@
"""Test_acceptance_test_fixtures."""
from flask.app import Flask
from spiffworkflow_backend.services.acceptance_test_fixtures import load_fixtures
from spiffworkflow_backend.services.acceptance_test_fixtures import (
load_acceptance_test_fixtures,
)
def test_start_dates_are_one_hour_apart(app: Flask) -> None:
"""Test_start_dates_are_one_hour_apart."""
process_instances = load_fixtures()
process_instances = load_acceptance_test_fixtures()
assert len(process_instances) > 2
assert process_instances[0].start_in_seconds is not None

View File

@ -0,0 +1,69 @@
"""Test_message_service."""
import pytest
from flask import Flask
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.models.user import UserNotFoundError
from spiffworkflow_backend.services.authorization_service import AuthorizationService
class TestAuthorizationService(BaseTest):
"""TestAuthorizationService."""
def test_can_raise_if_missing_user(
self, app: Flask, with_db_and_bpmn_file_cleanup: None
) -> None:
"""Test_can_raise_if_missing_user."""
with pytest.raises(UserNotFoundError):
AuthorizationService.import_permissions_from_yaml_file(
raise_if_missing_user=True
)
def test_can_import_permissions_from_yaml(
self, app: Flask, with_db_and_bpmn_file_cleanup: None
) -> None:
"""Test_can_import_permissions_from_yaml."""
usernames = [
"testadmin1",
"testadmin2",
"testuser1",
"testuser2",
"testuser3",
"testuser4",
]
users = {}
for username in usernames:
user = self.find_or_create_user(username=username)
users[username] = user
AuthorizationService.import_permissions_from_yaml_file()
assert len(users["testadmin1"].groups) == 1
assert users["testadmin1"].groups[0].identifier == "admin"
assert len(users["testuser1"].groups) == 1
assert users["testuser1"].groups[0].identifier == "finance"
assert len(users["testuser2"].groups) == 2
self.assert_user_has_permission(
users["testuser1"], "update", "/v1.0/process-groups/finance/model1"
)
self.assert_user_has_permission(
users["testuser1"], "update", "/v1.0/process-groups/finance/"
)
self.assert_user_has_permission(
users["testuser1"], "update", "/v1.0/process-groups/", expected_result=False
)
self.assert_user_has_permission(
users["testuser4"], "update", "/v1.0/process-groups/finance/model1"
)
self.assert_user_has_permission(
users["testuser4"], "read", "/v1.0/process-groups/finance/model1"
)
self.assert_user_has_permission(
users["testuser2"], "update", "/v1.0/process-groups/finance/model1"
)
self.assert_user_has_permission(
users["testuser2"], "update", "/v1.0/process-groups/", expected_result=False
)
self.assert_user_has_permission(
users["testuser2"], "read", "/v1.0/process-groups/"
)

View File

@ -0,0 +1,32 @@
"""Process Model."""
import pytest
from flask.app import Flask
from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.models.permission_target import (
InvalidPermissionTargetUriError,
)
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
class TestPermissionTarget(BaseTest):
"""TestPermissionTarget."""
def test_asterisk_must_go_at_the_end_of_uri(
self, app: Flask, with_db_and_bpmn_file_cleanup: None
) -> None:
"""Test_asterisk_must_go_at_the_end_of_uri."""
permission_target = PermissionTargetModel(uri="/test_group/%")
db.session.add(permission_target)
db.session.commit()
permission_target = PermissionTargetModel(uri="/test_group")
db.session.add(permission_target)
db.session.commit()
with pytest.raises(InvalidPermissionTargetUriError) as exception:
PermissionTargetModel(uri="/test_group/%/model")
assert (
str(exception.value) == "Wildcard must appear at end: /test_group/%/model"
)

View File

@ -8,7 +8,6 @@ from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.user_service import UserService
@ -74,23 +73,17 @@ class TestPermissions(BaseTest):
db.session.add(permission_assignment)
db.session.commit()
has_permission_to_a = AuthorizationService.user_has_permission(
user=group_a_admin,
permission="update",
target_uri=f"/{process_group_a_id}",
self.assert_user_has_permission(
group_a_admin, "update", f"/{process_group_a_id}"
)
assert has_permission_to_a is True
has_permission_to_b = AuthorizationService.user_has_permission(
user=group_a_admin,
permission="update",
target_uri=f"/{process_group_b_id}",
self.assert_user_has_permission(
group_a_admin, "update", f"/{process_group_b_id}", expected_result=False
)
assert has_permission_to_b is False
def test_user_can_be_granted_access_through_a_group(
self, app: Flask, with_db_and_bpmn_file_cleanup: None
) -> None:
"""Test_group_a_admin_needs_to_stay_away_from_group_b."""
"""Test_user_can_be_granted_access_through_a_group."""
process_group_ids = ["group-a", "group-b"]
process_group_a_id = process_group_ids[0]
process_group_ids[1]
@ -123,9 +116,38 @@ class TestPermissions(BaseTest):
db.session.add(permission_assignment)
db.session.commit()
has_permission_to_a = AuthorizationService.user_has_permission(
user=user,
self.assert_user_has_permission(user, "update", f"/{process_group_a_id}")
def test_user_can_be_read_models_with_global_permission(
self, app: Flask, with_db_and_bpmn_file_cleanup: None
) -> None:
"""Test_user_can_be_read_models_with_global_permission."""
process_group_ids = ["group-a", "group-b"]
process_group_a_id = process_group_ids[0]
process_group_b_id = process_group_ids[1]
for process_group_id in process_group_ids:
load_test_spec(
"timers_intermediate_catch_event",
process_group_id=process_group_id,
)
group_a_admin = self.find_or_create_user()
permission_target = PermissionTargetModel(uri="/%")
db.session.add(permission_target)
db.session.commit()
permission_assignment = PermissionAssignmentModel(
permission_target_id=permission_target.id,
principal_id=group_a_admin.principal.id,
permission="update",
target_uri=f"/{process_group_a_id}",
grant_type="permit",
)
db.session.add(permission_assignment)
db.session.commit()
self.assert_user_has_permission(
group_a_admin, "update", f"/{process_group_a_id}"
)
self.assert_user_has_permission(
group_a_admin, "update", f"/{process_group_b_id}"
)
assert has_permission_to_a is True

View File

@ -2,7 +2,10 @@
import os
from spiffworkflow_backend import create_app
from spiffworkflow_backend.services.acceptance_test_fixtures import load_fixtures
from spiffworkflow_backend.services.acceptance_test_fixtures import (
load_acceptance_test_fixtures,
)
from spiffworkflow_backend.services.data_setup_service import DataSetupService
app = create_app()
@ -10,4 +13,4 @@ app = create_app()
# it also loaded when we were running migrations, which resulted in a chicken/egg thing.
if os.environ.get("SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA") == "true":
with app.app_context():
load_fixtures()
load_acceptance_test_fixtures()