respect host from db uri env var when recreating db w/ burnettk
This commit is contained in:
parent
ed42c6c399
commit
e2fe5ea660
|
@ -23,6 +23,11 @@ if [[ -z "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
|||
export SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR
|
||||
fi
|
||||
|
||||
database_host="localhost"
|
||||
if [[ -n "${SPIFFWORKFLOW_BACKEND_DATABASE_URI:-}" ]]; then
|
||||
database_host=$(grep -oP "^[^:]+://.*@\K(.+?)[:/]" <<<"$SPIFFWORKFLOW_BACKEND_DATABASE_URI" | sed -E 's/[:\/]$//')
|
||||
fi
|
||||
|
||||
tasks=""
|
||||
if [[ "${1:-}" == "clean" ]]; then
|
||||
subcommand="${2:-}"
|
||||
|
@ -37,8 +42,8 @@ if [[ "${1:-}" == "clean" ]]; then
|
|||
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" != "mysql" ]]; then
|
||||
rm -f ./src/instance/*.sqlite3
|
||||
else
|
||||
mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_local_development"
|
||||
mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_unit_testing"
|
||||
mysql -h "$database_host" -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_local_development"
|
||||
mysql -h "$database_host" -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_unit_testing"
|
||||
fi
|
||||
|
||||
# TODO: check to see if the db already exists and we can connect to it. also actually clean it up.
|
||||
|
@ -74,8 +79,8 @@ else
|
|||
fi
|
||||
|
||||
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "mysql" ]]; then
|
||||
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_local_development"
|
||||
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_unit_testing"
|
||||
mysql -h "$database_host" -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_local_development"
|
||||
mysql -h "$database_host" -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_unit_testing"
|
||||
fi
|
||||
|
||||
for task in $tasks; do
|
||||
|
@ -85,7 +90,7 @@ done
|
|||
SPIFFWORKFLOW_BACKEND_ENV=unit_testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
|
||||
if [[ -n "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]] && ! grep -Eq '^(local_development|unit_testing)$' <<< "$SPIFFWORKFLOW_BACKEND_ENV"; then
|
||||
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "mysql" ]]; then
|
||||
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV"
|
||||
mysql -h "$database_host" -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV"
|
||||
fi
|
||||
FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
|
||||
fi
|
||||
|
|
|
@ -10,7 +10,7 @@ from spiffworkflow_backend.services.logging_service import setup_logger
|
|||
|
||||
|
||||
class ConfigurationError(Exception):
|
||||
"""ConfigurationError."""
|
||||
pass
|
||||
|
||||
|
||||
def setup_database_configs(app: Flask) -> None:
|
||||
|
|
|
@ -17,18 +17,6 @@ from SpiffWorkflow.task import TaskState
|
|||
from spiffworkflow_backend.services.custom_parser import MyCustomParser
|
||||
|
||||
|
||||
# workflow json for test case
|
||||
# 1. default action is load xml from disk and use spiff like normal and get back workflow json
|
||||
# 2. do stuff from disk cache
|
||||
|
||||
# find all process models
|
||||
# find all json test cases for each
|
||||
# for each test case, fire up something like spiff
|
||||
# for each task, if there is something special in the test case definition,
|
||||
# do it (provide data for user task, mock service task, etc)
|
||||
# when the thing is complete, check workflow data against expected data
|
||||
|
||||
|
||||
class UnrunnableTestCaseError(Exception):
|
||||
pass
|
||||
|
||||
|
@ -71,25 +59,36 @@ DEFAULT_NSMAP = {
|
|||
}
|
||||
|
||||
|
||||
# input:
|
||||
# BPMN_TASK_IDENTIIFER:
|
||||
# can be either task bpmn identifier or in format:
|
||||
# [BPMN_PROCESS_ID]:[TASK_BPMN_IDENTIFIER]
|
||||
# example: 'BasicServiceTaskProcess:service_task_one'
|
||||
# this allows for tasks to share bpmn identifiers across models
|
||||
# which is useful for call activities
|
||||
#
|
||||
# json_file:
|
||||
# {
|
||||
# [TEST_CASE_NAME]: {
|
||||
# "tasks": {
|
||||
# [BPMN_TASK_IDENTIIFER]: {
|
||||
# "data": [DATA]
|
||||
# }
|
||||
# },
|
||||
# "expected_output_json": [DATA]
|
||||
# }
|
||||
# }
|
||||
"""
|
||||
JSON file name:
|
||||
The name should be in format "test_BPMN_FILE_NAME_IT_TESTS.json".
|
||||
|
||||
BPMN_TASK_IDENTIIFER:
|
||||
can be either task bpmn identifier or in format:
|
||||
[BPMN_PROCESS_ID]:[TASK_BPMN_IDENTIFIER]
|
||||
example: 'BasicServiceTaskProcess:service_task_one'
|
||||
this allows for tasks to share bpmn identifiers across models
|
||||
which is useful for call activities
|
||||
|
||||
DATA for tasks:
|
||||
This is an array of task data. This allows for the task to
|
||||
be called multiple times and given different data each time.
|
||||
This is useful for testing loops where each iteration needs
|
||||
different input. The test will fail if the task is called
|
||||
multiple times without task data input for each call.
|
||||
|
||||
JSON file format:
|
||||
{
|
||||
TEST_CASE_NAME: {
|
||||
"tasks": {
|
||||
BPMN_TASK_IDENTIIFER: {
|
||||
"data": [DATA]
|
||||
}
|
||||
},
|
||||
"expected_output_json": DATA
|
||||
}
|
||||
}
|
||||
"""
|
||||
class ProcessModelTestRunner:
|
||||
"""Generic test runner code. May move into own library at some point.
|
||||
|
||||
|
|
Loading…
Reference in New Issue