respect host from db uri env var when recreating db w/ burnettk

This commit is contained in:
jasquat 2023-05-23 12:12:32 -04:00
parent ed42c6c399
commit e2fe5ea660
3 changed files with 41 additions and 37 deletions

View File

@ -23,6 +23,11 @@ if [[ -z "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR export SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR
fi fi
database_host="localhost"
if [[ -n "${SPIFFWORKFLOW_BACKEND_DATABASE_URI:-}" ]]; then
database_host=$(grep -oP "^[^:]+://.*@\K(.+?)[:/]" <<<"$SPIFFWORKFLOW_BACKEND_DATABASE_URI" | sed -E 's/[:\/]$//')
fi
tasks="" tasks=""
if [[ "${1:-}" == "clean" ]]; then if [[ "${1:-}" == "clean" ]]; then
subcommand="${2:-}" subcommand="${2:-}"
@ -37,8 +42,8 @@ if [[ "${1:-}" == "clean" ]]; then
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" != "mysql" ]]; then if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" != "mysql" ]]; then
rm -f ./src/instance/*.sqlite3 rm -f ./src/instance/*.sqlite3
else else
mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_local_development" mysql -h "$database_host" -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_local_development"
mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_unit_testing" mysql -h "$database_host" -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_unit_testing"
fi fi
# TODO: check to see if the db already exists and we can connect to it. also actually clean it up. # TODO: check to see if the db already exists and we can connect to it. also actually clean it up.
@ -74,8 +79,8 @@ else
fi fi
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "mysql" ]]; then if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "mysql" ]]; then
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_local_development" mysql -h "$database_host" -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_local_development"
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_unit_testing" mysql -h "$database_host" -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_unit_testing"
fi fi
for task in $tasks; do for task in $tasks; do
@ -85,7 +90,7 @@ done
SPIFFWORKFLOW_BACKEND_ENV=unit_testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade SPIFFWORKFLOW_BACKEND_ENV=unit_testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
if [[ -n "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]] && ! grep -Eq '^(local_development|unit_testing)$' <<< "$SPIFFWORKFLOW_BACKEND_ENV"; then if [[ -n "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]] && ! grep -Eq '^(local_development|unit_testing)$' <<< "$SPIFFWORKFLOW_BACKEND_ENV"; then
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "mysql" ]]; then if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "mysql" ]]; then
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV" mysql -h "$database_host" -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV"
fi fi
FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
fi fi

View File

@ -10,7 +10,7 @@ from spiffworkflow_backend.services.logging_service import setup_logger
class ConfigurationError(Exception): class ConfigurationError(Exception):
"""ConfigurationError.""" pass
def setup_database_configs(app: Flask) -> None: def setup_database_configs(app: Flask) -> None:

View File

@ -17,18 +17,6 @@ from SpiffWorkflow.task import TaskState
from spiffworkflow_backend.services.custom_parser import MyCustomParser from spiffworkflow_backend.services.custom_parser import MyCustomParser
# workflow json for test case
# 1. default action is load xml from disk and use spiff like normal and get back workflow json
# 2. do stuff from disk cache
# find all process models
# find all json test cases for each
# for each test case, fire up something like spiff
# for each task, if there is something special in the test case definition,
# do it (provide data for user task, mock service task, etc)
# when the thing is complete, check workflow data against expected data
class UnrunnableTestCaseError(Exception): class UnrunnableTestCaseError(Exception):
pass pass
@ -71,25 +59,36 @@ DEFAULT_NSMAP = {
} }
# input: """
# BPMN_TASK_IDENTIIFER: JSON file name:
# can be either task bpmn identifier or in format: The name should be in format "test_BPMN_FILE_NAME_IT_TESTS.json".
# [BPMN_PROCESS_ID]:[TASK_BPMN_IDENTIFIER]
# example: 'BasicServiceTaskProcess:service_task_one' BPMN_TASK_IDENTIIFER:
# this allows for tasks to share bpmn identifiers across models can be either task bpmn identifier or in format:
# which is useful for call activities [BPMN_PROCESS_ID]:[TASK_BPMN_IDENTIFIER]
# example: 'BasicServiceTaskProcess:service_task_one'
# json_file: this allows for tasks to share bpmn identifiers across models
# { which is useful for call activities
# [TEST_CASE_NAME]: {
# "tasks": { DATA for tasks:
# [BPMN_TASK_IDENTIIFER]: { This is an array of task data. This allows for the task to
# "data": [DATA] be called multiple times and given different data each time.
# } This is useful for testing loops where each iteration needs
# }, different input. The test will fail if the task is called
# "expected_output_json": [DATA] multiple times without task data input for each call.
# }
# } JSON file format:
{
TEST_CASE_NAME: {
"tasks": {
BPMN_TASK_IDENTIIFER: {
"data": [DATA]
}
},
"expected_output_json": DATA
}
}
"""
class ProcessModelTestRunner: class ProcessModelTestRunner:
"""Generic test runner code. May move into own library at some point. """Generic test runner code. May move into own library at some point.