Merge commit 'e228ab4b80138b3e943b11774c1fd47957ea7562'

This commit is contained in:
jasquat 2022-10-25 16:55:11 -04:00
commit c609604f56
15 changed files with 69 additions and 47 deletions

View File

@ -48,7 +48,6 @@ if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then
fi
export IS_GUNICORN="true"
export PROCESS_WAITING_MESSAGES="true"
# THIS MUST BE THE LAST COMMAND!
exec poetry run gunicorn ${additional_args} --bind "0.0.0.0:$port" --workers="$workers" --limit-request-line 8192 --timeout 90 --capture-output --access-logfile '-' --log-level debug wsgi:app

View File

@ -28,9 +28,6 @@ export APPLICATION_ROOT="/"
if [[ -n "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" ]]; then
./bin/boot_server_in_docker
else
if [[ -z "${PROCESS_WAITING_MESSAGES:-}" ]]; then
export PROCESS_WAITING_MESSAGES="true"
fi
export FLASK_DEBUG=1
if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then

View File

@ -0,0 +1,22 @@
"""Start the appscheduler in blocking mode."""
import time
from apscheduler.schedulers.background import BlockingScheduler # type: ignore
from spiffworkflow_backend import create_app
from spiffworkflow_backend import start_scheduler
from spiffworkflow_backend.helpers.db_helper import try_to_connect
def main() -> None:
"""Main."""
app = create_app()
start_time = time.time()
with app.app_context():
try_to_connect(start_time)
start_scheduler(app, BlockingScheduler)
if __name__ == "__main__":
main()

View File

@ -1,27 +1,13 @@
"""Grabs tickets from csv and makes process instances."""
"""Wait for db to be ready."""
import time
import sqlalchemy
from flask_bpmn.models.db import db
from spiffworkflow_backend import get_hacked_up_app_for_script
def try_to_connect(start_time: float) -> None:
"""Try to connect."""
try:
db.first_or_404("select 1")
except sqlalchemy.exc.DatabaseError as exception:
if time.time() - start_time > 15:
raise exception
else:
time.sleep(1)
try_to_connect(start_time)
from spiffworkflow_backend import create_app
from spiffworkflow_backend.helpers.db_helper import try_to_connect
def main() -> None:
"""Main."""
app = get_hacked_up_app_for_script()
app = create_app()
start_time = time.time()
with app.app_context():
try_to_connect(start_time)

View File

@ -62,7 +62,7 @@ services:
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@localhost:7003/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development}
- BPMN_SPEC_ABSOLUTE_DIR=/app/process_models
- SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-false}
- PROCESS_WAITING_MESSAGES=true
- RUN_BACKGROUND_SCHEDULER=true
ports:
- "7000:7000"
network_mode: host

View File

@ -7,6 +7,7 @@ import flask.app
import flask.json
import sqlalchemy
from apscheduler.schedulers.background import BackgroundScheduler # type: ignore
from apscheduler.schedulers.base import BaseScheduler # type: ignore
from flask.json.provider import DefaultJSONProvider
from flask_bpmn.api.api_error import api_error_blueprint
from flask_bpmn.models.db import db
@ -52,9 +53,11 @@ class MyJSONEncoder(DefaultJSONProvider):
return super().dumps(obj, **kwargs)
def start_scheduler(app: flask.app.Flask) -> None:
def start_scheduler(
app: flask.app.Flask, scheduler_class: BaseScheduler = BackgroundScheduler
) -> None:
"""Start_scheduler."""
scheduler = BackgroundScheduler()
scheduler = scheduler_class()
scheduler.add_job(
BackgroundProcessingService(app).process_message_instances_with_app_context,
"interval",
@ -111,7 +114,7 @@ def create_app() -> flask.app.Flask:
app.json = MyJSONEncoder(app)
if app.config["PROCESS_WAITING_MESSAGES"]:
if app.config["RUN_BACKGROUND_SCHEDULER"]:
start_scheduler(app)
configure_sentry(app)
@ -137,8 +140,6 @@ def get_hacked_up_app_for_script() -> flask.app.Flask:
else:
raise Exception(f"Could not find {full_process_model_path}")
app = create_app()
setup_config(app)
configure_sentry(app)
return app

View File

@ -13,8 +13,8 @@ CORS_ALLOW_ORIGINS = re.split(
r",\s*", environ.get("CORS_ALLOW_ORIGINS", default=CORS_DEFAULT)
)
PROCESS_WAITING_MESSAGES = (
environ.get("PROCESS_WAITING_MESSAGES", default="false") == "true"
RUN_BACKGROUND_SCHEDULER = (
environ.get("RUN_BACKGROUND_SCHEDULER", default="false") == "true"
)
SPIFFWORKFLOW_FRONTEND_URL = environ.get(
"SPIFFWORKFLOW_FRONTEND_URL", default="http://localhost:7001"

View File

@ -7,3 +7,7 @@ GIT_COMMIT_EMAIL = "demo@example.com"
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="demo.yml"
)
RUN_BACKGROUND_SCHEDULER = (
environ.get("RUN_BACKGROUND_SCHEDULER", default="false") == "true"
)

View File

@ -8,3 +8,7 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
)
RUN_BACKGROUND_SCHEDULER = (
environ.get("RUN_BACKGROUND_SCHEDULER", default="true") == "true"
)

View File

@ -0,0 +1,16 @@
"""Db_helper."""
import sqlalchemy
from flask_bpmn.models.db import db
import time
def try_to_connect(start_time: float) -> None:
"""Try to connect."""
try:
db.first_or_404("select 1") # type: ignore
except sqlalchemy.exc.DatabaseError as exception:
if time.time() - start_time > 15:
raise exception
else:
time.sleep(1)
try_to_connect(start_time)

View File

@ -1 +0,0 @@
"""Fixture_data."""

View File

@ -426,7 +426,7 @@ def process_instance_run(
processor.save()
ProcessInstanceService.update_task_assignments(processor)
if not current_app.config["PROCESS_WAITING_MESSAGES"]:
if not current_app.config["RUN_BACKGROUND_SCHEDULER"]:
MessageService.process_message_instances()
process_instance_api = ProcessInstanceService.processor_to_process_instance_api(

View File

@ -73,8 +73,10 @@ class ProcessInstanceService:
process_instance.status = ProcessInstanceStatus.erroring.value
db.session.add(process_instance)
db.session.commit()
error_message = f"Error running waiting task for process_instance {process_instance.id}" + \
f"({process_instance.process_model_identifier}). {str(e)}"
error_message = (
f"Error running waiting task for process_instance {process_instance.id}"
+ f"({process_instance.process_model_identifier}). {str(e)}"
)
current_app.logger.error(error_message)
@staticmethod

View File

@ -37,9 +37,7 @@ class SecretService:
) -> SecretModel:
"""Add_secret."""
# encrypted_key = self.encrypt_key(key)
secret_model = SecretModel(
key=key, value=value, user_id=user_id
)
secret_model = SecretModel(key=key, value=value, user_id=user_id)
db.session.add(secret_model)
try:
db.session.commit()
@ -81,9 +79,7 @@ class SecretService:
db.session.rollback()
raise e
elif create_if_not_exists:
SecretService.add_secret(
key=key, value=value, user_id=user_id
)
SecretService.add_secret(key=key, value=value, user_id=user_id)
else:
raise ApiError(
error_code="update_secret_error",

View File

@ -64,12 +64,8 @@ class ServiceTaskDelegate:
return proxied_response.text
secret_key = parsed_response["auth"]
refreshed_token_set = json.dumps(
parsed_response["refreshed_token_set"]
)
SecretService().update_secret(
secret_key, refreshed_token_set, g.user.id
)
refreshed_token_set = json.dumps(parsed_response["refreshed_token_set"])
SecretService().update_secret(secret_key, refreshed_token_set, g.user.id)
return json.dumps(parsed_response["api_response"])