From 84369a1b207aff4f1850ae982030cf8f3e293488 Mon Sep 17 00:00:00 2001 From: jasquat <2487833+jasquat@users.noreply.github.com> Date: Tue, 25 Jul 2023 13:27:53 -0400 Subject: [PATCH] =?UTF-8?q?return=20the=20process=20instance=20early=20fro?= =?UTF-8?q?m=20the=20interstitial=20if=20it=20is=20susp=E2=80=A6=20(#407)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * return the process instance early from the interstitial if it is suspended or terminated * added a test to make sure the interstitial page returns the process instance if suspended or termianted w/ burnettk * randomize tests and cleaned up the pyproject file a little bit w/ burnettk --------- Co-authored-by: jasquat --- spiffworkflow-backend/bin/tests-par | 2 +- spiffworkflow-backend/poetry.lock | 16 +++++- spiffworkflow-backend/pyproject.toml | 20 ++++---- .../routes/tasks_controller.py | 5 ++ .../integration/test_tasks_controller.py | 51 +++++++++++++++++++ 5 files changed, 83 insertions(+), 11 deletions(-) diff --git a/spiffworkflow-backend/bin/tests-par b/spiffworkflow-backend/bin/tests-par index f8b4de27..de53221a 100755 --- a/spiffworkflow-backend/bin/tests-par +++ b/spiffworkflow-backend/bin/tests-par @@ -18,4 +18,4 @@ if ! python -c "import xdist" &>/dev/null; then exit 1 fi -SPIFFWORKFLOW_BACKEND_DATABASE_TYPE=sqlite poet test -n auto -x --ff +SPIFFWORKFLOW_BACKEND_DATABASE_TYPE=sqlite poet test -n auto -x --ff --random-order diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 17655a10..13d4aeb4 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1736,6 +1736,20 @@ pytest = ">=5.0" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] +[[package]] +name = "pytest-random-order" +version = "1.1.0" +description = "Randomise the order in which pytest tests are run with some control over the randomness" +optional = false +python-versions = ">=3.5.0" +files = [ + {file = "pytest-random-order-1.1.0.tar.gz", hash = "sha256:dbe6debb9353a7af984cc9eddbeb3577dd4dbbcc1529a79e3d21f68ed9b45605"}, + {file = "pytest_random_order-1.1.0-py3-none-any.whl", hash = "sha256:6cb1e59ab0f798bb0c3488c11ae0c70d7d3340306a466d28b28ccd8ef8c20b7e"}, +] + +[package.dependencies] +pytest = ">=3.0.0" + [[package]] name = "pytest-xdist" version = "3.3.1" @@ -2700,4 +2714,4 @@ tests-strict = ["codecov (==2.0.15)", "pytest (==4.6.0)", "pytest (==4.6.0)", "p [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.12" -content-hash = "755b30cee83d139e40dcb609fc3417f26fc2a9eab24a41fe15174d4e24d739c2" +content-hash = "54f8f812dac1e3ce391b5e0a5b9505862cd55507eb017cfb5c58dab48614ffe5" diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index a48186ab..8ac9d6f0 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -2,18 +2,18 @@ name = "spiffworkflow-backend" version = "0.0.0" description = "Spiffworkflow Backend" -authors = ["Jason Lantz "] -license = "MIT" +authors = ["Sartography "] +license = "LGPL-2.1" readme = "README.rst" -homepage = "https://github.com/sartography/spiffworkflow-backend" -repository = "https://github.com/sartography/spiffworkflow-backend" -documentation = "https://spiffworkflow-backend.readthedocs.io" +homepage = "https://spiffworkflow.org" +repository = "https://github.com/sartography/spiff-arena" +documentation = "https://spiff-arena.readthedocs.io" classifiers = [ "Development Status :: 1 - Planning", ] [tool.poetry.urls] -Changelog = "https://github.com/sartography/spiffworkflow-backend/releases" +Changelog = "https://github.com/orgs/sartography/packages?repo_name=spiff-arena" [tool.poetry.dependencies] python = ">=3.10,<3.12" @@ -34,8 +34,6 @@ SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "ma # SpiffWorkflow = {develop = true, path = "../../SpiffWorkflow/" } sentry-sdk = "^1.10" # sphinx-autoapi = "^2.0" -pytest-flask = "^1.2.0" -pytest-flask-sqlalchemy = "^1.1.0" psycopg2 = "^2.9.3" typing-extensions = "^4.4.0" @@ -82,7 +80,6 @@ prometheus-flask-exporter = "^0.22.3" sqlalchemy = "^2.0.7" marshmallow-sqlalchemy = "^0.29.0" spiff-element-units = "^0.3.0" -pytest-xdist = "^3.3.1" # mysqlclient lib is deemed better than the mysql-connector-python lib by sqlalchemy # https://docs.sqlalchemy.org/en/20/dialects/mysql.html#module-sqlalchemy.dialects.mysql.mysqlconnector @@ -99,6 +96,11 @@ pre-commit = "^2.20.0" black = ">=21.10b0" ruff = "^0.0.270" +pytest-random-order = "^1.1.0" +pytest-flask = "^1.2.0" +pytest-flask-sqlalchemy = "^1.1.0" +pytest-xdist = "^3.3.1" + # 1.7.3 broke us. https://github.com/PyCQA/bandit/issues/841 bandit = "1.7.2" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index b8c105cb..936a031c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -426,6 +426,11 @@ def _interstitial_stream( return "" return JinjaService.render_instructions_for_end_user(task_model) + # do not attempt to get task instructions if process instance is suspended or was terminated + if process_instance.status in ["suspended", "terminated"]: + yield _render_data("unrunnable_instance", process_instance) + return + processor = ProcessInstanceProcessor(process_instance) reported_ids = [] # A list of all the ids reported by this endpoint so far. tasks = get_reportable_tasks() diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_tasks_controller.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_tasks_controller.py index 13744b10..ae15bbfe 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_tasks_controller.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_tasks_controller.py @@ -6,6 +6,7 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.routes.tasks_controller import _dequeued_interstitial_stream from spiffworkflow_backend.services.authorization_service import AuthorizationService @@ -69,6 +70,56 @@ class TestTasksController(BaseTest): "veryImportantFieldButOnlySometimes": {"ui:widget": "hidden"}, } + def test_interstitial_returns_process_instance_if_suspended_or_terminated( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + process_group_id = "my_process_group" + process_model_id = "dynamic_enum_select_fields" + bpmn_file_location = "dynamic_enum_select_fields" + process_model = self.create_group_and_model_with_bpmn( + client, + with_super_admin_user, + process_group_id=process_group_id, + process_model_id=process_model_id, + # bpmn_file_name=bpmn_file_name, + bpmn_file_location=bpmn_file_location, + ) + + headers = self.logged_in_headers(with_super_admin_user) + response = self.create_process_instance_from_process_model_id_with_api(client, process_model.id, headers) + assert response.json is not None + process_instance_id = response.json["id"] + assert process_instance_id + + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first() + assert process_instance is not None + + process_instance.status = ProcessInstanceStatus.suspended.value + db.session.add(process_instance) + db.session.commit() + stream_results = _dequeued_interstitial_stream(process_instance.id) + results = list(stream_results) + json_results = [json.loads(x[5:]) for x in results] # type: ignore + assert len(json_results) == 1 + assert json_results[0]["type"] == "unrunnable_instance" + assert json_results[0]["unrunnable_instance"]["id"] == process_instance.id + assert json_results[0]["unrunnable_instance"]["status"] == "suspended" + + process_instance.status = ProcessInstanceStatus.terminated.value + db.session.add(process_instance) + db.session.commit() + stream_results = _dequeued_interstitial_stream(process_instance.id) + results = list(stream_results) + json_results = [json.loads(x[5:]) for x in results] # type: ignore + assert len(json_results) == 1 + assert json_results[0]["type"] == "unrunnable_instance" + assert json_results[0]["unrunnable_instance"]["id"] == process_instance.id + assert json_results[0]["unrunnable_instance"]["status"] == "terminated" + def test_interstitial_page( self, app: Flask,