fixing a bug that was causing failing tests.

Adding id and spec_version to the workflow metadata.
Refactoring the processing of the master_spec so that it doesn't polute the workflow database.
Adding tests to assure that the status and counts are updated on the workflow model as users make progress.
This commit is contained in:
Dan Funk 2020-03-30 14:01:57 -04:00
parent 34b6ec92bf
commit 17796193de
6 changed files with 106 additions and 30 deletions

View File

@ -838,6 +838,7 @@ components:
example: "27b-6-42" example: "27b-6-42"
hsr_number: hsr_number:
type: string type: string
x-nullable: true
example: "27b-6-1212" example: "27b-6-1212"
categories: categories:
type: array type: array

View File

@ -43,11 +43,13 @@ class StudyModel(db.Model):
class WorkflowMetadata(object): class WorkflowMetadata(object):
def __init__(self, name, display_name, description, category_id, state: WorkflowState, status: WorkflowStatus, def __init__(self, id, name, display_name, description, spec_version, category_id, state: WorkflowState, status: WorkflowStatus,
total_tasks, completed_tasks): total_tasks, completed_tasks):
self.id = id
self.name = name self.name = name
self.display_name = display_name self.display_name = display_name
self.description = description self.description = description
self.spec_version = spec_version
self.category_id = category_id self.category_id = category_id
self.state = state self.state = state
self.status = status self.status = status
@ -58,9 +60,11 @@ class WorkflowMetadata(object):
@classmethod @classmethod
def from_workflow(cls, workflow: WorkflowModel): def from_workflow(cls, workflow: WorkflowModel):
instance = cls( instance = cls(
id=workflow.id,
name=workflow.workflow_spec.name, name=workflow.workflow_spec.name,
display_name=workflow.workflow_spec.display_name, display_name=workflow.workflow_spec.display_name,
description=workflow.workflow_spec.description, description=workflow.workflow_spec.description,
spec_version=workflow.spec_version,
category_id=workflow.workflow_spec.category_id, category_id=workflow.workflow_spec.category_id,
state=WorkflowState.optional, state=WorkflowState.optional,
status=workflow.status, status=workflow.status,
@ -74,7 +78,7 @@ class WorkflowMetadataSchema(ma.Schema):
status = EnumField(WorkflowStatus) status = EnumField(WorkflowStatus)
class Meta: class Meta:
model = WorkflowMetadata model = WorkflowMetadata
additional = ["name", "display_name", "description", additional = ["id", "name", "display_name", "description",
"total_tasks", "completed_tasks"] "total_tasks", "completed_tasks"]
unknown = INCLUDE unknown = INCLUDE

View File

@ -109,7 +109,11 @@ class StudyService(object):
@staticmethod @staticmethod
def __get_workflow_metas(study_id): def __get_workflow_metas(study_id):
# Add in the Workflows for each category # Add in the Workflows for each category
workflow_models = db.session.query(WorkflowModel).filter_by(study_id=study_id).all() workflow_models = db.session.query(WorkflowModel).\
join(WorkflowSpecModel).\
filter(WorkflowSpecModel.is_master_spec == False).\
filter(WorkflowModel.study_id == study_id).\
all()
workflow_metas = [] workflow_metas = []
for workflow in workflow_models: for workflow in workflow_models:
workflow_metas.append(WorkflowMetadata.from_workflow(workflow)) workflow_metas.append(WorkflowMetadata.from_workflow(workflow))
@ -127,15 +131,7 @@ class StudyService(object):
raise ApiError("multiple_master_specs", raise ApiError("multiple_master_specs",
"There is more than one master specification, and I don't know what to do.") "There is more than one master specification, and I don't know what to do.")
master_spec = master_specs[0] return WorkflowProcessor.run_master_spec(master_specs[0], study_model)
master_workflow = StudyService._create_workflow_model(study_model, master_spec)
processor = WorkflowProcessor(master_workflow)
processor.do_engine_steps()
if not processor.bpmn_workflow.is_completed():
raise ApiError("master_spec_not_automatic",
"The master spec should only contain fully automated tasks, it failed to complete.")
return processor.bpmn_workflow.last_task.data
@staticmethod @staticmethod
def _add_all_workflow_specs_to_study(study): def _add_all_workflow_specs_to_study(study):

View File

@ -120,6 +120,21 @@ class WorkflowProcessor(object):
self.workflow_spec_id = workflow_model.workflow_spec_id self.workflow_spec_id = workflow_model.workflow_spec_id
try: try:
self.bpmn_workflow = self.__get_bpmn_workflow(workflow_model, spec) self.bpmn_workflow = self.__get_bpmn_workflow(workflow_model, spec)
self.bpmn_workflow.script_engine = self._script_engine
workflow_model.total_tasks = len(self.get_all_user_tasks())
workflow_model.completed_tasks = len(self.get_all_completed_tasks())
workflow_model.status = self.get_status()
session.add(workflow_model)
session.commit()
# Need to commit twice, first to get a unique id for the workflow model, and
# a second time to store the serialization so we can maintain this link within
# the spiff-workflow process.
self.bpmn_workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY] = workflow_model.id
workflow_model.bpmn_workflow_json = WorkflowProcessor._serializer.serialize_workflow(self.bpmn_workflow)
session.add(workflow_model)
except KeyError as ke: except KeyError as ke:
if soft_reset: if soft_reset:
# Undo the soft-reset. # Undo the soft-reset.
@ -144,20 +159,23 @@ class WorkflowProcessor(object):
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = workflow_model.study_id bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = workflow_model.study_id
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = False bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = False
bpmn_workflow.do_engine_steps() bpmn_workflow.do_engine_steps()
session.add(workflow_model)
session.commit()
# Need to commit twice, first to get a unique id for the workflow model, and
# a second time to store the serialization so we can maintain this link within
# the spiff-workflow process.
bpmn_workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY] = workflow_model.id
workflow_model.bpmn_workflow_json = WorkflowProcessor._serializer.serialize_workflow(bpmn_workflow)
session.add(workflow_model)
# Assure the correct script engine is in use.
bpmn_workflow.script_engine = self._script_engine
return bpmn_workflow return bpmn_workflow
@staticmethod
def run_master_spec(spec_model, study):
"""Executes a BPMN specification for the given study, without recording any information to the database
Useful for running the master specification, which should not persist. """
spec = WorkflowProcessor.get_spec(spec_model.id)
bpmn_workflow = BpmnWorkflow(spec, script_engine=WorkflowProcessor._script_engine)
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = study.id
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = False
bpmn_workflow.do_engine_steps()
if not bpmn_workflow.is_completed():
raise ApiError("master_spec_not_automatic",
"The master spec should only contain fully automated tasks, it failed to complete.")
return bpmn_workflow.last_task.data
@staticmethod @staticmethod
def get_parser(): def get_parser():
parser = MyCustomParser() parser = MyCustomParser()
@ -368,6 +386,10 @@ class WorkflowProcessor(object):
def complete_task(self, task): def complete_task(self, task):
self.bpmn_workflow.complete_task_from_id(task.id) self.bpmn_workflow.complete_task_from_id(task.id)
self.workflow_model.total_tasks = len(self.get_all_user_tasks())
self.workflow_model.completed_tasks = len(self.get_all_completed_tasks())
self.workflow_model.status = self.get_status()
session.add(self.workflow_model)
def get_data(self): def get_data(self):
return self.bpmn_workflow.data return self.bpmn_workflow.data
@ -385,6 +407,11 @@ class WorkflowProcessor(object):
all_tasks = self.bpmn_workflow.get_tasks(SpiffTask.ANY_MASK) all_tasks = self.bpmn_workflow.get_tasks(SpiffTask.ANY_MASK)
return [t for t in all_tasks if not self.bpmn_workflow._is_engine_task(t.task_spec)] return [t for t in all_tasks if not self.bpmn_workflow._is_engine_task(t.task_spec)]
def get_all_completed_tasks(self):
all_tasks = self.bpmn_workflow.get_tasks(SpiffTask.ANY_MASK)
return [t for t in all_tasks
if not self.bpmn_workflow._is_engine_task(t.task_spec) and t.state in [t.COMPLETED, t.CANCELLED]]
@staticmethod @staticmethod
def get_process_id(et_root: ElementTree.Element): def get_process_id(et_root: ElementTree.Element):
process_elements = [] process_elements = []

View File

@ -207,9 +207,9 @@ class BaseTest(unittest.TestCase):
study = session.query(StudyModel).first() study = session.query(StudyModel).first()
spec = self.load_test_spec(workflow_name, category_id=category_id) spec = self.load_test_spec(workflow_name, category_id=category_id)
workflow_model = StudyService._create_workflow_model(study, spec) workflow_model = StudyService._create_workflow_model(study, spec)
processor = WorkflowProcessor(workflow_model) #processor = WorkflowProcessor(workflow_model)
workflow = session.query(WorkflowModel).filter_by(study_id=study.id, workflow_spec_id=workflow_name).first() #workflow = session.query(WorkflowModel).filter_by(study_id=study.id, workflow_spec_id=workflow_name).first()
return workflow return workflow_model
def create_reference_document(self): def create_reference_document(self):
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'reference', 'irb_documents.xlsx') file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'reference', 'irb_documents.xlsx')

View File

@ -2,18 +2,66 @@ import json
from datetime import datetime, timezone from datetime import datetime, timezone
from unittest.mock import patch from unittest.mock import patch
from crc import session from crc import session, db
from crc.models.api_models import WorkflowApiSchema from crc.models.api_models import WorkflowApiSchema
from crc.models.protocol_builder import ProtocolBuilderStatus, ProtocolBuilderStudyDetailsSchema, \ from crc.models.protocol_builder import ProtocolBuilderStatus, ProtocolBuilderStudyDetailsSchema, \
ProtocolBuilderStudySchema ProtocolBuilderStudySchema
from crc.models.study import StudyModel, StudySchema from crc.models.study import StudyModel, StudySchema
from crc.models.user import UserModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowStatus, \ from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowStatus, \
WorkflowSpecCategoryModel WorkflowSpecCategoryModel
from crc.services.study_service import StudyService
from crc.services.workflow_processor import WorkflowProcessor
from tests.base_test import BaseTest from tests.base_test import BaseTest
class TestStudyService(BaseTest): class TestStudyService(BaseTest):
"""Largely tested via the test_study_api, and time is tight, but adding new tests here."""
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_study_details') # mock_details
def test_total_tasks_updated(self): @patch('crc.services.protocol_builder.ProtocolBuilderService.get_studies') # mock_studies
def test_total_tasks_updated(self, mock_studies, mock_details):
"""Assure that as a user makes progress""" """Assure that as a user makes progress"""
self.load_example_data()
# Mock Protocol Builder responses
studies_response = self.protocol_builder_response('user_studies.json')
mock_studies.return_value = ProtocolBuilderStudySchema(many=True).loads(studies_response)
details_response = self.protocol_builder_response('study_details.json')
mock_details.return_value = ProtocolBuilderStudyDetailsSchema().loads(details_response)
# The load example data script should set us up a user and at least one study, one category, and one workflow.
user = db.session.query(UserModel).first()
studies = StudyService.get_studies_for_user(user)
self.assertTrue(len(studies) > 1)
self.assertTrue(len(studies[0].categories) > 1)
self.assertTrue(len(studies[0].categories[0].workflows) > 1)
workflow = next(iter(studies[0].categories[0].workflows)) # Workflows is a set.
# workflow should not be started, and it should have 0 completed tasks, and 0 total tasks.
self.assertEqual(WorkflowStatus.not_started, workflow.status)
self.assertEqual(None, workflow.spec_version)
self.assertEqual(0, workflow.total_tasks)
self.assertEqual(0, workflow.completed_tasks)
# Initialize the Workflow with the workflow processor.
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.id == workflow.id).first()
processor = WorkflowProcessor(workflow_model)
# Assure the workflow is now started, and knows the total and completed tasks.
studies = StudyService.get_studies_for_user(user)
workflow = next(iter(studies[0].categories[0].workflows)) # Workflows is a set.
# self.assertEqual(WorkflowStatus.user_input_required, workflow.status)
self.assertTrue(workflow.total_tasks > 0)
self.assertEqual(0, workflow.completed_tasks)
self.assertIsNotNone(workflow.spec_version)
# Complete a task
task = processor.next_task()
processor.complete_task(task)
# Assure the workflow has moved on to the next task.
studies = StudyService.get_studies_for_user(user)
workflow = next(iter(studies[0].categories[0].workflows)) # Workflows is a set.
self.assertEqual(1, workflow.completed_tasks)