Merge pull request #30 from sartography/feature_personnel_multi_instance

Feature multi instance
This commit is contained in:
Dan Funk 2020-04-20 11:41:20 -04:00 committed by GitHub
commit d7e53c5e7f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 260 additions and 27 deletions

14
Pipfile.lock generated
View File

@ -717,11 +717,11 @@
}, },
"sphinx": { "sphinx": {
"hashes": [ "hashes": [
"sha256:50972d83b78990fd61d0d3fe8620814cae53db29443e92c13661bc43dff46ec8", "sha256:3145d87d0962366d4c5264c39094eae3f5788d01d4b1a12294051bfe4271d91b",
"sha256:8411878f4768ec2a8896b844d68070204f9354a831b37937989c2e559d29dffc" "sha256:d7c6e72c6aa229caf96af82f60a0d286a1521d42496c226fe37f5a75dcfe2941"
], ],
"index": "pypi", "index": "pypi",
"version": "==3.0.1" "version": "==3.0.2"
}, },
"sphinxcontrib-applehelp": { "sphinxcontrib-applehelp": {
"hashes": [ "hashes": [
@ -768,7 +768,7 @@
"spiffworkflow": { "spiffworkflow": {
"editable": true, "editable": true,
"git": "https://github.com/sartography/SpiffWorkflow.git", "git": "https://github.com/sartography/SpiffWorkflow.git",
"ref": "d5f385f74ca2f755589aab2588333aa007d20852" "ref": "69cbb9d67d87895f8bcad7e6017802ba38f76895"
}, },
"sqlalchemy": { "sqlalchemy": {
"hashes": [ "hashes": [
@ -805,10 +805,10 @@
}, },
"urllib3": { "urllib3": {
"hashes": [ "hashes": [
"sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc", "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527",
"sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc" "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115"
], ],
"version": "==1.25.8" "version": "==1.25.9"
}, },
"vine": { "vine": {
"hashes": [ "hashes": [

View File

@ -584,7 +584,7 @@ paths:
format: int32 format: int32
get: get:
operationId: crc.api.workflow.get_workflow operationId: crc.api.workflow.get_workflow
summary: Detailed information for a specific workflow instance summary: Returns a workflow, can also be used to do a soft or hard reset on the workflow.
parameters: parameters:
- name: soft_reset - name: soft_reset
in: query in: query

View File

@ -12,7 +12,7 @@ class Task(object):
EMUM_OPTIONS_VALUE_COL_PROP = "enum.options.value.column" EMUM_OPTIONS_VALUE_COL_PROP = "enum.options.value.column"
EMUM_OPTIONS_LABEL_COL_PROP = "enum.options.label.column" EMUM_OPTIONS_LABEL_COL_PROP = "enum.options.label.column"
def __init__(self, id, name, title, type, state, form, documentation, data): def __init__(self, id, name, title, type, state, form, documentation, data, is_multi_instance, mi_count, mi_index):
self.id = id self.id = id
self.name = name self.name = name
self.title = title self.title = title
@ -21,7 +21,9 @@ class Task(object):
self.form = form self.form = form
self.documentation = documentation self.documentation = documentation
self.data = data self.data = data
self.is_multi_instance = is_multi_instance
self.mi_count = mi_count
self.mi_index = mi_index
class OptionSchema(ma.Schema): class OptionSchema(ma.Schema):
class Meta: class Meta:
@ -57,7 +59,8 @@ class FormSchema(ma.Schema):
class TaskSchema(ma.Schema): class TaskSchema(ma.Schema):
class Meta: class Meta:
fields = ["id", "name", "title", "type", "state", "form", "documentation", "data"] fields = ["id", "name", "title", "type", "state", "form", "documentation", "data", "is_multi_instance",
"mi_count", "mi_index"]
documentation = marshmallow.fields.String(required=False, allow_none=True) documentation = marshmallow.fields.String(required=False, allow_none=True)
form = marshmallow.fields.Nested(FormSchema, required=False, allow_none=True) form = marshmallow.fields.Nested(FormSchema, required=False, allow_none=True)

View File

@ -36,7 +36,9 @@ Takes two arguments:
file_name = args[0] file_name = args[0]
irb_doc_code = args[1] irb_doc_code = args[1]
FileService.add_task_file(study_id=study_id, workflow_id=workflow_id, task_id=task.id, FileService.add_task_file(study_id=study_id,
workflow_id=workflow_id,
task_id=task.id,
name=file_name, name=file_name,
content_type=CONTENT_TYPES['docx'], content_type=CONTENT_TYPES['docx'],
binary_data=final_document_stream.read(), binary_data=final_document_stream.read(),

View File

@ -58,11 +58,12 @@ class StudyInfo(Script):
if cmd == 'info': if cmd == 'info':
study = session.query(StudyModel).filter_by(id=study_id).first() study = session.query(StudyModel).filter_by(id=study_id).first()
schema = StudySchema() schema = StudySchema()
study_info["info"] = schema.dump(study) self.add_data_to_task(task, {cmd: schema.dump(study)})
if cmd == 'investigators': if cmd == 'investigators':
study_info["investigators"] = self.pb.get_investigators(study_id) pb_response = self.pb.get_investigators(study_id)
self.add_data_to_task(task, {cmd: self.organize_investigators_by_type(pb_response)})
if cmd == 'details': if cmd == 'details':
study_info["details"] = self.pb.get_study_details(study_id) self.add_data_to_task(task, {cmd: self.pb.get_study_details(study_id)})
task.data["study"] = study_info task.data["study"] = study_info
@ -71,3 +72,11 @@ class StudyInfo(Script):
raise ApiError(code="missing_argument", raise ApiError(code="missing_argument",
message="The StudyInfo script requires a single argument which must be " message="The StudyInfo script requires a single argument which must be "
"one of %s" % ",".join(StudyInfo.type_options)) "one of %s" % ",".join(StudyInfo.type_options))
def organize_investigators_by_type(self, pb_investigators):
"""Convert array of investigators from protocol builder into a dictionary keyed on the type"""
output = {}
for i in pb_investigators:
output[i["INVESTIGATORTYPE"]] = {"user_id": i["NETBADGEID"], "type_full": i["INVESTIGATORTYPEFULL"]}
return output

View File

@ -199,7 +199,7 @@ class FileService(object):
@staticmethod @staticmethod
def get_workflow_file_data(workflow, file_name): def get_workflow_file_data(workflow, file_name):
"""Given a SPIFF Workflow Model, tracks down a file with the given name in the datbase and returns it's data""" """Given a SPIFF Workflow Model, tracks down a file with the given name in the datbase and returns it's data"""
workflow_spec_model = FileService.__find_spec_model_in_db(workflow) workflow_spec_model = FileService.find_spec_model_in_db(workflow)
study_id = workflow.data[WorkflowProcessor.STUDY_ID_KEY] study_id = workflow.data[WorkflowProcessor.STUDY_ID_KEY]
if workflow_spec_model is None: if workflow_spec_model is None:
@ -219,7 +219,7 @@ class FileService(object):
return file_data_model return file_data_model
@staticmethod @staticmethod
def __find_spec_model_in_db(workflow): def find_spec_model_in_db(workflow):
""" Search for the workflow """ """ Search for the workflow """
# When the workflow spec model is created, we record the primary process id, # When the workflow spec model is created, we record the primary process id,
# then we can look it up. As there is the potential for sub-workflows, we # then we can look it up. As there is the potential for sub-workflows, we
@ -228,7 +228,7 @@ class FileService(object):
workflow_model = session.query(WorkflowSpecModel).join(FileModel). \ workflow_model = session.query(WorkflowSpecModel).join(FileModel). \
filter(FileModel.primary_process_id == spec.name).first() filter(FileModel.primary_process_id == spec.name).first()
if workflow_model is None and workflow != workflow.outer_workflow: if workflow_model is None and workflow != workflow.outer_workflow:
return FileService.__find_spec_model_in_db(workflow.outer_workflow) return FileService.find_spec_model_in_db(workflow.outer_workflow)
return workflow_model return workflow_model

View File

@ -1,4 +1,11 @@
from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask
from SpiffWorkflow.bpmn.specs.MultiInstanceTask import MultiInstanceTask
from SpiffWorkflow.bpmn.specs.NoneTask import NoneTask
from SpiffWorkflow.bpmn.specs.ScriptTask import ScriptTask
from SpiffWorkflow.bpmn.specs.UserTask import UserTask
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.dmn.specs.BuisnessRuleTask import BusinessRuleTask
from SpiffWorkflow.specs import CancelTask, StartTask
from pandas import ExcelFile from pandas import ExcelFile
from crc.api.common import ApiError from crc.api.common import ApiError
@ -45,14 +52,44 @@ class WorkflowService(object):
@staticmethod @staticmethod
def spiff_task_to_api_task(spiff_task): def spiff_task_to_api_task(spiff_task):
task_type = spiff_task.task_spec.__class__.__name__
if isinstance(spiff_task.task_spec, UserTask):
task_type = "UserTask"
elif isinstance(spiff_task.task_spec, ManualTask):
task_type = "ManualTask"
elif isinstance(spiff_task.task_spec, BusinessRuleTask):
task_type = "BusinessRuleTask"
elif isinstance(spiff_task.task_spec, CancelTask):
task_type = "CancelTask"
elif isinstance(spiff_task.task_spec, ScriptTask):
task_type = "ScriptTask"
elif isinstance(spiff_task.task_spec, StartTask):
task_type = "StartTask"
else:
task_type = "NoneTask"
multi_instance = isinstance(spiff_task.task_spec, MultiInstanceTask)
mi_count = 0
mi_index = 0
if multi_instance:
mi_count = spiff_task.task_spec._get_count(spiff_task)
mi_index = int(spiff_task._get_internal_data('runtimes', 1))
task = Task(spiff_task.id, task = Task(spiff_task.id,
spiff_task.task_spec.name, spiff_task.task_spec.name,
spiff_task.task_spec.description, spiff_task.task_spec.description,
spiff_task.task_spec.__class__.__name__, task_type,
spiff_task.get_state_name(), spiff_task.get_state_name(),
None, None,
"", "",
spiff_task.data) spiff_task.data,
multi_instance,
mi_count,
mi_index)
# Only process the form and documentation if this is something that is ready or completed. # Only process the form and documentation if this is something that is ready or completed.
if not (spiff_task._is_predicted()): if not (spiff_task._is_predicted()):
@ -100,8 +137,8 @@ class WorkflowService(object):
raise ApiError.from_task("invalid_emum", raise ApiError.from_task("invalid_emum",
"For emumerations based on an xls file, you must include 3 properties: %s, " "For emumerations based on an xls file, you must include 3 properties: %s, "
"%s, and %s, you supplied %s" % (Task.ENUM_OPTIONS_FILE_PROP, "%s, and %s, you supplied %s" % (Task.ENUM_OPTIONS_FILE_PROP,
Task.EMUM_OPTIONS_VALUE_COL_PROP, Task.EMUM_OPTIONS_VALUE_COL_PROP,
Task.EMUM_OPTIONS_LABEL_COL_PROP), Task.EMUM_OPTIONS_LABEL_COL_PROP),
task=spiff_task) task=spiff_task)
# Get the file data from the File Service # Get the file data from the File Service

View File

@ -0,0 +1,66 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_17fwemw" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<bpmn:process id="MultiInstance" isExecutable="true">
<bpmn:startEvent id="StartEvent_1" name="StartEvent_1">
<bpmn:outgoing>Flow_0t6p1sb</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0t6p1sb" sourceRef="StartEvent_1" targetRef="Task_1v0e2zu" />
<bpmn:endEvent id="Event_End" name="Event_End">
<bpmn:incoming>Flow_0ugjw69</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0ugjw69" sourceRef="MutiInstanceTask" targetRef="Event_End" />
<bpmn:userTask id="MutiInstanceTask" name="Gather more information" camunda:formKey="GetEmail">
<bpmn:documentation># Please provide addtional information about:
## Investigator ID: {{investigator.NETBADGEID}}
## Role: {{investigator.INVESTIGATORTYPEFULL}}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="email" label="Email Address:" type="string" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_1p568pp</bpmn:incoming>
<bpmn:outgoing>Flow_0ugjw69</bpmn:outgoing>
<bpmn:multiInstanceLoopCharacteristics isSequential="true" camunda:collection="StudyInfo.investigators" camunda:elementVariable="investigator" />
</bpmn:userTask>
<bpmn:sequenceFlow id="SequenceFlow_1p568pp" sourceRef="Task_1v0e2zu" targetRef="MutiInstanceTask" />
<bpmn:scriptTask id="Task_1v0e2zu" name="Load Personnel">
<bpmn:incoming>Flow_0t6p1sb</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1p568pp</bpmn:outgoing>
<bpmn:script>StudyInfo investigators</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="MultiInstance">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="142" y="99" width="36" height="36" />
<bpmndi:BPMNLabel>
<dc:Bounds x="129" y="142" width="64" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0t6p1sb_di" bpmnElement="Flow_0t6p1sb">
<di:waypoint x="178" y="117" />
<di:waypoint x="250" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Event_1g0pmib_di" bpmnElement="Event_End">
<dc:Bounds x="592" y="99" width="36" height="36" />
<bpmndi:BPMNLabel>
<dc:Bounds x="585" y="142" width="54" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0ugjw69_di" bpmnElement="Flow_0ugjw69">
<di:waypoint x="530" y="117" />
<di:waypoint x="592" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_1iyilui_di" bpmnElement="MutiInstanceTask">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1p568pp_di" bpmnElement="SequenceFlow_1p568pp">
<di:waypoint x="350" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="ScriptTask_0cbbirp_di" bpmnElement="Task_1v0e2zu">
<dc:Bounds x="250" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,5 +1,6 @@
import json import json
import os import os
from unittest.mock import patch
from crc import session, app from crc import session, app
from crc.models.api_models import WorkflowApiSchema from crc.models.api_models import WorkflowApiSchema
@ -257,3 +258,21 @@ class TestTasksApi(BaseTest):
self.assertEquals('ManualTask', workflow_api.next_task['type']) self.assertEquals('ManualTask', workflow_api.next_task['type'])
self.assertTrue('Markdown' in workflow_api.next_task['documentation']) self.assertTrue('Markdown' in workflow_api.next_task['documentation'])
self.assertTrue('Dan' in workflow_api.next_task['documentation']) self.assertTrue('Dan' in workflow_api.next_task['documentation'])
@patch('crc.services.protocol_builder.requests.get')
def test_multi_instance_task(self, mock_get):
# This depends on getting a list of investigators back from the protocol builder.
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('investigators.json')
self.load_example_data()
workflow = self.create_workflow('multi_instance')
# get the first form in the two form workflow.
tasks = self.get_workflow_api(workflow).user_tasks
self.assertEquals(1, len(tasks))
self.assertEquals("UserTask", tasks[0].type)
self.assertTrue(tasks[0].is_multi_instance)
self.assertEquals(3, tasks[0].mi_count)
workflow_api = self.complete_form(workflow, tasks[0], {"name": "Dan"})

View File

@ -238,11 +238,11 @@ class TestWorkflowProcessor(BaseTest):
processor.do_engine_steps() processor.do_engine_steps()
task = processor.bpmn_workflow.last_task task = processor.bpmn_workflow.last_task
self.assertIsNotNone(task.data) self.assertIsNotNone(task.data)
self.assertIn("study", task.data) self.assertIn("StudyInfo", task.data)
self.assertIn("info", task.data["study"]) self.assertIn("info", task.data["StudyInfo"])
self.assertIn("title", task.data["study"]["info"]) self.assertIn("title", task.data["StudyInfo"]["info"])
self.assertIn("last_updated", task.data["study"]["info"]) self.assertIn("last_updated", task.data["StudyInfo"]["info"])
self.assertIn("sponsor", task.data["study"]["info"]) self.assertIn("sponsor", task.data["StudyInfo"]["info"])
def test_spec_versioning(self): def test_spec_versioning(self):
self.load_example_data() self.load_example_data()

View File

@ -0,0 +1,97 @@
import logging
import os
import string
import random
from unittest.mock import patch
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
from crc import session, db, app
from crc.api.common import ApiError
from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES
from crc.models.study import StudyModel
from crc.models.workflow import WorkflowSpecModel, WorkflowStatus, WorkflowModel
from crc.services.file_service import FileService
from crc.services.study_service import StudyService
from crc.services.workflow_service import WorkflowService
from tests.base_test import BaseTest
from crc.services.workflow_processor import WorkflowProcessor
class TestWorkflowProcessorMultiInstance(BaseTest):
"""Tests the Workflow Processor as it deals with a Multi-Instance task"""
def _populate_form_with_random_data(self, task):
WorkflowProcessor.populate_form_with_random_data(task)
def get_processor(self, study_model, spec_model):
workflow_model = StudyService._create_workflow_model(study_model, spec_model)
return WorkflowProcessor(workflow_model)
@patch('crc.services.protocol_builder.requests.get')
def test_create_and_complete_workflow(self, mock_get):
# This depends on getting a list of investigators back from the protocol builder.
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('investigators.json')
self.load_example_data()
workflow_spec_model = self.load_test_spec("multi_instance")
study = session.query(StudyModel).first()
processor = self.get_processor(study, workflow_spec_model)
self.assertEqual(study.id, processor.bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY])
self.assertIsNotNone(processor)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
processor.bpmn_workflow.do_engine_steps()
next_user_tasks = processor.next_user_tasks()
self.assertEqual(1, len(next_user_tasks))
task = next_user_tasks[0]
self.assertEquals(
{
'DC': {'user_id': 'asd3v', 'type_full': 'Department Contact'},
'IRBC': {'user_id': 'asdf32', 'type_full': 'IRB Coordinator'},
'PI': {'user_id': 'dhf8r', 'type_full': 'Primary Investigator'}
},
task.data['StudyInfo']['investigators'])
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
self.assertEquals("asd3v", task.data["investigator"]["user_id"])
self.assertEqual("MutiInstanceTask", task.get_name())
api_task = WorkflowService.spiff_task_to_api_task(task)
self.assertEquals(3, api_task.mi_count)
self.assertEquals(1, api_task.mi_index)
task.update_data({"email":"asd3v@virginia.edu"})
processor.complete_task(task)
processor.do_engine_steps()
task = next_user_tasks[0]
api_task = WorkflowService.spiff_task_to_api_task(task)
self.assertEqual("MutiInstanceTask", api_task.name)
task.update_data({"email":"asdf32@virginia.edu"})
self.assertEquals(3, api_task.mi_count)
self.assertEquals(2, api_task.mi_index)
processor.complete_task(task)
processor.do_engine_steps()
task = next_user_tasks[0]
api_task = WorkflowService.spiff_task_to_api_task(task)
self.assertEqual("MutiInstanceTask", task.get_name())
task.update_data({"email":"dhf8r@virginia.edu"})
self.assertEquals(3, api_task.mi_count)
self.assertEquals(3, api_task.mi_index)
processor.complete_task(task)
processor.do_engine_steps()
self.assertEquals(
{
'DC': {'user_id': 'asd3v', 'type_full': 'Department Contact', 'email': 'asd3v@virginia.edu'},
'IRBC': {'user_id': 'asdf32', 'type_full': 'IRB Coordinator', "email": "asdf32@virginia.edu"},
'PI': {'user_id': 'dhf8r', 'type_full': 'Primary Investigator', "email": "dhf8r@virginia.edu"}
},
task.data['StudyInfo']['investigators'])
self.assertEqual(WorkflowStatus.complete, processor.get_status())