Merge branch 'master' into feature/protocol_status

This commit is contained in:
Aaron Louie 2020-05-01 22:55:59 -04:00
commit 28d5727851
5 changed files with 88 additions and 22 deletions

View File

@ -608,7 +608,7 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
delete:
operationId: crc.api.workflow.delete
operationId: crc.api.workflow.delete_workflow
summary: Removes an existing workflow
tags:
- Workflows and Tasks

View File

@ -9,6 +9,7 @@ from crc.models.stats import WorkflowStatsModel, TaskEventModel
from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, WorkflowSpecModel, WorkflowSpecCategoryModel, \
WorkflowSpecCategoryModelSchema
from crc.services.file_service import FileService
from crc.services.study_service import StudyService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.workflow_service import WorkflowService
@ -78,8 +79,8 @@ def delete_workflow_specification(spec_id):
FileService.delete_file(file.id)
# Delete all stats and workflow models related to this specification
session.query(WorkflowStatsModel).filter_by(workflow_spec_id=spec_id).delete()
session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id).delete()
for workflow in session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id):
StudyService.delete_workflow(workflow)
session.query(WorkflowSpecModel).filter_by(id=spec_id).delete()
session.commit()
@ -115,11 +116,8 @@ def get_workflow(workflow_id, soft_reset=False, hard_reset=False):
return WorkflowApiSchema().dump(workflow_api_model)
def delete(workflow_id):
session.query(TaskEventModel).filter_by(workflow_id=workflow_id).delete()
session.query(WorkflowStatsModel).filter_by(workflow_id=workflow_id).delete()
session.query(WorkflowModel).filter_by(id=workflow_id).delete()
session.commit()
def delete_workflow(workflow_id):
StudyService.delete_workflow(workflow_id)
def set_current_task(workflow_id, task_id):
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()

View File

@ -52,12 +52,21 @@ class StudyService(object):
@staticmethod
def delete_study(study_id):
session.query(WorkflowStatsModel).filter_by(study_id=study_id).delete()
session.query(TaskEventModel).filter_by(study_id=study_id).delete()
session.query(WorkflowModel).filter_by(study_id=study_id).delete()
session.query(WorkflowStatsModel).filter_by(study_id=study_id).delete()
for workflow in session.query(WorkflowModel).filter_by(study_id=study_id):
StudyService.delete_workflow(workflow.id)
session.query(StudyModel).filter_by(id=study_id).delete()
session.commit()
@staticmethod
def delete_workflow(workflow_id):
for file in session.query(FileModel).filter_by(workflow_id=workflow_id).all():
FileService.delete_file(file.id)
session.query(TaskEventModel).filter_by(workflow_id=workflow_id).delete()
session.query(WorkflowStatsModel).filter_by(workflow_id=workflow_id).delete()
session.query(WorkflowModel).filter_by(id=workflow_id).delete()
@staticmethod
def get_categories():
"""Returns a list of category objects, in the correct order."""

View File

@ -164,6 +164,11 @@ class TestTasksApi(BaseTest):
json_data = json.loads(rv.get_data(as_text=True))
files = FileModelSchema(many=True).load(json_data, session=session)
self.assertTrue(len(files) == 1)
# Assure we can still delete the study even when there is a file attached to a workflow.
rv = self.app.delete('/v1.0/study/%i' % workflow.study_id, headers=self.logged_in_headers())
self.assert_success(rv)
def test_get_documentation_populated_in_end(self):
self.load_example_data()

View File

@ -1,22 +1,13 @@
import logging
import os
import string
import random
from unittest.mock import patch
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
from crc import session, db, app
from crc.api.common import ApiError
from crc import session
from crc.models.api_models import MultiInstanceType
from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES
from crc.models.study import StudyModel
from crc.models.workflow import WorkflowSpecModel, WorkflowStatus, WorkflowModel
from crc.services.file_service import FileService
from crc.models.workflow import WorkflowStatus
from crc.services.study_service import StudyService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.workflow_service import WorkflowService
from tests.base_test import BaseTest
from crc.services.workflow_processor import WorkflowProcessor
class TestWorkflowProcessorMultiInstance(BaseTest):
@ -96,3 +87,66 @@ class TestWorkflowProcessorMultiInstance(BaseTest):
task.data['StudyInfo']['investigators'])
self.assertEqual(WorkflowStatus.complete, processor.get_status())
@patch('crc.services.protocol_builder.requests.get')
def test_create_and_complete_workflow_parallel(self, mock_get):
"""Unlike the test above, the parallel task allows us to complete the items in any order."""
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('investigators.json')
self.load_example_data()
workflow_spec_model = self.load_test_spec("multi_instance_parallel")
study = session.query(StudyModel).first()
processor = self.get_processor(study, workflow_spec_model)
processor.bpmn_workflow.do_engine_steps()
# In the Parallel instance, there should be three tasks, all of them in the ready state.
next_user_tasks = processor.next_user_tasks()
self.assertEqual(3, len(next_user_tasks))
# We can complete the tasks out of order.
task = next_user_tasks[2]
self.assertEquals(
{
'DC': {'user_id': 'asd3v', 'type_full': 'Department Contact'},
'IRBC': {'user_id': 'asdf32', 'type_full': 'IRB Coordinator'},
'PI': {'user_id': 'dhf8r', 'type_full': 'Primary Investigator'}
},
task.data['StudyInfo']['investigators'])
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
self.assertEquals("dhf8r", task.data["investigator"]["user_id"]) # The last of the tasks
api_task = WorkflowService.spiff_task_to_api_task(task)
self.assertEquals(MultiInstanceType.parallel, api_task.mi_type)
task.update_data({"email":"dhf8r@virginia.edu"})
processor.complete_task(task)
processor.do_engine_steps()
task = next_user_tasks[0]
api_task = WorkflowService.spiff_task_to_api_task(task)
self.assertEqual("MutiInstanceTask", api_task.name)
task.update_data({"email":"asd3v@virginia.edu"})
processor.complete_task(task)
processor.do_engine_steps()
task = next_user_tasks[1]
api_task = WorkflowService.spiff_task_to_api_task(task)
self.assertEqual("MutiInstanceTask", task.get_name())
task.update_data({"email":"asdf32@virginia.edu"})
processor.complete_task(task)
processor.do_engine_steps()
# Completing the tasks out of order, still provides the correct information.
self.assertEquals(
{
'DC': {'user_id': 'asd3v', 'type_full': 'Department Contact', 'email': 'asd3v@virginia.edu'},
'IRBC': {'user_id': 'asdf32', 'type_full': 'IRB Coordinator', "email": "asdf32@virginia.edu"},
'PI': {'user_id': 'dhf8r', 'type_full': 'Primary Investigator', "email": "dhf8r@virginia.edu"}
},
task.data['StudyInfo']['investigators'])
self.assertEqual(WorkflowStatus.complete, processor.get_status())