merging in from dev.

This commit is contained in:
Dan 2022-03-18 16:22:33 -04:00
commit e44ea6e13c
7 changed files with 56 additions and 46 deletions

View File

@ -66,6 +66,8 @@ PB_STUDY_DETAILS_URL = environ.get('PB_STUDY_DETAILS_URL', default=PB_BASE_URL +
PB_SPONSORS_URL = environ.get('PB_SPONSORS_URL', default=PB_BASE_URL + "sponsors?studyid=%i")
PB_IRB_INFO_URL = environ.get('PB_IRB_INFO_URL', default=PB_BASE_URL + "current_irb_info/%i")
PB_CHECK_STUDY_URL = environ.get('PB_CHECK_STUDY_URL', default=PB_BASE_URL + "check_study/%i")
# The earliest date a study was last modified in PB that we will import.
PB_MIN_DATE = environ.get('PB_MIN_DATE', default="2020-01-01T00:00:00.000Z")
# Ldap Configuration
LDAP_URL = environ.get('LDAP_URL', default="ldap.virginia.edu").strip('/') # No trailing slash or http://

View File

@ -48,7 +48,8 @@ def add_study(body):
session.commit()
master_workflow_results = __run_master_spec(study_model, spec_service.master_spec)
study = StudyService().get_study(study_model.id, categories, master_workflow_results=master_workflow_results)
study = StudyService().get_study(study_model.id, categories, master_workflow_results=master_workflow_results,
process_categories=True)
study_data = StudySchema().dump(study)
study_data["errors"] = ApiErrorSchema(many=True).dump(errors)
return study_data

View File

@ -252,6 +252,8 @@ def set_current_task(workflow_id, task_id):
processor = WorkflowProcessor(workflow_model)
task_id = uuid.UUID(task_id)
spiff_task = processor.bpmn_workflow.get_task(task_id)
cancel_notify = (spiff_task.state == spiff_task.COMPLETED and
spiff_task.task_spec.__class__.__name__ != 'EndEvent')
if not spiff_task:
# An invalid task_id was requested.
raise ApiError("invalid_task", "The Task you requested no longer exists as a part of this workflow.")
@ -262,7 +264,7 @@ def set_current_task(workflow_id, task_id):
"currently set to COMPLETE or READY.")
# Only reset the token if the task doesn't already have it.
if spiff_task.state == spiff_task.COMPLETED:
if cancel_notify:
processor.cancel_notify()
spiff_task.reset_token({}, reset_data=True) # Don't try to copy the existing data back into this task.

View File

@ -1,5 +1,6 @@
from copy import copy
from datetime import datetime
from dateutil import parser
from typing import List
import requests
@ -40,6 +41,7 @@ class StudyService(object):
# `Full Committee`, `Expedited`, `Non-UVA IRB Full Board`, and `Non-UVA IRB Expedited`
# These are considered to be the valid review types that can be shown to users.
VALID_REVIEW_TYPES = [2, 3, 23, 24]
PB_MIN_DATE = parser.parse(app.config['PB_MIN_DATE'])
def get_studies_for_user(self, user, categories, include_invalid=False):
"""Returns a list of all studies for the given user."""
@ -51,7 +53,8 @@ class StudyService(object):
studies = []
for study_model in db_studies:
if include_invalid or study_model.review_type in self.VALID_REVIEW_TYPES:
studies.append(StudyService.get_study(study_model.id, categories, study_model=study_model, process_categories=False))
studies.append(StudyService.get_study(study_model.id, categories, study_model=study_model,
process_categories=False))
return studies
@staticmethod
@ -69,7 +72,7 @@ class StudyService(object):
@staticmethod
@timeit
def get_study(study_id, categories: List[WorkflowSpecCategory], study_model: StudyModel = None,
master_workflow_results=None, process_categories=False):
master_workflow_results=None, process_categories=True):
"""Returns a study model that contains all the workflows organized by category.
Pass in the results of the master workflow spec, and the status of other workflows will be updated."""
last_time = firsttime()
@ -95,7 +98,7 @@ class StudyService(object):
DocumentService.get_dictionary()) for model in files)
study.files = list(files)
last_time = sincetime("files", last_time)
if process_categories:
if process_categories and master_workflow_results:
if study.status != StudyStatus.abandoned:
for category in study.categories:
workflow_metas = StudyService._get_workflow_metas(study_id, category)
@ -113,21 +116,19 @@ class StudyService(object):
for associate in associates:
if associate.role == "Primary Investigator":
study.primary_investigator = associate.ldap_info.display_name
# Calculate study progress and return it as a integer out of a hundred
last_time = sincetime("PI", last_time)
completed_wfs = 0
total_wfs = 0
for category in study.categories:
for workflow in category.workflows:
total_wfs +=1
if workflow.status == WorkflowStatus.complete:
completed_wfs += 1
if total_wfs > 0:
study.progress = int((completed_wfs/total_wfs)*100)
else:
study.progress = 0
all_workflows = db.session.query(WorkflowModel).\
filter(WorkflowModel.study_id == study.id).\
count()
complete_workflows = db.session.query(WorkflowModel).\
filter(WorkflowModel.study_id == study.id).\
filter(WorkflowModel.status == WorkflowStatus.complete).\
count()
if all_workflows > 0:
study.progress = int((complete_workflows/all_workflows)*100)
return study
last_time = sincetime("progress", last_time)
@staticmethod
@ -405,6 +406,7 @@ class StudyService(object):
return {}
@staticmethod
@timeit
def synch_with_protocol_builder_if_enabled(user, specs):
"""Assures that the studies we have locally for the given user are
in sync with the studies available in protocol builder. """
@ -424,6 +426,16 @@ class StudyService(object):
# Further assures that every active study (that does exist in the protocol builder)
# has a reference to every available workflow (though some may not have started yet)
for pb_study in pb_studies:
try:
if pb_study.DATELASTMODIFIED:
last_modified = parser.parse(pb_study.DATELASTMODIFIED)
else:
last_modified = parser.parse(pb_study.DATECREATED)
if last_modified.date() < StudyService.PB_MIN_DATE.date():
continue
except Exception as e:
# Last modified is null or undefined. Don't import it.
continue
new_status = None
new_progress_status = None
db_study = session.query(StudyModel).filter(StudyModel.id == pb_study.STUDYID).first()

View File

@ -1,4 +1,11 @@
[
{
"DATECREATED": "2002-02-19T14:26:49.127756",
"DATELASTMODIFIED": "2012-02-19T14:26:49.127756",
"STUDYID": 11111,
"TITLE": "An old study from the good old days that should not show up.",
"REVIEW_TYPE": 2
},
{
"DATECREATED": "2020-02-19T14:26:49.127756",
"DATELASTMODIFIED": "2020-02-19T14:26:49.127756",

View File

@ -45,7 +45,7 @@ class TestStudyApi(BaseTest):
def test_get_study(self):
"""Generic test, but pretty detailed, in that the study should return a categorized list of workflows
This starts with out loading the example data, to show that all the bases are covered from ground 0."""
This starts without loading the example data, to show that all the bases are covered from ground 0."""
"""NOTE: The protocol builder is not enabled or mocked out. As the master workflow (which is empty),
and the test workflow do not need it, and it is disabled in the configuration."""
@ -152,6 +152,7 @@ class TestStudyApi(BaseTest):
# Enable the protocol builder for these tests, as the master_workflow and other workflows
# depend on using the PB for data.
app.config['PB_ENABLED'] = True
app.config['PB_MIN_DATE'] = "2020-01-01T00:00:00.000Z"
self.add_studies()
with session.no_autoflush:
s = StudyModel(
@ -193,7 +194,8 @@ class TestStudyApi(BaseTest):
if study['id'] == 65432:
# This study has `null` for DATELASTMODIFIED, so we should use the value in DATECREATED
self.assertEqual('2020-02-19T14:24:55.101695+00:00', study['last_updated'])
if study['id'] == 11111:
self.assertTrue(False,"Study 11111 is too old to be processed and imported, it should be ignored.")
db_studies_after = session.query(StudyModel).all()
num_db_studies_after = len(db_studies_after)
self.assertGreater(num_db_studies_after, num_db_studies_before)
@ -213,6 +215,7 @@ class TestStudyApi(BaseTest):
abandoned_events = session.query(StudyEvent).filter_by(status=StudyStatus.abandoned)
self.assertEqual(abandoned_events.count(), 1) # 1 study has been abandoned
# We don't currently set any studies to Open for Enrollment automatically
# Leaving the test here because we will need it again
# when we implement a new way to set Open for Enrollment

View File

@ -38,7 +38,7 @@ class TestStudyService(BaseTest):
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_study_details') # mock_details
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_required_docs') # mock_docs
def test_total_tasks_updated(self, mock_docs, mock_details):
def test_study_progress(self, mock_docs, mock_details):
"""Assure that as a users progress is available when getting a list of studies for that user."""
app.config['PB_ENABLED'] = True
docs_response = self.protocol_builder_response('required_docs.json')
@ -52,43 +52,26 @@ class TestStudyService(BaseTest):
spec_service = WorkflowSpecService()
categories = spec_service.get_categories()
studies = StudyService().get_studies_for_user(user, categories)
self.assertTrue(len(studies) == 1)
self.assertTrue(len(studies[0].categories) == 1)
study_id = studies[0].id
study = StudyService().get_study(study_id, categories, process_categories=True)
workflow = study.categories[0].workflows[0]
# workflow should not be started, and it should have 0 completed tasks, and 0 total tasks.
self.assertEqual(WorkflowStatus.not_started, workflow.status)
self.assertEqual(0, workflow.total_tasks)
self.assertEqual(0, workflow.completed_tasks)
self.assertTrue(len(studies) == 1)
self.assertEqual(0, studies[0].progress)
# Initialize the Workflow with the workflow processor.
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.id == workflow.id).first()
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.study_id == studies[0].id).first()
processor = WorkflowProcessor(workflow_model)
processor.do_engine_steps()
# Assure the workflow is now started, and knows the total and completed tasks.
spec_service = WorkflowSpecService()
categories = spec_service.get_categories()
study = StudyService().get_study(study_id, categories, process_categories=True)
workflow = study.categories[0].workflows[0]
# self.assertEqual(WorkflowStatus.user_input_required, workflow.status)
self.assertTrue(workflow.total_tasks > 0)
self.assertEqual(0, workflow.completed_tasks)
# Complete a task
task = processor.next_task()
task.data = {"type":"norris"}
processor.complete_task(task)
processor.do_engine_steps()
processor.save()
# Assure the workflow has moved on to the next task.
study = StudyService().get_study(study_id, categories, process_categories=True)
workflow = study.categories[0].workflows[0]
self.assertEqual(1, workflow.completed_tasks)
# Assure the progress is now updated
studies = StudyService().get_studies_for_user(user, categories)
self.assertGreater(studies[0].progress, 0)
# Get approvals
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_study_details') # mock_details
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_required_docs') # mock_docs