2020-05-14 21:13:47 +00:00
|
|
|
import glob
|
2020-01-23 20:32:53 +00:00
|
|
|
import glob
|
2019-12-27 18:50:03 +00:00
|
|
|
import os
|
2019-12-18 19:02:17 +00:00
|
|
|
|
2020-01-14 16:45:12 +00:00
|
|
|
from crc import app, db, session
|
2020-05-14 21:13:47 +00:00
|
|
|
from crc.models.file import CONTENT_TYPES
|
2020-12-01 16:47:59 +00:00
|
|
|
from crc.models.ldap import LdapModel
|
2020-12-01 16:17:07 +00:00
|
|
|
from crc.models.user import UserModel
|
2020-03-27 19:32:07 +00:00
|
|
|
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecCategoryModel
|
2021-07-06 17:10:20 +00:00
|
|
|
from crc.services.document_service import DocumentService
|
2020-03-04 18:40:25 +00:00
|
|
|
from crc.services.file_service import FileService
|
2021-07-06 17:10:20 +00:00
|
|
|
from crc.services.study_service import StudyService
|
2019-12-18 19:02:17 +00:00
|
|
|
|
2020-02-27 15:30:16 +00:00
|
|
|
|
2019-12-18 19:02:17 +00:00
|
|
|
class ExampleDataLoader:
|
2020-03-04 18:40:25 +00:00
|
|
|
@staticmethod
|
|
|
|
def clean_db():
|
|
|
|
session.flush() # Clear out any transactions before deleting it all to avoid spurious errors.
|
2021-08-06 13:50:57 +00:00
|
|
|
engine = session.bind.engine
|
|
|
|
connection = engine.connect()
|
2020-03-04 18:40:25 +00:00
|
|
|
for table in reversed(db.metadata.sorted_tables):
|
2021-08-06 13:50:57 +00:00
|
|
|
if engine.dialect.has_table(connection, table):
|
|
|
|
session.execute(table.delete())
|
2020-05-23 02:04:11 +00:00
|
|
|
session.commit()
|
|
|
|
session.flush()
|
2020-03-04 18:40:25 +00:00
|
|
|
|
|
|
|
def load_all(self):
|
2020-03-20 11:41:21 +00:00
|
|
|
|
|
|
|
self.load_reference_documents()
|
2020-03-27 19:32:07 +00:00
|
|
|
categories = [
|
|
|
|
WorkflowSpecCategoryModel(
|
|
|
|
id=0,
|
|
|
|
name='irb_review',
|
2020-04-28 02:54:05 +00:00
|
|
|
display_name='From PB',
|
2020-03-27 19:32:07 +00:00
|
|
|
display_order=0
|
|
|
|
),
|
|
|
|
WorkflowSpecCategoryModel(
|
|
|
|
id=1,
|
|
|
|
name='core_info',
|
2020-04-06 17:12:34 +00:00
|
|
|
display_name='Core Info',
|
2020-03-27 19:32:07 +00:00
|
|
|
display_order=1
|
|
|
|
),
|
|
|
|
WorkflowSpecCategoryModel(
|
|
|
|
id=2,
|
|
|
|
name='approvals',
|
2020-04-06 17:12:34 +00:00
|
|
|
display_name='Approvals',
|
2020-03-27 19:32:07 +00:00
|
|
|
display_order=2
|
|
|
|
),
|
|
|
|
WorkflowSpecCategoryModel(
|
|
|
|
id=3,
|
|
|
|
name='data_security_plan',
|
2020-04-06 17:12:34 +00:00
|
|
|
display_name='Data Security Plan',
|
2020-03-27 19:32:07 +00:00
|
|
|
display_order=3
|
|
|
|
),
|
|
|
|
WorkflowSpecCategoryModel(
|
|
|
|
id=4,
|
|
|
|
name='finance',
|
2020-04-06 17:12:34 +00:00
|
|
|
display_name='Finance',
|
2020-03-27 19:32:07 +00:00
|
|
|
display_order=4
|
|
|
|
),
|
|
|
|
WorkflowSpecCategoryModel(
|
|
|
|
id=5,
|
|
|
|
name='notifications',
|
2020-04-06 17:12:34 +00:00
|
|
|
display_name='Notifications',
|
2020-03-27 19:32:07 +00:00
|
|
|
display_order=5
|
|
|
|
),
|
2020-04-28 02:54:05 +00:00
|
|
|
WorkflowSpecCategoryModel(
|
|
|
|
id=6,
|
|
|
|
name='status',
|
|
|
|
display_name='Status',
|
|
|
|
display_order=6
|
|
|
|
),
|
2020-03-27 19:32:07 +00:00
|
|
|
]
|
2020-12-10 15:06:21 +00:00
|
|
|
db.session.execute("select setval('workflow_spec_category_id_seq',7);")
|
2020-03-27 19:32:07 +00:00
|
|
|
db.session.add_all(categories)
|
|
|
|
db.session.commit()
|
2020-04-06 17:12:34 +00:00
|
|
|
|
|
|
|
# Pass IRB Review
|
2020-03-25 15:13:52 +00:00
|
|
|
self.create_spec(id="irb_api_personnel",
|
|
|
|
name="irb_api_personnel",
|
2020-04-28 02:54:05 +00:00
|
|
|
display_name="Personnel",
|
2020-03-27 19:32:07 +00:00
|
|
|
description="TBD",
|
2020-04-10 15:13:43 +00:00
|
|
|
category_id=0,
|
|
|
|
display_order=0)
|
2020-04-06 17:12:34 +00:00
|
|
|
self.create_spec(id="irb_api_details",
|
|
|
|
name="irb_api_details",
|
|
|
|
display_name="Protocol Builder Data",
|
|
|
|
description="TBD",
|
2020-04-10 15:13:43 +00:00
|
|
|
category_id=0,
|
|
|
|
display_order=1)
|
2020-04-23 23:25:01 +00:00
|
|
|
self.create_spec(id="documents_approvals",
|
|
|
|
name="documents_approvals",
|
|
|
|
display_name="Documents & Approvals",
|
2020-04-28 02:54:05 +00:00
|
|
|
description="Status of all approvals and documents required from Protocol Builder",
|
2020-04-23 23:25:01 +00:00
|
|
|
category_id=0,
|
|
|
|
display_order=2)
|
2020-04-28 02:54:05 +00:00
|
|
|
self.create_spec(id="ide_supplement",
|
|
|
|
name="ide_supplement",
|
|
|
|
display_name="IDE Supplement Info",
|
|
|
|
description="Supplemental information for the IDE number entered in Protocol Builder",
|
|
|
|
category_id=0,
|
|
|
|
display_order=3)
|
2020-07-17 15:51:21 +00:00
|
|
|
self.create_spec(id="ind_update",
|
|
|
|
name="ind_update",
|
2020-04-28 02:54:05 +00:00
|
|
|
display_name="IND Supplement Info",
|
|
|
|
description="Supplement information for the Investigational New Drug(s) specified in Protocol Builder",
|
|
|
|
category_id=0,
|
|
|
|
display_order=4)
|
2020-04-06 17:12:34 +00:00
|
|
|
|
|
|
|
# Core Info
|
2020-04-28 02:54:05 +00:00
|
|
|
self.create_spec(id="protocol",
|
|
|
|
name="protocol",
|
|
|
|
display_name="Protocol",
|
|
|
|
description="Upload the Study Protocol here.",
|
|
|
|
category_id=1,
|
|
|
|
display_order=0)
|
2020-08-27 17:55:27 +00:00
|
|
|
self.create_spec(id="non_uva_approval",
|
|
|
|
name="non_uva",
|
|
|
|
display_name="Non-UVA Institutional Approval",
|
|
|
|
description="TBD",
|
|
|
|
category_id=1,
|
|
|
|
display_order=1)
|
2020-03-27 19:32:07 +00:00
|
|
|
self.create_spec(id="core_info",
|
|
|
|
name="core_info",
|
2020-04-06 17:12:34 +00:00
|
|
|
display_name="Core Info",
|
2020-03-27 19:32:07 +00:00
|
|
|
description="TBD",
|
2020-04-10 15:13:43 +00:00
|
|
|
category_id=1,
|
2020-08-27 17:55:27 +00:00
|
|
|
display_order=2)
|
2020-04-06 17:12:34 +00:00
|
|
|
|
|
|
|
# Approvals
|
|
|
|
self.create_spec(id="ids_full_submission",
|
|
|
|
name="ids_full_submission",
|
2020-04-28 02:54:05 +00:00
|
|
|
display_name="Investigational Drug Service (IDS) Full Submission",
|
2020-04-06 17:12:34 +00:00
|
|
|
description="TBD",
|
2020-04-10 15:13:43 +00:00
|
|
|
category_id=2,
|
|
|
|
display_order=0)
|
2020-04-06 17:12:34 +00:00
|
|
|
self.create_spec(id="ids_waiver",
|
|
|
|
name="ids_waiver",
|
|
|
|
display_name="Investigational Drug Service (IDS) Waiver",
|
2020-03-27 19:32:07 +00:00
|
|
|
description="TBD",
|
2020-04-10 15:13:43 +00:00
|
|
|
category_id=2,
|
|
|
|
display_order=1)
|
2020-04-28 02:54:05 +00:00
|
|
|
self.create_spec(id="rsc_hire_submission",
|
|
|
|
name="rsc_hire_submission",
|
|
|
|
display_name="RSC/HIRE Submission",
|
|
|
|
description="TBD",
|
|
|
|
category_id=2,
|
|
|
|
display_order=2)
|
|
|
|
self.create_spec(id="rsc_hire_committee",
|
|
|
|
name="rsc_hire_committee",
|
|
|
|
display_name="RSC/HIRE Committee",
|
|
|
|
description="TBD",
|
|
|
|
category_id=2,
|
|
|
|
display_order=3)
|
2020-08-27 17:55:27 +00:00
|
|
|
self.create_spec(id="department_chair_approval",
|
|
|
|
name="department_chair_approval",
|
|
|
|
display_name="Department Chair Approval",
|
|
|
|
description="TBD",
|
|
|
|
category_id=2,
|
|
|
|
display_order=4)
|
2020-04-06 17:12:34 +00:00
|
|
|
|
|
|
|
# Data Security Plan
|
2020-04-28 02:54:05 +00:00
|
|
|
self.create_spec(id="data_security_plan",
|
|
|
|
name="data_security_plan",
|
|
|
|
display_name="Data Security Plan",
|
|
|
|
description="Create and generate Data Security Plan",
|
2020-04-10 15:13:43 +00:00
|
|
|
category_id=3,
|
|
|
|
display_order=0)
|
2020-04-06 17:12:34 +00:00
|
|
|
|
|
|
|
# Finance
|
2020-03-25 15:13:52 +00:00
|
|
|
self.create_spec(id="sponsor_funding_source",
|
|
|
|
name="sponsor_funding_source",
|
2020-03-27 19:32:07 +00:00
|
|
|
display_name="Sponsor Funding Source",
|
|
|
|
description="TBD",
|
2020-04-10 15:13:43 +00:00
|
|
|
category_id=4,
|
|
|
|
display_order=0)
|
2020-03-27 19:32:07 +00:00
|
|
|
self.create_spec(id="finance",
|
|
|
|
name="finance",
|
2020-04-06 17:12:34 +00:00
|
|
|
display_name="Finance Data",
|
2020-03-27 19:32:07 +00:00
|
|
|
description="TBD",
|
2020-04-10 15:13:43 +00:00
|
|
|
category_id=4,
|
|
|
|
display_order=1)
|
2019-12-18 19:02:17 +00:00
|
|
|
|
2020-04-06 17:12:34 +00:00
|
|
|
# Notifications
|
|
|
|
self.create_spec(id="notifications",
|
|
|
|
name="notifications",
|
|
|
|
display_name="Notifications",
|
|
|
|
description="TBD",
|
2020-04-10 15:13:43 +00:00
|
|
|
category_id=5,
|
|
|
|
display_order=0)
|
2020-04-06 17:12:34 +00:00
|
|
|
|
2020-04-28 02:54:05 +00:00
|
|
|
# Status
|
|
|
|
self.create_spec(id="enrollment_date",
|
|
|
|
name="enrollment_date",
|
|
|
|
display_name="Enrollment Date",
|
|
|
|
description="Study enrollment date",
|
|
|
|
category_id=6,
|
|
|
|
display_order=0)
|
|
|
|
self.create_spec(id="abandoned",
|
|
|
|
name="abandoned",
|
|
|
|
display_name="Abandoned",
|
|
|
|
description="Place study into Abandoned status",
|
|
|
|
category_id=6,
|
|
|
|
display_order=1)
|
|
|
|
|
2020-04-15 14:58:13 +00:00
|
|
|
# Top Level (Master Status) Workflow
|
|
|
|
self.create_spec(id="top_level_workflow",
|
|
|
|
name="top_level_workflow",
|
|
|
|
display_name="Top Level Workflow",
|
|
|
|
description="Determines the status of other workflows in a study",
|
|
|
|
category_id=None,
|
|
|
|
master_spec=True)
|
|
|
|
|
2020-05-25 16:29:05 +00:00
|
|
|
def load_rrt(self):
|
|
|
|
file_path = os.path.join(app.root_path, 'static', 'reference', 'rrt_documents.xlsx')
|
|
|
|
file = open(file_path, "rb")
|
|
|
|
FileService.add_reference_file(FileService.DOCUMENT_LIST,
|
|
|
|
binary_data=file.read(),
|
|
|
|
content_type=CONTENT_TYPES['xls'])
|
|
|
|
file.close()
|
2019-12-27 18:50:03 +00:00
|
|
|
|
2020-05-25 16:29:05 +00:00
|
|
|
category = WorkflowSpecCategoryModel(
|
|
|
|
id=0,
|
|
|
|
name='research_rampup_category',
|
|
|
|
display_name='Research Ramp-up Category',
|
|
|
|
display_order=0
|
|
|
|
)
|
|
|
|
db.session.add(category)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-05-28 14:43:03 +00:00
|
|
|
self.create_spec(id="rrt_top_level_workflow",
|
|
|
|
name="rrt_top_level_workflow",
|
2020-05-25 16:29:05 +00:00
|
|
|
display_name="Top Level Workflow",
|
|
|
|
description="Does nothing, we don't use the master workflow here.",
|
|
|
|
category_id=None,
|
2020-05-28 14:43:03 +00:00
|
|
|
master_spec=True)
|
2020-05-25 16:29:05 +00:00
|
|
|
|
2020-05-28 14:43:03 +00:00
|
|
|
self.create_spec(id="research_rampup",
|
|
|
|
name="research_rampup",
|
2020-05-25 16:29:05 +00:00
|
|
|
display_name="Research Ramp-up Toolkit",
|
2020-05-28 14:43:03 +00:00
|
|
|
description="Process for creating a new research ramp-up request.",
|
2020-05-25 16:29:05 +00:00
|
|
|
category_id=0,
|
|
|
|
master_spec=False)
|
|
|
|
|
|
|
|
def load_test_data(self):
|
|
|
|
self.load_reference_documents()
|
|
|
|
|
|
|
|
category = WorkflowSpecCategoryModel(
|
|
|
|
id=0,
|
|
|
|
name='test_category',
|
|
|
|
display_name='Test Category',
|
|
|
|
display_order=0
|
|
|
|
)
|
|
|
|
db.session.add(category)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
self.create_spec(id="empty_workflow",
|
|
|
|
name="empty_workflow",
|
|
|
|
display_name="Top Level Workflow",
|
|
|
|
description="Does nothing, we don't use the master workflow here.",
|
|
|
|
category_id=None,
|
|
|
|
master_spec=True,
|
|
|
|
from_tests = True)
|
|
|
|
|
|
|
|
self.create_spec(id="random_fact",
|
|
|
|
name="random_fact",
|
|
|
|
display_name="Random Fact",
|
|
|
|
description="The workflow for a Random Fact.",
|
|
|
|
category_id=0,
|
2021-08-27 17:04:11 +00:00
|
|
|
display_order=0,
|
2020-05-25 16:29:05 +00:00
|
|
|
master_spec=False,
|
|
|
|
from_tests=True)
|
|
|
|
|
|
|
|
def create_spec(self, id, name, display_name="", description="", filepath=None, master_spec=False,
|
2021-08-03 14:02:22 +00:00
|
|
|
category_id=None, display_order=None, from_tests=False, standalone=False, library=False):
|
2020-01-23 20:32:53 +00:00
|
|
|
"""Assumes that a directory exists in static/bpmn with the same name as the given id.
|
|
|
|
further assumes that the [id].bpmn is the primary file for the workflow.
|
2019-12-31 21:32:47 +00:00
|
|
|
returns an array of data models to be added to the database."""
|
2020-03-05 21:55:46 +00:00
|
|
|
global file
|
2020-03-04 18:40:25 +00:00
|
|
|
file_service = FileService()
|
2019-12-31 21:32:47 +00:00
|
|
|
spec = WorkflowSpecModel(id=id,
|
2020-01-28 18:25:54 +00:00
|
|
|
name=name,
|
2019-12-31 21:32:47 +00:00
|
|
|
display_name=display_name,
|
2020-03-15 19:54:13 +00:00
|
|
|
description=description,
|
2020-03-27 19:32:07 +00:00
|
|
|
is_master_spec=master_spec,
|
2020-04-10 15:13:43 +00:00
|
|
|
category_id=category_id,
|
2021-04-26 12:52:12 +00:00
|
|
|
display_order=display_order,
|
2021-08-03 14:02:22 +00:00
|
|
|
standalone=standalone,
|
|
|
|
library=library)
|
2020-03-04 18:40:25 +00:00
|
|
|
db.session.add(spec)
|
|
|
|
db.session.commit()
|
2020-05-25 16:29:05 +00:00
|
|
|
if not filepath and not from_tests:
|
2020-10-05 21:35:35 +00:00
|
|
|
filepath = os.path.join(app.root_path, 'static', 'bpmn', id, "*.*")
|
2020-05-25 16:29:05 +00:00
|
|
|
if not filepath and from_tests:
|
2020-10-05 21:35:35 +00:00
|
|
|
filepath = os.path.join(app.root_path, '..', 'tests', 'data', id, "*.*")
|
2020-05-25 16:29:05 +00:00
|
|
|
|
2020-01-23 20:32:53 +00:00
|
|
|
files = glob.glob(filepath)
|
|
|
|
for file_path in files:
|
2020-10-05 21:35:35 +00:00
|
|
|
if os.path.isdir(file_path):
|
|
|
|
continue # Don't try to process sub directories
|
|
|
|
|
2020-01-23 20:32:53 +00:00
|
|
|
noise, file_extension = os.path.splitext(file_path)
|
|
|
|
filename = os.path.basename(file_path)
|
2020-03-13 18:57:28 +00:00
|
|
|
|
2020-03-15 19:54:13 +00:00
|
|
|
is_status = filename.lower() == 'status.bpmn'
|
2020-03-13 18:57:28 +00:00
|
|
|
is_primary = filename.lower() == id + '.bpmn'
|
2020-05-05 20:15:38 +00:00
|
|
|
file = None
|
2020-01-23 20:32:53 +00:00
|
|
|
try:
|
2020-03-13 18:57:28 +00:00
|
|
|
file = open(file_path, 'rb')
|
2020-02-10 21:19:23 +00:00
|
|
|
data = file.read()
|
2020-03-04 18:40:25 +00:00
|
|
|
content_type = CONTENT_TYPES[file_extension[1:]]
|
|
|
|
file_service.add_workflow_spec_file(workflow_spec=spec, name=filename, content_type=content_type,
|
2020-03-13 18:57:28 +00:00
|
|
|
binary_data=data, primary=is_primary, is_status=is_status)
|
2020-03-05 16:18:20 +00:00
|
|
|
except IsADirectoryError as de:
|
|
|
|
# Ignore sub directories
|
|
|
|
pass
|
2020-01-23 20:32:53 +00:00
|
|
|
finally:
|
2020-03-05 21:55:46 +00:00
|
|
|
if file:
|
|
|
|
file.close()
|
2020-03-04 18:40:25 +00:00
|
|
|
return spec
|
2020-03-20 11:41:21 +00:00
|
|
|
|
|
|
|
def load_reference_documents(self):
|
|
|
|
file_path = os.path.join(app.root_path, 'static', 'reference', 'irb_documents.xlsx')
|
|
|
|
file = open(file_path, "rb")
|
2021-07-06 17:10:20 +00:00
|
|
|
FileService.add_reference_file(DocumentService.DOCUMENT_LIST,
|
2020-05-07 17:57:24 +00:00
|
|
|
binary_data=file.read(),
|
|
|
|
content_type=CONTENT_TYPES['xls'])
|
|
|
|
file.close()
|
|
|
|
|
|
|
|
file_path = os.path.join(app.root_path, 'static', 'reference', 'investigators.xlsx')
|
|
|
|
file = open(file_path, "rb")
|
2021-07-06 17:10:20 +00:00
|
|
|
FileService.add_reference_file(StudyService.INVESTIGATOR_LIST,
|
2020-03-20 11:41:21 +00:00
|
|
|
binary_data=file.read(),
|
|
|
|
content_type=CONTENT_TYPES['xls'])
|
|
|
|
file.close()
|
2020-08-27 17:55:27 +00:00
|
|
|
|
2020-12-01 16:17:07 +00:00
|
|
|
def load_default_user(self):
|
2020-12-01 16:47:59 +00:00
|
|
|
user = UserModel(uid="dhf8r", email_address="dhf8r@virginia.edu", display_name="Development User")
|
|
|
|
ldap_info = LdapModel(uid="dhf8r", email_address="dhf8r@virginia.edu", display_name="Development User")
|
2020-12-01 16:17:07 +00:00
|
|
|
db.session.add(user)
|
2020-12-01 16:47:59 +00:00
|
|
|
db.session.add(ldap_info)
|
2020-12-01 16:17:07 +00:00
|
|
|
db.session.commit()
|
|
|
|
|
2020-08-27 17:55:27 +00:00
|
|
|
def ldap(): return "x";
|
|
|
|
def study_info(i): return {"x":"Y"};
|
|
|
|
|
|
|
|
|
|
|
|
me = ldap()
|
|
|
|
investigators = study_info('investigators')
|
|
|
|
pi = investigators.get('PI', None)
|
|
|
|
is_me_pi = False
|
|
|
|
if pi is not None:
|
|
|
|
hasPI = True
|
|
|
|
if pi['uid'] == me['uid']:
|
|
|
|
is_me_pi = True
|
|
|
|
else:
|
|
|
|
hasPI = False
|
|
|
|
|
|
|
|
dc = investigators.get('DEPT_CH', None)
|
|
|
|
pcs = {}
|
|
|
|
is_me_pc = False
|
|
|
|
for k in investigators.keys():
|
|
|
|
if k in ['SC_I','SC_II','IRBC']:
|
|
|
|
investigator = investigators.get(k)
|
|
|
|
if investigator['uid'] != me['uid']:
|
|
|
|
pcs[k] = investigator
|
|
|
|
else:
|
|
|
|
is_me_pc = True
|
|
|
|
is_me_pc_role = investigator['label']
|
|
|
|
del(investigator)
|
|
|
|
cnt_pcs = len(pcs.keys())
|
|
|
|
acs = {}
|
|
|
|
is_me_ac = False
|
|
|
|
for k in investigators.keys():
|
|
|
|
if k == 'AS_C':
|
|
|
|
investigator = investigators.get(k)
|
|
|
|
if investigator['uid'] != me['uid']:
|
|
|
|
acs[k] = investigator
|
|
|
|
else:
|
|
|
|
is_me_ac = True
|
|
|
|
is_me_ac_role = investigator['label']
|
|
|
|
del investigator
|
|
|
|
|
|
|
|
cnt_acs = len(acs.keys())
|
|
|
|
subs = {}
|
|
|
|
is_me_subs = False
|
|
|
|
for k in investigators.keys():
|
|
|
|
if k[:2] == 'SI':
|
|
|
|
investigator = investigators.get(k)
|
|
|
|
if investigator['uid'] != me['uid']:
|
|
|
|
subs[k] = investigator
|
|
|
|
else:
|
|
|
|
is_me_subs = True
|
|
|
|
del investigator
|
|
|
|
|
|
|
|
cnt_subs = len(subs.keys())
|
|
|
|
del investigators
|