2019-12-18 19:02:17 +00:00
|
|
|
import datetime
|
2020-01-23 20:32:53 +00:00
|
|
|
import glob
|
2019-12-27 18:50:03 +00:00
|
|
|
import os
|
2020-02-27 15:30:16 +00:00
|
|
|
import xml.etree.ElementTree as ElementTree
|
2019-12-18 19:02:17 +00:00
|
|
|
|
2020-01-14 16:45:12 +00:00
|
|
|
from crc import app, db, session
|
2020-01-24 16:52:52 +00:00
|
|
|
from crc.models.file import FileType, FileModel, FileDataModel
|
|
|
|
from crc.models.study import StudyModel
|
2020-02-27 15:30:16 +00:00
|
|
|
from crc.models.user import UserModel
|
2020-01-24 16:52:52 +00:00
|
|
|
from crc.models.workflow import WorkflowSpecModel
|
2020-02-10 21:19:23 +00:00
|
|
|
from crc.services.workflow_processor import WorkflowProcessor
|
2019-12-18 19:02:17 +00:00
|
|
|
|
2020-02-27 15:30:16 +00:00
|
|
|
|
2019-12-18 19:02:17 +00:00
|
|
|
class ExampleDataLoader:
|
2019-12-30 21:00:33 +00:00
|
|
|
def make_data(self):
|
2020-02-27 15:30:16 +00:00
|
|
|
users = [
|
|
|
|
UserModel(
|
|
|
|
uid='dhf8r',
|
|
|
|
email_address='dhf8r@virginia.EDU',
|
|
|
|
display_name='Daniel Harold Funk',
|
|
|
|
affiliation='staff@virginia.edu;member@virginia.edu',
|
|
|
|
eppn='dhf8r@virginia.edu',
|
|
|
|
first_name='Daniel',
|
|
|
|
last_name='Funk',
|
|
|
|
title='SOFTWARE ENGINEER V'
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2019-12-30 21:00:33 +00:00
|
|
|
studies = [
|
|
|
|
StudyModel(
|
|
|
|
id=1,
|
|
|
|
title='The impact of fried pickles on beer consumption in bipedal software developers.',
|
|
|
|
last_updated=datetime.datetime.now(),
|
|
|
|
protocol_builder_status='in_process',
|
|
|
|
primary_investigator_id='dhf8r',
|
|
|
|
sponsor='Sartography Pharmaceuticals',
|
2020-02-27 15:30:16 +00:00
|
|
|
ind_number='1234',
|
|
|
|
user_uid='dhf8r'
|
2019-12-30 21:00:33 +00:00
|
|
|
),
|
|
|
|
StudyModel(
|
|
|
|
id=2,
|
|
|
|
title='Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels',
|
|
|
|
last_updated=datetime.datetime.now(),
|
|
|
|
protocol_builder_status='in_process',
|
|
|
|
primary_investigator_id='dhf8r',
|
|
|
|
sponsor='Makerspace & Co.',
|
2020-02-27 15:30:16 +00:00
|
|
|
ind_number='5678',
|
|
|
|
user_uid='dhf8r'
|
2019-12-30 21:00:33 +00:00
|
|
|
),
|
|
|
|
]
|
2019-12-18 19:02:17 +00:00
|
|
|
|
2019-12-31 21:32:47 +00:00
|
|
|
workflow_specifications = \
|
2020-02-28 20:39:44 +00:00
|
|
|
self.create_spec(id="crc2_training_session_enter_core_info",
|
|
|
|
name="crc2_training_session_enter_core_info",
|
|
|
|
display_name="CR Connect2 - Training Session - Core Info",
|
|
|
|
description='Part of Milestone 3 Deliverable')
|
|
|
|
workflow_specifications += \
|
2020-03-03 18:50:22 +00:00
|
|
|
self.create_spec(id="crc2_training_session_data_security_plan",
|
|
|
|
name="crc2_training_session_data_security_plan",
|
|
|
|
display_name="CR Connect2 - Training Session - Data Security Plan",
|
|
|
|
description='Part of Milestone 3 Deliverable')
|
2020-02-28 20:39:44 +00:00
|
|
|
workflow_specifications += \
|
2020-03-03 18:50:22 +00:00
|
|
|
self.create_spec(id="sponsor_funding_source",
|
|
|
|
name="sponsor_funding_source",
|
|
|
|
display_name="Sponsor and/or Funding Source ",
|
|
|
|
description='TBD')
|
2020-02-28 20:39:44 +00:00
|
|
|
# workflow_specifications += \
|
|
|
|
# self.create_spec(id="m2_demo",
|
|
|
|
# name="m2_demo",
|
|
|
|
# display_name="Milestone 2 Demo",
|
|
|
|
# description='A simplified CR Connect workflow for demonstration purposes.')
|
2020-02-11 18:40:14 +00:00
|
|
|
# workflow_specifications += \
|
|
|
|
# self.create_spec(id="crc_study_workflow",
|
|
|
|
# name="crc_study_workflow",
|
|
|
|
# display_name="CR Connect Study Workflow",
|
|
|
|
# description='Draft workflow for CR Connect studies.')
|
2020-02-27 15:30:16 +00:00
|
|
|
all_data = users + studies + workflow_specifications
|
2019-12-31 21:32:47 +00:00
|
|
|
return all_data
|
2019-12-27 18:50:03 +00:00
|
|
|
|
2020-02-04 21:49:28 +00:00
|
|
|
def create_spec(self, id, name, display_name="", description="", filepath=None):
|
2020-01-23 20:32:53 +00:00
|
|
|
"""Assumes that a directory exists in static/bpmn with the same name as the given id.
|
|
|
|
further assumes that the [id].bpmn is the primary file for the workflow.
|
2019-12-31 21:32:47 +00:00
|
|
|
returns an array of data models to be added to the database."""
|
2020-01-23 20:32:53 +00:00
|
|
|
models = []
|
2019-12-31 21:32:47 +00:00
|
|
|
spec = WorkflowSpecModel(id=id,
|
2020-01-28 18:25:54 +00:00
|
|
|
name=name,
|
2019-12-31 21:32:47 +00:00
|
|
|
display_name=display_name,
|
|
|
|
description=description)
|
2020-01-23 20:32:53 +00:00
|
|
|
models.append(spec)
|
2020-02-04 21:49:28 +00:00
|
|
|
if not filepath:
|
|
|
|
filepath = os.path.join(app.root_path, 'static', 'bpmn', id, "*")
|
2020-01-23 20:32:53 +00:00
|
|
|
files = glob.glob(filepath)
|
|
|
|
for file_path in files:
|
|
|
|
noise, file_extension = os.path.splitext(file_path)
|
|
|
|
filename = os.path.basename(file_path)
|
|
|
|
if file_extension.lower() == '.bpmn':
|
2020-02-03 20:15:36 +00:00
|
|
|
type = FileType.bpmn
|
2020-01-23 20:32:53 +00:00
|
|
|
elif file_extension.lower() == '.dmn':
|
2020-02-03 20:15:36 +00:00
|
|
|
type = FileType.dmn
|
2020-01-23 20:32:53 +00:00
|
|
|
elif file_extension.lower() == '.svg':
|
|
|
|
type = FileType.svg
|
2020-02-10 21:19:23 +00:00
|
|
|
elif file_extension.lower() == '.docx':
|
|
|
|
type = FileType.docx
|
2020-01-23 20:32:53 +00:00
|
|
|
else:
|
|
|
|
raise Exception("Unsupported file type:" + file_path)
|
|
|
|
continue
|
|
|
|
|
|
|
|
is_primary = filename.lower() == id + ".bpmn"
|
2020-01-23 20:35:51 +00:00
|
|
|
file_model = FileModel(name=filename, type=type, content_type='text/xml', version="1",
|
2020-01-23 20:32:53 +00:00
|
|
|
last_updated=datetime.datetime.now(), primary=is_primary,
|
|
|
|
workflow_spec_id=id)
|
|
|
|
models.append(file_model)
|
|
|
|
try:
|
|
|
|
file = open(file_path, "rb")
|
2020-02-10 21:19:23 +00:00
|
|
|
data = file.read()
|
2020-02-27 15:30:16 +00:00
|
|
|
if (is_primary):
|
2020-02-10 21:19:23 +00:00
|
|
|
bpmn: ElementTree.Element = ElementTree.fromstring(data)
|
|
|
|
spec.primary_process_id = WorkflowProcessor.get_process_id(bpmn)
|
|
|
|
print("Locating Process Id for " + filename + " " + spec.primary_process_id)
|
|
|
|
models.append(FileDataModel(data=data, file_model=file_model))
|
2020-01-23 20:32:53 +00:00
|
|
|
finally:
|
|
|
|
file.close()
|
|
|
|
return models
|
2019-12-27 18:50:03 +00:00
|
|
|
|
2019-12-30 18:03:57 +00:00
|
|
|
@staticmethod
|
|
|
|
def clean_db():
|
2020-01-14 16:45:12 +00:00
|
|
|
session.flush() # Clear out any transactions before deleting it all to avoid spurious errors.
|
2019-12-30 18:03:57 +00:00
|
|
|
for table in reversed(db.metadata.sorted_tables):
|
2020-01-14 16:45:12 +00:00
|
|
|
session.execute(table.delete())
|
|
|
|
session.flush()
|
2019-12-30 18:03:57 +00:00
|
|
|
|
2019-12-18 19:02:17 +00:00
|
|
|
def load_all(self):
|
2020-02-04 19:25:17 +00:00
|
|
|
for data in self.make_data():
|
|
|
|
session.add(data)
|
|
|
|
session.commit()
|
2020-01-14 16:45:12 +00:00
|
|
|
session.flush()
|