2019-12-18 19:02:17 +00:00
|
|
|
# Set environment variable to testing before loading.
|
|
|
|
# IMPORTANT - Environment must be loaded before app, models, etc....
|
|
|
|
import json
|
|
|
|
import os
|
2020-01-24 14:35:14 +00:00
|
|
|
import unittest
|
2020-02-27 15:30:16 +00:00
|
|
|
import urllib.parse
|
|
|
|
|
2020-03-19 21:13:30 +00:00
|
|
|
from crc.models.study import StudyModel
|
2020-03-05 18:25:28 +00:00
|
|
|
from crc.services.file_service import FileService
|
2020-03-19 21:13:30 +00:00
|
|
|
from crc.services.workflow_processor import WorkflowProcessor
|
2020-03-05 18:25:28 +00:00
|
|
|
|
2020-02-27 15:30:16 +00:00
|
|
|
os.environ["TESTING"] = "true"
|
2020-01-24 14:35:14 +00:00
|
|
|
|
2020-03-05 18:25:28 +00:00
|
|
|
from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES
|
2020-03-19 21:13:30 +00:00
|
|
|
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel
|
2020-02-27 15:30:16 +00:00
|
|
|
from crc.models.user import UserModel
|
2019-12-18 19:02:17 +00:00
|
|
|
|
2020-01-14 16:45:12 +00:00
|
|
|
from crc import app, db, session
|
2019-12-30 18:03:57 +00:00
|
|
|
from example_data import ExampleDataLoader
|
2019-12-18 19:02:17 +00:00
|
|
|
|
2019-12-27 18:50:03 +00:00
|
|
|
# UNCOMMENT THIS FOR DEBUGGING SQL ALCHEMY QUERIES
|
|
|
|
# import logging
|
|
|
|
# logging.basicConfig()
|
|
|
|
# logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
|
|
|
|
|
|
|
|
|
2020-01-24 14:35:14 +00:00
|
|
|
class BaseTest(unittest.TestCase):
|
2020-02-27 15:30:16 +00:00
|
|
|
""" Great class to inherit from, as it sets up and tears down classes
|
|
|
|
efficiently when we have a database in place.
|
|
|
|
"""
|
2019-11-21 14:22:42 +00:00
|
|
|
|
|
|
|
auths = {}
|
2020-02-27 15:30:16 +00:00
|
|
|
test_uid = "dhf8r"
|
2019-11-21 14:22:42 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
app.config.from_object('config.testing')
|
|
|
|
cls.ctx = app.test_request_context()
|
|
|
|
cls.app = app.test_client()
|
2019-12-18 19:02:17 +00:00
|
|
|
db.create_all()
|
2019-11-21 14:22:42 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
2019-12-18 19:02:17 +00:00
|
|
|
db.drop_all()
|
2020-01-14 16:45:12 +00:00
|
|
|
session.remove()
|
2019-11-21 14:22:42 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
self.ctx.push()
|
|
|
|
|
|
|
|
def tearDown(self):
|
2019-12-30 18:03:57 +00:00
|
|
|
ExampleDataLoader.clean_db() # This does not seem to work, some colision of sessions.
|
2019-11-21 14:22:42 +00:00
|
|
|
self.ctx.pop()
|
|
|
|
self.auths = {}
|
|
|
|
|
2020-02-27 15:30:16 +00:00
|
|
|
def logged_in_headers(self, user=None, redirect_url='http://some/frontend/url'):
|
|
|
|
if user is None:
|
|
|
|
uid = self.test_uid
|
|
|
|
user_info = {'uid': self.test_uid, 'first_name': 'Daniel', 'last_name': 'Funk',
|
|
|
|
'email_address': 'dhf8r@virginia.edu'}
|
|
|
|
else:
|
|
|
|
uid = user.uid
|
|
|
|
user_info = {'uid': user.uid, 'first_name': user.first_name, 'last_name': user.last_name,
|
|
|
|
'email_address': user.email_address}
|
|
|
|
|
|
|
|
query_string = self.user_info_to_query_string(user_info, redirect_url)
|
|
|
|
rv = self.app.get("/v1.0/sso_backdoor%s" % query_string, follow_redirects=False)
|
|
|
|
self.assertTrue(rv.status_code == 302)
|
|
|
|
self.assertTrue(str.startswith(rv.location, redirect_url))
|
|
|
|
|
|
|
|
user_model = session.query(UserModel).filter_by(uid=uid).first()
|
|
|
|
self.assertIsNotNone(user_model.display_name)
|
|
|
|
return dict(Authorization='Bearer ' + user_model.encode_auth_token().decode())
|
|
|
|
|
2019-12-18 19:02:17 +00:00
|
|
|
def load_example_data(self):
|
|
|
|
from example_data import ExampleDataLoader
|
2019-12-30 18:15:39 +00:00
|
|
|
ExampleDataLoader.clean_db()
|
2019-12-18 19:02:17 +00:00
|
|
|
ExampleDataLoader().load_all()
|
|
|
|
|
2020-01-24 14:35:14 +00:00
|
|
|
specs = session.query(WorkflowSpecModel).all()
|
|
|
|
self.assertIsNotNone(specs)
|
|
|
|
|
|
|
|
for spec in specs:
|
|
|
|
files = session.query(FileModel).filter_by(workflow_spec_id=spec.id).all()
|
|
|
|
self.assertIsNotNone(files)
|
|
|
|
self.assertGreater(len(files), 0)
|
|
|
|
|
|
|
|
for file in files:
|
|
|
|
file_data = session.query(FileDataModel).filter_by(file_model_id=file.id).all()
|
|
|
|
self.assertIsNotNone(file_data)
|
|
|
|
self.assertGreater(len(file_data), 0)
|
|
|
|
|
2020-02-27 15:30:16 +00:00
|
|
|
@staticmethod
|
2020-03-26 16:51:53 +00:00
|
|
|
def load_test_spec(dir_name, master_spec=False):
|
2020-02-04 21:49:28 +00:00
|
|
|
"""Loads a spec into the database based on a directory in /tests/data"""
|
2020-02-11 16:11:21 +00:00
|
|
|
if session.query(WorkflowSpecModel).filter_by(id=dir_name).count() > 0:
|
|
|
|
return
|
2020-02-04 21:49:28 +00:00
|
|
|
filepath = os.path.join(app.root_path, '..', 'tests', 'data', dir_name, "*")
|
2020-03-26 16:51:53 +00:00
|
|
|
return ExampleDataLoader().create_spec(id=dir_name, name=dir_name, filepath=filepath, master_spec=master_spec)
|
2020-02-04 21:49:28 +00:00
|
|
|
|
2020-02-27 15:30:16 +00:00
|
|
|
@staticmethod
|
|
|
|
def protocol_builder_response(file_name):
|
2020-02-20 18:30:04 +00:00
|
|
|
filepath = os.path.join(app.root_path, '..', 'tests', 'data', 'pb_responses', file_name)
|
|
|
|
with open(filepath, 'r') as myfile:
|
|
|
|
data = myfile.read()
|
|
|
|
return data
|
|
|
|
|
2019-12-18 19:02:17 +00:00
|
|
|
def assert_success(self, rv, msg=""):
|
|
|
|
try:
|
|
|
|
data = json.loads(rv.get_data(as_text=True))
|
2020-02-18 21:38:56 +00:00
|
|
|
self.assertTrue(200 <= rv.status_code < 300,
|
2019-12-18 19:02:17 +00:00
|
|
|
"BAD Response: %i. \n %s" %
|
|
|
|
(rv.status_code, json.dumps(data)) + ". " + msg)
|
|
|
|
except:
|
2020-02-18 21:38:56 +00:00
|
|
|
self.assertTrue(200 <= rv.status_code < 300,
|
2019-12-18 19:02:17 +00:00
|
|
|
"BAD Response: %i." % rv.status_code + ". " + msg)
|
2020-02-18 21:38:56 +00:00
|
|
|
|
2020-03-09 19:12:40 +00:00
|
|
|
def assert_failure(self, rv, status_code=0, error_code=""):
|
2020-02-18 21:38:56 +00:00
|
|
|
self.assertFalse(200 <= rv.status_code < 300,
|
|
|
|
"Incorrect Valid Response:" + rv.status + ".")
|
2020-03-09 19:12:40 +00:00
|
|
|
if status_code != 0:
|
|
|
|
self.assertEqual(status_code, rv.status_code)
|
|
|
|
if error_code != "":
|
|
|
|
data = json.loads(rv.get_data(as_text=True))
|
|
|
|
self.assertEqual(error_code, data["code"])
|
2020-02-27 15:30:16 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def user_info_to_query_string(user_info, redirect_url):
|
|
|
|
query_string_list = []
|
|
|
|
items = user_info.items()
|
|
|
|
for key, value in items:
|
|
|
|
query_string_list.append('%s=%s' % (key, urllib.parse.quote(value)))
|
|
|
|
|
|
|
|
query_string_list.append('redirect_url=%s' % redirect_url)
|
|
|
|
|
|
|
|
return '?%s' % '&'.join(query_string_list)
|
|
|
|
|
|
|
|
|
2020-03-05 18:25:28 +00:00
|
|
|
def replace_file(self, name, file_path):
|
|
|
|
"""Replaces a stored file with the given name with the contents of the file at the given path."""
|
|
|
|
file_service = FileService()
|
|
|
|
file = open(file_path, "rb")
|
|
|
|
data = file.read()
|
|
|
|
|
|
|
|
file_model = db.session.query(FileModel).filter(FileModel.name == name).first()
|
|
|
|
noise, file_extension = os.path.splitext(file_path)
|
|
|
|
content_type = CONTENT_TYPES[file_extension[1:]]
|
|
|
|
file_service.update_file(file_model, data, content_type)
|
2020-03-19 21:13:30 +00:00
|
|
|
|
|
|
|
def create_workflow(self, workflow_name):
|
|
|
|
study = session.query(StudyModel).first()
|
|
|
|
spec = self.load_test_spec(workflow_name)
|
|
|
|
processor = WorkflowProcessor.create(study.id, spec.id)
|
|
|
|
rv = self.app.post(
|
|
|
|
'/v1.0/study/%i/workflows' % study.id,
|
|
|
|
headers=self.logged_in_headers(),
|
|
|
|
content_type="application/json",
|
|
|
|
data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
|
|
|
|
self.assert_success(rv)
|
|
|
|
workflow = session.query(WorkflowModel).filter_by(study_id=study.id, workflow_spec_id=workflow_name).first()
|
|
|
|
return workflow
|
|
|
|
|
|
|
|
def create_reference_document(self):
|
|
|
|
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'reference', 'irb_documents.xlsx')
|
|
|
|
file = open(file_path, "rb")
|
|
|
|
FileService.add_reference_file(FileService.IRB_PRO_CATEGORIES_FILE,
|
|
|
|
binary_data=file.read(),
|
|
|
|
content_type=CONTENT_TYPES['xls'])
|
|
|
|
file.close()
|