Merge branch 'rrt/dev' into dev

This commit is contained in:
Aaron Louie 2020-06-05 09:24:34 -04:00
commit b51b431534
21 changed files with 247 additions and 97 deletions

View File

@ -468,7 +468,7 @@ paths:
$ref: "#/components/schemas/File" $ref: "#/components/schemas/File"
delete: delete:
operationId: crc.api.file.delete_file operationId: crc.api.file.delete_file
summary: Removes an existing file summary: Removes an existing file. In the event the file can not be deleted, it is marked as "archived" in the database and is no longer returned unless specifically requested by id.
tags: tags:
- Files - Files
responses: responses:

View File

@ -3,28 +3,21 @@ import pickle
from base64 import b64decode from base64 import b64decode
from datetime import datetime from datetime import datetime
from SpiffWorkflow import Workflow
from SpiffWorkflow.serializer.dict import DictionarySerializer
from SpiffWorkflow.serializer.json import JSONSerializer
from flask import g from flask import g
from crc import app, db, session from crc import db, session
from crc.api.common import ApiError
from crc.api.common import ApiError, ApiErrorSchema
from crc.models.approval import Approval, ApprovalModel, ApprovalSchema, ApprovalStatus from crc.models.approval import Approval, ApprovalModel, ApprovalSchema, ApprovalStatus
from crc.models.ldap import LdapSchema
from crc.models.study import Study
from crc.models.workflow import WorkflowModel from crc.models.workflow import WorkflowModel
from crc.services.approval_service import ApprovalService from crc.services.approval_service import ApprovalService
from crc.services.ldap_service import LdapService from crc.services.ldap_service import LdapService
from crc.services.workflow_processor import WorkflowProcessor
def get_approvals(everything=False): def get_approvals(everything=False):
if everything: if everything:
approvals = ApprovalService.get_all_approvals() approvals = ApprovalService.get_all_approvals(include_cancelled=True)
else: else:
approvals = ApprovalService.get_approvals_per_user(g.user.uid) approvals = ApprovalService.get_approvals_per_user(g.user.uid, include_cancelled=False)
results = ApprovalSchema(many=True).dump(approvals) results = ApprovalSchema(many=True).dump(approvals)
return results return results
@ -38,12 +31,11 @@ def get_approvals_for_study(study_id=None):
# ----- Being decent into madness ---- # # ----- Being decent into madness ---- #
def get_csv(): def get_csv():
"""A huge bit of a one-off for RRT, but 3 weeks of midnight work can convince a """A damn lie, it's a json file. A huge bit of a one-off for RRT, but 3 weeks of midnight work can convince a
man to do just about anything""" man to do just about anything"""
approvals = ApprovalService.get_all_approvals(ignore_cancelled=True) approvals = ApprovalService.get_all_approvals(include_cancelled=False)
output = [] output = []
errors = [] errors = []
ldapService = LdapService()
for approval in approvals: for approval in approvals:
try: try:
if approval.status != ApprovalStatus.APPROVED.value: if approval.status != ApprovalStatus.APPROVED.value:
@ -56,24 +48,33 @@ def get_csv():
last_task = find_task(data['last_task']['__uuid__'], data['task_tree']) last_task = find_task(data['last_task']['__uuid__'], data['task_tree'])
personnel = extract_value(last_task, 'personnel') personnel = extract_value(last_task, 'personnel')
training_val = extract_value(last_task, 'RequiredTraining') training_val = extract_value(last_task, 'RequiredTraining')
pi_supervisor = extract_value(last_task, 'PISupervisor')['value']
review_complete = 'AllRequiredTraining' in training_val review_complete = 'AllRequiredTraining' in training_val
pi_uid = workflow.study.primary_investigator_id pi_uid = workflow.study.primary_investigator_id
pi_details = ldapService.user_info(pi_uid) pi_details = LdapService.user_info(pi_uid)
details = [] details = []
details.append(pi_details) details.append(pi_details)
for person in personnel: for person in personnel:
uid = person['PersonnelComputingID']['value'] uid = person['PersonnelComputingID']['value']
details.append(ldapService.user_info(uid)) details.append(LdapService.user_info(uid))
for person in details: for person in details:
output.append({ record = {
"study_id": approval.study_id, "study_id": approval.study_id,
"pi_uid": pi_details.uid, "pi_uid": pi_details.uid,
"pi": pi_details.display_name, "pi": pi_details.display_name,
"name": person.display_name, "name": person.display_name,
"uid": person.uid,
"email": person.email_address, "email": person.email_address,
"supervisor": "",
"review_complete": review_complete, "review_complete": review_complete,
}) }
# We only know the PI's supervisor.
if person.uid == pi_details.uid:
record["supervisor"] = pi_supervisor
output.append(record)
except Exception as e: except Exception as e:
errors.append("Error pulling data for workflow #%i: %s" % (approval.workflow_id, str(e))) errors.append("Error pulling data for workflow #%i: %s" % (approval.workflow_id, str(e)))
return {"results": output, "errors": errors } return {"results": output, "errors": errors }

View File

@ -122,7 +122,7 @@ def get_file_info(file_id):
def update_file_info(file_id, body): def update_file_info(file_id, body):
if file_id is None: if file_id is None:
raise ApiError('unknown_file', 'Please provide a valid File ID.') raise ApiError('no_such_file', 'Please provide a valid File ID.')
file_model = session.query(FileModel).filter_by(id=file_id).first() file_model = session.query(FileModel).filter_by(id=file_id).first()

View File

@ -50,7 +50,7 @@ def update_study(study_id, body):
def get_study(study_id): def get_study(study_id):
study = StudyService.get_study(study_id) study = StudyService.get_study(study_id)
if (study is None): if (study is None):
raise ApiError("Study not found", status_code=404) raise ApiError("unknown_study", 'The study "' + study_id + '" is not recognized.', status_code=404)
return StudySchema().dump(study) return StudySchema().dump(study)

View File

@ -20,9 +20,9 @@ def render_markdown(data, template):
data = json.loads(data) data = json.loads(data)
return template.render(**data) return template.render(**data)
except UndefinedError as ue: except UndefinedError as ue:
raise ApiError(code="undefined field", message=ue.message) raise ApiError(code="undefined_field", message=ue.message)
except Exception as e: except Exception as e:
raise ApiError(code="invalid", message=str(e)) raise ApiError(code="invalid_render", message=str(e))
def render_docx(): def render_docx():
@ -42,9 +42,9 @@ def render_docx():
cache_timeout=-1 # Don't cache these files on the browser. cache_timeout=-1 # Don't cache these files on the browser.
) )
except ValueError as e: except ValueError as e:
raise ApiError(code="invalid", message=str(e)) raise ApiError(code="undefined_field", message=str(e))
except Exception as e: except Exception as e:
raise ApiError(code="invalid", message=str(e)) raise ApiError(code="invalid_render", message=str(e))
def list_scripts(): def list_scripts():

View File

@ -59,10 +59,7 @@ def sso_login():
app.logger.info("SSO_LOGIN: Full URL: " + request.url) app.logger.info("SSO_LOGIN: Full URL: " + request.url)
app.logger.info("SSO_LOGIN: User Id: " + uid) app.logger.info("SSO_LOGIN: User Id: " + uid)
app.logger.info("SSO_LOGIN: Will try to redirect to : " + str(redirect)) app.logger.info("SSO_LOGIN: Will try to redirect to : " + str(redirect))
info = LdapService.user_info(uid)
ldap_service = LdapService()
info = ldap_service.user_info(uid)
return _handle_login(info, redirect) return _handle_login(info, redirect)
@app.route('/sso') @app.route('/sso')
@ -151,7 +148,7 @@ def backdoor(
""" """
if not 'PRODUCTION' in app.config or not app.config['PRODUCTION']: if not 'PRODUCTION' in app.config or not app.config['PRODUCTION']:
ldap_info = LdapService().user_info(uid) ldap_info = LdapService.user_info(uid)
return _handle_login(ldap_info, redirect) return _handle_login(ldap_info, redirect)
else: else:
raise ApiError('404', 'unknown') raise ApiError('404', 'unknown')

View File

@ -71,15 +71,13 @@ class Approval(object):
if model.study: if model.study:
instance.title = model.study.title instance.title = model.study.title
ldap_service = LdapService()
try: try:
instance.approver = ldap_service.user_info(model.approver_uid) instance.approver = LdapService.user_info(model.approver_uid)
instance.primary_investigator = ldap_service.user_info(model.study.primary_investigator_id) instance.primary_investigator = LdapService.user_info(model.study.primary_investigator_id)
except ApiError as ae: except ApiError as ae:
app.logger.error("Ldap lookup failed for approval record %i" % model.id) app.logger.error("Ldap lookup failed for approval record %i" % model.id)
doc_dictionary = FileService.get_reference_data(FileService.DOCUMENT_LIST, 'code', ['id']) doc_dictionary = FileService.get_doc_dictionary()
instance.associated_files = [] instance.associated_files = []
for approval_file in model.approval_files: for approval_file in model.approval_files:
try: try:

View File

@ -82,7 +82,10 @@ class FileModel(db.Model):
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'), nullable=True) workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'), nullable=True)
workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=True) workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=True)
irb_doc_code = db.Column(db.String, nullable=True) # Code reference to the irb_documents.xlsx reference file. irb_doc_code = db.Column(db.String, nullable=True) # Code reference to the irb_documents.xlsx reference file.
# A request was made to delete the file, but we can't because there are
# active approvals or running workflows that depend on it. So we archive
# it instead, hide it in the interface.
archived = db.Column(db.Boolean, default=False, nullable=False)
class File(object): class File(object):
@classmethod @classmethod

View File

@ -9,13 +9,14 @@ from crc.models.approval import ApprovalModel, ApprovalStatus, ApprovalFile, App
from crc.models.study import StudyModel from crc.models.study import StudyModel
from crc.models.workflow import WorkflowModel from crc.models.workflow import WorkflowModel
from crc.services.file_service import FileService from crc.services.file_service import FileService
from crc.services.ldap_service import LdapService
class ApprovalService(object): class ApprovalService(object):
"""Provides common tools for working with an Approval""" """Provides common tools for working with an Approval"""
@staticmethod @staticmethod
def __one_approval_from_study(study, approver_uid = None, ignore_cancelled=False): def __one_approval_from_study(study, approver_uid = None, include_cancelled=True):
"""Returns one approval, with all additional approvals as 'related_approvals', """Returns one approval, with all additional approvals as 'related_approvals',
the main approval can be pinned to an approver with an optional argument. the main approval can be pinned to an approver with an optional argument.
Will return null if no approvals exist on the study.""" Will return null if no approvals exist on the study."""
@ -23,7 +24,7 @@ class ApprovalService(object):
related_approvals = [] related_approvals = []
query = db.session.query(ApprovalModel).\ query = db.session.query(ApprovalModel).\
filter(ApprovalModel.study_id == study.id) filter(ApprovalModel.study_id == study.id)
if ignore_cancelled: if not include_cancelled:
query=query.filter(ApprovalModel.status != ApprovalStatus.CANCELED.value) query=query.filter(ApprovalModel.status != ApprovalStatus.CANCELED.value)
approvals = query.all() approvals = query.all()
@ -40,35 +41,38 @@ class ApprovalService(object):
return main_approval return main_approval
@staticmethod @staticmethod
def get_approvals_per_user(approver_uid): def get_approvals_per_user(approver_uid, include_cancelled=False):
"""Returns a list of approval objects (not db models) for the given """Returns a list of approval objects (not db models) for the given
approver. """ approver. """
studies = db.session.query(StudyModel).join(ApprovalModel).\ studies = db.session.query(StudyModel).join(ApprovalModel).\
filter(ApprovalModel.approver_uid == approver_uid).all() filter(ApprovalModel.approver_uid == approver_uid).all()
approvals = [] approvals = []
for study in studies: for study in studies:
approval = ApprovalService.__one_approval_from_study(study, approver_uid) approval = ApprovalService.__one_approval_from_study(study, approver_uid, include_cancelled)
if approval: if approval:
approvals.append(approval) approvals.append(approval)
return approvals return approvals
@staticmethod @staticmethod
def get_all_approvals(ignore_cancelled=False): def get_all_approvals(include_cancelled=True):
"""Returns a list of all approval objects (not db models), one record """Returns a list of all approval objects (not db models), one record
per study, with any associated approvals grouped under the first approval.""" per study, with any associated approvals grouped under the first approval."""
studies = db.session.query(StudyModel).all() studies = db.session.query(StudyModel).all()
approvals = [] approvals = []
for study in studies: for study in studies:
approval = ApprovalService.__one_approval_from_study(study, ignore_cancelled=ignore_cancelled) approval = ApprovalService.__one_approval_from_study(study, include_cancelled=include_cancelled)
if approval: if approval:
approvals.append(approval) approvals.append(approval)
return approvals return approvals
@staticmethod @staticmethod
def get_approvals_for_study(study_id): def get_approvals_for_study(study_id, include_cancelled=True):
"""Returns an array of Approval objects for the study, it does not """Returns an array of Approval objects for the study, it does not
compute the related approvals.""" compute the related approvals."""
db_approvals = session.query(ApprovalModel).filter_by(study_id=study_id).all() query = session.query(ApprovalModel).filter_by(study_id=study_id)
if not include_cancelled:
query = query.filter(ApprovalModel.status != ApprovalStatus.CANCELED.value)
db_approvals = query.all()
return [Approval.from_model(approval_model) for approval_model in db_approvals] return [Approval.from_model(approval_model) for approval_model in db_approvals]

View File

@ -45,10 +45,8 @@ class FileService(object):
@staticmethod @staticmethod
def is_allowed_document(code): def is_allowed_document(code):
data_model = FileService.get_reference_file_data(FileService.DOCUMENT_LIST) doc_dict = FileService.get_doc_dictionary()
xls = ExcelFile(data_model.data) return code in doc_dict
df = xls.parse(xls.sheet_names[0])
return code in df['code'].values
@staticmethod @staticmethod
def add_workflow_file(workflow_id, irb_doc_code, name, content_type, binary_data): def add_workflow_file(workflow_id, irb_doc_code, name, content_type, binary_data):
@ -96,6 +94,7 @@ class FileService(object):
def get_workflow_files(workflow_id): def get_workflow_files(workflow_id):
"""Returns all the file models associated with a running workflow.""" """Returns all the file models associated with a running workflow."""
return session.query(FileModel).filter(FileModel.workflow_id == workflow_id).\ return session.query(FileModel).filter(FileModel.workflow_id == workflow_id).\
filter(FileModel.archived == False).\
order_by(FileModel.id).all() order_by(FileModel.id).all()
@staticmethod @staticmethod
@ -127,7 +126,11 @@ class FileService(object):
md5_checksum = UUID(hashlib.md5(binary_data).hexdigest()) md5_checksum = UUID(hashlib.md5(binary_data).hexdigest())
if (latest_data_model is not None) and (md5_checksum == latest_data_model.md5_hash): if (latest_data_model is not None) and (md5_checksum == latest_data_model.md5_hash):
# This file does not need to be updated, it's the same file. # This file does not need to be updated, it's the same file. If it is arhived,
# then de-arvhive it.
file_model.archived = False
session.add(file_model)
session.commit()
return file_model return file_model
# Verify the extension # Verify the extension
@ -139,6 +142,7 @@ class FileService(object):
else: else:
file_model.type = FileType[file_extension] file_model.type = FileType[file_extension]
file_model.content_type = content_type file_model.content_type = content_type
file_model.archived = False # Unarchive the file if it is archived.
if latest_data_model is None: if latest_data_model is None:
version = 1 version = 1
@ -188,7 +192,8 @@ class FileService(object):
def get_files_for_study(study_id, irb_doc_code=None): def get_files_for_study(study_id, irb_doc_code=None):
query = session.query(FileModel).\ query = session.query(FileModel).\
join(WorkflowModel).\ join(WorkflowModel).\
filter(WorkflowModel.study_id == study_id) filter(WorkflowModel.study_id == study_id).\
filter(FileModel.archived == False)
if irb_doc_code: if irb_doc_code:
query = query.filter(FileModel.irb_doc_code == irb_doc_code) query = query.filter(FileModel.irb_doc_code == irb_doc_code)
return query.all() return query.all()
@ -208,6 +213,9 @@ class FileService(object):
if name: if name:
query = query.filter_by(name=name) query = query.filter_by(name=name)
query = query.filter(FileModel.archived == False)
query = query.order_by(FileModel.id) query = query.order_by(FileModel.id)
results = query.all() results = query.all()
@ -270,11 +278,12 @@ class FileService(object):
@staticmethod @staticmethod
def get_workflow_file_data(workflow, file_name): def get_workflow_file_data(workflow, file_name):
"""Given a SPIFF Workflow Model, tracks down a file with the given name in the database and returns its data""" """This method should be deleted, find where it is used, and remove this method.
Given a SPIFF Workflow Model, tracks down a file with the given name in the database and returns its data"""
workflow_spec_model = FileService.find_spec_model_in_db(workflow) workflow_spec_model = FileService.find_spec_model_in_db(workflow)
if workflow_spec_model is None: if workflow_spec_model is None:
raise ApiError(code="workflow_model_error", raise ApiError(code="unknown_workflow",
message="Something is wrong. I can't find the workflow you are using.") message="Something is wrong. I can't find the workflow you are using.")
file_data_model = session.query(FileDataModel) \ file_data_model = session.query(FileDataModel) \
@ -316,6 +325,10 @@ class FileService(object):
session.query(FileModel).filter_by(id=file_id).delete() session.query(FileModel).filter_by(id=file_id).delete()
session.commit() session.commit()
except IntegrityError as ie: except IntegrityError as ie:
app.logger.error("Failed to delete file: %i, due to %s" % (file_id, str(ie))) # We can't delete the file or file data, because it is referenced elsewhere,
raise ApiError('file_integrity_error', "You are attempting to delete a file that is " # but we can at least mark it as deleted on the table.
"required by other records in the system.") session.rollback()
file_model = session.query(FileModel).filter_by(id=file_id).first()
file_model.archived = True
session.commit()
app.logger.info("Failed to delete file, so archiving it instead. %i, due to %s" % (file_id, str(ie)))

View File

@ -4,7 +4,7 @@ from attr import asdict
from ldap3.core.exceptions import LDAPExceptionError from ldap3.core.exceptions import LDAPExceptionError
from crc import app, db from crc import app, db
from ldap3 import Connection, Server, MOCK_SYNC from ldap3 import Connection, Server, MOCK_SYNC, RESTARTABLE
from crc.api.common import ApiError from crc.api.common import ApiError
from crc.models.ldap import LdapModel, LdapSchema from crc.models.ldap import LdapModel, LdapSchema
@ -19,37 +19,42 @@ class LdapService(object):
cn_single_search = '(&(objectclass=person)(cn=%s*))' cn_single_search = '(&(objectclass=person)(cn=%s*))'
cn_double_search = '(&(objectclass=person)(&(cn=%s*)(cn=*%s*)))' cn_double_search = '(&(objectclass=person)(&(cn=%s*)(cn=*%s*)))'
def __init__(self): conn = None
if app.config['TESTING']:
server = Server('my_fake_server')
self.conn = Connection(server, client_strategy=MOCK_SYNC)
file_path = os.path.abspath(os.path.join(app.root_path, '..', 'tests', 'data', 'ldap_response.json'))
self.conn.strategy.entries_from_json(file_path)
self.conn.bind()
else:
server = Server(app.config['LDAP_URL'], connect_timeout=app.config['LDAP_TIMEOUT_SEC'])
self.conn = Connection(server,
auto_bind=True,
receive_timeout=app.config['LDAP_TIMEOUT_SEC'],
)
def __del__(self): @staticmethod
if self.conn: def __get_conn():
self.conn.unbind() if not LdapService.conn:
if app.config['TESTING']:
server = Server('my_fake_server')
conn = Connection(server, client_strategy=MOCK_SYNC)
file_path = os.path.abspath(os.path.join(app.root_path, '..', 'tests', 'data', 'ldap_response.json'))
conn.strategy.entries_from_json(file_path)
conn.bind()
else:
server = Server(app.config['LDAP_URL'], connect_timeout=app.config['LDAP_TIMEOUT_SEC'])
conn = Connection(server, auto_bind=True,
receive_timeout=app.config['LDAP_TIMEOUT_SEC'],
client_strategy=RESTARTABLE)
LdapService.conn = conn
return LdapService.conn
def user_info(self, uva_uid):
@staticmethod
def user_info(uva_uid):
user_info = db.session.query(LdapModel).filter(LdapModel.uid == uva_uid).first() user_info = db.session.query(LdapModel).filter(LdapModel.uid == uva_uid).first()
if not user_info: if not user_info:
search_string = LdapService.uid_search_string % uva_uid search_string = LdapService.uid_search_string % uva_uid
self.conn.search(LdapService.search_base, search_string, attributes=LdapService.attributes) conn = LdapService.__get_conn()
if len(self.conn.entries) < 1: conn.search(LdapService.search_base, search_string, attributes=LdapService.attributes)
if len(conn.entries) < 1:
raise ApiError("missing_ldap_record", "Unable to locate a user with id %s in LDAP" % uva_uid) raise ApiError("missing_ldap_record", "Unable to locate a user with id %s in LDAP" % uva_uid)
entry = self.conn.entries[0] entry = conn.entries[0]
user_info = LdapModel.from_entry(entry) user_info = LdapModel.from_entry(entry)
db.session.add(user_info) db.session.add(user_info)
return user_info return user_info
def search_users(self, query, limit): @staticmethod
def search_users(query, limit):
if len(query.strip()) < 3: if len(query.strip()) < 3:
return [] return []
elif query.endswith(' '): elif query.endswith(' '):
@ -66,12 +71,13 @@ class LdapService(object):
results = [] results = []
print(search_string) print(search_string)
try: try:
self.conn.search(LdapService.search_base, search_string, attributes=LdapService.attributes) conn = LdapService.__get_conn()
conn.search(LdapService.search_base, search_string, attributes=LdapService.attributes)
# Entries are returned as a generator, accessing entries # Entries are returned as a generator, accessing entries
# can make subsequent calls to the ldap service, so limit # can make subsequent calls to the ldap service, so limit
# those here. # those here.
count = 0 count = 0
for entry in self.conn.entries: for entry in conn.entries:
if count > limit: if count > limit:
break break
results.append(LdapSchema().dump(LdapModel.from_entry(entry))) results.append(LdapSchema().dump(LdapModel.from_entry(entry)))

View File

@ -103,7 +103,7 @@ class LookupService(object):
workflow_id=workflow_model.id, workflow_id=workflow_model.id,
name=file_name) name=file_name)
if len(latest_files) < 1: if len(latest_files) < 1:
raise ApiError("missing_file", "Unable to locate the lookup data file '%s'" % file_name) raise ApiError("invalid_enum", "Unable to locate the lookup data file '%s'" % file_name)
else: else:
data_model = latest_files[0] data_model = latest_files[0]
@ -189,7 +189,7 @@ class LookupService(object):
@staticmethod @staticmethod
def _run_ldap_query(query, limit): def _run_ldap_query(query, limit):
users = LdapService().search_users(query, limit) users = LdapService.search_users(query, limit)
"""Converts the user models into something akin to the """Converts the user models into something akin to the
LookupModel in models/file.py, so this can be returned in the same way LookupModel in models/file.py, so this can be returned in the same way

View File

@ -25,7 +25,7 @@ class ProtocolBuilderService(object):
def get_studies(user_id) -> {}: def get_studies(user_id) -> {}:
ProtocolBuilderService.__enabled_or_raise() ProtocolBuilderService.__enabled_or_raise()
if not isinstance(user_id, str): if not isinstance(user_id, str):
raise ApiError("invalid_user_id", "This user id is invalid: " + str(user_id)) raise ApiError("protocol_builder_error", "This user id is invalid: " + str(user_id))
response = requests.get(ProtocolBuilderService.STUDY_URL % user_id) response = requests.get(ProtocolBuilderService.STUDY_URL % user_id)
if response.ok and response.text: if response.ok and response.text:
pb_studies = ProtocolBuilderStudySchema(many=True).loads(response.text) pb_studies = ProtocolBuilderStudySchema(many=True).loads(response.text)

View File

@ -206,8 +206,7 @@ class StudyService(object):
@staticmethod @staticmethod
def get_ldap_dict_if_available(user_id): def get_ldap_dict_if_available(user_id):
try: try:
ldap_service = LdapService() return LdapSchema().dump(LdapService().user_info(user_id))
return LdapSchema().dump(ldap_service.user_info(user_id))
except ApiError as ae: except ApiError as ae:
app.logger.info(str(ae)) app.logger.info(str(ae))
return {"error": str(ae)} return {"error": str(ae)}
@ -319,9 +318,9 @@ class StudyService(object):
try: try:
StudyService._create_workflow_model(study_model, workflow_spec) StudyService._create_workflow_model(study_model, workflow_spec)
except WorkflowTaskExecException as wtee: except WorkflowTaskExecException as wtee:
errors.append(ApiError.from_task("workflow_execution_exception", str(wtee), wtee.task)) errors.append(ApiError.from_task("workflow_startup_exception", str(wtee), wtee.task))
except WorkflowException as we: except WorkflowException as we:
errors.append(ApiError.from_task_spec("workflow_execution_exception", str(we), we.sender)) errors.append(ApiError.from_task_spec("workflow_startup_exception", str(we), we.sender))
return errors return errors
@staticmethod @staticmethod

View File

@ -82,7 +82,7 @@ class WorkflowService(object):
processor = WorkflowProcessor(workflow_model, validate_only=True) processor = WorkflowProcessor(workflow_model, validate_only=True)
except WorkflowException as we: except WorkflowException as we:
WorkflowService.delete_test_data() WorkflowService.delete_test_data()
raise ApiError.from_workflow_exception("workflow_execution_exception", str(we), we) raise ApiError.from_workflow_exception("workflow_validation_exception", str(we), we)
while not processor.bpmn_workflow.is_completed(): while not processor.bpmn_workflow.is_completed():
try: try:
@ -96,7 +96,7 @@ class WorkflowService(object):
task.complete() task.complete()
except WorkflowException as we: except WorkflowException as we:
WorkflowService.delete_test_data() WorkflowService.delete_test_data()
raise ApiError.from_workflow_exception("workflow_execution_exception", str(we), we) raise ApiError.from_workflow_exception("workflow_validation_exception", str(we), we)
WorkflowService.delete_test_data() WorkflowService.delete_test_data()
return processor.bpmn_workflow.last_task.data return processor.bpmn_workflow.last_task.data
@ -162,7 +162,7 @@ class WorkflowService(object):
options.append({"id": d.value, "label": d.label}) options.append({"id": d.value, "label": d.label})
return random.choice(options) return random.choice(options)
else: else:
raise ApiError.from_task("invalid_autocomplete", "The settings for this auto complete field " raise ApiError.from_task("unknown_lookup_option", "The settings for this auto complete field "
"are incorrect: %s " % field.id, task) "are incorrect: %s " % field.id, task)
elif field.type == "long": elif field.type == "long":
return random.randint(1, 1000) return random.randint(1, 1000)

View File

@ -0,0 +1,28 @@
"""empty message
Revision ID: 17597692d0b0
Revises: 13424d5a6de8
Create Date: 2020-06-03 17:33:56.454339
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '17597692d0b0'
down_revision = '13424d5a6de8'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('file', sa.Column('archived', sa.Boolean(), nullable=True, default=False))
op.execute("UPDATE file SET archived = false")
op.alter_column('file', 'archived', nullable=False)
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('file', 'archived')
# ### end Alembic commands ###

View File

@ -1,8 +1,9 @@
from tests.base_test import BaseTest from tests.base_test import BaseTest
from crc import db
from crc.services.file_service import FileService from crc.services.file_service import FileService
from crc.services.workflow_processor import WorkflowProcessor from crc.services.workflow_processor import WorkflowProcessor
class TestFileService(BaseTest): class TestFileService(BaseTest):
"""Largely tested via the test_file_api, and time is tight, but adding new tests here.""" """Largely tested via the test_file_api, and time is tight, but adding new tests here."""
@ -46,12 +47,42 @@ class TestFileService(BaseTest):
name="anything.png", content_type="text", name="anything.png", content_type="text",
binary_data=b'5678') binary_data=b'5678')
def test_replace_archive_file_unarchives_the_file_and_updates(self):
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
task = processor.next_task()
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
FileService.add_workflow_file(workflow_id=workflow.id,
irb_doc_code=irb_code,
name="anything.png", content_type="text",
binary_data=b'1234')
# Archive the file
file_models = FileService.get_workflow_files(workflow_id=workflow.id)
self.assertEquals(1, len(file_models))
file_model = file_models[0]
file_model.archived = True
db.session.add(file_model)
# Assure that the file no longer comes back.
file_models = FileService.get_workflow_files(workflow_id=workflow.id)
self.assertEquals(0, len(file_models))
# Add the file again with different data
FileService.add_workflow_file(workflow_id=workflow.id,
irb_doc_code=irb_code,
name="anything.png", content_type="text",
binary_data=b'5678')
file_models = FileService.get_workflow_files(workflow_id=workflow.id) file_models = FileService.get_workflow_files(workflow_id=workflow.id)
self.assertEquals(1, len(file_models)) self.assertEquals(1, len(file_models))
file_data = FileService.get_workflow_data_files(workflow_id=workflow.id) file_data = FileService.get_workflow_data_files(workflow_id=workflow.id)
self.assertEquals(1, len(file_data)) self.assertEquals(1, len(file_data))
self.assertEquals(2, file_data[0].version) self.assertEquals(2, file_data[0].version)
self.assertEquals(b'5678', file_data[0].data)
def test_add_file_from_form_allows_multiple_files_with_different_names(self): def test_add_file_from_form_allows_multiple_files_with_different_names(self):
self.load_example_data() self.load_example_data()

View File

@ -3,12 +3,14 @@ import json
from tests.base_test import BaseTest from tests.base_test import BaseTest
from crc import session from crc import session, db
from crc.models.file import FileModel, FileType, FileSchema, FileModelSchema from crc.models.file import FileModel, FileType, FileSchema, FileModelSchema
from crc.models.workflow import WorkflowSpecModel from crc.models.workflow import WorkflowSpecModel
from crc.services.file_service import FileService from crc.services.file_service import FileService
from crc.services.workflow_processor import WorkflowProcessor from crc.services.workflow_processor import WorkflowProcessor
from example_data import ExampleDataLoader from example_data import ExampleDataLoader
from crc.services.approval_service import ApprovalService
from crc.models.approval import ApprovalModel, ApprovalStatus
class TestFilesApi(BaseTest): class TestFilesApi(BaseTest):
@ -46,6 +48,7 @@ class TestFilesApi(BaseTest):
json_data = json.loads(rv.get_data(as_text=True)) json_data = json.loads(rv.get_data(as_text=True))
self.assertEqual(2, len(json_data)) self.assertEqual(2, len(json_data))
def test_create_file(self): def test_create_file(self):
self.load_example_data() self.load_example_data()
spec = session.query(WorkflowSpecModel).first() spec = session.query(WorkflowSpecModel).first()
@ -89,6 +92,39 @@ class TestFilesApi(BaseTest):
self.assert_success(rv) self.assert_success(rv)
def test_archive_file_no_longer_shows_up(self):
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
task = processor.next_task()
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
correct_name = task.task_spec.form.fields[0].id
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_id=%i&form_field_key=%s' %
(workflow.study_id, workflow.id, task.id, correct_name), data=data, follow_redirects=True,
content_type='multipart/form-data', headers=self.logged_in_headers())
self.assert_success(rv)
rv = self.app.get('/v1.0/file?workflow_id=%s' % workflow.id, headers=self.logged_in_headers())
self.assert_success(rv)
self.assertEquals(1, len(json.loads(rv.get_data(as_text=True))))
file_model = db.session.query(FileModel).filter(FileModel.workflow_id == workflow.id).all()
self.assertEquals(1, len(file_model))
file_model[0].archived = True
db.session.commit()
rv = self.app.get('/v1.0/file?workflow_id=%s' % workflow.id, headers=self.logged_in_headers())
self.assert_success(rv)
self.assertEquals(0, len(json.loads(rv.get_data(as_text=True))))
def test_set_reference_file(self): def test_set_reference_file(self):
file_name = "irb_document_types.xls" file_name = "irb_document_types.xls"
data = {'file': (io.BytesIO(b"abcdef"), "does_not_matter.xls")} data = {'file': (io.BytesIO(b"abcdef"), "does_not_matter.xls")}
@ -218,6 +254,41 @@ class TestFilesApi(BaseTest):
rv = self.app.get('/v1.0/file/%i' % file.id, headers=self.logged_in_headers()) rv = self.app.get('/v1.0/file/%i' % file.id, headers=self.logged_in_headers())
self.assertEqual(404, rv.status_code) self.assertEqual(404, rv.status_code)
def test_delete_file_after_approval(self):
self.create_reference_document()
workflow = self.create_workflow("empty_workflow")
FileService.add_workflow_file(workflow_id=workflow.id,
name="anything.png", content_type="text",
binary_data=b'5678', irb_doc_code="UVACompl_PRCAppr")
FileService.add_workflow_file(workflow_id=workflow.id,
name="anotother_anything.png", content_type="text",
binary_data=b'1234', irb_doc_code="Study_App_Doc")
ApprovalService.add_approval(study_id=workflow.study_id, workflow_id=workflow.id, approver_uid="dhf8r")
file = session.query(FileModel).\
filter(FileModel.workflow_id == workflow.id).\
filter(FileModel.name == "anything.png").first()
self.assertFalse(file.archived)
rv = self.app.get('/v1.0/file/%i' % file.id, headers=self.logged_in_headers())
self.assert_success(rv)
rv = self.app.delete('/v1.0/file/%i' % file.id, headers=self.logged_in_headers())
self.assert_success(rv)
session.refresh(file)
self.assertTrue(file.archived)
ApprovalService.add_approval(study_id=workflow.study_id, workflow_id=workflow.id, approver_uid="dhf8r")
approvals = session.query(ApprovalModel)\
.filter(ApprovalModel.status == ApprovalStatus.PENDING.value)\
.filter(ApprovalModel.study_id == workflow.study_id).all()
self.assertEquals(1, len(approvals))
self.assertEquals(1, len(approvals[0].approval_files))
def test_change_primary_bpmn(self): def test_change_primary_bpmn(self):
self.load_example_data() self.load_example_data()
spec = session.query(WorkflowSpecModel).first() spec = session.query(WorkflowSpecModel).first()

View File

@ -7,13 +7,13 @@ from crc.services.ldap_service import LdapService
class TestLdapService(BaseTest): class TestLdapService(BaseTest):
def setUp(self): def setUp(self):
self.ldap_service = LdapService() pass
def tearDown(self): def tearDown(self):
pass pass
def test_get_single_user(self): def test_get_single_user(self):
user_info = self.ldap_service.user_info("lb3dp") user_info = LdapService.user_info("lb3dp")
self.assertIsNotNone(user_info) self.assertIsNotNone(user_info)
self.assertEqual("lb3dp", user_info.uid) self.assertEqual("lb3dp", user_info.uid)
self.assertEqual("Laura Barnes", user_info.display_name) self.assertEqual("Laura Barnes", user_info.display_name)
@ -27,7 +27,7 @@ class TestLdapService(BaseTest):
def test_find_missing_user(self): def test_find_missing_user(self):
try: try:
user_info = self.ldap_service.user_info("nosuch") user_info = LdapService.user_info("nosuch")
self.assertFalse(True, "An API error should be raised.") self.assertFalse(True, "An API error should be raised.")
except ApiError as ae: except ApiError as ae:
self.assertEquals("missing_ldap_record", ae.code) self.assertEquals("missing_ldap_record", ae.code)

View File

@ -208,7 +208,6 @@ class TestTasksApi(BaseTest):
self.assert_success(rv) self.assert_success(rv)
def test_get_documentation_populated_in_end(self): def test_get_documentation_populated_in_end(self):
self.load_example_data() self.load_example_data()
workflow = self.create_workflow('random_fact') workflow = self.create_workflow('random_fact')

View File

@ -71,7 +71,7 @@ class TestWorkflowSpecValidation(BaseTest):
self.load_example_data() self.load_example_data()
errors = self.validate_workflow("invalid_expression") errors = self.validate_workflow("invalid_expression")
self.assertEqual(2, len(errors)) self.assertEqual(2, len(errors))
self.assertEqual("workflow_execution_exception", errors[0]['code']) self.assertEqual("workflow_validation_exception", errors[0]['code'])
self.assertEqual("ExclusiveGateway_003amsm", errors[0]['task_id']) self.assertEqual("ExclusiveGateway_003amsm", errors[0]['task_id'])
self.assertEqual("Has Bananas Gateway", errors[0]['task_name']) self.assertEqual("Has Bananas Gateway", errors[0]['task_name'])
self.assertEqual("invalid_expression.bpmn", errors[0]['file_name']) self.assertEqual("invalid_expression.bpmn", errors[0]['file_name'])
@ -92,7 +92,7 @@ class TestWorkflowSpecValidation(BaseTest):
self.load_example_data() self.load_example_data()
errors = self.validate_workflow("invalid_script") errors = self.validate_workflow("invalid_script")
self.assertEqual(2, len(errors)) self.assertEqual(2, len(errors))
self.assertEqual("workflow_execution_exception", errors[0]['code']) self.assertEqual("workflow_validation_exception", errors[0]['code'])
self.assertTrue("NoSuchScript" in errors[0]['message']) self.assertTrue("NoSuchScript" in errors[0]['message'])
self.assertEqual("Invalid_Script_Task", errors[0]['task_id']) self.assertEqual("Invalid_Script_Task", errors[0]['task_id'])
self.assertEqual("An Invalid Script Reference", errors[0]['task_name']) self.assertEqual("An Invalid Script Reference", errors[0]['task_name'])