Merge pull request #147 from sartography/dev

Dev to Test
This commit is contained in:
Dan Funk 2020-07-10 09:12:46 -04:00 committed by GitHub
commit 8976ad70ed
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 235 additions and 151 deletions

145
Pipfile.lock generated
View File

@ -197,40 +197,43 @@
},
"coverage": {
"hashes": [
"sha256:00f1d23f4336efc3b311ed0d807feb45098fc86dee1ca13b3d6768cdab187c8a",
"sha256:01333e1bd22c59713ba8a79f088b3955946e293114479bbfc2e37d522be03355",
"sha256:0cb4be7e784dcdc050fc58ef05b71aa8e89b7e6636b99967fadbdba694cf2b65",
"sha256:0e61d9803d5851849c24f78227939c701ced6704f337cad0a91e0972c51c1ee7",
"sha256:1601e480b9b99697a570cea7ef749e88123c04b92d84cedaa01e117436b4a0a9",
"sha256:2742c7515b9eb368718cd091bad1a1b44135cc72468c731302b3d641895b83d1",
"sha256:2d27a3f742c98e5c6b461ee6ef7287400a1956c11421eb574d843d9ec1f772f0",
"sha256:402e1744733df483b93abbf209283898e9f0d67470707e3c7516d84f48524f55",
"sha256:5c542d1e62eece33c306d66fe0a5c4f7f7b3c08fecc46ead86d7916684b36d6c",
"sha256:5f2294dbf7875b991c381e3d5af2bcc3494d836affa52b809c91697449d0eda6",
"sha256:6402bd2fdedabbdb63a316308142597534ea8e1895f4e7d8bf7476c5e8751fef",
"sha256:66460ab1599d3cf894bb6baee8c684788819b71a5dc1e8fa2ecc152e5d752019",
"sha256:782caea581a6e9ff75eccda79287daefd1d2631cc09d642b6ee2d6da21fc0a4e",
"sha256:79a3cfd6346ce6c13145731d39db47b7a7b859c0272f02cdb89a3bdcbae233a0",
"sha256:7a5bdad4edec57b5fb8dae7d3ee58622d626fd3a0be0dfceda162a7035885ecf",
"sha256:8fa0cbc7ecad630e5b0f4f35b0f6ad419246b02bc750de7ac66db92667996d24",
"sha256:a027ef0492ede1e03a8054e3c37b8def89a1e3c471482e9f046906ba4f2aafd2",
"sha256:a3f3654d5734a3ece152636aad89f58afc9213c6520062db3978239db122f03c",
"sha256:a82b92b04a23d3c8a581fc049228bafde988abacba397d57ce95fe95e0338ab4",
"sha256:acf3763ed01af8410fc36afea23707d4ea58ba7e86a8ee915dfb9ceff9ef69d0",
"sha256:adeb4c5b608574a3d647011af36f7586811a2c1197c861aedb548dd2453b41cd",
"sha256:b83835506dfc185a319031cf853fa4bb1b3974b1f913f5bb1a0f3d98bdcded04",
"sha256:bb28a7245de68bf29f6fb199545d072d1036a1917dca17a1e75bbb919e14ee8e",
"sha256:bf9cb9a9fd8891e7efd2d44deb24b86d647394b9705b744ff6f8261e6f29a730",
"sha256:c317eaf5ff46a34305b202e73404f55f7389ef834b8dbf4da09b9b9b37f76dd2",
"sha256:dbe8c6ae7534b5b024296464f387d57c13caa942f6d8e6e0346f27e509f0f768",
"sha256:de807ae933cfb7f0c7d9d981a053772452217df2bf38e7e6267c9cbf9545a796",
"sha256:dead2ddede4c7ba6cb3a721870f5141c97dc7d85a079edb4bd8d88c3ad5b20c7",
"sha256:dec5202bfe6f672d4511086e125db035a52b00f1648d6407cc8e526912c0353a",
"sha256:e1ea316102ea1e1770724db01998d1603ed921c54a86a2efcb03428d5417e489",
"sha256:f90bfc4ad18450c80b024036eaf91e4a246ae287701aaa88eaebebf150868052"
"sha256:0fc4e0d91350d6f43ef6a61f64a48e917637e1dcfcba4b4b7d543c628ef82c2d",
"sha256:10f2a618a6e75adf64329f828a6a5b40244c1c50f5ef4ce4109e904e69c71bd2",
"sha256:12eaccd86d9a373aea59869bc9cfa0ab6ba8b1477752110cb4c10d165474f703",
"sha256:1874bdc943654ba46d28f179c1846f5710eda3aeb265ff029e0ac2b52daae404",
"sha256:1dcebae667b73fd4aa69237e6afb39abc2f27520f2358590c1b13dd90e32abe7",
"sha256:1e58fca3d9ec1a423f1b7f2aa34af4f733cbfa9020c8fe39ca451b6071237405",
"sha256:214eb2110217f2636a9329bc766507ab71a3a06a8ea30cdeebb47c24dce5972d",
"sha256:25fe74b5b2f1b4abb11e103bb7984daca8f8292683957d0738cd692f6a7cc64c",
"sha256:32ecee61a43be509b91a526819717d5e5650e009a8d5eda8631a59c721d5f3b6",
"sha256:3740b796015b889e46c260ff18b84683fa2e30f0f75a171fb10d2bf9fb91fc70",
"sha256:3b2c34690f613525672697910894b60d15800ac7e779fbd0fccf532486c1ba40",
"sha256:41d88736c42f4a22c494c32cc48a05828236e37c991bd9760f8923415e3169e4",
"sha256:42fa45a29f1059eda4d3c7b509589cc0343cd6bbf083d6118216830cd1a51613",
"sha256:4bb385a747e6ae8a65290b3df60d6c8a692a5599dc66c9fa3520e667886f2e10",
"sha256:509294f3e76d3f26b35083973fbc952e01e1727656d979b11182f273f08aa80b",
"sha256:5c74c5b6045969b07c9fb36b665c9cac84d6c174a809fc1b21bdc06c7836d9a0",
"sha256:60a3d36297b65c7f78329b80120f72947140f45b5c7a017ea730f9112b40f2ec",
"sha256:6f91b4492c5cde83bfe462f5b2b997cdf96a138f7c58b1140f05de5751623cf1",
"sha256:7403675df5e27745571aba1c957c7da2dacb537c21e14007ec3a417bf31f7f3d",
"sha256:87bdc8135b8ee739840eee19b184804e5d57f518578ffc797f5afa2c3c297913",
"sha256:8a3decd12e7934d0254939e2bf434bf04a5890c5bf91a982685021786a08087e",
"sha256:9702e2cb1c6dec01fb8e1a64c015817c0800a6eca287552c47a5ee0ebddccf62",
"sha256:a4d511012beb967a39580ba7d2549edf1e6865a33e5fe51e4dce550522b3ac0e",
"sha256:bbb387811f7a18bdc61a2ea3d102be0c7e239b0db9c83be7bfa50f095db5b92a",
"sha256:bfcc811883699ed49afc58b1ed9f80428a18eb9166422bce3c31a53dba00fd1d",
"sha256:c32aa13cc3fe86b0f744dfe35a7f879ee33ac0a560684fef0f3e1580352b818f",
"sha256:ca63dae130a2e788f2b249200f01d7fa240f24da0596501d387a50e57aa7075e",
"sha256:d54d7ea74cc00482a2410d63bf10aa34ebe1c49ac50779652106c867f9986d6b",
"sha256:d67599521dff98ec8c34cd9652cbcfe16ed076a2209625fca9dc7419b6370e5c",
"sha256:d82db1b9a92cb5c67661ca6616bdca6ff931deceebb98eecbd328812dab52032",
"sha256:d9ad0a988ae20face62520785ec3595a5e64f35a21762a57d115dae0b8fb894a",
"sha256:ebf2431b2d457ae5217f3a1179533c456f3272ded16f8ed0b32961a6d90e38ee",
"sha256:ed9a21502e9223f563e071759f769c3d6a2e1ba5328c31e86830368e8d78bc9c",
"sha256:f50632ef2d749f541ca8e6c07c9928a37f87505ce3a9f20c8446ad310f1aa87b"
],
"index": "pypi",
"version": "==5.1"
"version": "==5.2"
},
"docutils": {
"hashes": [
@ -565,7 +568,8 @@
},
"openpyxl": {
"hashes": [
"sha256:6e62f058d19b09b95d20ebfbfb04857ad08d0833190516c1660675f699c6186f"
"sha256:6e62f058d19b09b95d20ebfbfb04857ad08d0833190516c1660675f699c6186f",
"sha256:d88dd1480668019684c66cfff3e52a5de4ed41e9df5dd52e008cbf27af0dbf87"
],
"index": "pypi",
"version": "==3.0.4"
@ -827,11 +831,11 @@
},
"sphinx": {
"hashes": [
"sha256:74fbead182a611ce1444f50218a1c5fc70b6cc547f64948f5182fb30a2a20258",
"sha256:97c9e3bcce2f61d9f5edf131299ee9d1219630598d9f9a8791459a4d9e815be5"
"sha256:97dbf2e31fc5684bb805104b8ad34434ed70e6c588f6896991b2fdfd2bef8c00",
"sha256:b9daeb9b39aa1ffefc2809b43604109825300300b987a24f45976c001ba1a8fd"
],
"index": "pypi",
"version": "==3.1.1"
"version": "==3.1.2"
},
"sphinxcontrib-applehelp": {
"hashes": [
@ -878,7 +882,7 @@
"spiffworkflow": {
"editable": true,
"git": "https://github.com/sartography/SpiffWorkflow.git",
"ref": "4d16fe9727bf2033d6f651ed0dece20693d54025"
"ref": "e47dbce4147f2475f50ef705eab32a1426540613"
},
"sqlalchemy": {
"hashes": [
@ -1007,40 +1011,43 @@
},
"coverage": {
"hashes": [
"sha256:00f1d23f4336efc3b311ed0d807feb45098fc86dee1ca13b3d6768cdab187c8a",
"sha256:01333e1bd22c59713ba8a79f088b3955946e293114479bbfc2e37d522be03355",
"sha256:0cb4be7e784dcdc050fc58ef05b71aa8e89b7e6636b99967fadbdba694cf2b65",
"sha256:0e61d9803d5851849c24f78227939c701ced6704f337cad0a91e0972c51c1ee7",
"sha256:1601e480b9b99697a570cea7ef749e88123c04b92d84cedaa01e117436b4a0a9",
"sha256:2742c7515b9eb368718cd091bad1a1b44135cc72468c731302b3d641895b83d1",
"sha256:2d27a3f742c98e5c6b461ee6ef7287400a1956c11421eb574d843d9ec1f772f0",
"sha256:402e1744733df483b93abbf209283898e9f0d67470707e3c7516d84f48524f55",
"sha256:5c542d1e62eece33c306d66fe0a5c4f7f7b3c08fecc46ead86d7916684b36d6c",
"sha256:5f2294dbf7875b991c381e3d5af2bcc3494d836affa52b809c91697449d0eda6",
"sha256:6402bd2fdedabbdb63a316308142597534ea8e1895f4e7d8bf7476c5e8751fef",
"sha256:66460ab1599d3cf894bb6baee8c684788819b71a5dc1e8fa2ecc152e5d752019",
"sha256:782caea581a6e9ff75eccda79287daefd1d2631cc09d642b6ee2d6da21fc0a4e",
"sha256:79a3cfd6346ce6c13145731d39db47b7a7b859c0272f02cdb89a3bdcbae233a0",
"sha256:7a5bdad4edec57b5fb8dae7d3ee58622d626fd3a0be0dfceda162a7035885ecf",
"sha256:8fa0cbc7ecad630e5b0f4f35b0f6ad419246b02bc750de7ac66db92667996d24",
"sha256:a027ef0492ede1e03a8054e3c37b8def89a1e3c471482e9f046906ba4f2aafd2",
"sha256:a3f3654d5734a3ece152636aad89f58afc9213c6520062db3978239db122f03c",
"sha256:a82b92b04a23d3c8a581fc049228bafde988abacba397d57ce95fe95e0338ab4",
"sha256:acf3763ed01af8410fc36afea23707d4ea58ba7e86a8ee915dfb9ceff9ef69d0",
"sha256:adeb4c5b608574a3d647011af36f7586811a2c1197c861aedb548dd2453b41cd",
"sha256:b83835506dfc185a319031cf853fa4bb1b3974b1f913f5bb1a0f3d98bdcded04",
"sha256:bb28a7245de68bf29f6fb199545d072d1036a1917dca17a1e75bbb919e14ee8e",
"sha256:bf9cb9a9fd8891e7efd2d44deb24b86d647394b9705b744ff6f8261e6f29a730",
"sha256:c317eaf5ff46a34305b202e73404f55f7389ef834b8dbf4da09b9b9b37f76dd2",
"sha256:dbe8c6ae7534b5b024296464f387d57c13caa942f6d8e6e0346f27e509f0f768",
"sha256:de807ae933cfb7f0c7d9d981a053772452217df2bf38e7e6267c9cbf9545a796",
"sha256:dead2ddede4c7ba6cb3a721870f5141c97dc7d85a079edb4bd8d88c3ad5b20c7",
"sha256:dec5202bfe6f672d4511086e125db035a52b00f1648d6407cc8e526912c0353a",
"sha256:e1ea316102ea1e1770724db01998d1603ed921c54a86a2efcb03428d5417e489",
"sha256:f90bfc4ad18450c80b024036eaf91e4a246ae287701aaa88eaebebf150868052"
"sha256:0fc4e0d91350d6f43ef6a61f64a48e917637e1dcfcba4b4b7d543c628ef82c2d",
"sha256:10f2a618a6e75adf64329f828a6a5b40244c1c50f5ef4ce4109e904e69c71bd2",
"sha256:12eaccd86d9a373aea59869bc9cfa0ab6ba8b1477752110cb4c10d165474f703",
"sha256:1874bdc943654ba46d28f179c1846f5710eda3aeb265ff029e0ac2b52daae404",
"sha256:1dcebae667b73fd4aa69237e6afb39abc2f27520f2358590c1b13dd90e32abe7",
"sha256:1e58fca3d9ec1a423f1b7f2aa34af4f733cbfa9020c8fe39ca451b6071237405",
"sha256:214eb2110217f2636a9329bc766507ab71a3a06a8ea30cdeebb47c24dce5972d",
"sha256:25fe74b5b2f1b4abb11e103bb7984daca8f8292683957d0738cd692f6a7cc64c",
"sha256:32ecee61a43be509b91a526819717d5e5650e009a8d5eda8631a59c721d5f3b6",
"sha256:3740b796015b889e46c260ff18b84683fa2e30f0f75a171fb10d2bf9fb91fc70",
"sha256:3b2c34690f613525672697910894b60d15800ac7e779fbd0fccf532486c1ba40",
"sha256:41d88736c42f4a22c494c32cc48a05828236e37c991bd9760f8923415e3169e4",
"sha256:42fa45a29f1059eda4d3c7b509589cc0343cd6bbf083d6118216830cd1a51613",
"sha256:4bb385a747e6ae8a65290b3df60d6c8a692a5599dc66c9fa3520e667886f2e10",
"sha256:509294f3e76d3f26b35083973fbc952e01e1727656d979b11182f273f08aa80b",
"sha256:5c74c5b6045969b07c9fb36b665c9cac84d6c174a809fc1b21bdc06c7836d9a0",
"sha256:60a3d36297b65c7f78329b80120f72947140f45b5c7a017ea730f9112b40f2ec",
"sha256:6f91b4492c5cde83bfe462f5b2b997cdf96a138f7c58b1140f05de5751623cf1",
"sha256:7403675df5e27745571aba1c957c7da2dacb537c21e14007ec3a417bf31f7f3d",
"sha256:87bdc8135b8ee739840eee19b184804e5d57f518578ffc797f5afa2c3c297913",
"sha256:8a3decd12e7934d0254939e2bf434bf04a5890c5bf91a982685021786a08087e",
"sha256:9702e2cb1c6dec01fb8e1a64c015817c0800a6eca287552c47a5ee0ebddccf62",
"sha256:a4d511012beb967a39580ba7d2549edf1e6865a33e5fe51e4dce550522b3ac0e",
"sha256:bbb387811f7a18bdc61a2ea3d102be0c7e239b0db9c83be7bfa50f095db5b92a",
"sha256:bfcc811883699ed49afc58b1ed9f80428a18eb9166422bce3c31a53dba00fd1d",
"sha256:c32aa13cc3fe86b0f744dfe35a7f879ee33ac0a560684fef0f3e1580352b818f",
"sha256:ca63dae130a2e788f2b249200f01d7fa240f24da0596501d387a50e57aa7075e",
"sha256:d54d7ea74cc00482a2410d63bf10aa34ebe1c49ac50779652106c867f9986d6b",
"sha256:d67599521dff98ec8c34cd9652cbcfe16ed076a2209625fca9dc7419b6370e5c",
"sha256:d82db1b9a92cb5c67661ca6616bdca6ff931deceebb98eecbd328812dab52032",
"sha256:d9ad0a988ae20face62520785ec3595a5e64f35a21762a57d115dae0b8fb894a",
"sha256:ebf2431b2d457ae5217f3a1179533c456f3272ded16f8ed0b32961a6d90e38ee",
"sha256:ed9a21502e9223f563e071759f769c3d6a2e1ba5328c31e86830368e8d78bc9c",
"sha256:f50632ef2d749f541ca8e6c07c9928a37f87505ce3a9f20c8446ad310f1aa87b"
],
"index": "pypi",
"version": "==5.1"
"version": "==5.2"
},
"importlib-metadata": {
"hashes": [

View File

@ -57,23 +57,11 @@ class Approval(object):
@classmethod
def from_model(cls, model: ApprovalModel):
# TODO: Reduce the code by iterating over model's dict keys
instance = cls()
instance.id = model.id
instance.study_id = model.study_id
instance.workflow_id = model.workflow_id
instance.version = model.version
instance.approver_uid = model.approver_uid
instance.status = model.status
instance.message = model.message
instance.date_created = model.date_created
instance.date_approved = model.date_approved
instance.version = model.version
instance.title = ''
args = dict((k, v) for k, v in model.__dict__.items() if not k.startswith('_'))
instance = cls(**args)
instance.related_approvals = []
instance.title = model.study.title if model.study else ''
if model.study:
instance.title = model.study.title
try:
instance.approver = LdapService.user_info(model.approver_uid)
instance.primary_investigator = LdapService.user_info(model.study.primary_investigator_id)

View File

@ -287,7 +287,7 @@ class ApprovalService(object):
)
if mail_result:
app.logger.error(mail_result, exc_info=True)
# TODO: Log update action by approver_uid - maybe ?
return db_approval
@staticmethod
@ -299,11 +299,12 @@ class ApprovalService(object):
pending approvals and create a new approval for the latest version
of the workflow."""
# Find any existing approvals for this workflow and approver.
latest_approval_request = db.session.query(ApprovalModel). \
# Find any existing approvals for this workflow.
latest_approval_requests = db.session.query(ApprovalModel). \
filter(ApprovalModel.workflow_id == workflow_id). \
filter(ApprovalModel.approver_uid == approver_uid). \
order_by(desc(ApprovalModel.version)).first()
order_by(desc(ApprovalModel.version))
latest_approver_request = latest_approval_requests.filter(ApprovalModel.approver_uid == approver_uid).first()
# Construct as hash of the latest files to see if things have changed since
# the last approval.
@ -318,16 +319,20 @@ class ApprovalService(object):
# If an existing approval request exists and no changes were made, do nothing.
# If there is an existing approval request for a previous version of the workflow
# then add a new request, and cancel any waiting/pending requests.
if latest_approval_request:
request_file_ids = list(file.file_data_id for file in latest_approval_request.approval_files)
if latest_approver_request:
request_file_ids = list(file.file_data_id for file in latest_approver_request.approval_files)
current_data_file_ids.sort()
request_file_ids.sort()
other_approver = latest_approval_requests.filter(ApprovalModel.approver_uid != approver_uid).first()
if current_data_file_ids == request_file_ids:
return # This approval already exists.
return # This approval already exists or we're updating other approver.
else:
latest_approval_request.status = ApprovalStatus.CANCELED.value
db.session.add(latest_approval_request)
version = latest_approval_request.version + 1
for approval_request in latest_approval_requests:
if (approval_request.version == latest_approver_request.version and
approval_request.status != ApprovalStatus.CANCELED.value):
approval_request.status = ApprovalStatus.CANCELED.value
db.session.add(approval_request)
version = latest_approver_request.version + 1
else:
version = 1

View File

@ -13,7 +13,7 @@ class EmailService(object):
"""Provides common tools for working with an Email"""
@staticmethod
def add_email(subject, sender, recipients, content, content_html, study_id):
def add_email(subject, sender, recipients, content, content_html, study_id=None):
"""We will receive all data related to an email and store it"""
# Find corresponding study - if any

View File

@ -1,3 +1,4 @@
from copy import copy
from datetime import datetime
import json
from typing import List
@ -64,13 +65,15 @@ class StudyService(object):
study.files = list(files)
# Calling this line repeatedly is very very slow. It creates the
# master spec and runs it.
status = StudyService.__get_study_status(study_model)
study.warnings = StudyService.__update_status_of_workflow_meta(workflow_metas, status)
# master spec and runs it. Don't execute this for Abandoned studies, as
# we don't have the information to process them.
if study.protocol_builder_status != ProtocolBuilderStatus.ABANDONED:
status = StudyService.__get_study_status(study_model)
study.warnings = StudyService.__update_status_of_workflow_meta(workflow_metas, status)
# Group the workflows into their categories.
for category in study.categories:
category.workflows = {w for w in workflow_metas if w.category_id == category.id}
# Group the workflows into their categories.
for category in study.categories:
category.workflows = {w for w in workflow_metas if w.category_id == category.id}
return study
@ -183,6 +186,7 @@ class StudyService(object):
@staticmethod
def get_investigators(study_id, all=False):
"""Convert array of investigators from protocol builder into a dictionary keyed on the type. """
# Loop through all known investigator types as set in the reference file
inv_dictionary = FileService.get_reference_data(FileService.INVESTIGATOR_LIST, 'code')
@ -190,18 +194,26 @@ class StudyService(object):
# Get PB required docs
pb_investigators = ProtocolBuilderService.get_investigators(study_id=study_id)
"""Convert array of investigators from protocol builder into a dictionary keyed on the type"""
# It is possible for the same type to show up more than once in some circumstances, in those events
# append a counter to the name.
investigators = {}
for i_type in inv_dictionary:
pb_data = next((item for item in pb_investigators if item['INVESTIGATORTYPE'] == i_type), None)
if pb_data:
inv_dictionary[i_type]['user_id'] = pb_data["NETBADGEID"]
inv_dictionary[i_type].update(StudyService.get_ldap_dict_if_available(pb_data["NETBADGEID"]))
else:
inv_dictionary[i_type]['user_id'] = None
pb_data_entries = list(item for item in pb_investigators if item['INVESTIGATORTYPE'] == i_type)
entry_count = 0
investigators[i_type] = copy(inv_dictionary[i_type])
investigators[i_type]['user_id'] = None
for pb_data in pb_data_entries:
entry_count += 1
if entry_count == 1:
t = i_type
else:
t = i_type + "_" + str(entry_count)
investigators[t] = copy(inv_dictionary[i_type])
investigators[t]['user_id'] = pb_data["NETBADGEID"]
investigators[t].update(StudyService.get_ldap_dict_if_available(pb_data["NETBADGEID"]))
if not all:
inv_dictionary = dict(filter(lambda elem: elem[1]['user_id'] is not None, inv_dictionary.items()))
return inv_dictionary
investigators = dict(filter(lambda elem: elem[1]['user_id'] is not None, investigators.items()))
return investigators
@staticmethod
def get_ldap_dict_if_available(user_id):

View File

@ -1,4 +1,6 @@
import re
from SpiffWorkflow.serializer.exceptions import MissingSpecError
from lxml import etree
import shlex
from datetime import datetime
@ -138,7 +140,7 @@ class WorkflowProcessor(object):
workflow_model.bpmn_workflow_json = WorkflowProcessor._serializer.serialize_workflow(self.bpmn_workflow)
self.save()
except KeyError as ke:
except MissingSpecError as ke:
raise ApiError(code="unexpected_workflow_structure",
message="Failed to deserialize workflow"
" '%s' version %s, due to a mis-placed or missing task '%s'" %

View File

@ -384,7 +384,8 @@ class WorkflowService(object):
except TypeError as te:
raise ApiError.from_task(code="template_error", message="Error processing template for task %s: %s" %
(spiff_task.task_spec.name, str(te)), task=spiff_task)
# TODO: Catch additional errors and report back.
except Exception as e:
app.logger.error(str(e), exc_info=True)
@staticmethod
def process_options(spiff_task, field):

View File

@ -217,27 +217,6 @@ class TestApprovals(BaseTest):
total_counts = sum(counts[status] for status in statuses)
self.assertEqual(total_counts, len(approvals), 'Total approval counts for user should match number of approvals for user')
def _create_study_workflow_approvals(self, user_uid, title, primary_investigator_id, approver_uids, statuses,
workflow_spec_name="random_fact"):
study = self.create_study(uid=user_uid, title=title, primary_investigator_id=primary_investigator_id)
workflow = self.create_workflow(workflow_name=workflow_spec_name, study=study)
approvals = []
for i in range(len(approver_uids)):
approvals.append(self.create_approval(
study=study,
workflow=workflow,
approver_uid=approver_uids[i],
status=statuses[i],
version=1
))
return {
'study': study,
'workflow': workflow,
'approvals': approvals,
}
def _add_lots_of_random_approvals(self, n=100, workflow_spec_name="random_fact"):
num_studies_before = db.session.query(StudyModel).count()
statuses = [name for name, value in ApprovalStatus.__members__.items()]

View File

@ -1,7 +1,7 @@
from tests.base_test import BaseTest
from crc import db
from crc.models.approval import ApprovalModel
from crc.services.approval_service import ApprovalService
from crc.services.approval_service import ApprovalService, ApprovalStatus
from crc.services.file_service import FileService
from crc.services.workflow_processor import WorkflowProcessor
@ -83,6 +83,34 @@ class TestApprovalsService(BaseTest):
self.assertEqual(len(records), 2)
def test_new_approval_cancels_all_previous_approvals(self):
self.create_reference_document()
workflow = self.create_workflow("empty_workflow")
FileService.add_workflow_file(workflow_id=workflow.id,
name="anything.png", content_type="text",
binary_data=b'5678', irb_doc_code="UVACompl_PRCAppr" )
ApprovalService.add_approval(study_id=workflow.study_id, workflow_id=workflow.id, approver_uid="dhf8r")
ApprovalService.add_approval(study_id=workflow.study_id, workflow_id=workflow.id, approver_uid="lb3dp")
current_count = ApprovalModel.query.count()
self.assertTrue(current_count, 2)
FileService.add_workflow_file(workflow_id=workflow.id,
name="borderline.png", content_type="text",
binary_data=b'906090', irb_doc_code="AD_CoCAppr" )
ApprovalService.add_approval(study_id=workflow.study_id, workflow_id=workflow.id, approver_uid="dhf8r")
current_count = ApprovalModel.query.count()
canceled_count = ApprovalModel.query.filter(ApprovalModel.status == ApprovalStatus.CANCELED.value)
self.assertTrue(current_count, 2)
self.assertTrue(current_count, 3)
ApprovalService.add_approval(study_id=workflow.study_id, workflow_id=workflow.id, approver_uid="lb3dp")
current_count = ApprovalModel.query.count()
self.assertTrue(current_count, 4)
def test_new_approval_sends_proper_emails(self):
self.assertEqual(1, 1)

View File

@ -240,6 +240,29 @@ class BaseTest(unittest.TestCase):
db.session.commit()
return study
def _create_study_workflow_approvals(self, user_uid, title, primary_investigator_id, approver_uids, statuses,
workflow_spec_name="random_fact"):
study = self.create_study(uid=user_uid, title=title, primary_investigator_id=primary_investigator_id)
workflow = self.create_workflow(workflow_name=workflow_spec_name, study=study)
approvals = []
for i in range(len(approver_uids)):
approvals.append(self.create_approval(
study=study,
workflow=workflow,
approver_uid=approver_uids[i],
status=statuses[i],
version=1
))
full_study = {
'study': study,
'workflow': workflow,
'approvals': approvals,
}
return full_study
def create_workflow(self, workflow_name, study=None, category_id=None):
db.session.flush()
spec = db.session.query(WorkflowSpecModel).filter(WorkflowSpecModel.name == workflow_name).first()

View File

@ -13,5 +13,15 @@
"INVESTIGATORTYPE": "PI",
"INVESTIGATORTYPEFULL": "Primary Investigator",
"NETBADGEID": "dhf8r"
},
{
"INVESTIGATORTYPE": "SI",
"INVESTIGATORTYPEFULL": "Sub Investigator",
"NETBADGEID": "ajl2j"
},
{
"INVESTIGATORTYPE": "SI",
"INVESTIGATORTYPEFULL": "Sub Investigator",
"NETBADGEID": "cah3us"
}
]
]

View File

@ -31,4 +31,15 @@ class TestEmailService(BaseTest):
self.assertEqual(email_model.content_html, content_html)
self.assertEqual(email_model.study, study)
# TODO: Create email model without study
subject = 'Email Subject - Empty study'
EmailService.add_email(subject=subject, sender=sender, recipients=recipients,
content=content, content_html=content_html)
email_model = EmailModel.query.order_by(EmailModel.id.desc()).first()
self.assertEqual(email_model.subject, subject)
self.assertEqual(email_model.sender, sender)
self.assertEqual(email_model.recipients, str(recipients))
self.assertEqual(email_model.content, content)
self.assertEqual(email_model.content_html, content_html)
self.assertEqual(email_model.study, None)

View File

@ -7,6 +7,7 @@ from unittest.mock import patch
from crc import session, app
from crc.models.protocol_builder import ProtocolBuilderStatus, \
ProtocolBuilderStudySchema
from crc.models.approval import ApprovalStatus
from crc.models.stats import TaskEventModel
from crc.models.study import StudyModel, StudySchema
from crc.models.workflow import WorkflowSpecModel, WorkflowModel
@ -95,8 +96,21 @@ class TestStudyApi(BaseTest):
# TODO: WRITE A TEST FOR STUDY FILES
def test_get_study_has_details_about_approvals(self):
# TODO: WRITE A TEST FOR STUDY APPROVALS
pass
self.load_example_data()
full_study = self._create_study_workflow_approvals(
user_uid="dhf8r", title="first study", primary_investigator_id="lb3dp",
approver_uids=["lb3dp", "dhf8r"], statuses=[ApprovalStatus.PENDING.value, ApprovalStatus.PENDING.value]
)
api_response = self.app.get('/v1.0/study/%i' % full_study['study'].id,
headers=self.logged_in_headers(), content_type="application/json")
self.assert_success(api_response)
study = StudySchema().loads(api_response.get_data(as_text=True))
self.assertEqual(len(study.approvals), 2)
for approval in study.approvals:
self.assertEqual(full_study['study'].title, approval['title'])
def test_add_study(self):
self.load_example_data()

View File

@ -193,7 +193,7 @@ class TestStudyService(BaseTest):
workflow = self.create_workflow('docx') # The workflow really doesnt matter in this case.
investigators = StudyService().get_investigators(workflow.study_id, all=True)
self.assertEqual(9, len(investigators))
self.assertEqual(10, len(investigators))
# dhf8r is in the ldap mock data.
self.assertEqual("dhf8r", investigators['PI']['user_id'])
@ -219,10 +219,14 @@ class TestStudyService(BaseTest):
workflow = self.create_workflow('docx') # The workflow really doesnt matter in this case.
investigators = StudyService().get_investigators(workflow.study_id, all=False)
self.assertEqual(3, len(investigators))
self.assertEqual(5, len(investigators))
# dhf8r is in the ldap mock data.
self.assertEqual("dhf8r", investigators['PI']['user_id'])
self.assertEqual("Dan Funk", investigators['PI']['display_name']) # Data from ldap
self.assertEqual("Primary Investigator", investigators['PI']['label']) # Data from xls file.
self.assertEqual("Always", investigators['PI']['display']) # Data from xls file.
# Both Alex and Aaron are SI, and both should be returned.
self.assertEqual("ajl2j", investigators['SI']['user_id'])
self.assertEqual("cah3us", investigators['SI_2']['user_id'])

View File

@ -24,7 +24,7 @@ class TestProtocolBuilder(BaseTest):
mock_get.return_value.text = self.protocol_builder_response('investigators.json')
response = ProtocolBuilderService.get_investigators(self.test_study_id)
self.assertIsNotNone(response)
self.assertEqual(3, len(response))
self.assertEqual(5, len(response))
self.assertEqual("DC", response[0]["INVESTIGATORTYPE"])
self.assertEqual("Department Contact", response[0]["INVESTIGATORTYPEFULL"])
self.assertEqual("asd3v", response[0]["NETBADGEID"])

View File

@ -322,7 +322,7 @@ class TestTasksApi(BaseTest):
self.assertEqual(4, len(navigation)) # Start task, form_task, multi_task, end task
self.assertEqual("UserTask", workflow.next_task.type)
self.assertEqual(MultiInstanceType.sequential.value, workflow.next_task.multi_instance_type)
self.assertEqual(3, workflow.next_task.multi_instance_count)
self.assertEqual(5, workflow.next_task.multi_instance_count)
# Assure that the names for each task are properly updated, so they aren't all the same.
self.assertEqual("Primary Investigator", workflow.next_task.properties['display_name'])
@ -480,15 +480,15 @@ class TestTasksApi(BaseTest):
workflow = self.create_workflow('multi_instance_parallel')
workflow_api = self.get_workflow_api(workflow)
self.assertEqual(6, len(workflow_api.navigation))
self.assertEqual(8, len(workflow_api.navigation))
ready_items = [nav for nav in workflow_api.navigation if nav['state'] == "READY"]
self.assertEqual(3, len(ready_items))
self.assertEqual(5, len(ready_items))
self.assertEqual("UserTask", workflow_api.next_task.type)
self.assertEqual("MultiInstanceTask",workflow_api.next_task.name)
self.assertEqual("Primary Investigator", workflow_api.next_task.title)
for i in random.sample(range(3), 3):
for i in random.sample(range(5), 5):
task = TaskSchema().load(ready_items[i]['task'])
rv = self.app.put('/v1.0/workflow/%i/task/%s/set_token' % (workflow.id, task.id),
headers=self.logged_in_headers(),

View File

@ -187,7 +187,7 @@ class TestWorkflowProcessor(BaseTest):
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'two_forms', 'mods', 'two_forms_struc_mod.bpmn')
self.replace_file("two_forms.bpmn", file_path)
# Attemping a soft update on a structural change should raise a sensible error.
# Attempting a soft update on a structural change should raise a sensible error.
with self.assertRaises(ApiError) as context:
processor3 = WorkflowProcessor(processor.workflow_model, soft_reset=True)
self.assertEqual("unexpected_workflow_structure", context.exception.code)

View File

@ -59,7 +59,7 @@ class TestWorkflowProcessorMultiInstance(BaseTest):
api_task = workflow_api.next_task
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
self.assertEqual("dhf8r", api_task.data["investigator"]["user_id"])
self.assertEqual("MultiInstanceTask", api_task.name)
self.assertTrue(api_task.name.startswith("MultiInstanceTask"))
self.assertEqual(3, api_task.multi_instance_count)
self.assertEqual(1, api_task.multi_instance_index)
@ -74,7 +74,7 @@ class TestWorkflowProcessorMultiInstance(BaseTest):
api_task = workflow_api.next_task
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
self.assertEqual(None, api_task.data["investigator"]["user_id"])
self.assertEqual("MultiInstanceTask", api_task.name)
self.assertTrue(api_task.name.startswith("MultiInstanceTask"))
self.assertEqual(3, api_task.multi_instance_count)
self.assertEqual(2, api_task.multi_instance_index)
@ -89,7 +89,7 @@ class TestWorkflowProcessorMultiInstance(BaseTest):
api_task = workflow_api.next_task
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
self.assertEqual("asd3v", api_task.data["investigator"]["user_id"])
self.assertEqual("MultiInstanceTask", api_task.name)
self.assertTrue(api_task.name.startswith("MultiInstanceTask"))
self.assertEqual(3, api_task.multi_instance_count)
self.assertEqual(3, api_task.multi_instance_index)