Merge branch 'dev' of https://github.com/sartography/cr-connect-workflow into partial_testing

This commit is contained in:
NWalker4483 2021-06-09 14:32:01 -04:00
commit 338fe30984
36 changed files with 596 additions and 305 deletions

2
Pipfile.lock generated
View File

@ -957,7 +957,7 @@
},
"spiffworkflow": {
"git": "https://github.com/sartography/SpiffWorkflow.git",
"ref": "834ca6fdfa4284262f6df5fe00c7b518353511ce"
"ref": "ce939de158246e9d10e7e154c92230669354bc64"
},
"sqlalchemy": {
"hashes": [

View File

@ -17,8 +17,8 @@ API_TOKEN = environ.get('API_TOKEN', default = 'af95596f327c9ecc007b60414fc84b61
NAME = "CR Connect Workflow"
DEFAULT_PORT = "5000"
FLASK_PORT = environ.get('PORT0') or environ.get('FLASK_PORT', default=DEFAULT_PORT)
FRONTEND = "localhost:4200"
BPMN = "localhost:5002"
FRONTEND = environ.get('FRONTEND', default="localhost:4200")
BPMN = environ.get('BPMN', default="localhost:5002")
CORS_DEFAULT = f'{FRONTEND}, {BPMN}'
CORS_ALLOW_ORIGINS = re.split(r',\s*', environ.get('CORS_ALLOW_ORIGINS', default=CORS_DEFAULT))
TESTING = environ.get('TESTING', default="false") == "true"

View File

@ -83,8 +83,6 @@ paths:
type : integer
get:
operationId: crc.api.file.get_document_directory
security:
- auth_admin: ['secret']
summary: Returns a directory of all files for study in a nested structure
tags:
- Document Categories
@ -512,6 +510,18 @@ paths:
description: The unique id of an existing workflow specification to validate.
schema:
type: string
- name: validate_study_id
in: query
required: false
description: Optional id of study to test under different scenarios
schema:
type: string
- name: test_until
in: query
required: false
description: Optional name of task to stop validating at
schema:
type: string
get:
operationId: crc.api.workflow.validate_workflow_specification
summary: Loads and attempts to execute a Workflow Specification, returning a list of errors encountered
@ -1372,6 +1382,27 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/DataStore"
/datastore/file/{file_id}:
parameters:
- name: file_id
in: path
required: true
description: The file id we are concerned with
schema:
type: string
format: string
get:
operationId: crc.api.data_store.file_multi_get
summary: Gets all datastore items by file_id
tags:
- DataStore
responses:
'200':
description: Get all values from the data store for a file_id
content:
application/json:
schema:
$ref: "#/components/schemas/DataStore"
components:
securitySchemes:
jwt:

View File

@ -4,7 +4,7 @@ from datetime import datetime
from crc import session
from crc.api.common import ApiError
from crc.models.data_store import DataStoreModel, DataStoreSchema
from crc.scripts.data_store_base import DataStoreBase
from crc.services.data_store_service import DataStoreBase
def study_multi_get(study_id):
@ -30,6 +30,16 @@ def user_multi_get(user_id):
return results
def file_multi_get(file_id):
"""Get all data values in the data store for a file_id"""
if file_id is None:
raise ApiError(code='unknown_file', message='Please provide a valid file id.')
dsb = DataStoreBase()
retval = dsb.get_multi_common(None, None, file_id=file_id)
results = DataStoreSchema(many=True).dump(retval)
return results
def datastore_del(id):
"""Delete a data store item for a key"""
session.query(DataStoreModel).filter_by(id=id).delete()

View File

@ -57,7 +57,7 @@ def get_document_directory(study_id, workflow_id=None):
if file.irb_doc_code in doc_dict:
doc_code = doc_dict[file.irb_doc_code]
else:
doc_code = {'category1': "Unknown", 'category2': None, 'category3': None}
doc_code = {'category1': "Unknown", 'category2': '', 'category3': ''}
if workflow_id:
expand = file.workflow_id == int(workflow_id)
else:

View File

@ -46,16 +46,16 @@ def get_workflow_specification(spec_id):
return WorkflowSpecModelSchema().dump(spec)
def validate_workflow_specification(spec_id):
def validate_workflow_specification(spec_id, validate_study_id=None, test_until=None):
errors = {}
try:
WorkflowService.test_spec(spec_id)
WorkflowService.test_spec(spec_id, validate_study_id, test_until)
except ApiError as ae:
ae.message = "When populating all fields ... \n" + ae.message
errors['all'] = ae
try:
# Run the validation twice, the second time, just populate the required fields.
WorkflowService.test_spec(spec_id, required_only=True)
WorkflowService.test_spec(spec_id, validate_study_id, test_until, required_only=True)
except ApiError as ae:
ae.message = "When populating only required fields ... \n" + ae.message
errors['required'] = ae
@ -206,6 +206,7 @@ def update_task(workflow_id, task_id, body, terminate_loop=None, update_all=Fals
processor = WorkflowProcessor(workflow_model)
task_id = uuid.UUID(task_id)
spiff_task = processor.bpmn_workflow.get_task(task_id)
spiff_task.workflow.script_engine = processor.bpmn_workflow.script_engine
_verify_user_and_role(processor, spiff_task)
user = UserService.current_user(allow_admin_impersonate=False) # Always log as the real user.
@ -215,7 +216,7 @@ def update_task(workflow_id, task_id, body, terminate_loop=None, update_all=Fals
raise ApiError("invalid_state", "You may not update a task unless it is in the READY state. "
"Consider calling a token reset to make this task Ready.")
if terminate_loop:
if terminate_loop and spiff_task.is_looping():
spiff_task.terminate_loop()
# Extract the details specific to the form submitted
@ -244,6 +245,7 @@ def __update_task(processor, task, data, user):
here because we need to do it multiple times when completing all tasks in
a multi-instance task"""
task.update_data(data)
WorkflowService.post_process_form(task) # some properties may update the data store.
processor.complete_task(task)
processor.do_engine_steps()
processor.save()

View File

@ -26,7 +26,8 @@ class Task(object):
PROP_EXTENSIONS_TITLE = "display_name"
# Autocomplete field
# Field Types
FIELD_TYPE_FILE = "file"
FIELD_TYPE_AUTO_COMPLETE = "autocomplete"
FIELD_PROP_AUTO_COMPLETE_MAX = "autocomplete_num" # Not used directly, passed in from the front end.
@ -59,6 +60,10 @@ class Task(object):
FIELD_PROP_REPLEAT_TITLE = "repeat_title"
FIELD_PROP_REPLEAT_BUTTON = "repeat_button_label"
# File specific field properties
FIELD_PROP_DOC_CODE = "doc_code" # to associate a file upload field with a doc code
FIELD_PROP_FILE_DATA = "file_data" # to associate a bit of data with a specific file upload file.
# Additional properties
FIELD_PROP_ENUM_TYPE = "enum_type"
FIELD_PROP_TEXT_AREA_ROWS = "rows"

View File

@ -25,4 +25,5 @@ class DataStoreSchema(SQLAlchemyAutoSchema):
class Meta:
model = DataStoreModel
load_instance = True
include_fk = True
sqla_session = db.session

View File

@ -90,7 +90,7 @@ class FileModel(db.Model):
# it instead, hide it in the interface.
is_review = db.Column(db.Boolean, default=False, nullable=True)
archived = db.Column(db.Boolean, default=False, nullable=False)
tags = relationship("DataStoreModel", cascade="all,delete", backref="file")
data_stores = relationship("DataStoreModel", cascade="all,delete", backref="file")
class File(object):
@classmethod
@ -123,6 +123,11 @@ class File(object):
else:
instance.last_modified = None
instance.latest_version = None
instance.data_store = {}
for ds in model.data_stores:
instance.data_store[ds.key] = ds.value
return instance
@ -142,7 +147,7 @@ class FileSchema(Schema):
fields = ["id", "name", "is_status", "is_reference", "content_type",
"primary", "primary_process_id", "workflow_spec_id", "workflow_id",
"irb_doc_code", "last_modified", "latest_version", "type", "categories",
"description", "category", "description", "download_name", "size"]
"description", "category", "download_name", "size", "data_store"]
unknown = INCLUDE
type = EnumField(FileType)

View File

@ -1,7 +1,7 @@
from flask import g
from crc.api.common import ApiError
from crc.scripts.data_store_base import DataStoreBase
from crc.services.data_store_service import DataStoreBase
from crc.scripts.script import Script

View File

@ -1,8 +1,9 @@
from flask import g
from crc.api.common import ApiError
from crc.scripts.data_store_base import DataStoreBase
from crc.services.data_store_service import DataStoreBase
from crc.scripts.script import Script
from crc.services.file_service import FileService
class FileDataSet(Script, DataStoreBase):
@ -34,8 +35,19 @@ class FileDataSet(Script, DataStoreBase):
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
if self.validate_kw_args(**kwargs):
myargs = [kwargs['key'],kwargs['value']]
fileid = kwargs['file_id']
try:
fileid = int(kwargs['file_id'])
except:
raise ApiError("invalid_file_id",
"Attempting to update DataStore for an invalid fileid '%s'" % kwargs['file_id'])
del(kwargs['file_id'])
if kwargs['key'] == 'irb_code':
irb_doc_code = kwargs['value']
FileService.update_irb_code(fileid,irb_doc_code)
return self.set_data_common(task.id,
None,
None,

View File

@ -16,16 +16,14 @@ example : get_study_associate('sbp3ey') => {'uid':'sbp3ey','role':'Unicorn Herde
"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
if len(args)<1:
return False
return True
if len(args) < 1:
raise ApiError('no_user_id_specified', 'A uva uid is the sole argument to this function')
return {'uid': 'sbp3ey', 'role': 'Unicorn Herder', 'send_email': False, 'access': True}
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
if len(args)<1:
if len(args) < 1:
raise ApiError('no_user_id_specified', 'A uva uid is the sole argument to this function')
if not isinstance(args[0],type('')):
if not isinstance(args[0], str):
raise ApiError('argument_should_be_string', 'A uva uid is always a string, please check type')
return StudyService.get_study_associate(study_id=study_id,uid=args[0])
return StudyService.get_study_associate(study_id=study_id, uid=args[0])

View File

@ -1,4 +1,4 @@
from crc.scripts.data_store_base import DataStoreBase
from crc.services.data_store_service import DataStoreBase
from crc.scripts.script import Script

View File

@ -1,4 +1,4 @@
from crc.scripts.data_store_base import DataStoreBase
from crc.services.data_store_service import DataStoreBase
from crc.scripts.script import Script

View File

@ -4,9 +4,10 @@ from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from crc import session
from crc.api.common import ApiError
from crc.api.workflow import get_workflow
from crc.models.protocol_builder import ProtocolBuilderInvestigatorType
from crc.models.study import StudyModel, StudySchema
from crc.models.workflow import WorkflowStatus
from crc.api import workflow as workflow_api
from crc.scripts.script import Script
from crc.services.cache_service import timeit
from crc.services.file_service import FileService
@ -118,7 +119,7 @@ class StudyInfo(Script):
def get_description(self):
return """
StudyInfo [TYPE], where TYPE is one of 'info', 'investigators', 'details', 'documents' or 'protocol'.
StudyInfo [TYPE], where TYPE is one of 'info', 'investigators', 'details', or 'documents'.
Adds details about the current study to the Task Data. The type of information required should be
provided as an argument. The following arguments are available:
@ -157,9 +158,6 @@ Please note this is just a few examples, ALL known document types are returned i
{documents_example}
```
### Protocol ###
Returns information specific to the protocol.
""".format(info_example=self.example_to_string("info"),
investigators_example=self.example_to_string("investigators"),
@ -172,197 +170,10 @@ Returns information specific to the protocol.
# Assure the reference file exists (a bit hacky, but we want to raise this error early, and cleanly.)
FileService.get_reference_file_data(FileService.DOCUMENT_LIST)
FileService.get_reference_file_data(FileService.INVESTIGATOR_LIST)
data = {
"study": {
"info": {
"id": 12,
"title": "test",
"short_title": "tst",
"primary_investigator_id": 21,
"user_uid": "dif84",
"sponsor": "sponsor",
"ind_number": "1234",
"inactive": False
},
"sponsors": [
{
"COMMONRULEAGENCY": None,
"SPONSOR_ID": 2453,
"SP_NAME": "Abbott Ltd",
"SP_TYPE": "Private",
"SP_TYPE_GROUP_NAME": None,
"SS_STUDY": 2
},
{
"COMMONRULEAGENCY": None,
"SPONSOR_ID": 2387,
"SP_NAME": "Abbott-Price",
"SP_TYPE": "Incoming Sub Award",
"SP_TYPE_GROUP_NAME": "Government",
"SS_STUDY": 2
},
{
"COMMONRULEAGENCY": None,
"SPONSOR_ID": 1996,
"SP_NAME": "Abernathy-Heidenreich",
"SP_TYPE": "Foundation/Not for Profit",
"SP_TYPE_GROUP_NAME": "Other External Funding",
"SS_STUDY": 2
}
],
"investigators": {
"PI": {
"label": ProtocolBuilderInvestigatorType.PI.value,
"display": "Always",
"unique": "Yes",
"user_id": "dhf8r",
"title": "",
"display_name": "Daniel Harold Funk",
"sponsor_type": "Contractor",
"telephone_number": "0000000000",
"department": "",
"email_address": "dhf8r@virginia.edu",
"given_name": "Daniel",
"uid": "dhf8r",
"affiliation": "",
"date_cached": "2020-08-04T19:32:08.006128+00:00"
},
"SC_I": {
"label": ProtocolBuilderInvestigatorType.SC_I.value,
"display": "Always",
"unique": "Yes",
"user_id": "ajl2j",
"title": "",
"display_name": "Aaron Louie",
"sponsor_type": "Contractor",
"telephone_number": "0000000000",
"department": "",
"email_address": "ajl2j@virginia.edu",
"given_name": "Aaron",
"uid": "ajl2j",
"affiliation": "sponsored",
"date_cached": "2020-08-04T19:32:10.699666+00:00"
},
"SC_II": {
"label": ProtocolBuilderInvestigatorType.SC_II.value,
"display": "Optional",
"unique": "Yes",
"user_id": "cah3us",
"title": "",
"display_name": "Alex Herron",
"sponsor_type": "Contractor",
"telephone_number": "0000000000",
"department": "",
"email_address": "cah3us@virginia.edu",
"given_name": "Alex",
"uid": "cah3us",
"affiliation": "sponsored",
"date_cached": "2020-08-04T19:32:10.075852+00:00"
},
},
"pi": {
"PI": {
"label": ProtocolBuilderInvestigatorType.PI.value,
"display": "Always",
"unique": "Yes",
"user_id": "dhf8r",
"title": "",
"display_name": "Daniel Harold Funk",
"sponsor_type": "Contractor",
"telephone_number": "0000000000",
"department": "",
"email_address": "dhf8r@virginia.edu",
"given_name": "Daniel",
"uid": "dhf8r",
"affiliation": "",
"date_cached": "2020-08-04T19:32:08.006128+00:00"
}
},
"roles":
{
"INVESTIGATORTYPE": "PI",
"INVESTIGATORTYPEFULL": ProtocolBuilderInvestigatorType.PI.value,
"NETBADGEID": "dhf8r"
},
"details":
{
"DSMB": None,
"DSMB_FREQUENCY": None,
"GCRC_NUMBER": None,
"IBC_NUMBER": None,
"IDE": None,
"IND_1": 1234,
"IND_2": None,
"IND_3": None,
"IRBREVIEWERADMIN": None,
"IS_ADULT_PARTICIPANT": None,
"IS_APPROVED_DEVICE": None,
"IS_AUX": None,
"IS_BIOMEDICAL": None,
"IS_CANCER_PATIENT": None,
"IS_CENTRAL_REG_DB": None,
"IS_CHART_REVIEW": None,
"IS_COMMITTEE_CONFLICT": None,
"IS_CONSENT_WAIVER": None,
"IS_DB": None,
"IS_ELDERLY_POP": None,
"IS_ENGAGED_RESEARCH": None,
"IS_FETUS_POP": None,
"IS_FINANCIAL_CONFLICT": None,
"IS_FOR_CANCER_CENTER": None,
"IS_FUNDING_SOURCE": None,
"IS_GCRC": None,
"IS_GENE_TRANSFER": None,
"IS_GRANT": None,
"IS_HGT": None,
"IS_IBC": None,
"IS_IDE": None,
"IS_IND": 1,
"IS_MENTAL_IMPAIRMENT_POP": None,
"IS_MINOR": None,
"IS_MINOR_PARTICIPANT": None,
"IS_MULTI_SITE": None,
"IS_NOT_CONSENT_WAIVER": None,
"IS_NOT_PRC_WAIVER": None,
"IS_OTHER_VULNERABLE_POP": None,
"IS_OUTSIDE_CONTRACT": None,
"IS_PI_INITIATED": None,
"IS_PI_SCHOOL": None,
"IS_PRC": None,
"IS_PRC_DSMP": None,
"IS_PREGNANT_POP": None,
"IS_PRISONERS_POP": None,
"IS_QUALITATIVE": None,
"IS_RADIATION": None,
"IS_REVIEW_BY_CENTRAL_IRB": None,
"IS_SPONSOR": None,
"IS_SPONSOR_MONITORING": None,
"IS_SURROGATE_CONSENT": None,
"IS_TISSUE_BANKING": None,
"IS_UVA_DB": None,
"IS_UVA_IDE": None,
"IS_UVA_IND": None,
"IS_UVA_LOCATION": None,
"IS_UVA_PI_MULTI": None,
"MULTI_SITE_LOCATIONS": None,
"NON_UVA_LOCATION": None,
"OTHER_VULNERABLE_DESC": None,
"PRC_NUMBER": None,
"SPONSORS_PROTOCOL_REVISION_DATE": None,
"UPLOAD_COMPLETE": None
},
'protocol': {
'id': 0,
}
}
}
if args[0] == 'documents':
return self.box_it(StudyService().get_documents_status(study_id))
return self.box_it(data['study'][args[0]])
# self.add_data_to_task(task=task, data=data["study"])
# self.add_data_to_task(task, {"documents": StudyService().get_documents_status(study_id)})
# we call the real do_task so we can
# seed workflow validations with settings from studies in PB Mock
# in order to test multiple paths thru the workflow
return self.do_task(task, study_id, workflow_id, args[0])
@timeit
def do_task(self, task, study_id, workflow_id, *args, **kwargs):

View File

@ -4,13 +4,11 @@ from crc.services.study_service import StudyService
class UpdateStudyAssociates(Script):
argument_error_message = "You must supply at least one argument to the " \
"update_study_associates task, an array of objects in the form " \
"{'uid':'someid', 'role': 'text', 'send_email: 'boolean', " \
"'access':'boolean'} "
def get_description(self):
return """
Allows you to associate other users with a study - only 'uid' is required in the
@ -26,20 +24,26 @@ associations already in place.
example : update_study_associates([{'uid':'sbp3ey','role':'Unicorn Herder', 'send_email': False, 'access':True}])
"""
def validate_arg(self,arg):
if not isinstance(arg,list):
def validate_arg(self, arg):
if not isinstance(arg, list):
raise ApiError("invalid parameter", "This function is expecting a list of dictionaries")
if not len(arg) > 0 and not isinstance(arg[0],dict):
raise ApiError("invalid paramemter","This function is expecting a list of dictionaries")
if len(arg[0]) > 0:
if not len(arg) > 0 and not isinstance(arg[0], dict):
raise ApiError("invalid paramemter", "This function is expecting a list of dictionaries")
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
items = args[0]
self.validate_arg(items)
return all([x.get('uid',False) for x in items])
if len(args) == 0:
items = []
else:
items = args[0]
self.validate_arg(items)
return all([x.get('uid', False) for x in items])
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
access_list = args[0]
self.validate_arg(access_list)
return StudyService.update_study_associates(study_id,access_list)
if len(args) == 0:
access_list = []
else:
access_list = args[0]
self.validate_arg(access_list)
return StudyService.update_study_associates(study_id, access_list)

View File

@ -1,6 +1,6 @@
from flask import g
from crc.scripts.data_store_base import DataStoreBase
from crc.services.data_store_service import DataStoreBase
from crc.scripts.script import Script

View File

@ -1,6 +1,6 @@
from flask import g
from crc.scripts.data_store_base import DataStoreBase
from crc.services.data_store_service import DataStoreBase
from crc.scripts.script import Script

View File

@ -1,6 +1,3 @@
import importlib
import os
import pkgutil
from crc import session
from crc.api.common import ApiError
from crc.models.data_store import DataStoreModel

View File

@ -97,9 +97,8 @@ class FileService(object):
review = any([f.is_review for f in files])
return review
@staticmethod
def add_workflow_file(workflow_id, irb_doc_code, name, content_type, binary_data):
def update_irb_code(file_id, irb_doc_code):
"""Create a new file and associate it with the workflow
Please note that the irb_doc_code MUST be a known file in the irb_documents.xslx reference document."""
if not FileService.is_allowed_document(irb_doc_code):
@ -107,9 +106,21 @@ class FileService(object):
"When uploading files, the form field id must match a known document in the "
"irb_docunents.xslx reference file. This code is not found in that file '%s'" % irb_doc_code)
"""Assure this is unique to the workflow, task, and document code AND the Name
Because we will allow users to upload multiple files for the same form field
in some cases """
""" """
file_model = session.query(FileModel)\
.filter(FileModel.id == file_id).first()
if file_model is None:
raise ApiError("invalid_file_id",
"When updating the irb_doc_code for a file, that file_id must already exist "
"This file_id is not found in the database '%d'" % file_id)
file_model.irb_doc_code = irb_doc_code
session.commit()
return True
@staticmethod
def add_workflow_file(workflow_id, irb_doc_code, name, content_type, binary_data):
file_model = session.query(FileModel)\
.filter(FileModel.workflow_id == workflow_id)\
.filter(FileModel.name == name)\

View File

@ -22,6 +22,7 @@ from jinja2 import Template
from crc import db, app
from crc.api.common import ApiError
from crc.models.api_models import Task, MultiInstanceType, WorkflowApi
from crc.models.data_store import DataStoreModel
from crc.models.file import LookupDataModel, FileModel
from crc.models.study import StudyModel
from crc.models.task_event import TaskEventModel
@ -52,17 +53,27 @@ class WorkflowService(object):
handles the testing of a workflow specification by completing it with
random selections, attempting to mimic a front end as much as possible. """
from crc.services.user_service import UserService
@staticmethod
def make_test_workflow(spec_id):
user = db.session.query(UserModel).filter_by(uid="test").first()
def make_test_workflow(spec_id, validate_study_id=None):
try:
user = UserService.current_user()
except ApiError as e:
user = None
if not user:
user = db.session.query(UserModel).filter_by(uid="test").first()
if not user:
db.session.add(UserModel(uid="test"))
db.session.commit()
study = db.session.query(StudyModel).filter_by(user_uid="test").first()
user = db.session.query(UserModel).filter_by(uid="test").first()
if validate_study_id:
study = db.session.query(StudyModel).filter_by(id=validate_study_id).first()
else:
study = db.session.query(StudyModel).filter_by(user_uid=user.uid).first()
if not study:
db.session.add(StudyModel(user_uid="test", title="test"))
db.session.add(StudyModel(user_uid=user.uid, title="test"))
db.session.commit()
study = db.session.query(StudyModel).filter_by(user_uid="test").first()
study = db.session.query(StudyModel).filter_by(user_uid=user.uid).first()
workflow_model = WorkflowModel(status=WorkflowStatus.not_started,
workflow_spec_id=spec_id,
last_updated=datetime.utcnow(),
@ -80,18 +91,18 @@ class WorkflowService(object):
db.session.delete(user)
@staticmethod
def test_spec(spec_id, required_only=False, test_until = ""):
def test_spec(spec_id, validate_study_id=None, test_until=None, required_only=False):
"""Runs a spec through it's paces to see if it results in any errors.
Not fool-proof, but a good sanity check. Returns the final data
output form the last task if successful.
test_until
required_only can be set to true, in which case this will run the
spec, only completing the required fields, rather than everything.
testing_depth
"""
workflow_model = WorkflowService.make_test_workflow(spec_id)
workflow_model = WorkflowService.make_test_workflow(spec_id, validate_study_id)
try:
processor = WorkflowProcessor(workflow_model, validate_only=True)
@ -100,12 +111,18 @@ class WorkflowService(object):
raise ApiError.from_workflow_exception("workflow_validation_exception", str(we), we)
count = 0
escaped = False
while not processor.bpmn_workflow.is_completed():
while not processor.bpmn_workflow.is_completed() and not escaped:
if count < 100: # check for infinite loop
try:
processor.bpmn_workflow.get_deep_nav_list() # Assure no errors with navigation.
processor.bpmn_workflow.do_engine_steps()
exit_task = processor.bpmn_workflow.do_engine_steps(exit_at=test_until)
if (exit_task != None):
WorkflowService.delete_test_data()
raise ApiError.from_task("validation_break",
f"This task is in a lane called '{task.task_spec.lane}' "
, exit_task.parent)
tasks = processor.bpmn_workflow.get_tasks(SpiffTask.READY)
for task in tasks:
if task.task_spec.lane is not None and task.task_spec.lane not in task.data:
@ -124,11 +141,11 @@ class WorkflowService(object):
task_name=task.get_name())
WorkflowService.populate_form_with_random_data(task, task_api, required_only)
processor.complete_task(task)
a = task.get_data()
if test_until == task.name:
test_data = processor.bpmn_workflow.last_task.data
WorkflowService.delete_test_data()
return test_data
if test_until == task.task_spec.name:
escaped = WorkflowService.delete_test_data()
raise ApiError.from_task("validation_break",
f"This task is in a lane called '{task.task_spec.name}' and was run using "
, task.parent)
count += 1
except WorkflowException as we:
WorkflowService.delete_test_data()
@ -250,6 +267,26 @@ class WorkflowService(object):
f'The field {field.id} contains an unsupported '
f'property: {name}', task=task)
@staticmethod
def post_process_form(task):
"""Looks through the fields in a submitted form, acting on any properties."""
if not hasattr(task.task_spec, 'form'): return
for field in task.task_spec.form.fields:
if field.has_property(Task.FIELD_PROP_DOC_CODE) and \
field.type == Task.FIELD_TYPE_FILE:
file_id = task.data[field.id]
file = db.session.query(FileModel).filter(FileModel.id == file_id).first()
doc_code = WorkflowService.evaluate_property(Task.FIELD_PROP_DOC_CODE, field, task)
file.irb_doc_code = doc_code
db.session.commit()
# Set the doc code on the file.
if field.has_property(Task.FIELD_PROP_FILE_DATA) and \
field.get_property(Task.FIELD_PROP_FILE_DATA) in task.data:
file_id = task.data[field.get_property(Task.FIELD_PROP_FILE_DATA)]
data_store = DataStoreModel(file_id=file_id, key=field.id, value=task.data[field.id])
db.session.add(data_store)
@staticmethod
def evaluate_property(property_name, field, task):
expression = field.get_property(property_name)

View File

@ -1,13 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1wrlvk8" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1wrlvk8" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="Finance" isExecutable="true">
<bpmn:startEvent id="StartEvent_1p6s47e">
<bpmn:outgoing>SequenceFlow_0ea9hvd</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:endEvent id="EndEvent_14p904o">
<bpmn:incoming>SequenceFlow_1h0d349</bpmn:incoming>
<bpmn:incoming>Flow_0t55959</bpmn:incoming>
</bpmn:endEvent>
<bpmn:userTask id="Task_112migv" name="Upload Executed Non-Funded" camunda:formKey="FormKey_ExecutedNonFunded">
<bpmn:sequenceFlow id="SequenceFlow_0ea9hvd" sourceRef="StartEvent_1p6s47e" targetRef="Activity_0neioh9" />
<bpmn:userTask id="Activity_0neioh9" name="Upload Executed Non-Funded" camunda:formKey="FormKey_ExecutedNonFunded">
<bpmn:documentation>#### Non-Funded Executed Agreement
@ -15,40 +16,55 @@
OGC will upload the Non-Funded Executed Agreement after it has been negotiated by OSP contract negotiator.</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="UVACompl_PRCAppr" label="Non-Funded Executed Agreement" type="file">
<camunda:formField id="file_type" type="enum" defaultValue="FileType1">
<camunda:value id="FileType1" name="My First file type" />
<camunda:value id="FileType2" name="My second file type" />
</camunda:formField>
<camunda:formField id="Some_File" label="Upload File" type="file">
<camunda:properties>
<camunda:property id="group" value="upload" />
<camunda:property id="repeat" value="upload" />
<camunda:property id="group" value="PCRApproval" />
<camunda:property id="doc_code" value="file_type" />
</camunda:properties>
</camunda:formField>
<camunda:formField id="Language" label="Language" type="string" defaultValue="Engish">
<camunda:properties>
<camunda:property id="group" value="PCRApproval" />
<camunda:property id="file_data" value="Some_File" />
</camunda:properties>
</camunda:formField>
<camunda:formField id="Date" label="Version Date" type="date">
<camunda:properties>
<camunda:property id="group" value="PCRApproval" />
<camunda:property id="file_data" value="Some_File" />
</camunda:properties>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0ea9hvd</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1h0d349</bpmn:outgoing>
<bpmn:outgoing>Flow_0t55959</bpmn:outgoing>
<bpmn:standardLoopCharacteristics />
</bpmn:userTask>
<bpmn:sequenceFlow id="SequenceFlow_0ea9hvd" sourceRef="StartEvent_1p6s47e" targetRef="Task_112migv" />
<bpmn:sequenceFlow id="SequenceFlow_1h0d349" sourceRef="Task_112migv" targetRef="EndEvent_14p904o" />
<bpmn:sequenceFlow id="Flow_0t55959" sourceRef="Activity_0neioh9" targetRef="EndEvent_14p904o" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Finance">
<bpmndi:BPMNEdge id="SequenceFlow_0ea9hvd_di" bpmnElement="SequenceFlow_0ea9hvd">
<di:waypoint x="148" y="117" />
<di:waypoint x="210" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0t55959_di" bpmnElement="Flow_0t55959">
<di:waypoint x="310" y="117" />
<di:waypoint x="392" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="StartEvent_1p6s47e_di" bpmnElement="StartEvent_1p6s47e">
<dc:Bounds x="112" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_14p904o_di" bpmnElement="EndEvent_14p904o">
<dc:Bounds x="682" y="99" width="36" height="36" />
<dc:Bounds x="392" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="UserTask_1peopdt_di" bpmnElement="Task_112migv">
<dc:Bounds x="350" y="77" width="100" height="80" />
<bpmndi:BPMNShape id="Activity_0neioh9_di" bpmnElement="Activity_0neioh9">
<dc:Bounds x="210" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0ea9hvd_di" bpmnElement="SequenceFlow_0ea9hvd">
<di:waypoint x="148" y="117" />
<di:waypoint x="350" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1h0d349_di" bpmnElement="SequenceFlow_1h0d349">
<di:waypoint x="450" y="117" />
<di:waypoint x="682" y="117" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,62 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_6e97803" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:process id="Process_d8c1062" name="GetValidation" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1aycav1</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1aycav1" sourceRef="StartEvent_1" targetRef="Activity_RunScript" />
<bpmn:scriptTask id="Activity_RunScript" name="Run Script">
<bpmn:incoming>Flow_1aycav1</bpmn:incoming>
<bpmn:outgoing>Flow_0wkyatv</bpmn:outgoing>
<bpmn:script>pi = study_info('investigators').get('PI', False)
if pi:
try:
pi_assc = get_study_associate(pi.user_id)
except:
pi_assc_chk = False
else:
if pi_assc['role'] == "Primary Investigator":
pi_assc_chk = True
else:
pi_assc_chk = False</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0wkyatv" sourceRef="Activity_RunScript" targetRef="Activity_DisplayValue" />
<bpmn:manualTask id="Activity_DisplayValue" name="Display Value">
<bpmn:documentation>pi_assc_chk is {{pi_assc_chk}}</bpmn:documentation>
<bpmn:incoming>Flow_0wkyatv</bpmn:incoming>
<bpmn:outgoing>Flow_0784fc6</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:endEvent id="Event_0hdelnp">
<bpmn:incoming>Flow_0784fc6</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0784fc6" sourceRef="Activity_DisplayValue" targetRef="Event_0hdelnp" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_d8c1062">
<bpmndi:BPMNEdge id="Flow_0784fc6_di" bpmnElement="Flow_0784fc6">
<di:waypoint x="530" y="117" />
<di:waypoint x="592" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0wkyatv_di" bpmnElement="Flow_0wkyatv">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1aycav1_di" bpmnElement="Flow_1aycav1">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1ah3917_di" bpmnElement="Activity_RunScript">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1sysjzs_di" bpmnElement="Activity_DisplayValue">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0hdelnp_di" bpmnElement="Event_0hdelnp">
<dc:Bounds x="592" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,79 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_0kmksnn" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:process id="Process_0exnnpv" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1nfe5m9</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_1nfe5m9" sourceRef="StartEvent_1" targetRef="Task_Script_Load_Study_Sponsors" />
<bpmn:scriptTask id="Task_Script_Load_Study_Sponsors" name="Load Study Sponsors">
<bpmn:incoming>SequenceFlow_1nfe5m9</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1bqiin0</bpmn:outgoing>
<bpmn:script>sponsors = study_info('sponsors')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_1bqiin0" sourceRef="Task_Script_Load_Study_Sponsors" targetRef="Activity_0cm6tn2" />
<bpmn:sequenceFlow id="Flow_09cika8" sourceRef="Activity_0cm6tn2" targetRef="Activity_1on1n5v" />
<bpmn:scriptTask id="Activity_0cm6tn2" name="setval">
<bpmn:incoming>SequenceFlow_1bqiin0</bpmn:incoming>
<bpmn:outgoing>Flow_09cika8</bpmn:outgoing>
<bpmn:script>update_study_associate(uid='lb3dp',role='SuperGal',send_email=False,access=True)
update_study_associate(uid='lje5u',role='SuperGal2',send_email=False,access=False)</bpmn:script>
</bpmn:scriptTask>
<bpmn:endEvent id="Event_0c8gcuh">
<bpmn:incoming>Flow_0axwrzg</bpmn:incoming>
</bpmn:endEvent>
<bpmn:task id="Activity_0s3e9zu" name="Meaningless User Task">
<bpmn:documentation>This should just leave us a task to complete after the update_study_assocate script</bpmn:documentation>
<bpmn:incoming>Flow_1xi8k3i</bpmn:incoming>
<bpmn:outgoing>Flow_0axwrzg</bpmn:outgoing>
</bpmn:task>
<bpmn:sequenceFlow id="Flow_0axwrzg" sourceRef="Activity_0s3e9zu" targetRef="Event_0c8gcuh" />
<bpmn:sequenceFlow id="Flow_1xi8k3i" sourceRef="Activity_1on1n5v" targetRef="Activity_0s3e9zu" />
<bpmn:scriptTask id="Activity_1on1n5v" name="Zap Associates">
<bpmn:incoming>Flow_09cika8</bpmn:incoming>
<bpmn:outgoing>Flow_1xi8k3i</bpmn:outgoing>
<bpmn:script>update_study_associates()</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0exnnpv">
<bpmndi:BPMNEdge id="SequenceFlow_1bqiin0_di" bpmnElement="SequenceFlow_1bqiin0">
<di:waypoint x="370" y="117" />
<di:waypoint x="440" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1nfe5m9_di" bpmnElement="SequenceFlow_1nfe5m9">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ScriptTask_1mp6xid_di" bpmnElement="Task_Script_Load_Study_Sponsors">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_09cika8_di" bpmnElement="Flow_09cika8">
<di:waypoint x="540" y="117" />
<di:waypoint x="600" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0wnwluq_di" bpmnElement="Activity_0cm6tn2">
<dc:Bounds x="440" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0c8gcuh_di" bpmnElement="Event_0c8gcuh">
<dc:Bounds x="1042" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0s3e9zu_di" bpmnElement="Activity_0s3e9zu">
<dc:Bounds x="790" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0axwrzg_di" bpmnElement="Flow_0axwrzg">
<di:waypoint x="890" y="117" />
<di:waypoint x="1042" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1xi8k3i_di" bpmnElement="Flow_1xi8k3i">
<di:waypoint x="700" y="117" />
<di:waypoint x="790" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_13lcouw_di" bpmnElement="Activity_1on1n5v">
<dc:Bounds x="600" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -8,6 +8,7 @@ from crc.models.file import FileModel, FileType, FileSchema, FileModelSchema
from crc.models.workflow import WorkflowSpecModel
from crc.services.file_service import FileService
from crc.services.workflow_processor import WorkflowProcessor
from crc.models.data_store import DataStoreModel
from example_data import ExampleDataLoader
@ -73,11 +74,6 @@ class TestFilesApi(BaseTest):
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
correct_name = task.task_spec.form.fields[0].id
rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_id=%i&form_field_key=%s' %
(workflow.study_id, workflow.id, task.id, "not_a_known_file"), data=data, follow_redirects=True,
content_type='multipart/form-data', headers=self.logged_in_headers())
self.assert_failure(rv, error_code="invalid_form_field_key")
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_id=%i&form_field_key=%s' %
(workflow.study_id, workflow.id, task.id, correct_name), data=data, follow_redirects=True,
@ -232,6 +228,17 @@ class TestFilesApi(BaseTest):
self.assertEqual("text/xml; charset=utf-8", rv.content_type)
self.assertTrue(rv.content_length > 1)
def test_get_file_contains_data_store_elements(self):
self.load_example_data()
spec = session.query(WorkflowSpecModel).first()
file = session.query(FileModel).filter_by(workflow_spec_id=spec.id).first()
ds = DataStoreModel(key="my_key", value="my_value", file_id=file.id);
db.session.add(ds)
rv = self.app.get('/v1.0/file/%i' % file.id, headers=self.logged_in_headers())
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
self.assertEqual("my_value", json_data['data_store']['my_key'])
def test_get_files_for_form_field_returns_only_those_files(self):
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')

View File

View File

@ -0,0 +1,19 @@
import json
from unittest.mock import patch
from tests.base_test import BaseTest
from crc import app
class TestGetStudyAssociateValidation(BaseTest):
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_investigators')
def test_get_study_associate_validation(self, mock):
response = self.protocol_builder_response('investigators.json')
mock.return_value = json.loads(response)
app.config['PB_ENABLED'] = True
self.load_example_data()
workflow = self.create_workflow('get_study_associate')
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % workflow.workflow_spec_id,
headers=self.logged_in_headers())
self.assertEqual(0, len(rv.json))

View File

@ -19,7 +19,11 @@ class TestSudySponsorsScript(BaseTest):
test_study_id = 1
def test_study_sponsors_script_validation(self):
@patch('crc.services.protocol_builder.requests.get')
def test_study_sponsors_script_validation(self, mock_get):
app.config['PB_ENABLED'] = True
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('sponsors.json')
flask.g.user = UserModel(uid='dhf8r')
self.load_example_data() # study_info script complains if irb_documents.xls is not loaded
# during the validate phase I'm going to assume that we will never
@ -161,3 +165,30 @@ class TestSudySponsorsScript(BaseTest):
app.config['PB_ENABLED'] = False
output = user_studies()
self.assertEqual(len(output),0)
@patch('crc.services.protocol_builder.requests.get')
def test_study_sponsors_script_ensure_delete(self, mock_get):
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('sponsors.json')
flask.g.user = UserModel(uid='dhf8r')
app.config['PB_ENABLED'] = True
self.load_example_data()
self.create_reference_document()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("study_sponsors_associates_delete")
workflow_model = StudyService._create_workflow_model(study, workflow_spec_model)
WorkflowService.test_spec("study_sponsors_associates_delete")
processor = WorkflowProcessor(workflow_model)
processor.do_engine_steps()
# change user and make sure we can access the study
flask.g.user = UserModel(uid='lb3dp')
flask.g.token = 'my spiffy token'
app.config['PB_ENABLED'] = False
output = user_studies()
self.assertEqual(len(output),0)
flask.g.token = 'my spiffy token'
app.config['PB_ENABLED'] = False
output = user_studies()
self.assertEqual(len(output),0)

View File

@ -16,7 +16,11 @@ class TestSudySponsorsScript(BaseTest):
test_study_id = 1
def test_study_sponsors_script_validation(self):
@patch('crc.services.protocol_builder.requests.get')
def test_study_sponsors_script_validation(self, mock_get):
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('sponsors.json')
app.config['PB_ENABLED'] = True
flask.g.user = UserModel(uid='dhf8r')
self.load_example_data() # study_info script complains if irb_documents.xls is not loaded
# during the validate phase I'm going to assume that we will never

View File

@ -5,7 +5,7 @@ from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from tests.base_test import BaseTest
from unittest.mock import patch
from crc import db, session
from crc import app, session
from crc.api.common import ApiError
from crc.models.file import FileDataModel, FileModel
from crc.models.protocol_builder import ProtocolBuilderRequiredDocumentSchema, ProtocolBuilderStudySchema
@ -29,8 +29,23 @@ class TestStudyDetailsScript(BaseTest):
self.processor = WorkflowProcessor(self.workflow_model)
self.task = self.processor.next_task()
def test_study_info_returns_a_box_object_for_all_validations(self):
@patch('crc.services.protocol_builder.requests.get')
def test_study_info_returns_a_box_object_for_all_validations(self, mock_get):
app.config['PB_ENABLED'] = True
mock_get.return_value.ok = True
for option in StudyInfo.type_options:
if option == 'info':
mock_get.return_value.text = self.protocol_builder_response('irb_info.json')
elif option == 'investigators':
mock_get.return_value.text = self.protocol_builder_response('investigators.json')
elif option == 'roles':
mock_get.return_value.text = self.protocol_builder_response('investigators.json')
elif option == 'details':
mock_get.return_value.text = self.protocol_builder_response('study_details.json')
elif option == 'documents':
mock_get.return_value.text = self.protocol_builder_response('required_docs.json')
elif option == 'sponsors':
mock_get.return_value.text = self.protocol_builder_response('sponsors.json')
data = StudyInfo().do_task_validate_only(self.task, self.study.id, self.workflow_model.id, option)
if isinstance(data, list):
for x in data:

View File

@ -113,4 +113,45 @@ class TestStudyDetailsDocumentsScript(BaseTest):
docs = StudyInfo().do_task(task, study.id, workflow_model.id, "documents")
self.assertTrue(isinstance(docs, Box))
self.assertEquals(1, len(docs.UVACompl_PRCAppr.files))
self.assertEquals("doodle", docs.UVACompl_PRCAppr.files[0].data_store.ginger)
self.assertEquals("doodle", docs.UVACompl_PRCAppr.files[0].data_store.ginger)
@patch('crc.services.protocol_builder.requests.get')
def test_file_data_set_changes_irb_code(self, mock_get):
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('required_docs.json')
self.load_example_data()
self.create_reference_document()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
workflow_model = StudyService._create_workflow_model(study, workflow_spec_model)
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
file = FileService.add_workflow_file(workflow_id=workflow_model.id,
name="anything.png", content_type="text",
binary_data=b'1234', irb_doc_code=irb_code)
processor = WorkflowProcessor(workflow_model)
task = processor.next_task()
FileDataSet().do_task(task, study.id, workflow_model.id, key="irb_code", value="Study_App_Doc", file_id=file.id)
docs = StudyInfo().do_task(task, study.id, workflow_model.id, "documents")
self.assertTrue(isinstance(docs, Box))
self.assertEquals(1, len(docs.Study_App_Doc.files))
self.assertEquals("Study_App_Doc", docs.Study_App_Doc.files[0].data_store.irb_code)
@patch('crc.services.protocol_builder.requests.get')
def test_file_data_set_invalid_irb_code_fails(self, mock_get):
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('required_docs.json')
self.load_example_data()
self.create_reference_document()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
workflow_model = StudyService._create_workflow_model(study, workflow_spec_model)
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
file = FileService.add_workflow_file(workflow_id=workflow_model.id,
name="anything.png", content_type="text",
binary_data=b'1234', irb_doc_code=irb_code)
processor = WorkflowProcessor(workflow_model)
task = processor.next_task()
with self.assertRaises(ApiError):
FileDataSet().do_task(task, study.id, workflow_model.id, key="irb_code", value="My_Pretty_Pony",
file_id=file.id)

View File

@ -13,7 +13,12 @@ class TestSudySponsorsScript(BaseTest):
test_study_id = 1
def test_study_sponsors_script_validation(self):
@patch('crc.services.protocol_builder.requests.get')
def test_study_sponsors_script_validation(self, mock_get):
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('sponsors.json')
app.config['PB_ENABLED'] = True
self.load_example_data() # study_info script complains if irb_documents.xls is not loaded
# during the validate phase I'm going to assume that we will never
# have a case where irb_documents.xls is not loaded ??

View File

@ -1,14 +1,10 @@
import json
from profile import Profile
from tests.base_test import BaseTest
from datetime import datetime, timezone
from unittest.mock import patch
from crc.models.data_store import DataStoreModel, DataStoreSchema
from crc import session, app
from crc.models.file import FileModel
from crc import session
import json
class DataStoreTest(BaseTest):
@ -20,6 +16,13 @@ class DataStoreTest(BaseTest):
"spec_id": "My Spec Name",
"value": "Some Value"
}
TEST_FILE_ITEM = {
"key": "MyKey",
"workflow_id": 12,
"task_id": "MyTask",
"spec_id": "My Spec Name",
"value": "Some Value"
}
def add_test_study_data(self):
study_data = DataStoreSchema().dump(self.TEST_STUDY_ITEM)
@ -42,7 +45,16 @@ class DataStoreTest(BaseTest):
self.assert_success(rv)
return json.loads(rv.get_data(as_text=True))
def add_test_file_data(self, file_id, value):
file_data = DataStoreSchema().dump(self.TEST_FILE_ITEM)
file_data['file_id'] = file_id
file_data['value'] = value
rv = self.app.post('/v1.0/datastore',
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(file_data))
self.assert_success(rv)
return json.loads(rv.get_data(as_text=True))
def test_get_study_data(self):
"""Generic test, but pretty detailed, in that the study should return a categorized list of workflows
@ -112,3 +124,60 @@ class DataStoreTest(BaseTest):
self.assert_success(api_response)
d = json.loads(api_response.get_data(as_text=True))
self.assertEqual(d[0]['value'],'Some Value')
def test_datastore_file(self):
self.load_example_data()
test_file = session.query(FileModel).first()
# make sure we don't already have a datastore
api_response = self.app.get(f'/v1.0/datastore/file/{test_file.id}',
headers=self.logged_in_headers(),
content_type="application/json")
self.assert_success(api_response)
data = json.loads(api_response.get_data(as_text=True))
self.assertEqual(0, len(data))
# add datastore
self.add_test_file_data(test_file.id, 'Some File Data Value')
# make sure we can get the datastore
api_response = self.app.get(f'/v1.0/datastore/file/{test_file.id}',
headers=self.logged_in_headers(),
content_type="application/json")
self.assert_success(api_response)
data = json.loads(api_response.get_data(as_text=True))
self.assertEqual(1, len(data))
self.assertEqual('MyKey', data[0]['key'])
self.assertEqual('Some File Data Value', data[0]['value'])
def test_datastore_files(self):
self.load_example_data()
test_file = session.query(FileModel).first()
# add datastore
value_1 = 'Some File Data Value 1'
self.add_test_file_data(test_file.id, value_1)
# make sure we have 1 item in the datastore
api_response_1 = self.app.get(f'/v1.0/datastore/file/{test_file.id}',
headers=self.logged_in_headers(), content_type="application/json")
self.assert_success(api_response_1)
data_1 = json.loads(api_response_1.get_data(as_text=True))
self.assertEqual(1, len(data_1))
self.assertEqual('MyKey', data_1[0]['key'])
self.assertEqual(value_1, data_1[0]['value'])
# add second datastore
value_2 = 'Some File Data Value 2'
self.add_test_file_data(test_file.id, value_2)
# make sure we have 2 items in the datastore
api_response_2 = self.app.get(f'/v1.0/datastore/file/{test_file.id}',
headers=self.logged_in_headers(), content_type="application/json")
self.assert_success(api_response_2)
data_2 = json.loads(api_response_2.get_data(as_text=True))
self.assertEqual(2, len(data_2))
self.assertEqual(value_1, data_2[0]['value'])
self.assertEqual(value_2, data_2[1]['value'])

View File

@ -269,3 +269,12 @@ class TestTasksApi(BaseTest):
self.assertEqual(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEqual(0, len(self.get_assignment_task_events(supervisor.uid)))
def test_no_error_when_calling_end_loop_on_non_looping_task(self):
workflow = self.create_workflow('hello_world')
workflow_api = self.get_workflow_api(workflow)
data = workflow_api.next_task.data
data['name'] = "john"
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, terminate_loop=True)

View File

@ -1,12 +1,17 @@
from tests.base_test import BaseTest
from crc import app
from crc.services.workflow_service import WorkflowService
from crc.api.common import ApiError
from jinja2.exceptions import TemplateSyntaxError
from unittest.mock import patch
class TestValidateEndEvent(BaseTest):
def test_validate_end_event(self):
@patch('crc.services.protocol_builder.requests.get')
def test_validate_end_event(self, mock_get):
app.config['PB_ENABLED'] = True
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('study_details.json')
error_string = """Error processing template for task EndEvent_1qvyxg7: expected token 'end of statement block', got '='"""

View File

@ -1,11 +1,16 @@
from tests.base_test import BaseTest
from crc import app
import json
from unittest.mock import patch
class TestWorkflowInfiniteLoop(BaseTest):
def test_workflow_infinite_loop(self):
@patch('crc.services.protocol_builder.requests.get')
def test_workflow_infinite_loop(self, mock_get):
app.config['PB_ENABLED'] = True
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('investigators.json')
self.load_example_data()
spec_model = self.load_test_spec('infinite_loop')
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % spec_model.id, headers=self.logged_in_headers())