Merge pull request #25 from sartography/feature/status_refactor

Feature/status refactor
This commit is contained in:
Dan Funk 2020-03-31 09:31:53 -04:00 committed by GitHub
commit af5ab6a2c2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 1934 additions and 1013 deletions

28
Pipfile.lock generated
View File

@ -179,10 +179,10 @@
},
"configparser": {
"hashes": [
"sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c",
"sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df"
"sha256:2ca44140ee259b5e3d8aaf47c79c36a7ab0d5e94d70bd4105c03ede7a20ea5a1",
"sha256:cffc044844040c7ce04e9acd1838b5f2e5fa3170182f6fda4d2ea8b0099dbadd"
],
"version": "==4.0.2"
"version": "==5.0.0"
},
"connexion": {
"extras": [
@ -322,10 +322,10 @@
},
"httpretty": {
"hashes": [
"sha256:66216f26b9d2c52e81808f3e674a6fb65d4bf719721394a1a9be926177e55fbe"
"sha256:24a6fd2fe1c76e94801b74db8f52c0fb42718dc4a199a861b305b1a492b9d868"
],
"index": "pypi",
"version": "==0.9.7"
"version": "==1.0.2"
},
"idna": {
"hashes": [
@ -343,11 +343,11 @@
},
"importlib-metadata": {
"hashes": [
"sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302",
"sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b"
"sha256:298a914c82144c6b3b06c568a8973b89ad2176685f43cd1ea9ba968307300fa9",
"sha256:dfc83688553a91a786c6c91eeb5f3b1d31f24d71877bbd94ecbf5484e57690a2"
],
"markers": "python_version < '3.8'",
"version": "==1.5.0"
"version": "==1.5.2"
},
"inflection": {
"hashes": [
@ -630,9 +630,9 @@
},
"pyrsistent": {
"hashes": [
"sha256:cdc7b5e3ed77bed61270a47d35434a30617b9becdf2478af76ad2c6ade307280"
"sha256:28669905fe725965daa16184933676547c5bb40a5153055a8dee2a4bd7933ad3"
],
"version": "==0.15.7"
"version": "==0.16.0"
},
"python-dateutil": {
"hashes": [
@ -769,7 +769,7 @@
"spiffworkflow": {
"editable": true,
"git": "https://github.com/sartography/SpiffWorkflow.git",
"ref": "eff4e3562775239bda8de9b20b722104d6b7e345"
"ref": "4e8f4d7ab9da27e7191997019634eb968e0a11e4"
},
"sqlalchemy": {
"hashes": [
@ -863,11 +863,11 @@
},
"importlib-metadata": {
"hashes": [
"sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302",
"sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b"
"sha256:298a914c82144c6b3b06c568a8973b89ad2176685f43cd1ea9ba968307300fa9",
"sha256:dfc83688553a91a786c6c91eeb5f3b1d31f24d71877bbd94ecbf5484e57690a2"
],
"markers": "python_version < '3.8'",
"version": "==1.5.0"
"version": "==1.5.2"
},
"more-itertools": {
"hashes": [

View File

@ -175,56 +175,7 @@ paths:
description: Study is currently up to date and does not need to be reloaded from Protocol Builder
'202':
description: Request accepted, will preform an update. Study state set to "updating"
/study/{study_id}/workflows:
get:
operationId: crc.api.study.get_study_workflows
summary: Provides a list of workflows to be completed for the given study.
tags:
- Studies
parameters:
- name: study_id
in: path
required: true
description: The id of the study for which workflows should be returned.
schema:
type: integer
format: int32
responses:
'200':
description: An array of workflows
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Workflow"
post:
operationId: crc.api.study.add_workflow_to_study
summary: Starts a new workflow for the given study using the provided spec. This is atypical, and should be left to the protocol builder.
tags:
- Studies
parameters:
- name: study_id
in: path
required: true
description: The id of the study for which a workflow should start
schema:
type: integer
format: int32
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/WorkflowSpec'
responses:
'200':
description: An array of workflows
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Workflow"
/workflow-specification:
get:
operationId: crc.api.workflow.all_specifications
@ -302,12 +253,34 @@ paths:
responses:
'204':
description: The workflow specification has been removed.
/workflow-specification/{spec_id}/validate:
parameters:
- name: spec_id
in: path
required: false
description: The unique id of an existing workflow specification to validate.
schema:
type: string
get:
operationId: crc.api.workflow.validate_workflow_specification
summary: Loads and attempts to execute a Workflow Specification, returning a list of errors encountered
tags:
- Workflow Specifications
responses:
'200':
description: Workflow specification.
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Error"
/workflow-specification-category:
get:
operationId: crc.api.workflow.list_workflow_spec_categories
summary: Provides a list of categories that can be added to a workflow spec.
tags:
- Workflow Specifications
- Workflow Specification Category
responses:
'200':
description: An array of workflow specification categories
@ -321,7 +294,7 @@ paths:
operationId: crc.api.workflow.add_workflow_spec_category
summary: Creates a new workflow spec category with the given parameters.
tags:
- Workflow Specifications
- Workflow Specification Category
requestBody:
content:
application/json:
@ -346,7 +319,7 @@ paths:
operationId: crc.api.workflow.get_workflow_spec_category
summary: Returns a single workflow spec category
tags:
- Workflow Specifications
- Workflow Specification Category
responses:
'200':
description: Workflow spec category.
@ -358,7 +331,7 @@ paths:
operationId: crc.api.workflow.update_workflow_spec_category
summary: Modifies an existing workflow spec category with the given parameters.
tags:
- Workflow Specifications
- Workflow Specification Category
requestBody:
content:
application/json:
@ -375,7 +348,7 @@ paths:
operationId: crc.api.workflow.delete_workflow_spec_category
summary: Removes an existing workflow spec category
tags:
- Workflow Specifications
- Workflow Specification Category
responses:
'204':
description: The workflow spec category has been removed.
@ -533,13 +506,11 @@ paths:
- file
responses:
'200':
description: Returns the actual file
description: Returns the updated file model with the new version information.
content:
application/octet-stream:
application/json:
schema:
type: string
format: binary
example: '<?xml version="1.0" encoding="UTF-8"?><bpmn:definitions></bpmn:definitions>'
$ref: "#/components/schemas/File"
# /v1.0/workflow/0
/reference_file:
get:
@ -849,22 +820,30 @@ components:
type: string
format: date_time
example: "2019-12-25T09:12:33.001Z"
primary_investigator_id:
type: string
example: dhf8r
user_uid:
type: string
example: dhf8r
protocol_builder_status:
type: string
enum: [DRAFT, IN_PROCESS, IN_REVIEW, REVIEW_COMPLETE, INACTIVE]
example: done
user_uid:
type: string
example: dhf8r
primary_investigator_id:
type: string
example: dhf8r
sponsor:
type: string
example: "Sartography Pharmaceuticals"
ind_number:
type: string
example: "27b-6-42"
hsr_number:
type: string
x-nullable: true
example: "27b-6-1212"
categories:
type: array
items:
$ref: "#/components/schemas/WorkflowSpecCategory"
WorkflowSpec:
properties:
id:
@ -878,7 +857,7 @@ components:
primary_process_id:
type: string
nullable: true
workflow_spec_category_id:
category_id:
type: integer
nullable: true
workflow_spec_category:
@ -896,6 +875,10 @@ components:
type: string
display_order:
type: integer
workflows:
type: array
items:
$ref: "#/components/schemas/Workflow"
File:
properties:
id:

View File

@ -2,16 +2,40 @@ from crc import ma, app
class ApiError(Exception):
def __init__(self, code, message, status_code=400):
def __init__(self, code, message, status_code=400,
file_name="", task_id="", task_name="", tag=""):
self.status_code = status_code
self.code = code
self.message = message
self.code = code # a short consistent string describing the error.
self.message = message # A detailed message that provides more information.
self.task_id = task_id or "" # OPTIONAL: The id of the task in the BPMN Diagram.
self.task_name = task_name or "" # OPTIONAL: The name of the task in the BPMN Diagram.
self.file_name = file_name or "" # OPTIONAL: The file that caused the error.
self.tag = tag or "" # OPTIONAL: The XML Tag that caused the issue.
Exception.__init__(self, self.message)
@classmethod
def from_task(cls, code, message, task, status_code=400):
"""Constructs an API Error with details pulled from the current task."""
instance = cls(code, message, status_code=status_code)
instance.task_id = task.task_spec.name or ""
instance.task_name = task.task_spec.description or ""
instance.file_name = task.workflow.spec.file or ""
return instance
@classmethod
def from_task_spec(cls, code, message, task_spec, status_code=400):
"""Constructs an API Error with details pulled from the current task."""
instance = cls(code, message, status_code=status_code)
instance.task_id = task_spec.name or ""
instance.task_name = task_spec.description or ""
if task_spec._wf_spec:
instance.file_name = task_spec._wf_spec.file
return instance
class ApiErrorSchema(ma.Schema):
class Meta:
fields = ("code", "message")
fields = ("code", "message", "workflow_name", "file_name", "task_name", "task_id")
@app.errorhandler(ApiError)

View File

@ -1,5 +1,4 @@
import logging
from typing import List, Optional, Union, Tuple, Dict
from typing import List
from connexion import NoContent
from flask import g
@ -10,131 +9,63 @@ from crc.api.common import ApiError, ApiErrorSchema
from crc.api.workflow import __get_workflow_api_model
from crc.models.api_models import WorkflowApiSchema
from crc.models.protocol_builder import ProtocolBuilderStatus, ProtocolBuilderStudy
from crc.models.study import StudyModelSchema, StudyModel
from crc.models.study import StudySchema, StudyModel, Study
from crc.models.workflow import WorkflowModel, WorkflowSpecModel
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.protocol_builder import ProtocolBuilderService
def all_studies():
return update_from_protocol_builder()
from crc.services.study_service import StudyService
from crc.services.workflow_processor import WorkflowProcessor
def add_study(body):
study: StudyModel = StudyModelSchema().load(body, session=session)
status_spec = __get_status_spec(study.status_spec_id)
# Get latest status spec version
if status_spec is not None:
study.status_spec_id = status_spec.id
study.status_spec_version = WorkflowProcessor.get_latest_version_string(status_spec.id)
session.add(study)
"""This should never get called, and is subject to deprication. Studies
should be added through the protocol builder only."""
study: Study = StudySchema().load(body)
study_model = StudyModel(**study.model_args())
session.add(study_model)
errors = StudyService._add_all_workflow_specs_to_study(study)
session.commit()
__add_study_workflows_from_status(study.id, status_spec)
return StudyModelSchema().dump(study)
def __get_status_spec(status_spec_id):
if status_spec_id is None:
return session.query(WorkflowSpecModel).filter_by(is_status=True).first()
else:
return session.query(WorkflowSpecModel).filter_by(id=status_spec_id).first()
def __add_study_workflows_from_status(study_id, status_spec):
all_specs = session.query(WorkflowSpecModel).all()
if status_spec is not None:
# Run status spec to get list of workflow specs applicable to this study
status_processor = WorkflowProcessor.create(study_id, status_spec)
status_processor.do_engine_steps()
status_data = status_processor.next_task().data
# Only add workflow specs listed in status spec
for spec in all_specs:
if spec.id in status_data and status_data[spec.id]:
WorkflowProcessor.create(study_id, spec.id)
else:
# No status spec. Just add all workflows.
for spec in all_specs:
WorkflowProcessor.create(study_id, spec.id)
study_data = StudySchema().dump(study)
study_data["errors"] = ApiErrorSchema(many=True).dump(errors)
return study_data
def update_study(study_id, body):
if study_id is None:
raise ApiError('unknown_study', 'Please provide a valid Study ID.')
study = session.query(StudyModel).filter_by(id=study_id).first()
if study is None:
study_model = session.query(StudyModel).filter_by(id=study_id).first()
if study_model is None:
raise ApiError('unknown_study', 'The study "' + study_id + '" is not recognized.')
schema = StudyModelSchema()
study = schema.load(body, session=session, instance=study, partial=True)
session.add(study)
study: Study = StudySchema().load(body)
study.update_model(study_model)
session.add(study_model)
session.commit()
return schema.dump(study)
return StudySchema().dump(study)
def get_study(study_id):
study = session.query(StudyModel).filter_by(id=study_id).first()
schema = StudyModelSchema()
if study is None:
return NoContent, 404
study_service = StudyService()
study = study_service.get_study(study_id)
schema = StudySchema()
return schema.dump(study)
def delete_study(study_id):
try:
session.query(StudyModel).filter_by(id=study_id).delete()
StudyService.delete_study(study_id)
except IntegrityError as ie:
session.rollback()
app.logger.error("Failed to delete Study #%i due to an Integrity Error: %s" % (study_id, str(ie)))
raise ApiError(code="study_integrity_error", message="This study contains running workflows that is "
"preventing deletion. Please delete the workflows " +
"before proceeding.")
message = "Failed to delete Study #%i due to an Integrity Error: %s" % (study_id, str(ie))
raise ApiError(code="study_integrity_error", message=message)
def update_from_protocol_builder():
"""Updates the list of known studies for a given user based on data received from
the protocol builder."""
user = g.user
""":type: crc.models.user.UserModel"""
# Get studies matching this user from Protocol Builder
pb_studies: List[ProtocolBuilderStudy] = get_user_pb_studies()
# Get studies from the database
db_studies = session.query(StudyModel).filter_by(user_uid=user.uid).all()
db_study_ids = list(map(lambda s: s.id, db_studies))
pb_study_ids = list(map(lambda s: s['STUDYID'], pb_studies))
for pb_study in pb_studies:
# Update studies with latest data from Protocol Builder
if pb_study['STUDYID'] in db_study_ids:
update_study(pb_study['STUDYID'], map_pb_study_to_study(pb_study))
# Add studies from Protocol Builder that aren't in the database yet
else:
new_study = map_pb_study_to_study(pb_study)
add_study(new_study)
# Mark studies as inactive that are no longer in Protocol Builder
for study_id in db_study_ids:
if study_id not in pb_study_ids:
update_study(
study_id=study_id,
body={
'inactive': True,
'protocol_builder_status': ProtocolBuilderStatus.INACTIVE.name
}
)
# Return updated studies
updated_studies = session.query(StudyModel).filter_by(user_uid=user.uid).all()
results = StudyModelSchema(many=True).dump(updated_studies)
def all_studies():
"""Returns all the studies associated with the current user. Assures we are
in sync with values read in from the protocol builder. """
StudyService.synch_all_studies_with_protocol_builder(g.user)
studies = StudyService.get_studies_for_user(g.user)
results = StudySchema(many=True).dump(studies)
return results
@ -142,115 +73,18 @@ def post_update_study_from_protocol_builder(study_id):
"""Update a single study based on data received from
the protocol builder."""
pb_studies: List[ProtocolBuilderStudy] = get_user_pb_studies()
for pb_study in pb_studies:
if pb_study['STUDYID'] == study_id:
return update_study(study_id, map_pb_study_to_study(pb_study))
db_study = session.query(StudyModel).filter_by(study_id=study_id).all()
pb_studies: List[ProtocolBuilderStudy] = ProtocolBuilderService.get_studies(g.user.uid)
pb_study = next((pbs for pbs in pb_studies if pbs.STUDYID == study_id), None)
if pb_study:
db_study.update_from_protocol_builder(pb_study)
else:
db_study.inactive = True
db_study.protocol_builder_status = ProtocolBuilderStatus.INACTIVE
return NoContent, 304
def get_study_workflows(study_id):
# Get study
study: StudyModel = session.query(StudyModel).filter_by(id=study_id).first()
# Get study status spec
status_spec: WorkflowSpecModel = session.query(WorkflowSpecModel)\
.filter_by(is_status=True)\
.filter_by(id=study.status_spec_id)\
.first()
status_data = None
if status_spec is not None:
# Run status spec
status_workflow_model: WorkflowModel = session.query(WorkflowModel)\
.filter_by(study_id=study.id)\
.filter_by(workflow_spec_id=status_spec.id)\
.first()
status_processor = WorkflowProcessor(status_workflow_model)
# Get list of active workflow specs for study
status_processor.do_engine_steps()
status_data = status_processor.bpmn_workflow.last_task.data
# Get study workflows
workflow_models = session.query(WorkflowModel)\
.filter_by(study_id=study_id)\
.filter(WorkflowModel.workflow_spec_id != status_spec.id)\
.all()
else:
# Get study workflows
workflow_models = session.query(WorkflowModel)\
.filter_by(study_id=study_id)\
.all()
api_models = []
for workflow_model in workflow_models:
processor = WorkflowProcessor(workflow_model,
workflow_model.bpmn_workflow_json)
api_models.append(__get_workflow_api_model(processor, status_data))
schema = WorkflowApiSchema(many=True)
return schema.dump(api_models)
def add_workflow_to_study(study_id, body):
workflow_spec_model: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=body["id"]).first()
if workflow_spec_model is None:
raise ApiError('unknown_spec', 'The specification "' + body['id'] + '" is not recognized.')
processor = WorkflowProcessor.create(study_id, workflow_spec_model.id)
# If workflow spec is a status spec, update the study status spec
if workflow_spec_model.is_status:
study = session.query(StudyModel).filter_by(id=study_id).first()
study.status_spec_id = workflow_spec_model.id
study.status_spec_version = processor.get_spec_version()
session.add(study)
session.commit()
return WorkflowApiSchema().dump(__get_workflow_api_model(processor))
def get_user_pb_studies() -> List[ProtocolBuilderStudy]:
"""Get studies from Protocol Builder matching the given user"""
user = g.user
""":type: crc.models.user.UserModel"""
return ProtocolBuilderService.get_studies(user.uid)
def map_pb_study_to_study(pb_study):
"""Translates the given dict of ProtocolBuilderStudy properties to dict of StudyModel attributes"""
prop_map = {
'STUDYID': 'id',
'HSRNUMBER': 'hsr_number',
'TITLE': 'title',
'NETBADGEID': 'user_uid',
'DATE_MODIFIED': 'last_updated',
}
study_info = {}
# Translate Protocol Builder property names to Study attributes
for k, v in pb_study.items():
if k in prop_map:
study_info[prop_map[k]] = v
# Translate Protocol Builder states to enum values
status = ProtocolBuilderStatus.DRAFT
pb_details = ProtocolBuilderService.get_study_details(pb_study['STUDYID'])
if 'Q_COMPLETE' in pb_study and pb_study['Q_COMPLETE']:
if 'UPLOAD_COMPLETE' in pb_details and pb_details['UPLOAD_COMPLETE']:
if 'HSRNUMBER' in pb_study and pb_study['HSRNUMBER']:
status = ProtocolBuilderStatus.REVIEW_COMPLETE
else:
status = ProtocolBuilderStatus.IN_REVIEW
else:
status = ProtocolBuilderStatus.IN_PROCESS
study_info['protocol_builder_status'] = status.name
study_info['inactive'] = False
return study_info

View File

@ -36,6 +36,17 @@ def get_workflow_specification(spec_id):
return WorkflowSpecModelSchema().dump(spec)
def validate_workflow_specification(spec_id):
errors = []
try:
WorkflowProcessor.test_spec(spec_id)
except ApiError as ae:
errors.append(ae)
return ApiErrorSchema(many=True).dump(errors)
def update_workflow_specification(spec_id, body):
if spec_id is None:
raise ApiError('unknown_spec', 'Please provide a valid Workflow Spec ID.')
@ -89,7 +100,6 @@ def __get_workflow_api_model(processor: WorkflowProcessor, status_data=None):
workflow_spec_id=processor.workflow_spec_id,
spec_version=processor.get_spec_version(),
is_latest_spec=processor.get_spec_version() == processor.get_latest_version_string(processor.workflow_spec_id),
is_active=is_active
)
if processor.next_task():
workflow_api.next_task = Task.from_spiff(processor.next_task())
@ -104,6 +114,7 @@ def get_workflow(workflow_id, soft_reset=False, hard_reset=False):
return WorkflowApiSchema().dump(workflow_api_model)
def delete(workflow_id):
session.query(WorkflowModel).filter_by(id=workflow_id).delete()
session.commit()

View File

@ -95,8 +95,8 @@ class TaskSchema(ma.Schema):
class WorkflowApi(object):
def __init__(self, id, status, user_tasks, last_task, next_task, workflow_spec_id, spec_version,
is_latest_spec, is_active):
def __init__(self, id, status, user_tasks, last_task, next_task,
spec_version, is_latest_spec, workflow_spec_id):
self.id = id
self.status = status
self.user_tasks = user_tasks
@ -105,14 +105,12 @@ class WorkflowApi(object):
self.workflow_spec_id = workflow_spec_id
self.spec_version = spec_version
self.is_latest_spec = is_latest_spec
self.is_active = is_active
class WorkflowApiSchema(ma.Schema):
class Meta:
model = WorkflowApi
fields = ["id", "status", "user_tasks", "last_task", "next_task",
"workflow_spec_id", "spec_version", "is_latest_spec", "is_active"]
"workflow_spec_id", "spec_version", "is_latest_spec"]
unknown = INCLUDE
status = EnumField(WorkflowStatus)
@ -123,6 +121,6 @@ class WorkflowApiSchema(ma.Schema):
@marshmallow.post_load
def make_workflow(self, data, **kwargs):
keys = ['id', 'status', 'user_tasks', 'last_task', 'next_task',
'workflow_spec_id', 'spec_version', 'is_latest_spec', "is_active"]
'workflow_spec_id', 'spec_version', 'is_latest_spec']
filtered_fields = {key: data[key] for key in keys}
return WorkflowApi(**filtered_fields)

View File

@ -1,6 +1,6 @@
import enum
from marshmallow import INCLUDE
from marshmallow import INCLUDE, post_load
from crc import ma
@ -43,10 +43,13 @@ class ProtocolBuilderStudySchema(ma.Schema):
model = ProtocolBuilderStudy
unknown = INCLUDE
@post_load
def make_pbs(self, data, **kwargs):
return ProtocolBuilderStudy(**data)
class ProtocolBuilderInvestigator(object):
def __init__(self, STUDYID: int, NETBADGEID: str, INVESTIGATORTYPE: str, INVESTIGATORTYPEFULL: str):
self.STUDYID = STUDYID
def __init__(self, NETBADGEID: str, INVESTIGATORTYPE: str, INVESTIGATORTYPEFULL: str):
self.NETBADGEID = NETBADGEID
self.INVESTIGATORTYPE = INVESTIGATORTYPE
self.INVESTIGATORTYPEFULL = INVESTIGATORTYPEFULL
@ -57,6 +60,9 @@ class ProtocolBuilderInvestigatorSchema(ma.Schema):
model = ProtocolBuilderInvestigator
unknown = INCLUDE
@post_load
def make_inv(self, data, **kwargs):
return ProtocolBuilderInvestigator(**data)
class ProtocolBuilderRequiredDocument(object):
def __init__(self, AUXDOCID: str, AUXDOC: str):
@ -69,78 +75,80 @@ class ProtocolBuilderRequiredDocumentSchema(ma.Schema):
model = ProtocolBuilderRequiredDocument
unknown = INCLUDE
@post_load
def make_req(self, data, **kwargs):
return ProtocolBuilderRequiredDocument(**data)
class ProtocolBuilderStudyDetails(object):
def __init__(
self,
STUDYID: int,
IS_IND: int,
IND_1: str,
IND_2: str,
IND_3: str,
IS_UVA_IND: int,
IS_IDE: int,
IS_UVA_IDE: int,
IDE: str,
IS_CHART_REVIEW: int,
IS_RADIATION: int,
GCRC_NUMBER: str,
IS_GCRC: int,
IS_PRC_DSMP: int,
IS_PRC: int,
PRC_NUMBER: str,
IS_IBC: int,
IBC_NUMBER: str,
SPONSORS_PROTOCOL_REVISION_DATE: int,
IS_SPONSOR_MONITORING: int,
IS_AUX: int,
IS_SPONSOR: int,
IS_GRANT: int,
IS_COMMITTEE_CONFLICT: int,
DSMB: int,
DSMB_FREQUENCY: int,
IS_DB: int,
IS_UVA_DB: int,
IS_CENTRAL_REG_DB: int,
IS_CONSENT_WAIVER: int,
IS_HGT: int,
IS_GENE_TRANSFER: int,
IS_TISSUE_BANKING: int,
IS_SURROGATE_CONSENT: int,
IS_ADULT_PARTICIPANT: int,
IS_MINOR_PARTICIPANT: int,
IS_MINOR: int,
IS_BIOMEDICAL: int,
IS_QUALITATIVE: int,
IS_PI_SCHOOL: int,
IS_PRISONERS_POP: int,
IS_PREGNANT_POP: int,
IS_FETUS_POP: int,
IS_MENTAL_IMPAIRMENT_POP: int,
IS_ELDERLY_POP: int,
IS_OTHER_VULNERABLE_POP: int,
OTHER_VULNERABLE_DESC: str,
IS_MULTI_SITE: int,
IS_UVA_LOCATION: int,
NON_UVA_LOCATION: str,
MULTI_SITE_LOCATIONS: str,
IS_OUTSIDE_CONTRACT: int,
IS_UVA_PI_MULTI: int,
IS_NOT_PRC_WAIVER: int,
IS_CANCER_PATIENT: int,
UPLOAD_COMPLETE: int,
IS_FUNDING_SOURCE: int,
IS_PI_INITIATED: int,
IS_ENGAGED_RESEARCH: int,
IS_APPROVED_DEVICE: int,
IS_FINANCIAL_CONFLICT: int,
IS_NOT_CONSENT_WAIVER: int,
IS_FOR_CANCER_CENTER: int,
IS_REVIEW_BY_CENTRAL_IRB: int,
IRBREVIEWERADMIN: str
IS_IND: int = None,
IND_1: str = None,
IND_2: str = None,
IND_3: str = None,
IS_UVA_IND: int = None,
IS_IDE: int = None,
IS_UVA_IDE: int = None,
IDE: str = None,
IS_CHART_REVIEW: int = None,
IS_RADIATION: int = None,
GCRC_NUMBER: str = None,
IS_GCRC: int = None,
IS_PRC_DSMP: int = None,
IS_PRC: int = None,
PRC_NUMBER: str = None,
IS_IBC: int = None,
IBC_NUMBER: str = None,
SPONSORS_PROTOCOL_REVISION_DATE: int = None,
IS_SPONSOR_MONITORING: int = None,
IS_AUX: int = None,
IS_SPONSOR: int = None,
IS_GRANT: int = None,
IS_COMMITTEE_CONFLICT: int = None,
DSMB: int = None,
DSMB_FREQUENCY: int = None,
IS_DB: int = None,
IS_UVA_DB: int = None,
IS_CENTRAL_REG_DB: int = None,
IS_CONSENT_WAIVER: int = None,
IS_HGT: int = None,
IS_GENE_TRANSFER: int = None,
IS_TISSUE_BANKING: int = None,
IS_SURROGATE_CONSENT: int = None,
IS_ADULT_PARTICIPANT: int = None,
IS_MINOR_PARTICIPANT: int = None,
IS_MINOR: int = None,
IS_BIOMEDICAL: int = None,
IS_QUALITATIVE: int = None,
IS_PI_SCHOOL: int = None,
IS_PRISONERS_POP: int = None,
IS_PREGNANT_POP: int = None,
IS_FETUS_POP: int = None,
IS_MENTAL_IMPAIRMENT_POP: int = None,
IS_ELDERLY_POP: int = None,
IS_OTHER_VULNERABLE_POP: int = None,
OTHER_VULNERABLE_DESC: str = None,
IS_MULTI_SITE: int = None,
IS_UVA_LOCATION: int = None,
NON_UVA_LOCATION: str = None,
MULTI_SITE_LOCATIONS: str = None,
IS_OUTSIDE_CONTRACT: int = None,
IS_UVA_PI_MULTI: int = None,
IS_NOT_PRC_WAIVER: int = None,
IS_CANCER_PATIENT: int = None,
UPLOAD_COMPLETE: int = None,
IS_FUNDING_SOURCE: int = None,
IS_PI_INITIATED: int = None,
IS_ENGAGED_RESEARCH: int = None,
IS_APPROVED_DEVICE: int = None,
IS_FINANCIAL_CONFLICT: int = None,
IS_NOT_CONSENT_WAIVER: int = None,
IS_FOR_CANCER_CENTER: int = None,
IS_REVIEW_BY_CENTRAL_IRB: int = None,
IRBREVIEWERADMIN: str = None
):
self.STUDYID = STUDYID
self.IS_IND = IS_IND
self.IND_1 = IND_1
self.IND_2 = IND_2
@ -211,3 +219,7 @@ class ProtocolBuilderStudyDetailsSchema(ma.Schema):
class Meta:
model = ProtocolBuilderStudyDetails
unknown = INCLUDE
@post_load
def make_details(self, data, **kwargs):
return ProtocolBuilderStudyDetails(**data)

View File

@ -1,9 +1,13 @@
import marshmallow
from marshmallow import INCLUDE, fields
from marshmallow_enum import EnumField
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from sqlalchemy import func
from crc import db
from crc.models.protocol_builder import ProtocolBuilderStatus
from crc import db, ma
from crc.api.common import ApiErrorSchema
from crc.models.protocol_builder import ProtocolBuilderStatus, ProtocolBuilderStudy
from crc.models.workflow import WorkflowSpecCategoryModel, WorkflowState, WorkflowStatus, WorkflowSpecModel, \
WorkflowModel
class StudyModel(db.Model):
@ -20,15 +24,132 @@ class StudyModel(db.Model):
investigator_uids = db.Column(db.ARRAY(db.String), nullable=True)
inactive = db.Column(db.Boolean, default=False)
requirements = db.Column(db.ARRAY(db.Integer), nullable=True)
status_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
status_spec_version = db.Column(db.String)
def update_from_protocol_builder(self, pbs: ProtocolBuilderStudy):
self.hsr_number = pbs.HSRNUMBER
self.title = pbs.TITLE
self.user_uid = pbs.NETBADGEID
self.last_updated = pbs.DATE_MODIFIED
self.protocol_builder_status = ProtocolBuilderStatus.DRAFT
self.inactive = False
if pbs.HSRNUMBER: # And Up load complete?
self.protocol_builder_status = ProtocolBuilderStatus.REVIEW_COMPLETE
elif pbs.Q_COMPLETE:
self.protocol_builder_status = ProtocolBuilderStatus.IN_PROCESS
class StudyModelSchema(SQLAlchemyAutoSchema):
class WorkflowMetadata(object):
def __init__(self, id, name, display_name, description, spec_version, category_id, state: WorkflowState, status: WorkflowStatus,
total_tasks, completed_tasks):
self.id = id
self.name = name
self.display_name = display_name
self.description = description
self.spec_version = spec_version
self.category_id = category_id
self.state = state
self.status = status
self.total_tasks = total_tasks
self.completed_tasks = completed_tasks
@classmethod
def from_workflow(cls, workflow: WorkflowModel):
instance = cls(
id=workflow.id,
name=workflow.workflow_spec.name,
display_name=workflow.workflow_spec.display_name,
description=workflow.workflow_spec.description,
spec_version=workflow.spec_version,
category_id=workflow.workflow_spec.category_id,
state=WorkflowState.optional,
status=workflow.status,
total_tasks=workflow.total_tasks,
completed_tasks=workflow.completed_tasks)
return instance
class WorkflowMetadataSchema(ma.Schema):
state = EnumField(WorkflowState)
status = EnumField(WorkflowStatus)
class Meta:
model = StudyModel
load_instance = True
include_relationships = True
include_fk = True # Includes foreign keys
model = WorkflowMetadata
additional = ["id", "name", "display_name", "description",
"total_tasks", "completed_tasks"]
unknown = INCLUDE
class Category(object):
def __init__(self, model: WorkflowSpecCategoryModel):
self.id = model.id
self.name = model.name
self.display_name = model.display_name
self.display_order = model.display_order
class CategorySchema(ma.Schema):
workflows = fields.List(fields.Nested(WorkflowMetadataSchema), dump_only=True)
class Meta:
model = Category
additional = ["id", "name", "display_name", "display_order"]
unknown = INCLUDE
class Study(object):
def __init__(self, id, title, last_updated, primary_investigator_id, user_uid,
protocol_builder_status=None,
sponsor="", hsr_number="", ind_number="", inactive=False, categories=[], **argsv):
self.id = id
self.user_uid = user_uid
self.title = title
self.last_updated = last_updated
self.protocol_builder_status = protocol_builder_status
self.primary_investigator_id = primary_investigator_id
self.sponsor = sponsor
self.hsr_number = hsr_number
self.ind_number = ind_number
self.inactive = inactive
self.categories = categories
self.warnings = []
@classmethod
def from_model(cls, study_model: StudyModel):
args = {k: v for k, v in study_model.__dict__.items() if not k.startswith('_')}
instance = cls(**args)
return instance
def update_model(self, study_model: StudyModel):
for k,v in self.__dict__.items():
if not k.startswith('_'):
study_model.__dict__[k] = v
def model_args(self):
"""Arguments that can be passed into the Study Model to update it."""
self_dict = self.__dict__.copy()
del self_dict["categories"]
del self_dict["warnings"]
return self_dict
class StudySchema(ma.Schema):
categories = fields.List(fields.Nested(CategorySchema), dump_only=True)
warnings = fields.List(fields.Nested(ApiErrorSchema), dump_only=True)
protocol_builder_status = EnumField(ProtocolBuilderStatus)
hsr_number = fields.String(allow_none=True)
class Meta:
model = Study
additional = ["id", "title", "last_updated", "primary_investigator_id", "user_uid",
"sponsor", "ind_number", "inactive"]
unknown = INCLUDE
@marshmallow.post_load
def make_study(self, data, **kwargs):
"""Can load the basic study data for updates to the database, but categories are write only"""
return Study(**data)

View File

@ -29,9 +29,9 @@ class WorkflowSpecModel(db.Model):
display_name = db.Column(db.String)
description = db.Column(db.Text)
primary_process_id = db.Column(db.String)
workflow_spec_category_id = db.Column(db.Integer, db.ForeignKey('workflow_spec_category.id'), nullable=True)
workflow_spec_category = db.relationship("WorkflowSpecCategoryModel")
is_status = db.Column(db.Boolean, default=False)
category_id = db.Column(db.Integer, db.ForeignKey('workflow_spec_category.id'), nullable=True)
category = db.relationship("WorkflowSpecCategoryModel")
is_master_spec = db.Column(db.Boolean, default=False)
class WorkflowSpecModelSchema(SQLAlchemyAutoSchema):
@ -42,11 +42,25 @@ class WorkflowSpecModelSchema(SQLAlchemyAutoSchema):
include_fk = True # Includes foreign keys
unknown = EXCLUDE
workflow_spec_category = marshmallow.fields.Nested(WorkflowSpecCategoryModelSchema, dump_only=True)
category = marshmallow.fields.Nested(WorkflowSpecCategoryModelSchema, dump_only=True)
class WorkflowState(enum.Enum):
hidden = "hidden"
disabled = "disabled"
required = "required"
optional = "optional"
@classmethod
def has_value(cls, value):
return value in cls._value2member_map_
@staticmethod
def list():
return list(map(lambda c: c.value, WorkflowState))
class WorkflowStatus(enum.Enum):
new = "new"
not_started = "not_started"
user_input_required = "user_input_required"
waiting = "waiting"
complete = "complete"
@ -59,4 +73,7 @@ class WorkflowModel(db.Model):
status = db.Column(db.Enum(WorkflowStatus))
study_id = db.Column(db.Integer, db.ForeignKey('study.id'))
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
workflow_spec = db.relationship("WorkflowSpecModel")
spec_version = db.Column(db.String)
total_tasks = db.Column(db.Integer, default=0)
completed_tasks = db.Column(db.Integer, default=0)

View File

@ -26,7 +26,23 @@ Takes two arguments:
2. The 'code' of the IRB Document as set in the irb_documents.xlsx file."
"""
def do_task_validate_only(self, task, study_id, *args, **kwargs):
"""For validation only, process the template, but do not store it in the database."""
self.process_template(task, study_id, *args, **kwargs)
def do_task(self, task, study_id, *args, **kwargs):
workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
final_document_stream = self.process_template(task, study_id, *args, **kwargs)
file_name = args[0]
irb_doc_code = args[1]
FileService.add_task_file(study_id=study_id, workflow_id=workflow_id, task_id=task.id,
name=file_name,
content_type=CONTENT_TYPES['docx'],
binary_data=final_document_stream.read(),
irb_doc_code=irb_doc_code)
def process_template(self, task, study_id, *args, **kwargs):
"""Entry point, mostly worried about wiring it all up."""
if len(args) != 2:
raise ApiError(code="missing_argument",
@ -34,10 +50,9 @@ Takes two arguments:
"the name of the docx template to use. The second "
"argument is a code for the document, as "
"set in the reference document %s. " % FileService.IRB_PRO_CATEGORIES_FILE)
file_name = args[0]
irb_doc_code = args[1]
workflow_spec_model = self.find_spec_model_in_db(task.workflow)
task_study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
file_name = args[0]
if task_study_id != study_id:
raise ApiError(code="invalid_argument",
@ -54,18 +69,12 @@ Takes two arguments:
if file_data_model is None:
raise ApiError(code="file_missing",
message="Can not find a file called '%s' "
"within workflow specification '%s'") % (args[0], workflow_spec_model.id)
message="Can not find a file called '%s' within workflow specification '%s'"
% (args[0], workflow_spec_model.id))
final_document_stream = self.make_template(BytesIO(file_data_model.data), task.data)
workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
FileService.add_task_file(study_id=study_id, workflow_id=workflow_id, task_id=task.id,
name=file_name,
content_type=CONTENT_TYPES['docx'],
binary_data=final_document_stream.read(),
irb_doc_code=irb_doc_code)
print("Complete Task was called with %s" % str(args))
return self.make_template(BytesIO(file_data_model.data), task.data)
def make_template(self, binary_stream, context):
doc = DocxTemplate(binary_stream)

View File

@ -20,6 +20,9 @@ class FactService(Script):
response = requests.get('https://api.chucknorris.io/jokes/random')
return response.json()['value']
def do_task_validate_only(self, task, study_id, **kwargs):
self.do_task(task, study_id, **kwargs)
def do_task(self, task, study_id, **kwargs):
print(task.data)

View File

@ -1,8 +1,4 @@
from pandas import ExcelFile
from crc import session, ma
from crc.api.common import ApiError
from crc.models.study import StudyModel, StudyModelSchema
from crc.scripts.script import Script, ScriptValidationError
from crc.services.file_service import FileService
from crc.services.protocol_builder import ProtocolBuilderService
@ -11,14 +7,13 @@ from crc.services.protocol_builder import ProtocolBuilderService
class RequiredDocs(Script):
"""Provides information about the documents required by Protocol Builder."""
pb = ProtocolBuilderService()
type_options = ['info', 'investigators', 'required_docs', 'details']
def get_description(self):
return """
Provides detailed information about the documents required by the Protocol Builder.
Makes an immediate call to the IRB Protocol Builder API to get a list of currently required
documents. It then collects all the information in a reference file called 'irb_pro_categories.xls',
if the Id from Protcol Builder matches an Id in this table, all data available in that row
if the Id from Protocol Builder matches an Id in this table, all data available in that row
is also provided.
This place a dictionary of values in the current task, where the key is the numeric id.
@ -39,28 +34,33 @@ For example:
}
```
"""
def do_task_validate_only(self, task, study_id, *args, **kwargs):
"""For validation only, pretend no results come back from pb"""
pb_docs = []
self.get_required_docs(study_id, pb_docs)
task.data["required_docs"] = self.get_required_docs(study_id, pb_docs)
def do_task(self, task, study_id, *args, **kwargs):
"""Takes data from the protocol builder, and merges it with data from the IRB Pro Categories
spreadsheet to return pertinant details about the required documents."""
self.get_required_docs(study_id)
task.data["required_docs"] = self.get_required_docs(study_id)
spreadsheet to return pertinent details about the required documents."""
pb_docs = self.pb.get_required_docs(study_id)
self.get_required_docs(study_id, pb_docs)
task.data["required_docs"] = self.get_required_docs(study_id, pb_docs)
def get_required_docs(self, study_id):
def get_required_docs(self, study_id, pb_docs):
"""Takes data from the protocol builder, and merges it with data from the IRB Pro Categories spreadsheet to return
pertinant details about the required documents."""
pb_docs = self.pb.get_required_docs(study_id)
doc_dictionary = FileService.get_file_reference_dictionary()
required_docs = []
required_docs = {}
for doc in pb_docs:
id = int(doc['AUXDOCID'])
required_doc = {'id': id, 'name': doc['AUXDOC'], 'required': True,
id = int(doc.AUXDOCID)
required_doc = {'id': id, 'name': doc.AUXDOC, 'required': True,
'count': 0}
if id in doc_dictionary:
required_doc = {**required_doc, **doc_dictionary[id]}
required_doc['count'] = self.get_count(study_id, doc_dictionary[id]["Code"])
required_docs.append(required_doc)
required_docs[id] = required_doc
return required_docs
def get_count(self, study_id, irb_doc_code):

View File

@ -15,9 +15,15 @@ class Script:
def do_task(self, task, study_id, **kwargs):
raise ApiError("invalid_script",
"This is an internal error. The script you are trying to execute " +
"This is an internal error. The script you are trying to execute '%s' " % self.__class__.__name__ +
"does not properly implement the do_task function.")
def do_task_validate_only(self, task, study_id, **kwargs):
raise ApiError("invalid_script",
"This is an internal error. The script you are trying to execute '%s' " % self.__class__.__name__ +
"does must provide a validate_only option that mimics the do_task, " +
"but does not make external calls or database updates." )
def validate(self):
"""Override this method to perform an early check that the script has access to
everything it needs to properly process requests.

View File

@ -1,8 +1,9 @@
from crc import session
from crc.api.common import ApiError
from crc.models.study import StudyModel, StudyModelSchema
from crc.models.study import StudyModel, StudySchema
from crc.scripts.script import Script
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.workflow_processor import WorkflowProcessor
class StudyInfo(Script):
@ -20,11 +21,14 @@ class StudyInfo(Script):
this study.
"""
def do_task_validate_only(self, task, study_id, *args, **kwargs):
"""For validation only, pretend no results come back from pb"""
self.check_args(args)
def do_task(self, task, study_id, *args, **kwargs):
if len(args) != 1 or (args[0] not in StudyInfo.type_options):
raise ApiError(code="missing_argument",
message="The StudyInfo script requires a single argument which must be "
"one of %s" % ",".join(StudyInfo.type_options))
self.check_args(args)
cmd = args[0]
study_info = {}
if "study" in task.data:
@ -32,10 +36,17 @@ class StudyInfo(Script):
if cmd == 'info':
study = session.query(StudyModel).filter_by(id=study_id).first()
schema = StudyModelSchema()
schema = StudySchema()
study_info["info"] = schema.dump(study)
if cmd == 'investigators':
study_info["investigators"] = self.pb.get_investigators(study_id)
study_info["investigators"] = self.pb.get_investigators(study_id, as_json=True)
if cmd == 'details':
study_info["details"] = self.pb.get_study_details(study_id)
study_info["details"] = self.pb.get_study_details(study_id, as_json=True)
task.data["study"] = study_info
def check_args(self, args):
if len(args) != 1 or (args[0] not in StudyInfo.type_options):
raise ApiError(code="missing_argument",
message="The StudyInfo script requires a single argument which must be "
"one of %s" % ",".join(StudyInfo.type_options))

View File

@ -30,36 +30,45 @@ class ProtocolBuilderService(object):
(response.status_code, response.text))
@staticmethod
def get_investigators(study_id) -> Optional[List[ProtocolBuilderInvestigator]]:
def get_investigators(study_id, as_json=False) -> Optional[List[ProtocolBuilderInvestigator]]:
ProtocolBuilderService.check_args(study_id)
response = requests.get(ProtocolBuilderService.INVESTIGATOR_URL % study_id)
if response.ok and response.text:
pb_studies = ProtocolBuilderInvestigatorSchema(many=True).loads(response.text)
return pb_studies
if as_json:
return ProtocolBuilderInvestigatorSchema(many=True).dump(pb_studies)
else:
return pb_studies
else:
raise ApiError("protocol_builder_error",
"Received an invalid response from the protocol builder (status %s): %s" %
(response.status_code, response.text))
@staticmethod
def get_required_docs(study_id) -> Optional[List[ProtocolBuilderRequiredDocument]]:
def get_required_docs(study_id, as_json=False) -> Optional[List[ProtocolBuilderRequiredDocument]]:
ProtocolBuilderService.check_args(study_id)
response = requests.get(ProtocolBuilderService.REQUIRED_DOCS_URL % study_id)
if response.ok and response.text:
pb_studies = ProtocolBuilderRequiredDocumentSchema(many=True).loads(response.text)
return pb_studies
if as_json:
return ProtocolBuilderRequiredDocumentSchema(many=True).dump(pb_studies)
else:
return pb_studies
else:
raise ApiError("protocol_builder_error",
"Received an invalid response from the protocol builder (status %s): %s" %
(response.status_code, response.text))
@staticmethod
def get_study_details(study_id) -> Optional[ProtocolBuilderStudyDetails]:
def get_study_details(study_id, as_json=False) -> Optional[ProtocolBuilderStudyDetails]:
ProtocolBuilderService.check_args(study_id)
response = requests.get(ProtocolBuilderService.STUDY_DETAILS_URL % study_id)
if response.ok and response.text:
pb_study_details = ProtocolBuilderStudyDetailsSchema().loads(response.text)
return pb_study_details
if as_json:
return ProtocolBuilderStudyDetailsSchema().dump(pb_study_details)
else:
return pb_study_details
else:
raise ApiError("protocol_builder_error",
"Received an invalid response from the protocol builder (status %s): %s" %

View File

@ -0,0 +1,159 @@
from typing import List
from SpiffWorkflow import WorkflowException
from crc import db, session
from crc.api.common import ApiError
from crc.models.protocol_builder import ProtocolBuilderStudy, ProtocolBuilderStatus
from crc.models.study import StudyModel, Study, Category, WorkflowMetadata
from crc.models.workflow import WorkflowSpecCategoryModel, WorkflowModel, WorkflowSpecModel, WorkflowState, \
WorkflowStatus
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.workflow_processor import WorkflowProcessor
class StudyService(object):
"""Provides common tools for working with a Study"""
@staticmethod
def get_studies_for_user(user):
"""Returns a list of all studies for the given user."""
db_studies = session.query(StudyModel).filter_by(user_uid=user.uid).all()
studies = []
for study_model in db_studies:
studies.append(StudyService.get_study(study_model.id, study_model))
return studies
@staticmethod
def get_study(study_id, study_model: StudyModel = None):
"""Returns a study model that contains all the workflows organized by category.
IMPORTANT: This is intended to be a lightweight call, it should never involve
loading up and executing all the workflows in a study to calculate information."""
if not study_model:
study_model = session.query(StudyModel).filter_by(id=study_id).first()
study = Study.from_model(study_model)
study.categories = StudyService.get_categories()
workflow_metas = StudyService.__get_workflow_metas(study_id)
status = StudyService.__get_study_status(study_model)
study.warnings = StudyService.__update_status_of_workflow_meta(workflow_metas, status)
# Group the workflows into their categories.
for category in study.categories:
category.workflows = {w for w in workflow_metas if w.category_id == category.id}
return study
@staticmethod
def delete_study(study_id):
session.query(WorkflowModel).filter_by(study_id=study_id).delete()
session.query(StudyModel).filter_by(id=study_id).delete()
@staticmethod
def get_categories():
"""Returns a list of category objects, in the correct order."""
cat_models = db.session.query(WorkflowSpecCategoryModel) \
.order_by(WorkflowSpecCategoryModel.display_order).all()
categories = []
for cat_model in cat_models:
categories.append(Category(cat_model))
return categories
@staticmethod
def synch_all_studies_with_protocol_builder(user):
"""Assures that the studies we have locally for the given user are
in sync with the studies available in protocol builder. """
# Get studies matching this user from Protocol Builder
pb_studies: List[ProtocolBuilderStudy] = ProtocolBuilderService.get_studies(user.uid)
# Get studies from the database
db_studies = session.query(StudyModel).filter_by(user_uid=user.uid).all()
# Update all studies from the protocol builder, create new studies as needed.
# Futher assures that every active study (that does exist in the protocol builder)
# has a reference to every available workflow (though some may not have started yet)
for pb_study in pb_studies:
db_study = next((s for s in db_studies if s.id == pb_study.STUDYID), None)
if not db_study:
db_study = StudyModel(id=pb_study.STUDYID)
session.add(db_study)
db_studies.append(db_study)
db_study.update_from_protocol_builder(pb_study)
StudyService._add_all_workflow_specs_to_study(db_study)
# Mark studies as inactive that are no longer in Protocol Builder
for study in db_studies:
pb_study = next((pbs for pbs in pb_studies if pbs.STUDYID == study.id), None)
if not pb_study:
study.inactive = True
study.protocol_builder_status = ProtocolBuilderStatus.INACTIVE
db.session.commit()
@staticmethod
def __update_status_of_workflow_meta(workflow_metas, status):
# Update the status on each workflow
warnings = []
for wfm in workflow_metas:
if wfm.name in status.keys():
if not WorkflowState.has_value(status[wfm.name]):
warnings.append(ApiError("invalid_status",
"Workflow '%s' can not be set to '%s', should be one of %s" % (
wfm.name, status[wfm.name], ",".join(WorkflowState.list())
)))
else:
wfm.state = WorkflowState[status[wfm.name]]
else:
warnings.append(ApiError("missing_status", "No status specified for workflow %s" % wfm.name))
return warnings
@staticmethod
def __get_workflow_metas(study_id):
# Add in the Workflows for each category
workflow_models = db.session.query(WorkflowModel).\
join(WorkflowSpecModel).\
filter(WorkflowSpecModel.is_master_spec == False).\
filter(WorkflowModel.study_id == study_id).\
all()
workflow_metas = []
for workflow in workflow_models:
workflow_metas.append(WorkflowMetadata.from_workflow(workflow))
return workflow_metas
@staticmethod
def __get_study_status(study_model):
"""Uses the Top Level Workflow to calculate the status of the study, and it's
workflow models."""
master_specs = db.session.query(WorkflowSpecModel). \
filter_by(is_master_spec=True).all()
if len(master_specs) < 1:
raise ApiError("missing_master_spec", "No specifications are currently marked as the master spec.")
if len(master_specs) > 1:
raise ApiError("multiple_master_specs",
"There is more than one master specification, and I don't know what to do.")
return WorkflowProcessor.run_master_spec(master_specs[0], study_model)
@staticmethod
def _add_all_workflow_specs_to_study(study):
existing_models = session.query(WorkflowModel).filter(WorkflowModel.study_id == study.id).all()
existing_specs = list(m.workflow_spec_id for m in existing_models)
new_specs = session.query(WorkflowSpecModel). \
filter(WorkflowSpecModel.is_master_spec == False). \
filter(WorkflowSpecModel.id.notin_(existing_specs)). \
all()
errors = []
for workflow_spec in new_specs:
try:
StudyService._create_workflow_model(study, workflow_spec)
except WorkflowException as we:
errors.append(ApiError.from_task_spec("workflow_execution_exception", str(we), we.sender))
return errors
@staticmethod
def _create_workflow_model(study, spec):
workflow_model = WorkflowModel(status=WorkflowStatus.not_started,
study_id=study.id,
workflow_spec_id=spec.id)
session.add(workflow_model)
session.commit()
return workflow_model

View File

@ -1,8 +1,9 @@
import json
import random
import re
import string
import xml.etree.ElementTree as ElementTree
from SpiffWorkflow import Task as SpiffTask, Workflow
from SpiffWorkflow import Task as SpiffTask
from SpiffWorkflow.bpmn.BpmnScriptEngine import BpmnScriptEngine
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer
@ -10,9 +11,11 @@ from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from SpiffWorkflow.exceptions import WorkflowException
from SpiffWorkflow.operators import Operator
from SpiffWorkflow.specs import WorkflowSpec
from crc import session, db
from crc import session
from crc.api.common import ApiError
from crc.models.file import FileDataModel, FileModel, FileType
from crc.models.workflow import WorkflowStatus, WorkflowModel
@ -24,7 +27,7 @@ class CustomBpmnScriptEngine(BpmnScriptEngine):
Rather than execute arbitrary code, this assumes the script references a fully qualified python class
such as myapp.RandomFact. """
def execute(self, task:SpiffTask, script, **kwargs):
def execute(self, task: SpiffTask, script, **kwargs):
"""
Assume that the script read in from the BPMN file is a fully qualified python class. Instantiate
that class, pass in any data available to the current task so that it might act on it.
@ -45,14 +48,21 @@ class CustomBpmnScriptEngine(BpmnScriptEngine):
klass = getattr(mod, class_name)
study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
if not isinstance(klass(), Script):
raise ApiError("invalid_script",
"This is an internal error. The script '%s:%s' you called "
"does not properly implement the CRC Script class." %
(module_name, class_name))
klass().do_task(task, study_id, *commands[1:])
except ModuleNotFoundError as mnfe:
raise ApiError("invalid_script",
"Unable to locate Script: '%s:%s'" % (module_name, class_name), 400)
raise ApiError.from_task("invalid_script",
"This is an internal error. The script '%s:%s' you called " %
(module_name, class_name) +
"does not properly implement the CRC Script class.",
task=task)
if task.workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY]:
"""If this is running a validation, and not a normal process, then we want to
mimic running the script, but not make any external calls or database changes."""
klass().do_task_validate_only(task, study_id, *commands[1:])
else:
klass().do_task(task, study_id, *commands[1:])
except ModuleNotFoundError:
raise ApiError.from_task("invalid_script",
"Unable to locate Script: '%s:%s'" % (module_name, class_name),
task=task)
@staticmethod
def camel_to_snake(camel):
@ -71,12 +81,12 @@ class CustomBpmnScriptEngine(BpmnScriptEngine):
def _eval(self, task, expression, **kwargs):
locals().update(kwargs)
try :
try:
return eval(expression)
except NameError as ne:
raise ApiError('invalid_expression',
'The expression you provided does not exist:' + expression)
raise ApiError.from_task('invalid_expression',
'The expression you provided does not exist:' + expression,
task=task)
class MyCustomParser(BpmnDmnParser):
"""
@ -91,6 +101,7 @@ class WorkflowProcessor(object):
_serializer = BpmnSerializer()
WORKFLOW_ID_KEY = "workflow_id"
STUDY_ID_KEY = "study_id"
VALIDATION_PROCESS_KEY = "validate_only"
def __init__(self, workflow_model: WorkflowModel, soft_reset=False, hard_reset=False):
"""Create a Workflow Processor based on the serialized information available in the workflow model.
@ -99,31 +110,72 @@ class WorkflowProcessor(object):
completed task in the previous workflow.
If neither flag is set, it will use the same version of the specification that was used to originally
create the workflow model. """
self.workflow_model = workflow_model
orig_version = workflow_model.spec_version
if soft_reset:
spec = self.get_spec(workflow_model.workflow_spec_id)
workflow_model.spec_version = spec.description
else:
spec = self.get_spec(workflow_model.workflow_spec_id, workflow_model.spec_version)
self.workflow_spec_id = workflow_model.workflow_spec_id
try:
self.bpmn_workflow = self._serializer.deserialize_workflow(workflow_model.bpmn_workflow_json, workflow_spec=spec)
self.bpmn_workflow = self.__get_bpmn_workflow(workflow_model, spec)
self.bpmn_workflow.script_engine = self._script_engine
workflow_model.total_tasks = len(self.get_all_user_tasks())
workflow_model.completed_tasks = len(self.get_all_completed_tasks())
workflow_model.status = self.get_status()
session.add(workflow_model)
session.commit()
# Need to commit twice, first to get a unique id for the workflow model, and
# a second time to store the serialization so we can maintain this link within
# the spiff-workflow process.
self.bpmn_workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY] = workflow_model.id
workflow_model.bpmn_workflow_json = WorkflowProcessor._serializer.serialize_workflow(self.bpmn_workflow)
session.add(workflow_model)
except KeyError as ke:
if soft_reset:
# Undo the soft-reset.
workflow_model.spec_version = orig_version
orig_version = workflow_model.spec_version
raise ApiError(code="unexpected_workflow_structure",
message="Failed to deserialize workflow '%s' version %s, due to a mis-placed or missing task '%s'" %
message="Failed to deserialize workflow"
" '%s' version %s, due to a mis-placed or missing task '%s'" %
(self.workflow_spec_id, workflow_model.spec_version, str(ke)) +
" This is very likely due to a soft reset where there was a structural change.")
self.bpmn_workflow.script_engine = self._script_engine
if hard_reset:
# Now that the spec is loaded, get the data and rebuild the bpmn with the new details
workflow_model.spec_version = self.hard_reset()
def __get_bpmn_workflow(self, workflow_model: WorkflowModel, spec: WorkflowSpec):
workflow_model.spec_version = spec.description # Very naughty. But we keep the version in the spec desc.
if workflow_model.bpmn_workflow_json:
bpmn_workflow = self._serializer.deserialize_workflow(workflow_model.bpmn_workflow_json, workflow_spec=spec)
else:
bpmn_workflow = BpmnWorkflow(spec, script_engine=self._script_engine)
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = workflow_model.study_id
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = False
bpmn_workflow.do_engine_steps()
return bpmn_workflow
@staticmethod
def run_master_spec(spec_model, study):
"""Executes a BPMN specification for the given study, without recording any information to the database
Useful for running the master specification, which should not persist. """
spec = WorkflowProcessor.get_spec(spec_model.id)
bpmn_workflow = BpmnWorkflow(spec, script_engine=WorkflowProcessor._script_engine)
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = study.id
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = False
bpmn_workflow.do_engine_steps()
if not bpmn_workflow.is_completed():
raise ApiError("master_spec_not_automatic",
"The master spec should only contain fully automated tasks, it failed to complete.")
return bpmn_workflow.last_task.data
@staticmethod
def get_parser():
parser = MyCustomParser()
@ -152,7 +204,7 @@ class WorkflowProcessor(object):
major_version = file_data.version
else:
minor_version.append(file_data.version)
minor_version.insert(0, major_version) # Add major version to beginning.
minor_version.insert(0, major_version) # Add major version to beginning.
version = ".".join(str(x) for x in minor_version)
files = ".".join(str(x) for x in file_ids)
full_version = "v%s (%s)" % (version, files)
@ -168,8 +220,9 @@ class WorkflowProcessor(object):
.filter(FileDataModel.id.in_(file_ids)).all()
if len(files) != len(file_ids):
raise ApiError("invalid_version",
"The version '%s' of workflow specification '%s' is invalid. Unable to locate the correct files to recreate it." %
(version, workflow_spec_id))
"The version '%s' of workflow specification '%s' is invalid. " %
(version, workflow_spec_id) +
" Unable to locate the correct files to recreate it.")
return files
@staticmethod
@ -182,7 +235,6 @@ class WorkflowProcessor(object):
.order_by(FileModel.id)\
.all()
@staticmethod
def get_spec(workflow_spec_id, version=None):
"""Returns the requested version of the specification,
@ -211,10 +263,55 @@ class WorkflowProcessor(object):
except ValidationException as ve:
raise ApiError(code="workflow_validation_error",
message="Failed to parse Workflow Specification '%s' %s." % (workflow_spec_id, version) +
"Error is %s" % str(ve))
"Error is %s" % str(ve),
file_name=ve.filename,
task_id=ve.id,
tag=ve.tag)
spec.description = version
return spec
@classmethod
def test_spec(cls, spec_id):
spec = WorkflowProcessor.get_spec(spec_id)
bpmn_workflow = BpmnWorkflow(spec, script_engine=cls._script_engine)
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = 1
bpmn_workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY] = spec_id
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = True
while not bpmn_workflow.is_completed():
try:
bpmn_workflow.do_engine_steps()
tasks = bpmn_workflow.get_tasks(SpiffTask.READY)
for task in tasks:
WorkflowProcessor.populate_form_with_random_data(task)
task.complete()
except WorkflowException as we:
raise ApiError.from_task_spec("workflow_execution_exception", str(we),
we.sender)
@staticmethod
def populate_form_with_random_data(task):
"""populates a task with random data - useful for testing a spec."""
form_data = {}
for field in task.task_spec.form.fields:
if field.type == "enum":
form_data[field.id] = random.choice(field.options)
elif field.type == "long":
form_data[field.id] = random.randint(1, 1000)
else:
form_data[field.id] = WorkflowProcessor._random_string()
if task.data is None:
task.data = {}
task.data.update(form_data)
@staticmethod
def _random_string(string_length=10):
"""Generate a random string of fixed length """
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(string_length))
@staticmethod
def status_of(bpmn_workflow):
if bpmn_workflow.is_completed():
@ -225,29 +322,6 @@ class WorkflowProcessor(object):
else:
return WorkflowStatus.waiting
@classmethod
def create(cls, study_id, workflow_spec_id):
spec = WorkflowProcessor.get_spec(workflow_spec_id)
bpmn_workflow = BpmnWorkflow(spec, script_engine=cls._script_engine)
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = study_id
bpmn_workflow.do_engine_steps()
workflow_model = WorkflowModel(status=WorkflowProcessor.status_of(bpmn_workflow),
study_id=study_id,
workflow_spec_id=workflow_spec_id,
spec_version=spec.description)
session.add(workflow_model)
session.commit()
# Need to commit twice, first to get a unique id for the workflow model, and
# a second time to store the serialization so we can maintain this link within
# the spiff-workflow process.
bpmn_workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY] = workflow_model.id
workflow_model.bpmn_workflow_json = WorkflowProcessor._serializer.serialize_workflow(bpmn_workflow)
session.add(workflow_model)
session.commit()
processor = cls(workflow_model)
return processor
def hard_reset(self):
"""Recreate this workflow, but keep the data from the last completed task and add it back into the first task.
This may be useful when a workflow specification changes, and users need to review all the
@ -290,7 +364,6 @@ class WorkflowProcessor(object):
# If the whole blessed mess is done, return the end_event task in the tree
if self.bpmn_workflow.is_completed():
last_task = None
for task in SpiffTask.Iterator(self.bpmn_workflow.task_tree, SpiffTask.ANY_MASK):
if isinstance(task.task_spec, EndEvent):
return task
@ -313,6 +386,10 @@ class WorkflowProcessor(object):
def complete_task(self, task):
self.bpmn_workflow.complete_task_from_id(task.id)
self.workflow_model.total_tasks = len(self.get_all_user_tasks())
self.workflow_model.completed_tasks = len(self.get_all_completed_tasks())
self.workflow_model.status = self.get_status()
session.add(self.workflow_model)
def get_data(self):
return self.bpmn_workflow.data
@ -330,6 +407,11 @@ class WorkflowProcessor(object):
all_tasks = self.bpmn_workflow.get_tasks(SpiffTask.ANY_MASK)
return [t for t in all_tasks if not self.bpmn_workflow._is_engine_task(t.task_spec)]
def get_all_completed_tasks(self):
all_tasks = self.bpmn_workflow.get_tasks(SpiffTask.ANY_MASK)
return [t for t in all_tasks
if not self.bpmn_workflow._is_engine_task(t.task_spec) and t.state in [t.COMPLETED, t.CANCELLED]]
@staticmethod
def get_process_id(et_root: ElementTree.Element):
process_elements = []
@ -353,3 +435,6 @@ class WorkflowProcessor(object):
raise ValidationException('No start event found in %s' % et_root.attrib['id'])
return process_elements[0].attrib['id']

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_1wv9t3c" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_1wv9t3c" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<bpmn:process id="Process_19ej1y2" name="Data Securty Plan" isExecutable="true">
<bpmn:startEvent id="StartEvent_1co48s3">
<bpmn:outgoing>SequenceFlow_100w7co</bpmn:outgoing>
@ -634,7 +634,7 @@ Submit the step only when you are ready. After you "Submit" the step, the inform
<bpmn:scriptTask id="Task_0q2zf9x" name="Generate DSP">
<bpmn:incoming>SequenceFlow_07rwety</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0v51xcx</bpmn:outgoing>
<bpmn:script>scripts.CompleteTemplate NEW_DSP_template.docx</bpmn:script>
<bpmn:script>CompleteTemplate NEW_DSP_template.docx Study.DataSecurityPlan</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_1huif01" name="No" sourceRef="ExclusiveGateway_1ef1ype" targetRef="Activity_0kxjyy1">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">FormField_isCreateDSP == False</bpmn:conditionExpression>

View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<decision id="data_security_plan" name="Data Security Plan">
<extensionElements>
<biodi:bounds x="190" y="80" width="180" height="80" />
</extensionElements>
<decisionTable id="DecisionTable_1mjqwlv">
<input id="InputClause_18pwfqu" label="Required Doc Keys">
<inputExpression id="LiteralExpression_1y84stb" typeRef="string" expressionLanguage="feel">
<text>required_docs.keys()</text>
</inputExpression>
</input>
<output id="OutputClause_05y0j7c" label="data_security_plan" name="data_security_plan" typeRef="string" />
<rule id="DecisionRule_17xsr74">
<description></description>
<inputEntry id="UnaryTests_05ldcq4">
<text>contains(6)</text>
</inputEntry>
<outputEntry id="LiteralExpression_09oao3s">
<text>"required"</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<decision id="enter_core_info" name="Enter Core Info">
<extensionElements>
<biodi:bounds x="170" y="60" width="180" height="80" />
</extensionElements>
<decisionTable id="decisionTable_1">
<input id="InputClause_1ki80j6" label="required doc ids">
<inputExpression id="LiteralExpression_10mfcy7" typeRef="string" expressionLanguage="Python">
<text>required_docs.keys()</text>
</inputExpression>
</input>
<output id="output_1" label="enter_core_info" name="enter_core_info" typeRef="string" />
<rule id="DecisionRule_10oo3ms">
<description>Core information is always required.</description>
<inputEntry id="UnaryTests_1dtfw0r">
<text></text>
</inputEntry>
<outputEntry id="LiteralExpression_1d9565g">
<text>"required"</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<decision id="sponsor_funding_source" name="Sponsor Funding Source">
<extensionElements>
<biodi:bounds x="190" y="70" width="180" height="80" />
</extensionElements>
<decisionTable id="DecisionTable_00zdxg0">
<input id="InputClause_02n3ccs" label="Required Doc Ids">
<inputExpression id="LiteralExpression_1ju4o1o" typeRef="string" expressionLanguage="feel">
<text>required_docs.keys()</text>
</inputExpression>
</input>
<output id="OutputClause_1ybi1ud" label="sponsor_funding_source" name="eat_my_shorts" typeRef="string" />
<rule id="DecisionRule_1t97mw4">
<inputEntry id="UnaryTests_0ym4ln2">
<text>contains(12)</text>
</inputEntry>
<outputEntry id="LiteralExpression_1pweuqc">
<text>"required"</text>
</outputEntry>
</rule>
<rule id="DecisionRule_1q965wz">
<inputEntry id="UnaryTests_1mlhh3t">
<text>not contains(12)</text>
</inputEntry>
<outputEntry id="LiteralExpression_073vd6i">
<text>"disabled"</text>
</outputEntry>
</rule>
<rule id="DecisionRule_0zh6o60">
<inputEntry id="UnaryTests_059mk90">
<text></text>
</inputEntry>
<outputEntry id="LiteralExpression_18pfm2o">
<text>"hidden"</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,150 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1kudwnk" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<bpmn:process id="Process_0jhpidf" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1ees8ka</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_1ees8ka" sourceRef="StartEvent_1" targetRef="Task_Load_Requirements" />
<bpmn:endEvent id="Event_135x8jg">
<bpmn:incoming>Flow_0pwtiqm</bpmn:incoming>
</bpmn:endEvent>
<bpmn:scriptTask id="Task_Load_Requirements" name="Load Required Documents From PM">
<bpmn:incoming>SequenceFlow_1ees8ka</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_17ct47v</bpmn:outgoing>
<bpmn:script>RequiredDocs</bpmn:script>
</bpmn:scriptTask>
<bpmn:businessRuleTask id="Activity_1yqy50i" name="Enter Core Info&#10;" camunda:decisionRef="enter_core_info">
<bpmn:incoming>Flow_1m8285h</bpmn:incoming>
<bpmn:outgoing>Flow_1sggkit</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:sequenceFlow id="Flow_1sggkit" sourceRef="Activity_1yqy50i" targetRef="Gateway_12tpgcy" />
<bpmn:parallelGateway id="Gateway_12tpgcy">
<bpmn:incoming>Flow_1sggkit</bpmn:incoming>
<bpmn:incoming>Flow_1txrak2</bpmn:incoming>
<bpmn:incoming>Flow_0x9580l</bpmn:incoming>
<bpmn:outgoing>Flow_0pwtiqm</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:sequenceFlow id="Flow_0pwtiqm" sourceRef="Gateway_12tpgcy" targetRef="Event_135x8jg" />
<bpmn:parallelGateway id="Gateway_1nta7st">
<bpmn:incoming>SequenceFlow_17ct47v</bpmn:incoming>
<bpmn:outgoing>Flow_1m8285h</bpmn:outgoing>
<bpmn:outgoing>Flow_18pl92p</bpmn:outgoing>
<bpmn:outgoing>Flow_1nimppb</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:sequenceFlow id="Flow_1m8285h" sourceRef="Gateway_1nta7st" targetRef="Activity_1yqy50i" />
<bpmn:sequenceFlow id="SequenceFlow_17ct47v" sourceRef="Task_Load_Requirements" targetRef="Gateway_1nta7st" />
<bpmn:sequenceFlow id="Flow_18pl92p" sourceRef="Gateway_1nta7st" targetRef="Activity_16cm213" />
<bpmn:sequenceFlow id="Flow_1nimppb" sourceRef="Gateway_1nta7st" targetRef="Activity_1k5eeun" />
<bpmn:businessRuleTask id="Activity_1k5eeun" name="Data Security Plan" camunda:decisionRef="data_security_plan">
<bpmn:incoming>Flow_1nimppb</bpmn:incoming>
<bpmn:outgoing>Flow_1txrak2</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:sequenceFlow id="Flow_1txrak2" sourceRef="Activity_1k5eeun" targetRef="Gateway_12tpgcy" />
<bpmn:businessRuleTask id="Activity_16cm213" name="Sponsor Funding Source" camunda:decisionRef="sponsor_funding_source">
<bpmn:incoming>Flow_18pl92p</bpmn:incoming>
<bpmn:outgoing>Flow_0x9580l</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:sequenceFlow id="Flow_0x9580l" sourceRef="Activity_16cm213" targetRef="Gateway_12tpgcy" />
<bpmn:textAnnotation id="TextAnnotation_1pv8ygy">
<bpmn:text>Loads information from the Protocol Builder</bpmn:text>
</bpmn:textAnnotation>
<bpmn:association id="Association_0w69z3w" sourceRef="Task_Load_Requirements" targetRef="TextAnnotation_1pv8ygy" />
<bpmn:textAnnotation id="TextAnnotation_0ydnva4">
<bpmn:text>Include only automatic tasks, no user input is accepted for the Master workflow</bpmn:text>
</bpmn:textAnnotation>
<bpmn:association id="Association_0a41ixa" sourceRef="StartEvent_1" targetRef="TextAnnotation_0ydnva4" />
<bpmn:textAnnotation id="TextAnnotation_1f52jro">
<bpmn:text>All workflows available in the sytem are considered "optional" by default.  Use decision tables here to alter that state if needed.  Alternate values include:  "hidden" (do not show by them initially), "required" (must be completed), "disabled" (visible, but can not be started yet)</bpmn:text>
</bpmn:textAnnotation>
<bpmn:association id="Association_1mzqzwj" sourceRef="Gateway_1nta7st" targetRef="TextAnnotation_1f52jro" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0jhpidf">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="192" y="421" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1ees8ka_di" bpmnElement="SequenceFlow_1ees8ka">
<di:waypoint x="228" y="439" />
<di:waypoint x="300" y="439" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Event_135x8jg_di" bpmnElement="Event_135x8jg">
<dc:Bounds x="862" y="421" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ScriptTask_0x4a3pe_di" bpmnElement="Task_Load_Requirements">
<dc:Bounds x="300" y="399" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1yqy50i_di" bpmnElement="Activity_1yqy50i">
<dc:Bounds x="640" y="290" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="TextAnnotation_1pv8ygy_di" bpmnElement="TextAnnotation_1pv8ygy">
<dc:Bounds x="300" y="247" width="100" height="68" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Association_0w69z3w_di" bpmnElement="Association_0w69z3w">
<di:waypoint x="350" y="399" />
<di:waypoint x="350" y="315" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="TextAnnotation_0ydnva4_di" bpmnElement="TextAnnotation_0ydnva4">
<dc:Bounds x="155" y="220" width="110" height="82" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Association_0a41ixa_di" bpmnElement="Association_0a41ixa">
<di:waypoint x="210" y="421" />
<di:waypoint x="210" y="302" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="TextAnnotation_1f52jro_di" bpmnElement="TextAnnotation_1f52jro">
<dc:Bounds x="461" y="80" width="243" height="124" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1sggkit_di" bpmnElement="Flow_1sggkit">
<di:waypoint x="740" y="330" />
<di:waypoint x="800" y="330" />
<di:waypoint x="800" y="414" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Gateway_1kk6x70_di" bpmnElement="Gateway_12tpgcy">
<dc:Bounds x="775" y="414" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0pwtiqm_di" bpmnElement="Flow_0pwtiqm">
<di:waypoint x="825" y="439" />
<di:waypoint x="862" y="439" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Gateway_1m22g4p_di" bpmnElement="Gateway_1nta7st">
<dc:Bounds x="558" y="414" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Association_1mzqzwj_di" bpmnElement="Association_1mzqzwj">
<di:waypoint x="583" y="414" />
<di:waypoint x="583" y="204" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1m8285h_di" bpmnElement="Flow_1m8285h">
<di:waypoint x="583" y="414" />
<di:waypoint x="583" y="330" />
<di:waypoint x="640" y="330" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_17ct47v_di" bpmnElement="SequenceFlow_17ct47v">
<di:waypoint x="400" y="439" />
<di:waypoint x="558" y="439" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_18pl92p_di" bpmnElement="Flow_18pl92p">
<di:waypoint x="583" y="464" />
<di:waypoint x="583" y="550" />
<di:waypoint x="640" y="550" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1nimppb_di" bpmnElement="Flow_1nimppb">
<di:waypoint x="608" y="439" />
<di:waypoint x="640" y="439" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_1k5eeun_di" bpmnElement="Activity_1k5eeun">
<dc:Bounds x="640" y="399" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1txrak2_di" bpmnElement="Flow_1txrak2">
<di:waypoint x="740" y="439" />
<di:waypoint x="775" y="439" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_16cm213_di" bpmnElement="Activity_16cm213">
<dc:Bounds x="640" y="510" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0x9580l_di" bpmnElement="Flow_0x9580l">
<di:waypoint x="740" y="550" />
<di:waypoint x="800" y="550" />
<di:waypoint x="800" y="464" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -7,7 +7,7 @@ from crc import app, db, session
from crc.models.file import FileType, FileModel, FileDataModel, CONTENT_TYPES
from crc.models.study import StudyModel
from crc.models.user import UserModel
from crc.models.workflow import WorkflowSpecModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecCategoryModel
from crc.services.file_service import FileService
from crc.services.workflow_processor import WorkflowProcessor
from crc.models.protocol_builder import ProtocolBuilderStatus
@ -25,81 +25,96 @@ class ExampleDataLoader:
self.load_reference_documents()
users = [
UserModel(
uid='dhf8r',
email_address='dhf8r@virginia.EDU',
display_name='Daniel Harold Funk',
affiliation='staff@virginia.edu;member@virginia.edu',
eppn='dhf8r@virginia.edu',
first_name='Daniel',
last_name='Funk',
title='SOFTWARE ENGINEER V'
)
]
db.session.add_all(users)
db.session.commit()
studies = [
StudyModel(
categories = [
WorkflowSpecCategoryModel(
id=0,
name='irb_review',
display_name='Pass IRB Review',
display_order=0
),
WorkflowSpecCategoryModel(
id=1,
title='The impact of fried pickles on beer consumption in bipedal software developers.',
last_updated=datetime.datetime.now(),
protocol_builder_status=ProtocolBuilderStatus.IN_PROCESS,
primary_investigator_id='dhf8r',
sponsor='Sartography Pharmaceuticals',
ind_number='1234',
user_uid='dhf8r'
name='core_info',
display_name='Enter Core Info',
display_order=1
),
StudyModel(
WorkflowSpecCategoryModel(
id=2,
title='Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels',
last_updated=datetime.datetime.now(),
protocol_builder_status=ProtocolBuilderStatus.IN_PROCESS,
primary_investigator_id='dhf8r',
sponsor='Makerspace & Co.',
ind_number='5678',
user_uid='dhf8r'
name='approvals',
display_name='Obtain Approvals',
display_order=2
),
WorkflowSpecCategoryModel(
id=3,
name='data_security_plan',
display_name='Enter Data Security Plan',
display_order=3
),
WorkflowSpecCategoryModel(
id=4,
name='finance',
display_name='Enter Finance Data',
display_order=4
),
WorkflowSpecCategoryModel(
id=5,
name='notifications',
display_name='View and Send Notifications',
display_order=5
),
]
db.session.add_all(studies)
db.session.add_all(categories)
db.session.commit()
self.create_spec(id="core_info",
name="core_info",
display_name="core_info",
description="core_info")
self.create_spec(id="data_security_plan",
name="data_security_plan",
display_name="data_security_plan",
description="data_security_plan")
self.create_spec(id="finance",
name="finance",
display_name="finance",
description="finance")
self.create_spec(id="ids",
name="ids",
display_name="ids",
description="ids")
self.create_spec(id="top_level_workflow",
name="top_level_workflow",
display_name="Top Level Workflow",
description="Determines the status of other workflows in a study",
category_id=0,
master_spec=True
)
self.create_spec(id="irb_api_details",
name="irb_api_details",
display_name="irb_api_details",
description="irb_api_details")
display_name="IRB API Details",
description="TBD",
category_id=0)
self.create_spec(id="irb_api_personnel",
name="irb_api_personnel",
display_name="irb_api_personnel",
description="irb_api_personnel")
display_name="IRB API Personnel",
description="TBD",
category_id=0)
# self.create_spec(id="irb_api_required_docs",
# name="irb_api_required_docs",
# display_name="irb_api_required_docs",
# description="irb_api_required_docs")
# display_name="IRB API Required Documents",
# description="TBD",
# category_id=0)
self.create_spec(id="core_info",
name="core_info",
display_name="Core Data",
description="TBD",
category_id=1)
self.create_spec(id="ids",
name="ids",
display_name="Investigative Drug Services (IDS)",
description="TBD",
category_id=2)
self.create_spec(id="data_security_plan",
name="data_security_plan",
display_name="Data Security Plan",
description="TBD",
category_id=3)
self.create_spec(id="sponsor_funding_source",
name="sponsor_funding_source",
display_name="sponsor_funding_source",
description="sponsor_funding_source")
display_name="Sponsor Funding Source",
description="TBD",
category_id=4)
self.create_spec(id="finance",
name="finance",
display_name="Finance",
description="TBD",
category_id=4)
def create_spec(self, id, name, display_name="", description="", filepath=None):
def create_spec(self, id, name, display_name="", description="", filepath=None, master_spec=False, category_id=None):
"""Assumes that a directory exists in static/bpmn with the same name as the given id.
further assumes that the [id].bpmn is the primary file for the workflow.
returns an array of data models to be added to the database."""
@ -109,7 +124,8 @@ class ExampleDataLoader:
name=name,
display_name=display_name,
description=description,
is_status=id == 'status')
is_master_spec=master_spec,
category_id=category_id)
db.session.add(spec)
db.session.commit()
if not filepath:

View File

@ -0,0 +1,25 @@
"""empty message
Revision ID: 7c0de7621a1f
Revises: 87af86338630
Create Date: 2020-03-30 08:42:21.483856
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7c0de7621a1f'
down_revision = '87af86338630'
branch_labels = None
depends_on = None
def upgrade():
op.execute("COMMIT")
op.execute("ALTER TYPE WorkflowStatus ADD VALUE 'not_started'")
def downgrade():
pass

View File

@ -0,0 +1,38 @@
"""empty message
Revision ID: 87af86338630
Revises: afecb64d2e66
Create Date: 2020-03-30 08:34:41.323688
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '87af86338630'
down_revision = 'afecb64d2e66'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('workflow', sa.Column('completed_tasks', sa.Integer(), nullable=True))
op.add_column('workflow', sa.Column('total_tasks', sa.Integer(), nullable=True))
op.add_column('workflow_spec', sa.Column('category_id', sa.Integer(), nullable=True))
op.drop_constraint('workflow_spec_workflow_spec_category_id_fkey', 'workflow_spec', type_='foreignkey')
op.create_foreign_key(None, 'workflow_spec', 'workflow_spec_category', ['category_id'], ['id'])
op.drop_column('workflow_spec', 'workflow_spec_category_id')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('workflow_spec', sa.Column('workflow_spec_category_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.drop_constraint(None, 'workflow_spec', type_='foreignkey')
op.create_foreign_key('workflow_spec_workflow_spec_category_id_fkey', 'workflow_spec', 'workflow_spec_category', ['workflow_spec_category_id'], ['id'])
op.drop_column('workflow_spec', 'category_id')
op.drop_column('workflow', 'total_tasks')
op.drop_column('workflow', 'completed_tasks')
# ### end Alembic commands ###

View File

@ -0,0 +1,36 @@
"""empty message
Revision ID: afecb64d2e66
Revises: 5e0709e172fa
Create Date: 2020-03-27 12:18:54.378797
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'afecb64d2e66'
down_revision = '5e0709e172fa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('study_status_spec_id_fkey', 'study', type_='foreignkey')
op.drop_column('study', 'status_spec_version')
op.drop_column('study', 'status_spec_id')
op.add_column('workflow_spec', sa.Column('is_master_spec', sa.Boolean(), nullable=True))
op.drop_column('workflow_spec', 'is_status')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('workflow_spec', sa.Column('is_status', sa.BOOLEAN(), autoincrement=False, nullable=True))
op.drop_column('workflow_spec', 'is_master_spec')
op.add_column('study', sa.Column('status_spec_id', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('study', sa.Column('status_spec_version', sa.VARCHAR(), autoincrement=False, nullable=True))
op.create_foreign_key('study_status_spec_id_fkey', 'study', 'workflow_spec', ['status_spec_id'], ['id'])
# ### end Alembic commands ###

View File

@ -4,9 +4,12 @@ import json
import os
import unittest
import urllib.parse
import datetime
from crc.models.protocol_builder import ProtocolBuilderStatus
from crc.models.study import StudyModel
from crc.services.file_service import FileService
from crc.services.study_service import StudyService
from crc.services.workflow_processor import WorkflowProcessor
os.environ["TESTING"] = "true"
@ -32,6 +35,43 @@ class BaseTest(unittest.TestCase):
auths = {}
test_uid = "dhf8r"
users = [
{
'uid':'dhf8r',
'email_address':'dhf8r@virginia.EDU',
'display_name':'Daniel Harold Funk',
'affiliation':'staff@virginia.edu;member@virginia.edu',
'eppn':'dhf8r@virginia.edu',
'first_name':'Daniel',
'last_name':'Funk',
'title':'SOFTWARE ENGINEER V'
}
]
studies = [
{
'id':0,
'title':'The impact of fried pickles on beer consumption in bipedal software developers.',
'last_updated':datetime.datetime.now(),
'protocol_builder_status':ProtocolBuilderStatus.IN_PROCESS,
'primary_investigator_id':'dhf8r',
'sponsor':'Sartography Pharmaceuticals',
'ind_number':'1234',
'user_uid':'dhf8r'
},
{
'id':1,
'title':'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels',
'last_updated':datetime.datetime.now(),
'protocol_builder_status':ProtocolBuilderStatus.IN_PROCESS,
'primary_investigator_id':'dhf8r',
'sponsor':'Makerspace & Co.',
'ind_number':'5678',
'user_uid':'dhf8r'
}
]
@classmethod
def setUpClass(cls):
app.config.from_object('config.testing')
@ -77,6 +117,16 @@ class BaseTest(unittest.TestCase):
ExampleDataLoader.clean_db()
ExampleDataLoader().load_all()
for user_json in self.users:
db.session.add(UserModel(**user_json))
db.session.commit()
for study_json in self.studies:
study_model = StudyModel(**study_json)
db.session.add(study_model)
StudyService._add_all_workflow_specs_to_study(study_model)
db.session.commit()
db.session.flush()
specs = session.query(WorkflowSpecModel).all()
self.assertIsNotNone(specs)
@ -85,18 +135,23 @@ class BaseTest(unittest.TestCase):
self.assertIsNotNone(files)
self.assertGreater(len(files), 0)
for spec in specs:
files = session.query(FileModel).filter_by(workflow_spec_id=spec.id).all()
self.assertIsNotNone(files)
self.assertGreater(len(files), 0)
for file in files:
file_data = session.query(FileDataModel).filter_by(file_model_id=file.id).all()
self.assertIsNotNone(file_data)
self.assertGreater(len(file_data), 0)
@staticmethod
def load_test_spec(dir_name):
def load_test_spec(dir_name, master_spec=False, category_id=None):
"""Loads a spec into the database based on a directory in /tests/data"""
if session.query(WorkflowSpecModel).filter_by(id=dir_name).count() > 0:
return
filepath = os.path.join(app.root_path, '..', 'tests', 'data', dir_name, "*")
return ExampleDataLoader().create_spec(id=dir_name, name=dir_name, filepath=filepath)
return ExampleDataLoader().create_spec(id=dir_name, name=dir_name, filepath=filepath, master_spec=master_spec,
category_id=category_id)
@staticmethod
def protocol_builder_response(file_name):
@ -147,18 +202,14 @@ class BaseTest(unittest.TestCase):
content_type = CONTENT_TYPES[file_extension[1:]]
file_service.update_file(file_model, data, content_type)
def create_workflow(self, workflow_name):
study = session.query(StudyModel).first()
spec = self.load_test_spec(workflow_name)
processor = WorkflowProcessor.create(study.id, spec.id)
rv = self.app.post(
'/v1.0/study/%i/workflows' % study.id,
headers=self.logged_in_headers(),
content_type="application/json",
data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
self.assert_success(rv)
workflow = session.query(WorkflowModel).filter_by(study_id=study.id, workflow_spec_id=workflow_name).first()
return workflow
def create_workflow(self, workflow_name, study=None, category_id=None):
if study == None:
study = session.query(StudyModel).first()
spec = self.load_test_spec(workflow_name, category_id=category_id)
workflow_model = StudyService._create_workflow_model(study, spec)
#processor = WorkflowProcessor(workflow_model)
#workflow = session.query(WorkflowModel).filter_by(study_id=study.id, workflow_spec_id=workflow_name).first()
return workflow_model
def create_reference_document(self):
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'reference', 'irb_documents.xlsx')

View File

@ -15,7 +15,7 @@
<bpmn:outgoing>SequenceFlow_1lmkn99</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="SequenceFlow_1lmkn99" sourceRef="Task_Has_Bananas" targetRef="ExclusiveGateway_003amsm" />
<bpmn:exclusiveGateway id="ExclusiveGateway_003amsm">
<bpmn:exclusiveGateway id="ExclusiveGateway_003amsm" name="Has Bananas Gateway">
<bpmn:incoming>SequenceFlow_1lmkn99</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_Yes_Bananas</bpmn:outgoing>
<bpmn:outgoing>SequenceFlow_No_Bananas</bpmn:outgoing>
@ -24,7 +24,7 @@
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">has_bananas == True</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="SequenceFlow_No_Bananas" name="no" sourceRef="ExclusiveGateway_003amsm" targetRef="Task_Why_No_Bananas">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">lower_case_true==true</bpmn:conditionExpression>
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">this_value_does_not_exist==true</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:userTask id="Task_Num_Bananas" name="Number of Bananas" camunda:formKey="banana_count">
<bpmn:extensionElements>
@ -71,6 +71,9 @@
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="ExclusiveGateway_14wqqsi_di" bpmnElement="ExclusiveGateway_003amsm" isMarkerVisible="true">
<dc:Bounds x="425" y="92" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="417" y="62" width="67" height="27" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0f3vx1l_di" bpmnElement="SequenceFlow_Yes_Bananas">
<di:waypoint x="475" y="117" />

View File

@ -0,0 +1,39 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1j7idla" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<bpmn:process id="Process_18biih5" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1pnq3kg</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_1pnq3kg" sourceRef="StartEvent_1" targetRef="Invalid_Script_Task" />
<bpmn:endEvent id="EndEvent_063bpg6">
<bpmn:incoming>SequenceFlow_12pf6um</bpmn:incoming>
</bpmn:endEvent>
<bpmn:scriptTask id="Invalid_Script_Task" name="An Invalid Script Reference">
<bpmn:incoming>SequenceFlow_1pnq3kg</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_12pf6um</bpmn:outgoing>
<bpmn:script>NoSuchScript withArg1</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_12pf6um" sourceRef="Invalid_Script_Task" targetRef="EndEvent_063bpg6" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_18biih5">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1pnq3kg_di" bpmnElement="SequenceFlow_1pnq3kg">
<di:waypoint x="215" y="117" />
<di:waypoint x="290" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="EndEvent_063bpg6_di" bpmnElement="EndEvent_063bpg6">
<dc:Bounds x="442" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ScriptTask_1imeym0_di" bpmnElement="Invalid_Script_Task">
<dc:Bounds x="290" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_12pf6um_di" bpmnElement="SequenceFlow_12pf6um">
<di:waypoint x="390" y="117" />
<di:waypoint x="442" y="117" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,55 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1kudwnk" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:process id="Process_0jhpidf" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1ees8ka</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_1ees8ka" sourceRef="StartEvent_1" targetRef="Activity_00rh8pw" />
<bpmn:businessRuleTask id="Activity_1yqy50i" name="Set status" camunda:decisionRef="set_status">
<bpmn:incoming>Flow_1g180uv</bpmn:incoming>
<bpmn:outgoing>Flow_02wonvv</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:endEvent id="Event_135x8jg">
<bpmn:incoming>Flow_02wonvv</bpmn:incoming>
</bpmn:endEvent>
<bpmn:userTask id="Activity_00rh8pw" name="Enable specs" camunda:formKey="form_key">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="some_input" label="some_input" type="boolean" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_1ees8ka</bpmn:incoming>
<bpmn:outgoing>Flow_1g180uv</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_1g180uv" sourceRef="Activity_00rh8pw" targetRef="Activity_1yqy50i" />
<bpmn:sequenceFlow id="Flow_02wonvv" sourceRef="Activity_1yqy50i" targetRef="Event_135x8jg" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0jhpidf">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="152" y="101" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1ees8ka_di" bpmnElement="SequenceFlow_1ees8ka">
<di:waypoint x="188" y="119" />
<di:waypoint x="230" y="119" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_1yqy50i_di" bpmnElement="Activity_1yqy50i">
<dc:Bounds x="460" y="79" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_135x8jg_di" bpmnElement="Event_135x8jg">
<dc:Bounds x="682" y="101" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0vfs7g0_di" bpmnElement="Activity_00rh8pw">
<dc:Bounds x="230" y="79" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1g180uv_di" bpmnElement="Flow_1g180uv">
<di:waypoint x="330" y="119" />
<di:waypoint x="460" y="119" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_02wonvv_di" bpmnElement="Flow_02wonvv">
<di:waypoint x="560" y="119" />
<di:waypoint x="682" y="119" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,72 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<decision id="set_status" name="Set Workflow Spec Status">
<extensionElements>
<biodi:bounds x="190" y="70" width="180" height="80" />
</extensionElements>
<decisionTable id="DecisionTable_00zdxg0">
<input id="InputClause_02n3ccs" label="some_input">
<inputExpression id="LiteralExpression_1ju4o1o" typeRef="boolean" expressionLanguage="feel" />
</input>
<output id="OutputClause_1ybi1ud" label="core_info" name="Core Info" typeRef="boolean" />
<output id="OutputClause_1flmldc" label="data_security_plan" name="Data Security Plan" typeRef="boolean" />
<output id="OutputClause_0d82n6f" label="finance" name="Finance" typeRef="boolean" />
<output id="OutputClause_1odxpcj" label="ids" name="IDS" typeRef="boolean" />
<output id="OutputClause_0s9ssvl" label="irb_api_details" name="IRB API Details" typeRef="boolean" />
<output id="OutputClause_19akic4" label="irb_api_personnel" name="IRB API Personnel" typeRef="boolean" />
<output id="OutputClause_0snkhd8" label="sponsor_funding_source" name="Sponsor Funding Source" typeRef="boolean" />
<rule id="DecisionRule_1t97mw4">
<inputEntry id="UnaryTests_0ym4ln2">
<text>false</text>
</inputEntry>
<outputEntry id="LiteralExpression_1pweuqc">
<text>false</text>
</outputEntry>
<outputEntry id="LiteralExpression_0bybp69">
<text>false</text>
</outputEntry>
<outputEntry id="LiteralExpression_0t9f8go">
<text>false</text>
</outputEntry>
<outputEntry id="LiteralExpression_02avtl3">
<text>false</text>
</outputEntry>
<outputEntry id="LiteralExpression_10vzn1s">
<text>false</text>
</outputEntry>
<outputEntry id="LiteralExpression_0tzzekf">
<text>false</text>
</outputEntry>
<outputEntry id="LiteralExpression_0q6qmjy">
<text>false</text>
</outputEntry>
</rule>
<rule id="DecisionRule_0j9wah0">
<inputEntry id="UnaryTests_0dzq6w4">
<text>true</text>
</inputEntry>
<outputEntry id="LiteralExpression_119lstj">
<text>true</text>
</outputEntry>
<outputEntry id="LiteralExpression_030mbml">
<text>true</text>
</outputEntry>
<outputEntry id="LiteralExpression_1gsvr6s">
<text>true</text>
</outputEntry>
<outputEntry id="LiteralExpression_06q5wbg">
<text>true</text>
</outputEntry>
<outputEntry id="LiteralExpression_083h7la">
<text>true</text>
</outputEntry>
<outputEntry id="LiteralExpression_02loqh4">
<text>true</text>
</outputEntry>
<outputEntry id="LiteralExpression_00odcj9">
<text>true</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<decision id="data_security_plan" name="Data Security Plan">
<extensionElements>
<biodi:bounds x="190" y="80" width="180" height="80" />
</extensionElements>
<decisionTable id="DecisionTable_1mjqwlv">
<input id="InputClause_18pwfqu" label="Required Doc Keys">
<inputExpression id="LiteralExpression_1y84stb" typeRef="string" expressionLanguage="feel">
<text>required_docs.keys()</text>
</inputExpression>
</input>
<output id="OutputClause_05y0j7c" label="data_security_plan" name="data_security_plan" typeRef="string" />
<rule id="DecisionRule_17xsr74">
<description></description>
<inputEntry id="UnaryTests_05ldcq4">
<text>contains(6)</text>
</inputEntry>
<outputEntry id="LiteralExpression_09oao3s">
<text>"required"</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<decision id="enter_core_info" name="Enter Core Info">
<extensionElements>
<biodi:bounds x="170" y="60" width="180" height="80" />
</extensionElements>
<decisionTable id="decisionTable_1">
<input id="InputClause_1ki80j6" label="required doc ids">
<inputExpression id="LiteralExpression_10mfcy7" typeRef="string" expressionLanguage="Python">
<text>required_docs.keys()</text>
</inputExpression>
</input>
<output id="output_1" label="enter_core_info" name="enter_core_info" typeRef="string" />
<rule id="DecisionRule_10oo3ms">
<description>Core information is always required.</description>
<inputEntry id="UnaryTests_1dtfw0r">
<text></text>
</inputEntry>
<outputEntry id="LiteralExpression_1d9565g">
<text>"required"</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<decision id="sponsor_funding_source" name="Sponsor Funding Source">
<extensionElements>
<biodi:bounds x="190" y="70" width="180" height="80" />
</extensionElements>
<decisionTable id="DecisionTable_00zdxg0">
<input id="InputClause_02n3ccs" label="Required Doc Ids">
<inputExpression id="LiteralExpression_1ju4o1o" typeRef="string" expressionLanguage="feel">
<text>required_docs.keys()</text>
</inputExpression>
</input>
<output id="OutputClause_1ybi1ud" label="sponsor_funding_source" name="eat_my_shorts" typeRef="string" />
<rule id="DecisionRule_1t97mw4">
<inputEntry id="UnaryTests_0ym4ln2">
<text>contains(12)</text>
</inputEntry>
<outputEntry id="LiteralExpression_1pweuqc">
<text>"required"</text>
</outputEntry>
</rule>
<rule id="DecisionRule_1q965wz">
<inputEntry id="UnaryTests_1mlhh3t">
<text>not contains(12)</text>
</inputEntry>
<outputEntry id="LiteralExpression_073vd6i">
<text>"disabled"</text>
</outputEntry>
</rule>
<rule id="DecisionRule_0zh6o60">
<inputEntry id="UnaryTests_059mk90">
<text></text>
</inputEntry>
<outputEntry id="LiteralExpression_18pfm2o">
<text>"hidden"</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,150 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1kudwnk" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<bpmn:process id="Process_0jhpidf" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1ees8ka</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_1ees8ka" sourceRef="StartEvent_1" targetRef="Task_Load_Requirements" />
<bpmn:endEvent id="Event_135x8jg">
<bpmn:incoming>Flow_0pwtiqm</bpmn:incoming>
</bpmn:endEvent>
<bpmn:scriptTask id="Task_Load_Requirements" name="Load Required Documents From PM">
<bpmn:incoming>SequenceFlow_1ees8ka</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_17ct47v</bpmn:outgoing>
<bpmn:script>RequiredDocs</bpmn:script>
</bpmn:scriptTask>
<bpmn:businessRuleTask id="Activity_1yqy50i" name="Enter Core Info&#10;" camunda:decisionRef="enter_core_info">
<bpmn:incoming>Flow_1m8285h</bpmn:incoming>
<bpmn:outgoing>Flow_1sggkit</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:sequenceFlow id="Flow_1sggkit" sourceRef="Activity_1yqy50i" targetRef="Gateway_12tpgcy" />
<bpmn:parallelGateway id="Gateway_12tpgcy">
<bpmn:incoming>Flow_1sggkit</bpmn:incoming>
<bpmn:incoming>Flow_1txrak2</bpmn:incoming>
<bpmn:incoming>Flow_0x9580l</bpmn:incoming>
<bpmn:outgoing>Flow_0pwtiqm</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:sequenceFlow id="Flow_0pwtiqm" sourceRef="Gateway_12tpgcy" targetRef="Event_135x8jg" />
<bpmn:parallelGateway id="Gateway_1nta7st">
<bpmn:incoming>SequenceFlow_17ct47v</bpmn:incoming>
<bpmn:outgoing>Flow_1m8285h</bpmn:outgoing>
<bpmn:outgoing>Flow_18pl92p</bpmn:outgoing>
<bpmn:outgoing>Flow_1nimppb</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:sequenceFlow id="Flow_1m8285h" sourceRef="Gateway_1nta7st" targetRef="Activity_1yqy50i" />
<bpmn:sequenceFlow id="SequenceFlow_17ct47v" sourceRef="Task_Load_Requirements" targetRef="Gateway_1nta7st" />
<bpmn:sequenceFlow id="Flow_18pl92p" sourceRef="Gateway_1nta7st" targetRef="Activity_16cm213" />
<bpmn:sequenceFlow id="Flow_1nimppb" sourceRef="Gateway_1nta7st" targetRef="Activity_1k5eeun" />
<bpmn:businessRuleTask id="Activity_1k5eeun" name="Data Security Plan" camunda:decisionRef="data_security_plan">
<bpmn:incoming>Flow_1nimppb</bpmn:incoming>
<bpmn:outgoing>Flow_1txrak2</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:sequenceFlow id="Flow_1txrak2" sourceRef="Activity_1k5eeun" targetRef="Gateway_12tpgcy" />
<bpmn:businessRuleTask id="Activity_16cm213" name="Sponsor Funding Source" camunda:decisionRef="sponsor_funding_source">
<bpmn:incoming>Flow_18pl92p</bpmn:incoming>
<bpmn:outgoing>Flow_0x9580l</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:sequenceFlow id="Flow_0x9580l" sourceRef="Activity_16cm213" targetRef="Gateway_12tpgcy" />
<bpmn:textAnnotation id="TextAnnotation_1pv8ygy">
<bpmn:text>Loads information from the Protocol Builder</bpmn:text>
</bpmn:textAnnotation>
<bpmn:association id="Association_0w69z3w" sourceRef="Task_Load_Requirements" targetRef="TextAnnotation_1pv8ygy" />
<bpmn:textAnnotation id="TextAnnotation_0ydnva4">
<bpmn:text>Include only automatic tasks, no user input is accepted for the Master workflow</bpmn:text>
</bpmn:textAnnotation>
<bpmn:association id="Association_0a41ixa" sourceRef="StartEvent_1" targetRef="TextAnnotation_0ydnva4" />
<bpmn:textAnnotation id="TextAnnotation_1f52jro">
<bpmn:text>All workflows available in the sytem are considered "optional" by default.  Use decision tables here to alter that state if needed.  Alternate values include:  "hidden" (do not show by them initially), "required" (must be completed), "disabled" (visible, but can not be started yet)</bpmn:text>
</bpmn:textAnnotation>
<bpmn:association id="Association_1mzqzwj" sourceRef="Gateway_1nta7st" targetRef="TextAnnotation_1f52jro" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0jhpidf">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="192" y="421" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1ees8ka_di" bpmnElement="SequenceFlow_1ees8ka">
<di:waypoint x="228" y="439" />
<di:waypoint x="300" y="439" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Event_135x8jg_di" bpmnElement="Event_135x8jg">
<dc:Bounds x="862" y="421" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ScriptTask_0x4a3pe_di" bpmnElement="Task_Load_Requirements">
<dc:Bounds x="300" y="399" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1yqy50i_di" bpmnElement="Activity_1yqy50i">
<dc:Bounds x="640" y="290" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="TextAnnotation_1pv8ygy_di" bpmnElement="TextAnnotation_1pv8ygy">
<dc:Bounds x="300" y="247" width="100" height="68" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Association_0w69z3w_di" bpmnElement="Association_0w69z3w">
<di:waypoint x="350" y="399" />
<di:waypoint x="350" y="315" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="TextAnnotation_0ydnva4_di" bpmnElement="TextAnnotation_0ydnva4">
<dc:Bounds x="155" y="220" width="110" height="82" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Association_0a41ixa_di" bpmnElement="Association_0a41ixa">
<di:waypoint x="210" y="421" />
<di:waypoint x="210" y="302" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="TextAnnotation_1f52jro_di" bpmnElement="TextAnnotation_1f52jro">
<dc:Bounds x="461" y="80" width="243" height="124" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1sggkit_di" bpmnElement="Flow_1sggkit">
<di:waypoint x="740" y="330" />
<di:waypoint x="800" y="330" />
<di:waypoint x="800" y="414" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Gateway_1kk6x70_di" bpmnElement="Gateway_12tpgcy">
<dc:Bounds x="775" y="414" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0pwtiqm_di" bpmnElement="Flow_0pwtiqm">
<di:waypoint x="825" y="439" />
<di:waypoint x="862" y="439" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Gateway_1m22g4p_di" bpmnElement="Gateway_1nta7st">
<dc:Bounds x="558" y="414" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Association_1mzqzwj_di" bpmnElement="Association_1mzqzwj">
<di:waypoint x="583" y="414" />
<di:waypoint x="583" y="204" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1m8285h_di" bpmnElement="Flow_1m8285h">
<di:waypoint x="583" y="414" />
<di:waypoint x="583" y="330" />
<di:waypoint x="640" y="330" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_17ct47v_di" bpmnElement="SequenceFlow_17ct47v">
<di:waypoint x="400" y="439" />
<di:waypoint x="558" y="439" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_18pl92p_di" bpmnElement="Flow_18pl92p">
<di:waypoint x="583" y="464" />
<di:waypoint x="583" y="550" />
<di:waypoint x="640" y="550" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1nimppb_di" bpmnElement="Flow_1nimppb">
<di:waypoint x="608" y="439" />
<di:waypoint x="640" y="439" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_1k5eeun_di" bpmnElement="Activity_1k5eeun">
<dc:Bounds x="640" y="399" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1txrak2_di" bpmnElement="Flow_1txrak2">
<di:waypoint x="740" y="439" />
<di:waypoint x="775" y="439" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_16cm213_di" bpmnElement="Activity_16cm213">
<dc:Bounds x="640" y="510" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0x9580l_di" bpmnElement="Flow_0x9580l">
<di:waypoint x="740" y="550" />
<di:waypoint x="800" y="550" />
<di:waypoint x="800" y="464" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -26,7 +26,7 @@ class TestFilesApi(BaseTest):
def test_list_multiple_files_for_workflow_spec(self):
self.load_example_data()
spec = session.query(WorkflowSpecModel).first()
spec = self.load_test_spec("random_fact")
svgFile = FileModel(name="test.svg", type=FileType.svg,
primary=False, workflow_spec_id=spec.id)
session.add(svgFile)
@ -162,6 +162,11 @@ class TestFilesApi(BaseTest):
self.assertEqual("application/octet-stream", file.content_type)
self.assertEqual(spec.id, file.workflow_spec_id)
# Assure it is updated in the database and properly persisted.
file_model = session.query(FileModel).filter(FileModel.id == file.id).first()
self.assertEqual(2, file_model.latest_version)
rv = self.app.get('/v1.0/file/%i/data' % file.id, headers=self.logged_in_headers())
self.assert_success(rv)
data = rv.get_data()
@ -206,3 +211,4 @@ class TestFilesApi(BaseTest):
rv = self.app.delete('/v1.0/file/%i' % file.id, headers=self.logged_in_headers())
rv = self.app.get('/v1.0/file/%i' % file.id, headers=self.logged_in_headers())
self.assertEqual(404, rv.status_code)

View File

@ -1,16 +1,11 @@
import io
import os
import json
from unittest.mock import patch
from crc import app, db
from crc.models.file import CONTENT_TYPES, FileDataModel, FileModel
from crc.models.study import StudyModel
from crc.models.workflow import WorkflowSpecModel, WorkflowModel
from crc import db
from crc.models.file import FileDataModel, FileModel
from crc.models.protocol_builder import ProtocolBuilderRequiredDocumentSchema
from crc.scripts.required_docs import RequiredDocs
from crc.scripts.study_info import StudyInfo
from crc.services.file_service import FileService
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.workflow_processor import WorkflowProcessor
from tests.base_test import BaseTest
@ -48,43 +43,38 @@ class TestRequiredDocsScript(BaseTest):
dict = FileService.get_file_reference_dictionary()
self.assertIsNotNone(dict)
@patch('crc.services.protocol_builder.requests.get')
def test_get_required_docs(self, mock_get):
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('required_docs.json')
def get_required_docs(self):
string_data = self.protocol_builder_response('required_docs.json')
return ProtocolBuilderRequiredDocumentSchema(many=True).loads(string_data)
def test_get_required_docs(self):
pb_docs = self.get_required_docs()
self.create_reference_document()
script = RequiredDocs()
required_docs = script.get_required_docs(12)
required_docs = script.get_required_docs(12, pb_docs) # Mocked out, any random study id works.
self.assertIsNotNone(required_docs)
self.assertTrue(len(required_docs) == 5)
self.assertEquals(6, required_docs[0]['id'])
self.assertEquals("Cancer Center's PRC Approval Form", required_docs[0]['name'])
self.assertEquals("UVA Compliance", required_docs[0]['category1'])
self.assertEquals("PRC Approval", required_docs[0]['category2'])
self.assertEquals("CRC", required_docs[0]['Who Uploads?'])
self.assertEquals(0, required_docs[0]['count'])
@patch('crc.services.protocol_builder.requests.get')
def test_get_required_docs_has_correct_count_when_a_file_exists(self, mock_get):
self.assertTrue(6 in required_docs.keys())
self.assertEquals("Cancer Center's PRC Approval Form", required_docs[6]['name'])
self.assertEquals("UVA Compliance", required_docs[6]['category1'])
self.assertEquals("PRC Approval", required_docs[6]['category2'])
self.assertEquals("CRC", required_docs[6]['Who Uploads?'])
self.assertEquals(0, required_docs[6]['count'])
def test_get_required_docs_has_correct_count_when_a_file_exists(self):
self.load_example_data()
# Mock out the protocol builder
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('required_docs.json')
# Make sure the xslt refernce document is in place.
pb_docs = self.get_required_docs()
# Make sure the xslt reference document is in place.
self.create_reference_document()
script = RequiredDocs()
# Add a document to the study with the correct code.
workflow = self.create_workflow('docx')
irb_code = "UVACompliance.PRCApproval" # The first file referenced in pb required docs.
FileService.add_task_file(study_id = workflow.study_id, workflow_id = workflow.id,
task_id ="fakingthisout",
irb_code = "UVACompliance.PRCApproval" # The first file referenced in pb required docs.
FileService.add_task_file(study_id=workflow.study_id, workflow_id=workflow.id,
task_id="fakingthisout",
name="anything.png", content_type="text",
binary_data=b'1234', irb_doc_code=irb_code)
required_docs = script.get_required_docs(workflow.study_id)
required_docs = script.get_required_docs(workflow.study_id, pb_docs)
self.assertIsNotNone(required_docs)
self.assertEquals(1, required_docs[0]['count'])
self.assertEquals(1, required_docs[6]['count'])

View File

@ -1,51 +1,98 @@
import json
from datetime import datetime, timezone
from unittest.mock import patch, Mock
from unittest.mock import patch
from crc import session
from crc.models.api_models import WorkflowApiSchema, WorkflowApi
from crc.models.study import StudyModel, StudyModelSchema
from crc.models.api_models import WorkflowApiSchema
from crc.models.protocol_builder import ProtocolBuilderStatus, ProtocolBuilderStudyDetailsSchema, \
ProtocolBuilderStudySchema, ProtocolBuilderInvestigatorSchema, ProtocolBuilderRequiredDocumentSchema
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowStatus
ProtocolBuilderStudySchema
from crc.models.study import StudyModel, StudySchema
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowStatus, \
WorkflowSpecCategoryModel
from tests.base_test import BaseTest
class TestStudyApi(BaseTest):
TEST_STUDY = {
"id": 12345,
"title": "Phase III Trial of Genuine People Personalities (GPP) Autonomous Intelligent Emotional Agents "
"for Interstellar Spacecraft",
"last_updated": datetime.now(tz=timezone.utc),
"protocol_builder_status": ProtocolBuilderStatus.IN_PROCESS,
"primary_investigator_id": "tricia.marie.mcmillan@heartofgold.edu",
"sponsor": "Sirius Cybernetics Corporation",
"ind_number": "567890",
"user_uid": "dhf8r",
}
def add_test_study(self):
rv = self.app.post('/v1.0/study',
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(StudySchema().dump(self.TEST_STUDY)))
self.assert_success(rv)
return json.loads(rv.get_data(as_text=True))
def test_study_basics(self):
self.load_example_data()
study = session.query(StudyModel).first()
self.assertIsNotNone(study)
def test_get_study(self):
"""Generic test, but pretty detailed, in that the study should return a categorized list of workflows
This starts with out loading the example data, to show that all the bases are covered from ground 0."""
new_study = self.add_test_study()
new_study = session.query(StudyModel).filter_by(id=new_study["id"]).first()
# Add a category
new_category = WorkflowSpecCategoryModel(id=21, name="test_cat", display_name="Test Category", display_order=0)
session.add(new_category)
session.commit()
# Create a workflow specification
self.create_workflow("random_fact", study=new_study, category_id=new_category.id)
# Assure there is a master specification, and it has the lookup files it needs.
spec = self.load_test_spec("top_level_workflow", master_spec=True)
self.create_reference_document()
api_response = self.app.get('/v1.0/study/%i' % new_study.id,
headers=self.logged_in_headers(), content_type="application/json")
self.assert_success(api_response)
study = StudySchema().loads(api_response.get_data(as_text=True))
self.assertEqual(study.title, self.TEST_STUDY['title'])
self.assertEqual(study.primary_investigator_id, self.TEST_STUDY['primary_investigator_id'])
self.assertEqual(study.sponsor, self.TEST_STUDY['sponsor'])
self.assertEqual(study.ind_number, self.TEST_STUDY['ind_number'])
self.assertEqual(study.user_uid, self.TEST_STUDY['user_uid'])
# Categories are read only, so switching to sub-scripting here.
category = [c for c in study.categories if c['name'] == "test_cat"][0]
self.assertEqual("test_cat", category['name'])
self.assertEqual("Test Category", category['display_name'])
self.assertEqual(1, len(category["workflows"]))
workflow = category["workflows"][0]
self.assertEqual("random_fact", workflow["name"])
self.assertEqual("optional", workflow["state"])
self.assertEqual("not_started", workflow["status"])
self.assertEqual(0, workflow["total_tasks"])
self.assertEqual(0, workflow["completed_tasks"])
def test_add_study(self):
self.load_example_data()
study = {
"id": 12345,
"title": "Phase III Trial of Genuine People Personalities (GPP) Autonomous Intelligent Emotional Agents "
"for Interstellar Spacecraft",
"last_updated": datetime.now(tz=timezone.utc),
"protocol_builder_status": ProtocolBuilderStatus.IN_PROCESS,
"primary_investigator_id": "tricia.marie.mcmillan@heartofgold.edu",
"sponsor": "Sirius Cybernetics Corporation",
"ind_number": "567890",
"user_uid": "dhf8r",
}
rv = self.app.post('/v1.0/study',
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(StudyModelSchema().dump(study)))
self.assert_success(rv)
study = self.add_test_study()
db_study = session.query(StudyModel).filter_by(id=12345).first()
self.assertIsNotNone(db_study)
self.assertEqual(study["title"], db_study.title)
self.assertAlmostEqual(study["last_updated"], db_study.last_updated)
self.assertEqual(study["protocol_builder_status"], db_study.protocol_builder_status)
self.assertEqual(study["primary_investigator_id"], db_study.primary_investigator_id)
self.assertEqual(study["sponsor"], db_study.sponsor)
self.assertEqual(study["ind_number"], db_study.ind_number)
self.assertEqual(study["user_uid"], db_study.user_uid)
workflow_spec_count =session.query(WorkflowSpecModel).filter(WorkflowSpecModel.is_master_spec == False).count()
workflow_count = session.query(WorkflowModel).filter(WorkflowModel.study_id == 12345).count()
error_count = len(study["errors"])
self.assertEquals(workflow_spec_count, workflow_count + error_count)
def test_update_study(self):
self.load_example_data()
study: StudyModel = session.query(StudyModel).first()
@ -54,41 +101,42 @@ class TestStudyApi(BaseTest):
rv = self.app.put('/v1.0/study/%i' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(StudyModelSchema().dump(study)))
data=json.dumps(StudySchema().dump(study)))
self.assert_success(rv)
db_study = session.query(StudyModel).filter_by(id=study.id).first()
self.assertIsNotNone(db_study)
self.assertEqual(study.title, db_study.title)
self.assertEqual(study.protocol_builder_status, db_study.protocol_builder_status)
json_data = json.loads(rv.get_data(as_text=True))
self.assertEqual(study.title, json_data['title'])
self.assertEqual(study.protocol_builder_status.name, json_data['protocol_builder_status'])
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_study_details') # mock_details
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_studies') # mock_studies
def test_get_all_studies(self, mock_studies, mock_details):
self.load_example_data()
db_studies_before = session.query(StudyModel).all()
num_db_studies_before = len(db_studies_before)
s = StudyModel(
id=54321, # This matches one of the ids from the study_details_json data.
title='The impact of pandemics on dog owner sanity after 12 days',
user_uid='dhf8r',
)
session.add(s)
session.commit()
# Mock Protocol Builder response
num_db_studies_before = session.query(StudyModel).count()
# Mock Protocol Builder responses
studies_response = self.protocol_builder_response('user_studies.json')
mock_studies.return_value = ProtocolBuilderStudySchema(many=True).loads(studies_response)
details_response = self.protocol_builder_response('study_details.json')
mock_details.return_value = ProtocolBuilderStudyDetailsSchema().loads(details_response)
self.load_example_data()
api_response = self.app.get('/v1.0/study',
follow_redirects=True,
headers=self.logged_in_headers(),
content_type="application/json")
# Make the api call to get all studies
api_response = self.app.get('/v1.0/study', headers=self.logged_in_headers(), content_type="application/json")
self.assert_success(api_response)
json_data = json.loads(api_response.get_data(as_text=True))
api_studies = StudyModelSchema(many=True).load(json_data, session=session)
num_inactive = 0
num_active = 0
for study in api_studies:
if study.inactive:
for study in json_data:
if study['inactive']:
num_inactive += 1
else:
num_active += 1
@ -98,10 +146,14 @@ class TestStudyApi(BaseTest):
self.assertGreater(num_db_studies_after, num_db_studies_before)
self.assertGreater(num_inactive, 0)
self.assertGreater(num_active, 0)
self.assertEqual(len(api_studies), num_db_studies_after)
self.assertEqual(len(json_data), num_db_studies_after)
self.assertEqual(num_active + num_inactive, num_db_studies_after)
def test_study_api_get_single_study(self):
# Assure that the existing study is properly updated.
test_study = session.query(StudyModel).filter_by(id=54321).first()
self.assertFalse(test_study.inactive)
def test_get_single_study(self):
self.load_example_data()
study = session.query(StudyModel).first()
rv = self.app.get('/v1.0/study/%i' % study.id,
@ -110,36 +162,12 @@ class TestStudyApi(BaseTest):
content_type="application/json")
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
study2 = StudyModelSchema().load(json_data, session=session)
self.assertEqual(study, study2)
self.assertEqual(study.id, study2.id)
self.assertEqual(study.title, study2.title)
self.assertEqual(study.last_updated, study2.last_updated)
self.assertEqual(study.protocol_builder_status, study2.protocol_builder_status)
self.assertEqual(study.primary_investigator_id, study2.primary_investigator_id)
self.assertEqual(study.sponsor, study2.sponsor)
self.assertEqual(study.ind_number, study2.ind_number)
def test_add_workflow_to_study(self):
self.load_example_data()
study = session.query(StudyModel).first()
self.assertEqual(0, session.query(WorkflowModel).count())
spec = session.query(WorkflowSpecModel).first()
rv = self.app.post('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
self.assert_success(rv)
self.assertEqual(1, session.query(WorkflowModel).count())
workflow_model = session.query(WorkflowModel).first()
self.assertEqual(study.id, workflow_model.study_id)
self.assertEqual(WorkflowStatus.user_input_required, workflow_model.status)
self.assertIsNotNone(workflow_model.bpmn_workflow_json)
self.assertEqual(spec.id, workflow_model.workflow_spec_id)
json_data = json.loads(rv.get_data(as_text=True))
workflow2 = WorkflowApiSchema().load(json_data)
self.assertEqual(workflow_model.id, workflow2.id)
self.assertEqual(study.id, json_data['id'])
self.assertEqual(study.title, json_data['title'])
self.assertEqual(study.protocol_builder_status.name, json_data['protocol_builder_status'])
self.assertEqual(study.primary_investigator_id, json_data['primary_investigator_id'])
self.assertEqual(study.sponsor, json_data['sponsor'])
self.assertEqual(study.ind_number, json_data['ind_number'])
def test_delete_study(self):
self.load_example_data()
@ -147,158 +175,73 @@ class TestStudyApi(BaseTest):
rv = self.app.delete('/v1.0/study/%i' % study.id, headers=self.logged_in_headers())
self.assert_success(rv)
def test_delete_study_with_workflow(self):
self.load_example_data()
study = session.query(StudyModel).first()
spec = session.query(WorkflowSpecModel).first()
rv = self.app.post('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
rv = self.app.delete('/v1.0/study/%i' % study.id, headers=self.logged_in_headers())
self.assert_failure(rv, error_code="study_integrity_error")
def test_delete_workflow(self):
self.load_example_data()
study = session.query(StudyModel).first()
spec = session.query(WorkflowSpecModel).first()
rv = self.app.post('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
self.assertEqual(1, session.query(WorkflowModel).count())
json_data = json.loads(rv.get_data(as_text=True))
workflow = WorkflowApiSchema().load(json_data)
rv = self.app.delete('/v1.0/workflow/%i' % workflow.id, headers=self.logged_in_headers())
self.assert_success(rv)
self.assertEqual(0, session.query(WorkflowModel).count())
def test_get_study_workflows(self):
self.load_example_data()
# Should have no workflows to start
study = session.query(StudyModel).first()
response_before = self.app.get('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(response_before)
json_data_before = json.loads(response_before.get_data(as_text=True))
workflows_before = WorkflowApiSchema(many=True).load(json_data_before)
self.assertEqual(0, len(workflows_before))
# Add a workflow
spec = session.query(WorkflowSpecModel).first()
add_response = self.app.post('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
self.assert_success(add_response)
# Should have one workflow now
response_after = self.app.get('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(response_after)
json_data_after = json.loads(response_after.get_data(as_text=True))
workflows_after = WorkflowApiSchema(many=True).load(json_data_after)
self.assertEqual(1, len(workflows_after))
"""
Workflow Specs that have been made available (or not) to a particular study via the status.bpmn should be flagged
as available (or not) when the list of a study's workflows is retrieved.
"""
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_studies')
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_investigators')
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_required_docs')
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_study_details')
def test_workflow_spec_status(self,
mock_details,
mock_required_docs,
mock_investigators,
mock_studies):
# Mock Protocol Builder response
studies_response = self.protocol_builder_response('user_studies.json')
mock_studies.return_value = ProtocolBuilderStudySchema(many=True).loads(studies_response)
investigators_response = self.protocol_builder_response('investigators.json')
mock_investigators.return_value = ProtocolBuilderInvestigatorSchema(many=True).loads(investigators_response)
required_docs_response = self.protocol_builder_response('required_docs.json')
mock_required_docs.return_value = ProtocolBuilderRequiredDocumentSchema(many=True).loads(required_docs_response)
details_response = self.protocol_builder_response('study_details.json')
mock_details.return_value = ProtocolBuilderStudyDetailsSchema().loads(details_response)
self.load_example_data()
study = session.query(StudyModel).first()
study_id = study.id
# Add status workflow
self.load_test_spec('status')
# Add status workflow to the study
status_spec = session.query(WorkflowSpecModel).filter_by(is_status=True).first()
add_status_response = self.app.post('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecModelSchema().dump(status_spec)))
self.assert_success(add_status_response)
json_data_status = json.loads(add_status_response.get_data(as_text=True))
status_workflow: WorkflowApi = WorkflowApiSchema().load(json_data_status)
self.assertIsNotNone(status_workflow)
self.assertIsNotNone(status_workflow.workflow_spec_id)
self.assertIsNotNone(status_workflow.spec_version)
self.assertIsNotNone(status_workflow.next_task)
self.assertIsNotNone(status_workflow.next_task['id'])
status_task_id = status_workflow.next_task['id']
# Verify that the study status spec is populated
updated_study: StudyModel = session.query(StudyModel).filter_by(id=study_id).first()
self.assertIsNotNone(updated_study)
self.assertIsNotNone(updated_study.status_spec_id)
self.assertIsNotNone(updated_study.status_spec_version)
self.assertEqual(updated_study.status_spec_id, status_workflow.workflow_spec_id)
self.assertEqual(updated_study.status_spec_version, status_workflow.spec_version)
# Add all available non-status workflows to the study
specs = session.query(WorkflowSpecModel).filter_by(is_status=False).all()
for spec in specs:
add_response = self.app.post('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
self.assert_success(add_response)
for is_active in [False, True]:
# Set all workflow specs to inactive|active
update_status_response = self.app.put('/v1.0/workflow/%i/task/%s/data' % (status_workflow.id, status_task_id),
headers=self.logged_in_headers(),
content_type="application/json",
data=json.dumps({'some_input': is_active}))
self.assert_success(update_status_response)
json_workflow_api = json.loads(update_status_response.get_data(as_text=True))
updated_workflow_api: WorkflowApi = WorkflowApiSchema().load(json_workflow_api)
self.assertIsNotNone(updated_workflow_api)
self.assertEqual(updated_workflow_api.status, WorkflowStatus.complete)
self.assertIsNotNone(updated_workflow_api.last_task)
self.assertIsNotNone(updated_workflow_api.last_task['data'])
self.assertIsNotNone(updated_workflow_api.last_task['data']['some_input'])
self.assertEqual(updated_workflow_api.last_task['data']['some_input'], is_active)
# List workflows for study
response_after = self.app.get('/v1.0/study/%i/workflows' % study.id,
content_type="application/json",
headers=self.logged_in_headers())
self.assert_success(response_after)
json_data_after = json.loads(response_after.get_data(as_text=True))
workflows_after = WorkflowApiSchema(many=True).load(json_data_after)
self.assertEqual(len(specs), len(workflows_after))
# All workflows should be inactive|active
for workflow in workflows_after:
self.assertEqual(workflow.is_active, is_active)
# """
# Workflow Specs that have been made available (or not) to a particular study via the status.bpmn should be flagged
# as available (or not) when the list of a study's workflows is retrieved.
# """
# @patch('crc.services.protocol_builder.ProtocolBuilderService.get_studies')
# @patch('crc.services.protocol_builder.ProtocolBuilderService.get_investigators')
# @patch('crc.services.protocol_builder.ProtocolBuilderService.get_required_docs')
# @patch('crc.services.protocol_builder.ProtocolBuilderService.get_study_details')
# def test_workflow_spec_status(self,
# mock_details,
# mock_required_docs,
# mock_investigators,
# mock_studies):
#
# # Mock Protocol Builder response
# studies_response = self.protocol_builder_response('user_studies.json')
# mock_studies.return_value = ProtocolBuilderStudySchema(many=True).loads(studies_response)
#
# investigators_response = self.protocol_builder_response('investigators.json')
# mock_investigators.return_value = ProtocolBuilderInvestigatorSchema(many=True).loads(investigators_response)
#
# required_docs_response = self.protocol_builder_response('required_docs.json')
# mock_required_docs.return_value = ProtocolBuilderRequiredDocumentSchema(many=True).loads(required_docs_response)
#
# details_response = self.protocol_builder_response('study_details.json')
# mock_details.return_value = ProtocolBuilderStudyDetailsSchema().loads(details_response)
#
# self.load_example_data()
# study = session.query(StudyModel).first()
# study_id = study.id
#
# # Add status workflow
# self.load_test_spec('top_level_workflow')
#
# # Assure the top_level_workflow is added to the study
# top_level_spec = session.query(WorkflowSpecModel).filter_by(is_master_spec=True).first()
# self.assertIsNotNone(top_level_spec)
#
#
#
# for is_active in [False, True]:
# # Set all workflow specs to inactive|active
# update_status_response = self.app.put('/v1.0/workflow/%i/task/%s/data' % (status_workflow.id, status_task_id),
# headers=self.logged_in_headers(),
# content_type="application/json",
# data=json.dumps({'some_input': is_active}))
# self.assert_success(update_status_response)
# json_workflow_api = json.loads(update_status_response.get_data(as_text=True))
# updated_workflow_api: WorkflowApi = WorkflowApiSchema().load(json_workflow_api)
# self.assertIsNotNone(updated_workflow_api)
# self.assertEqual(updated_workflow_api.status, WorkflowStatus.complete)
# self.assertIsNotNone(updated_workflow_api.last_task)
# self.assertIsNotNone(updated_workflow_api.last_task['data'])
# self.assertIsNotNone(updated_workflow_api.last_task['data']['some_input'])
# self.assertEqual(updated_workflow_api.last_task['data']['some_input'], is_active)
#
# # List workflows for study
# response_after = self.app.get('/v1.0/study/%i/workflows' % study.id,
# content_type="application/json",
# headers=self.logged_in_headers())
# self.assert_success(response_after)
#
# json_data_after = json.loads(response_after.get_data(as_text=True))
# workflows_after = WorkflowApiSchema(many=True).load(json_data_after)
# self.assertEqual(len(specs), len(workflows_after))
#
# # All workflows should be inactive|active
# for workflow in workflows_after:
# self.assertEqual(workflow.is_active, is_active)

View File

@ -0,0 +1,76 @@
import json
from datetime import datetime, timezone
from unittest.mock import patch
from crc import session, db
from crc.models.api_models import WorkflowApiSchema
from crc.models.protocol_builder import ProtocolBuilderStatus, ProtocolBuilderStudyDetailsSchema, \
ProtocolBuilderStudySchema
from crc.models.study import StudyModel, StudySchema
from crc.models.user import UserModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowStatus, \
WorkflowSpecCategoryModel
from crc.services.study_service import StudyService
from crc.services.workflow_processor import WorkflowProcessor
from example_data import ExampleDataLoader
from tests.base_test import BaseTest
class TestStudyService(BaseTest):
"""Largely tested via the test_study_api, and time is tight, but adding new tests here."""
def test_total_tasks_updated(self):
"""Assure that as a users progress is available when getting a list of studies for that user."""
# Assure some basic models are in place, This is a damn mess. Our database models need an overhaul to make
# this easier - better relationship modeling is now critical.
self.load_test_spec("top_level_workflow", master_spec=True)
user = UserModel(uid="dhf8r", email_address="whatever@stuff.com", display_name="Stayathome Smellalots")
db.session.add(user)
db.session.commit()
study = StudyModel(title="My title", protocol_builder_status=ProtocolBuilderStatus.IN_PROCESS, user_uid=user.uid)
cat = WorkflowSpecCategoryModel(name="cat", display_name="cat", display_order=0)
db.session.add_all([study, cat])
db.session.commit()
self.load_test_spec("random_fact", category_id=cat.id)
workflow = WorkflowModel(workflow_spec_id="random_fact", study_id=study.id, status=WorkflowStatus.not_started)
db.session.add(workflow)
db.session.commit()
# Assure there is a master specification, one standard spec, and lookup tables.
ExampleDataLoader().load_reference_documents()
# The load example data script should set us up a user and at least one study, one category, and one workflow.
studies = StudyService.get_studies_for_user(user)
self.assertTrue(len(studies) == 1)
self.assertTrue(len(studies[0].categories) == 1)
self.assertTrue(len(studies[0].categories[0].workflows) == 1)
workflow = next(iter(studies[0].categories[0].workflows)) # Workflows is a set.
# workflow should not be started, and it should have 0 completed tasks, and 0 total tasks.
self.assertEqual(WorkflowStatus.not_started, workflow.status)
self.assertEqual(None, workflow.spec_version)
self.assertEqual(0, workflow.total_tasks)
self.assertEqual(0, workflow.completed_tasks)
# Initialize the Workflow with the workflow processor.
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.id == workflow.id).first()
processor = WorkflowProcessor(workflow_model)
# Assure the workflow is now started, and knows the total and completed tasks.
studies = StudyService.get_studies_for_user(user)
workflow = next(iter(studies[0].categories[0].workflows)) # Workflows is a set.
# self.assertEqual(WorkflowStatus.user_input_required, workflow.status)
self.assertTrue(workflow.total_tasks > 0)
self.assertEqual(0, workflow.completed_tasks)
self.assertIsNotNone(workflow.spec_version)
# Complete a task
task = processor.next_task()
processor.complete_task(task)
# Assure the workflow has moved on to the next task.
studies = StudyService.get_studies_for_user(user)
workflow = next(iter(studies[0].categories[0].workflows)) # Workflows is a set.
self.assertEqual(1, workflow.completed_tasks)

View File

@ -1,3 +1,4 @@
import logging
import os
import string
import random
@ -11,30 +12,25 @@ from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES
from crc.models.study import StudyModel
from crc.models.workflow import WorkflowSpecModel, WorkflowStatus, WorkflowModel
from crc.services.file_service import FileService
from crc.services.study_service import StudyService
from tests.base_test import BaseTest
from crc.services.workflow_processor import WorkflowProcessor
class TestWorkflowProcessor(BaseTest):
def _randomString(self, stringLength=10):
"""Generate a random string of fixed length """
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(stringLength))
def _populate_form_with_random_data(self, task):
form_data = {}
for field in task.task_spec.form.fields:
form_data[field.id] = self._randomString()
if task.data is None:
task.data = {}
task.data.update(form_data)
WorkflowProcessor.populate_form_with_random_data(task)
def get_processor(self, study_model, spec_model):
workflow_model = StudyService._create_workflow_model(study_model, spec_model)
return WorkflowProcessor(workflow_model)
def test_create_and_complete_workflow(self):
self.load_example_data()
workflow_spec_model = self.load_test_spec("random_fact")
study = session.query(StudyModel).first()
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
self.assertEqual(study.id, processor.bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY])
self.assertIsNotNone(processor)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
@ -60,7 +56,7 @@ class TestWorkflowProcessor(BaseTest):
workflow_spec_model = self.load_test_spec("decision_table")
files = session.query(FileModel).filter_by(workflow_spec_id='decision_table').all()
self.assertEqual(2, len(files))
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(1, len(next_user_tasks))
@ -84,7 +80,7 @@ class TestWorkflowProcessor(BaseTest):
self.load_example_data()
workflow_spec_model = self.load_test_spec("parallel_tasks")
study = session.query(StudyModel).first()
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
# Complete the first steps of the 4 parallel tasks
@ -125,7 +121,7 @@ class TestWorkflowProcessor(BaseTest):
self.load_example_data()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("parallel_tasks")
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(4, len(next_user_tasks))
@ -146,7 +142,7 @@ class TestWorkflowProcessor(BaseTest):
self.load_example_data()
workflow_spec_model = self.load_test_spec("random_fact")
study = session.query(StudyModel).first()
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
task = processor.next_task()
task.data = {"type": "buzzword"}
@ -164,7 +160,7 @@ class TestWorkflowProcessor(BaseTest):
workflow_spec_model = self.load_test_spec("invalid_spec")
study = session.query(StudyModel).first()
with self.assertRaises(ApiError) as context:
WorkflowProcessor.create(study.id, workflow_spec_model.id)
self.get_processor(study, workflow_spec_model)
self.assertEqual("workflow_validation_error", context.exception.code)
self.assertTrue("bpmn:startEvent" in context.exception.message)
@ -176,9 +172,8 @@ class TestWorkflowProcessor(BaseTest):
self.load_example_data()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.study_id == study.id).first()
self.assertEqual(workflow_model.workflow_spec_id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
self.assertEqual(processor.workflow_model.workflow_spec_id, workflow_spec_model.id)
task = processor.next_task()
task.data = {"color": "blue"}
processor.complete_task(task)
@ -189,18 +184,15 @@ class TestWorkflowProcessor(BaseTest):
# Attemping a soft update on a structural change should raise a sensible error.
with self.assertRaises(ApiError) as context:
processor3 = WorkflowProcessor(workflow_model, soft_reset=True)
processor3 = WorkflowProcessor(processor.workflow_model, soft_reset=True)
self.assertEqual("unexpected_workflow_structure", context.exception.code)
def test_workflow_with_bad_expression_raises_sensible_error(self):
self.load_example_data()
workflow_spec_model = self.load_test_spec("invalid_expression")
study = session.query(StudyModel).first()
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
next_user_tasks = processor.next_user_tasks()
self.assertEqual(1, len(next_user_tasks))
@ -217,7 +209,7 @@ class TestWorkflowProcessor(BaseTest):
files = session.query(FileModel).filter_by(workflow_spec_id='docx').all()
self.assertEqual(2, len(files))
workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id="docx").first()
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(1, len(next_user_tasks))
@ -242,7 +234,7 @@ class TestWorkflowProcessor(BaseTest):
self.load_example_data()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("study_details")
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
task = processor.bpmn_workflow.last_task
self.assertIsNotNone(task.data)
@ -256,12 +248,12 @@ class TestWorkflowProcessor(BaseTest):
self.load_example_data()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("decision_table")
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
self.assertTrue(processor.get_spec_version().startswith('v1.1'))
file_service = FileService()
file_service.add_workflow_spec_file(workflow_spec_model, "new_file.txt", "txt", b'blahblah')
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
self.assertTrue(processor.get_spec_version().startswith('v1.1.1'))
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'docx', 'docx.bpmn')
@ -270,16 +262,15 @@ class TestWorkflowProcessor(BaseTest):
file_model = db.session.query(FileModel).filter(FileModel.name == "decision_table.bpmn").first()
file_service.update_file(file_model, data, "txt")
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
self.assertTrue(processor.get_spec_version().startswith('v2.1.1'))
def test_restart_workflow(self):
self.load_example_data()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.study_id == study.id).first()
self.assertEqual(workflow_model.workflow_spec_id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
self.assertEqual(processor.workflow_model.workflow_spec_id, workflow_spec_model.id)
task = processor.next_task()
task.data = {"key": "Value"}
processor.complete_task(task)
@ -297,9 +288,8 @@ class TestWorkflowProcessor(BaseTest):
# Start the two_forms workflow, and enter some data in the first form.
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.study_id == study.id).first()
self.assertEqual(workflow_model.workflow_spec_id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
self.assertEqual(processor.workflow_model.workflow_spec_id, workflow_spec_model.id)
task = processor.next_task()
task.data = {"color": "blue"}
processor.complete_task(task)
@ -309,14 +299,14 @@ class TestWorkflowProcessor(BaseTest):
self.replace_file("two_forms.bpmn", file_path)
# Setting up another processor should not error out, but doesn't pick up the update.
workflow_model.bpmn_workflow_json = processor.serialize()
processor2 = WorkflowProcessor(workflow_model)
processor.workflow_model.bpmn_workflow_json = processor.serialize()
processor2 = WorkflowProcessor(processor.workflow_model)
self.assertEqual("Step 1", processor2.bpmn_workflow.last_task.task_spec.description)
self.assertNotEqual("# This is some documentation I wanted to add.",
processor2.bpmn_workflow.last_task.task_spec.documentation)
# You can do a soft update and get the right response.
processor3 = WorkflowProcessor(workflow_model, soft_reset=True)
processor3 = WorkflowProcessor(processor.workflow_model, soft_reset=True)
self.assertEqual("Step 1", processor3.bpmn_workflow.last_task.task_spec.description)
self.assertEqual("# This is some documentation I wanted to add.",
processor3.bpmn_workflow.last_task.task_spec.documentation)
@ -329,9 +319,8 @@ class TestWorkflowProcessor(BaseTest):
# Start the two_forms workflow, and enter some data in the first form.
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.study_id == study.id).first()
self.assertEqual(workflow_model.workflow_spec_id, workflow_spec_model.id)
processor = self.get_processor(study, workflow_spec_model)
self.assertEqual(processor.workflow_model.workflow_spec_id, workflow_spec_model.id)
task = processor.next_task()
task.data = {"color": "blue"}
processor.complete_task(task)
@ -343,12 +332,12 @@ class TestWorkflowProcessor(BaseTest):
self.replace_file("two_forms.bpmn", file_path)
# Assure that creating a new processor doesn't cause any issues, and maintains the spec version.
workflow_model.bpmn_workflow_json = processor.serialize()
processor2 = WorkflowProcessor(workflow_model)
processor.workflow_model.bpmn_workflow_json = processor.serialize()
processor2 = WorkflowProcessor(processor.workflow_model)
self.assertTrue(processor2.get_spec_version().startswith("v1 ")) # Still at version 1.
# Do a hard reset, which should bring us back to the beginning, but retain the data.
processor3 = WorkflowProcessor(workflow_model, hard_reset=True)
processor3 = WorkflowProcessor(processor.workflow_model, hard_reset=True)
self.assertEqual("Step 1", processor3.next_task().task_spec.description)
self.assertEqual({"color": "blue"}, processor3.next_task().data)
processor3.complete_task(processor3.next_task())
@ -360,29 +349,32 @@ class TestWorkflowProcessor(BaseTest):
version = WorkflowProcessor.get_latest_version_string("two_forms")
self.assertTrue(version.startswith("v1 "))
def test_status_bpmn(self):
@patch('crc.services.protocol_builder.requests.get')
def test_master_bpmn(self, mock_get):
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('required_docs.json')
self.load_example_data()
specs = session.query(WorkflowSpecModel).all()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("status")
workflow_spec_model = db.session.query(WorkflowSpecModel).\
filter(WorkflowSpecModel.name=="top_level_workflow").first()
for enabled in [True, False]:
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
task = processor.next_task()
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertTrue("Top level process is fully automatic.", processor.bpmn_workflow.is_completed())
data = processor.bpmn_workflow.last_task.data
# Turn all specs on or off
task.data = {"some_input": enabled}
processor.complete_task(task)
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)
# Finish out rest of workflow
while processor.get_status() == WorkflowStatus.waiting:
task = processor.next_task()
processor.complete_task(task)
# It should mark Enter Core Data as required, because it is always required.
self.assertTrue("enter_core_info" in data)
self.assertEquals("required", data["enter_core_info"])
self.assertEqual(processor.get_status(), WorkflowStatus.complete)
# It should mark the Data Security Plan as required, because InfoSec Approval (24) is included in required docs.
self.assertTrue("data_security_plan" in data)
self.assertEquals("required", data["data_security_plan"])
# It should mark the sponsor funding source as disabled since the funding required (12) is not included in the required docs.
self.assertTrue("sponsor_funding_source" in data)
self.assertEquals("disabled", data["sponsor_funding_source"])
# Enabled status of all specs should match the value set in the first task
for spec in specs:
self.assertEqual(task.data[spec.id], enabled)

View File

@ -1,6 +1,7 @@
import json
from crc import session
from crc.api.common import ApiErrorSchema
from crc.models.file import FileModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel
from tests.base_test import BaseTest
@ -49,15 +50,16 @@ class TestWorkflowSpec(BaseTest):
def test_update_workflow_specification(self):
self.load_example_data()
category = WorkflowSpecCategoryModel(id=0, name='trap', display_name="It's a trap!", display_order=0)
category_id = 99
category = WorkflowSpecCategoryModel(id=category_id, name='trap', display_name="It's a trap!", display_order=0)
session.add(category)
session.commit()
db_spec_before: WorkflowSpecModel = session.query(WorkflowSpecModel).first()
spec_id = db_spec_before.id
self.assertIsNone(db_spec_before.workflow_spec_category_id)
self.assertNotEqual(db_spec_before.category_id, category_id)
db_spec_before.workflow_spec_category_id = 0
db_spec_before.category_id = category_id
rv = self.app.put('/v1.0/workflow-specification/%s' % spec_id,
content_type="application/json",
headers=self.logged_in_headers(),
@ -68,10 +70,10 @@ class TestWorkflowSpec(BaseTest):
self.assertEqual(db_spec_before, api_spec)
db_spec_after: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
self.assertIsNotNone(db_spec_after.workflow_spec_category_id)
self.assertIsNotNone(db_spec_after.workflow_spec_category)
self.assertEqual(db_spec_after.workflow_spec_category.display_name, category.display_name)
self.assertEqual(db_spec_after.workflow_spec_category.display_order, category.display_order)
self.assertIsNotNone(db_spec_after.category_id)
self.assertIsNotNone(db_spec_after.category)
self.assertEqual(db_spec_after.category.display_name, category.display_name)
self.assertEqual(db_spec_after.category.display_order, category.display_order)
def test_delete_workflow_specification(self):
self.load_example_data()
@ -94,3 +96,4 @@ class TestWorkflowSpec(BaseTest):
num_files_after = session.query(FileModel).filter_by(workflow_spec_id=spec_id).count()
num_workflows_after = session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id).count()
self.assertEqual(num_files_after + num_workflows_after, 0)

View File

@ -0,0 +1,68 @@
import json
import unittest
from crc import session
from crc.api.common import ApiErrorSchema
from crc.models.file import FileModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel
from tests.base_test import BaseTest
class TestWorkflowSpecValidation(BaseTest):
def validate_workflow(self, workflow_name):
self.load_example_data()
spec_model = self.load_test_spec(workflow_name)
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % spec_model.id, headers=self.logged_in_headers())
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
return ApiErrorSchema(many=True).load(json_data)
def test_successful_validation_of_test_workflows(self):
self.assertEqual(0, len(self.validate_workflow("parallel_tasks")))
self.assertEqual(0, len(self.validate_workflow("decision_table")))
self.assertEqual(0, len(self.validate_workflow("docx")))
self.assertEqual(0, len(self.validate_workflow("exclusive_gateway")))
self.assertEqual(0, len(self.validate_workflow("file_upload_form")))
self.assertEqual(0, len(self.validate_workflow("random_fact")))
self.assertEqual(0, len(self.validate_workflow("study_details")))
self.assertEqual(0, len(self.validate_workflow("two_forms")))
@unittest.skip("There is one workflow that is failing right now, and I want that visible after deployment.")
def test_successful_validation_of_auto_loaded_workflows(self):
self.load_example_data()
workflows = session.query(WorkflowSpecModel).all()
errors = []
for w in workflows:
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % w.id,
headers=self.logged_in_headers())
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
errors.extend(ApiErrorSchema(many=True).load(json_data))
self.assertEqual(0, len(errors), json.dumps(errors))
def test_invalid_expression(self):
errors = self.validate_workflow("invalid_expression")
self.assertEqual(1, len(errors))
self.assertEquals("invalid_expression", errors[0]['code'])
self.assertEquals("ExclusiveGateway_003amsm", errors[0]['task_id'])
self.assertEquals("Has Bananas Gateway", errors[0]['task_name'])
self.assertEquals("invalid_expression.bpmn", errors[0]['file_name'])
self.assertEquals("The expression you provided does not exist:this_value_does_not_exist==true", errors[0]["message"])
def test_validation_error(self):
errors = self.validate_workflow("invalid_spec")
self.assertEqual(1, len(errors))
self.assertEquals("workflow_validation_error", errors[0]['code'])
self.assertEquals("StartEvent_1", errors[0]['task_id'])
self.assertEquals("invalid_spec.bpmn", errors[0]['file_name'])
def test_invalid_script(self):
errors = self.validate_workflow("invalid_script")
self.assertEqual(1, len(errors))
self.assertEquals("workflow_execution_exception", errors[0]['code'])
self.assertTrue("NoSuchScript" in errors[0]['message'])
self.assertEquals("Invalid_Script_Task", errors[0]['task_id'])
self.assertEquals("An Invalid Script Reference", errors[0]['task_name'])
self.assertEquals("invalid_script.bpmn", errors[0]['file_name'])