Merge branch 'dev' into bug/missing_pi_name_246

# Conflicts:
#	crc/api.yml
#	crc/services/study_service.py
This commit is contained in:
NWalker4483 2021-07-20 11:44:11 -04:00
commit 4632c6374f
163 changed files with 6031 additions and 1718 deletions

View File

@ -39,15 +39,14 @@ requests = "*"
sentry-sdk = {extras = ["flask"],version = "==0.14.4"}
sphinx = "*"
swagger-ui-bundle = "*"
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow.git",ref = "master"}
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow.git"}
# spiffworkflow = {editable = true, path = "./../SpiffWorkflow"}
webtest = "*"
werkzeug = "*"
xlrd = "*"
xlsxwriter = "*"
pygithub = "*"
python-box = "*"
python-levenshtein = "*"
apscheduler = "*"
[requires]
python_version = "3.8"

1185
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -17,7 +17,10 @@ API_TOKEN = environ.get('API_TOKEN', default = 'af95596f327c9ecc007b60414fc84b61
NAME = "CR Connect Workflow"
DEFAULT_PORT = "5000"
FLASK_PORT = environ.get('PORT0') or environ.get('FLASK_PORT', default=DEFAULT_PORT)
CORS_ALLOW_ORIGINS = re.split(r',\s*', environ.get('CORS_ALLOW_ORIGINS', default="localhost:4200, localhost:5002"))
FRONTEND = environ.get('FRONTEND', default="localhost:4200")
BPMN = environ.get('BPMN', default="localhost:5002")
CORS_DEFAULT = f'{FRONTEND}, {BPMN}'
CORS_ALLOW_ORIGINS = re.split(r',\s*', environ.get('CORS_ALLOW_ORIGINS', default=CORS_DEFAULT))
TESTING = environ.get('TESTING', default="false") == "true"
PRODUCTION = (environ.get('PRODUCTION', default="false") == "true")
TEST_UID = environ.get('TEST_UID', default="dhf8r")
@ -50,7 +53,6 @@ SQLALCHEMY_DATABASE_URI = environ.get(
TOKEN_AUTH_TTL_HOURS = float(environ.get('TOKEN_AUTH_TTL_HOURS', default=24))
SECRET_KEY = environ.get('SECRET_KEY', default="Shhhh!!! This is secret! And better darn well not show up in prod.")
FRONTEND_AUTH_CALLBACK = environ.get('FRONTEND_AUTH_CALLBACK', default="http://localhost:4200/session")
SWAGGER_AUTH_KEY = environ.get('SWAGGER_AUTH_KEY', default="SWAGGER")
# %s/%i placeholders expected for uva_id and study_id in various calls.
PB_ENABLED = environ.get('PB_ENABLED', default="false") == "true"
@ -60,6 +62,8 @@ PB_INVESTIGATORS_URL = environ.get('PB_INVESTIGATORS_URL', default=PB_BASE_URL +
PB_REQUIRED_DOCS_URL = environ.get('PB_REQUIRED_DOCS_URL', default=PB_BASE_URL + "required_docs?studyid=%i")
PB_STUDY_DETAILS_URL = environ.get('PB_STUDY_DETAILS_URL', default=PB_BASE_URL + "study?studyid=%i")
PB_SPONSORS_URL = environ.get('PB_SPONSORS_URL', default=PB_BASE_URL + "sponsors?studyid=%i")
PB_IRB_INFO_URL = environ.get('PB_IRB_INFO_URL', default=PB_BASE_URL + "current_irb_info/%i")
PB_CHECK_STUDY_URL = environ.get('PB_CHECK_STUDY_URL', default=PB_BASE_URL + "check_study/%i")
# Ldap Configuration
LDAP_URL = environ.get('LDAP_URL', default="ldap.virginia.edu").strip('/') # No trailing slash or http://

View File

@ -12,6 +12,8 @@ from flask_mail import Mail
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from sentry_sdk.integrations.flask import FlaskIntegration
from apscheduler.schedulers.background import BackgroundScheduler
logging.basicConfig(level=logging.INFO)
@ -33,6 +35,7 @@ db = SQLAlchemy(app)
session = db.session
""":type: sqlalchemy.orm.Session"""
scheduler = BackgroundScheduler()
# Mail settings
mail = Mail(app)
@ -43,9 +46,18 @@ ma = Marshmallow(app)
from crc import models
from crc import api
from crc.api import admin
from crc.services.workflow_service import WorkflowService
connexion_app.add_api('api.yml', base_path='/v1.0')
# needed function to avoid circular import
def process_waiting_tasks():
with app.app_context():
WorkflowService.do_waiting()
scheduler.add_job(process_waiting_tasks,'interval',minutes=5)
scheduler.start()
# Convert list of allowed origins to list of regexes
origins_re = [r"^https?:\/\/%s(.*)" % o.replace('.', '\.') for o in app.config['CORS_ALLOW_ORIGINS']]
@ -112,3 +124,8 @@ def clear_db():
from example_data import ExampleDataLoader
ExampleDataLoader.clean_db()
@app.cli.command()
def sync_with_testing():
"""Load all the workflows currently on testing into this system."""
from crc.api import workflow_sync
workflow_sync.sync_all_changed_workflows("https://testing.crconnect.uvadcos.io/api")

View File

@ -65,7 +65,37 @@ paths:
application/json:
schema:
type: array
$ref: "#/components/schemas/User"
items:
$ref: "#/components/schemas/User"
# /v1.0/document_directory/{study_id}
/document_directory/{study_id}:
parameters :
- name : study_id
required : true
in : path
description : The unique id of a study.
schema :
type : integer
- name : workflow_id
in : query
required : false
schema :
type : integer
get:
operationId: crc.api.document.get_document_directory
summary: Returns a directory of all files for study in a nested structure
tags:
- Document Categories
responses:
'200':
description: All IRB Categories defined in the file document.
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/DocumentDirectory"
# /v1.0/study
/study:
get:
@ -80,7 +110,8 @@ paths:
application/json:
schema:
type: array
$ref: "#/components/schemas/Study"
items:
$ref: "#/components/schemas/Study"
post:
operationId: crc.api.study.add_study
summary: Creates a new study with the given parameters.
@ -160,7 +191,7 @@ paths:
parameters:
- name: workflow_spec_id
in: path
required: false
required: true
description: The unique id of an existing workflow specification to modify.
schema:
type: string
@ -318,6 +349,12 @@ paths:
schema:
type: integer
format: int32
- name: update_status
in: query
required: false
description: If set to true, will synch the study with protocol builder and assure the status of all workflows is up to date (expensive).
schema:
type: boolean
get:
operationId: crc.api.study.get_study
summary: Provides a single study
@ -415,8 +452,8 @@ paths:
parameters:
- name: spec_id
in: path
required: false
description: The unique id of an existing workflow specification to modify.
required: true
description: The unique id of an existing workflow specification.
schema:
type: string
get:
@ -431,6 +468,18 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/WorkflowSpec"
post:
operationId: crc.api.workflow.get_workflow_from_spec
summary: Creates a workflow from a workflow spec and returns the workflow
tags:
- Workflow Specifications
responses:
'200':
description: Workflow generated successfully
content:
application/json:
schema:
$ref: "#/components/schemas/Workflow"
put:
operationId: crc.api.workflow.update_workflow_specification
security:
@ -460,14 +509,41 @@ paths:
responses:
'204':
description: The workflow specification has been removed.
/workflow-specification/standalone:
get:
operationId: crc.api.workflow.standalone_workflow_specs
summary: Provides a list of workflow specifications that can be run outside a study.
tags:
- Workflow Specifications
responses:
'200':
description: A list of workflow specifications
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/WorkflowSpec"
/workflow-specification/{spec_id}/validate:
parameters:
- name: spec_id
in: path
required: false
required: true
description: The unique id of an existing workflow specification to validate.
schema:
type: string
- name: study_id
in: query
required: false
description: Optional id of study to test under different scenarios
schema:
type: string
- name: test_until
in: query
required: false
description: Optional name of task to stop validating at
schema:
type: string
get:
operationId: crc.api.workflow.validate_workflow_specification
summary: Loads and attempts to execute a Workflow Specification, returning a list of errors encountered
@ -520,7 +596,7 @@ paths:
parameters:
- name: cat_id
in: path
required: false
required: true
description: The unique id of an existing workflow spec category to modify.
schema:
type: string
@ -585,6 +661,12 @@ paths:
description: The unique key of a workflow task form field. Make sure this matches a document in the irb_documents.xslx reference sheet.
schema:
type: string
- name: study_id
in: query
required: false
description: The study that the files are related to
schema:
type: integer
get:
operationId: crc.api.file.get_files
summary: Provides a list of files that match the given parameters (such as a spec id) IMPORTANT, only includes metadata, not the file content.
@ -690,6 +772,41 @@ paths:
type: string
format: binary
example: '<?xml version="1.0" encoding="UTF-8"?><bpmn:definitions></bpmn:definitions>'
/file/{file_id}/download :
parameters :
- name : file_id
in : path
required : true
description : The id of the File requested
schema :
type : integer
- name : auth_token
in : query
required : true
description : User Auth Toeken
schema :
type : string
- name : version
in : query
required : false
description : The version of the file, or none for latest version
schema :
type : integer
get :
operationId : crc.api.file.get_file_data_link
summary : Returns only the file contents
security: []
tags :
- Files
responses :
'200' :
description : Returns the actual file
content :
application/octet-stream :
schema :
type : string
format : binary
example : '<?xml version="1.0" encoding="UTF-8"?><bpmn:definitions></bpmn:definitions>'
/file/{file_id}/data:
parameters:
- name: file_id
@ -894,6 +1011,12 @@ paths:
description: Set this to true to clear data when starting workflow.
schema:
type: boolean
- name: delete_files
in: query
required: false
description: Set this to true to delete associated files when starting workflow.
schema:
type: boolean
tags:
- Workflows and Tasks
responses:
@ -1281,6 +1404,27 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/DataStore"
/datastore/file/{file_id}:
parameters:
- name: file_id
in: path
required: true
description: The file id we are concerned with
schema:
type: string
format: string
get:
operationId: crc.api.data_store.file_multi_get
summary: Gets all datastore items by file_id
tags:
- DataStore
responses:
'200':
description: Get all values from the data store for a file_id
content:
application/json:
schema:
$ref: "#/components/schemas/DataStore"
components:
securitySchemes:
jwt:
@ -1478,6 +1622,29 @@ components:
type: string
x-nullable: true
example: "27b-6-1212"
DocumentDirectory:
properties:
level:
type: string
x-nullable: true
example: "Ancillary Document"
description:
type: string
x-nullable: true
example: "Documents that are ancillary to the study"
file:
$ref: "#/components/schemas/File"
x-nullable: true
expanded:
type: boolean
example: False
filecount:
type: integer
example: 1
children:
type: array
items:
$ref: "#/components/schemas/File"
DataStore:
properties:
id:
@ -1527,6 +1694,10 @@ components:
category_id:
type: integer
nullable: true
standalone:
type: boolean
example: false
default: false
workflow_spec_category:
$ref: "#/components/schemas/WorkflowSpecCategory"
is_status:
@ -1571,8 +1742,7 @@ components:
example: "random_fact"
x-nullable: true
file:
type: file
format: binary
type: string
Workflow:
properties:
id:
@ -1580,7 +1750,7 @@ components:
type: integer
format: int64
status:
type: enum
type: string
enum: ['new','user_input_required','waiting','complete']
navigation:
type: array
@ -1600,6 +1770,8 @@ components:
type: integer
num_tasks_incomplete:
type: integer
study_id:
type: integer
example:
id: 291234
@ -1634,7 +1806,7 @@ components:
data:
type: object
multi_instance_type:
type: enum
type: string
enum: ['none', 'looping', 'parallel', 'sequential']
multi_instance_count:
type: number
@ -1767,7 +1939,7 @@ components:
type: string
readOnly: true
type:
type: enum
type: string
enum: ['string', 'long', 'boolean', 'date', 'enum']
readOnly: true
label:
@ -1843,7 +2015,7 @@ components:
type: string
example: "Chuck Norris"
data:
type: any
type: string
NavigationItem:
properties:
id:
@ -1873,7 +2045,7 @@ components:
type: integer
example: 4
state:
type: enum
type: string
enum: ['FUTURE', 'WAITING', 'READY', 'CANCELLED', 'COMPLETED','LIKELY','MAYBE']
readOnly: true
is_decision:

View File

@ -10,7 +10,9 @@ import sentry_sdk
class ApiError(Exception):
def __init__(self, code, message, status_code=400,
file_name="", task_id="", task_name="", tag="", task_data = {}):
file_name="", task_id="", task_name="", tag="", task_data=None, error_type="", line_number=0, offset=0):
if task_data is None:
task_data = {}
self.status_code = status_code
self.code = code # a short consistent string describing the error.
self.message = message # A detailed message that provides more information.
@ -18,8 +20,11 @@ class ApiError(Exception):
self.task_name = task_name or "" # OPTIONAL: The name of the task in the BPMN Diagram.
self.file_name = file_name or "" # OPTIONAL: The file that caused the error.
self.tag = tag or "" # OPTIONAL: The XML Tag that caused the issue.
self.task_data = task_data or "" # OPTIONAL: A snapshot of data connected to the task when error ocurred.
if hasattr(g,'user'):
self.task_data = task_data or "" # OPTIONAL: A snapshot of data connected to the task when error occurred.
self.line_number = line_number
self.offset = offset
self.error_type = error_type
if hasattr(g, 'user'):
user = g.user.uid
else:
user = 'Unknown'
@ -29,12 +34,16 @@ class ApiError(Exception):
Exception.__init__(self, self.message)
@classmethod
def from_task(cls, code, message, task, status_code=400):
def from_task(cls, code, message, task, status_code=400, line_number=0, offset=0, error_type="", error_line=""):
"""Constructs an API Error with details pulled from the current task."""
instance = cls(code, message, status_code=status_code)
instance.task_id = task.task_spec.name or ""
instance.task_name = task.task_spec.description or ""
instance.file_name = task.workflow.spec.file or ""
instance.line_number = line_number
instance.offset = offset
instance.error_type = error_type
instance.error_line = error_line
# Fixme: spiffworkflow is doing something weird where task ends up referenced in the data in some cases.
if "task" in task.data:
@ -61,7 +70,11 @@ class ApiError(Exception):
so consolidating the code, and doing the best things
we can with the data we have."""
if isinstance(exp, WorkflowTaskExecException):
return ApiError.from_task(code, message, exp.task)
return ApiError.from_task(code, message, exp.task, line_number=exp.line_number,
offset=exp.offset,
error_type=exp.exception.__class__.__name__,
error_line=exp.error_line)
else:
return ApiError.from_task_spec(code, message, exp.sender)
@ -69,7 +82,7 @@ class ApiError(Exception):
class ApiErrorSchema(ma.Schema):
class Meta:
fields = ("code", "message", "workflow_name", "file_name", "task_name", "task_id",
"task_data", "task_user", "hint")
"task_data", "task_user", "hint", "line_number", "offset", "error_type", "error_line")
@app.errorhandler(ApiError)

View File

@ -4,33 +4,7 @@ from datetime import datetime
from crc import session
from crc.api.common import ApiError
from crc.models.data_store import DataStoreModel, DataStoreSchema
from crc.scripts.data_store_base import DataStoreBase
def study_data_set(study_id, key, value):
"""Set a study data value in the data_store, mimic the script endpoint"""
if study_id is None:
raise ApiError('unknown_study', 'Please provide a valid Study ID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
retval = dsb.set_data_common('api', study_id, None, None, None, 'api_study_data_set', key, value)
json_value = json.dumps(retval, ensure_ascii=False, indent=2)
return json_value
def study_data_get(study_id, key, default=None):
"""Get a study data value in the data_store, mimic the script endpoint"""
if study_id is None:
raise ApiError('unknown_study', 'Please provide a valid Study ID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
retval = dsb.get_data_common(study_id, None, 'api_study_data_get', key, default)
# json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text
return retval
from crc.services.data_store_service import DataStoreBase
def study_multi_get(study_id):
@ -44,57 +18,6 @@ def study_multi_get(study_id):
return results
def study_data_del(study_id, key):
"""Delete a study data value in the data store"""
if study_id is None:
raise ApiError('unknown_study', 'Please provide a valid Study ID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
dsb.del_data_common(study_id, None, 'api_study_data_get', key)
json_value = json.dumps('deleted', ensure_ascii=False, indent=2)
return json_value
def user_data_set(user_id, key, value):
"""Set a user data value in the data_store, mimic the script endpoint"""
if user_id is None:
raise ApiError('unknown_study', 'Please provide a valid UserID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
retval = dsb.set_data_common('api',
None,
user_id,
None,
None,
'api_user_data_set',
key, value)
json_value = json.dumps(retval, ensure_ascii=False, indent=2)
return json_value
def user_data_get(user_id, key, default=None):
"""Get a user data value from the data_store, mimic the script endpoint"""
if user_id is None:
raise ApiError('unknown_study', 'Please provide a valid UserID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
retval = dsb.get_data_common(None,
user_id,
'api_user_data_get',
key, default)
# json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text
return retval
def user_multi_get(user_id):
"""Get all data values in the data_store for a userid"""
if user_id is None:
@ -107,8 +30,18 @@ def user_multi_get(user_id):
return results
def file_multi_get(file_id):
"""Get all data values in the data store for a file_id"""
if file_id is None:
raise ApiError(code='unknown_file', message='Please provide a valid file id.')
dsb = DataStoreBase()
retval = dsb.get_multi_common(None, None, file_id=file_id)
results = DataStoreSchema(many=True).dump(retval)
return results
def datastore_del(id):
"""Delete a data store item for a user_id and a key"""
"""Delete a data store item for a key"""
session.query(DataStoreModel).filter_by(id=id).delete()
session.commit()
json_value = json.dumps('deleted', ensure_ascii=False, indent=2)
@ -116,7 +49,7 @@ def datastore_del(id):
def datastore_get(id):
"""Delete a data store item for a user_id and a key"""
"""retrieve a data store item by a key"""
item = session.query(DataStoreModel).filter_by(id=id).first()
results = DataStoreSchema(many=False).dump(item)
return results
@ -130,13 +63,9 @@ def update_datastore(id, body):
item = session.query(DataStoreModel).filter_by(id=id).first()
if item is None:
raise ApiError('unknown_item', 'The item "' + id + '" is not recognized.')
print(body)
# I'm not sure if there is a generic way to use the
# schema to both parse the body and update the SQLAlchemy record
for key in body:
if hasattr(item, key):
setattr(item, key, body[key])
item.last_updated = datetime.now()
DataStoreSchema().load(body, instance=item, session=session)
item.last_updated = datetime.utcnow()
session.add(item)
session.commit()
return DataStoreSchema().dump(item)
@ -155,20 +84,20 @@ def add_datastore(body):
if 'value' not in body:
raise ApiError('no_value', 'You need to specify a value to add a datastore item')
if (not 'user_id' in body) and (not 'study_id' in body):
raise ApiError('conflicting_values', 'A datastore item should have either a study_id or a user_id')
if ('user_id' not in body) and ('study_id' not in body) and ('file_id' not in body):
raise ApiError('conflicting_values', 'A datastore item should have either a study_id, user_id or file_id ')
if 'user_id' in body and 'study_id' in body:
raise ApiError('conflicting_values', 'A datastore item should have either a study_id or a user_id, '
'but not both')
item = DataStoreModel(key=body['key'], value=body['value'])
# I'm not sure if there is a generic way to use the
# schema to both parse the body and update the SQLAlchemy record
for key in body:
if hasattr(item, key):
setattr(item, key, body[key])
item.last_updated = datetime.now()
present = 0
for field in ['user_id','study_id','file_id']:
if field in body:
present = present+1
if present > 1:
raise ApiError('conflicting_values', 'A datastore item should have one of a study_id, user_id or a file_id '
'but not more than one of these')
item = DataStoreSchema().load(body)
item.last_updated = datetime.utcnow()
session.add(item)
session.commit()
return DataStoreSchema().dump(item)

18
crc/api/document.py Normal file
View File

@ -0,0 +1,18 @@
from crc.models.api_models import DocumentDirectorySchema
from crc.models.file import File
from crc.services.document_service import DocumentService
from crc.services.file_service import FileService
from crc.services.lookup_service import LookupService
def get_document_directory(study_id, workflow_id=None):
"""
return a nested list of files arranged according to the category hierarchy
defined in the doc dictionary
"""
file_models = FileService.get_files_for_study(study_id=study_id)
doc_dict = DocumentService.get_dictionary()
files = (File.from_models(model, FileService.get_file_data(model.id), doc_dict) for model in file_models)
directory = DocumentService.get_directory(doc_dict, files, workflow_id)
return DocumentDirectorySchema(many=True).dump(directory)

View File

@ -6,24 +6,28 @@ from flask import send_file
from crc import session
from crc.api.common import ApiError
from crc.models.file import FileSchema, FileModel, File, FileModelSchema, FileDataModel
from crc.api.user import verify_token
from crc.models.file import FileSchema, FileModel, File, FileModelSchema, FileDataModel, FileType
from crc.models.workflow import WorkflowSpecModel
from crc.services.document_service import DocumentService
from crc.services.file_service import FileService
def to_file_api(file_model):
"""Converts a FileModel object to something we can return via the api"""
return File.from_models(file_model, FileService.get_file_data(file_model.id),
FileService.get_doc_dictionary())
DocumentService.get_dictionary())
def get_files(workflow_spec_id=None, workflow_id=None, form_field_key=None):
if all(v is None for v in [workflow_spec_id, workflow_id, form_field_key]):
def get_files(workflow_spec_id=None, workflow_id=None, form_field_key=None,study_id=None):
if all(v is None for v in [workflow_spec_id, workflow_id, form_field_key,study_id]):
raise ApiError('missing_parameter',
'Please specify either a workflow_spec_id or a '
'workflow_id with an optional form_field_key')
file_models = FileService.get_files(workflow_spec_id=workflow_spec_id,
if study_id is not None:
file_models = FileService.get_files_for_study(study_id=study_id, irb_doc_code=form_field_key)
else:
file_models = FileService.get_files(workflow_spec_id=workflow_spec_id,
workflow_id=workflow_id,
irb_doc_code=form_field_key)
@ -47,9 +51,16 @@ def add_file(workflow_spec_id=None, workflow_id=None, form_field_key=None):
name=file.filename, content_type=file.content_type,
binary_data=file.stream.read())
elif workflow_spec_id:
# check if we have a primary already
have_primary = FileModel.query.filter(FileModel.workflow_spec_id==workflow_spec_id, FileModel.type==FileType.bpmn, FileModel.primary==True).all()
# set this to primary if we don't already have one
if not have_primary:
primary = True
else:
primary = False
workflow_spec = session.query(WorkflowSpecModel).filter_by(id=workflow_spec_id).first()
file_model = FileService.add_workflow_spec_file(workflow_spec, file.filename, file.content_type,
file.stream.read())
file.stream.read(), primary=primary)
else:
raise ApiError("invalid_file", "You must supply either a workflow spec id or a workflow_id and form_field_key.")
@ -116,6 +127,22 @@ def get_file_data(file_id, version=None):
)
def get_file_data_link(file_id, auth_token, version=None):
if not verify_token(auth_token):
raise ApiError('not_authenticated', 'You need to include an authorization token in the URL with this')
file_data = FileService.get_file_data(file_id, version)
if file_data is None:
raise ApiError('no_such_file', 'The file id you provided does not exist')
return send_file(
io.BytesIO(file_data.data),
attachment_filename=file_data.file_model.name,
mimetype=file_data.file_model.content_type,
cache_timeout=-1, # Don't cache these files on the browser.
last_modified=file_data.date_created,
as_attachment = True
)
def get_file_info(file_id):
file_model = session.query(FileModel).filter_by(id=file_id).with_for_update().first()
if file_model is None:

View File

@ -23,7 +23,7 @@ def add_study(body):
study_model = StudyModel(user_uid=UserService.current_user().uid,
title=body['title'],
primary_investigator_id=body['primary_investigator_id'],
last_updated=datetime.now(),
last_updated=datetime.utcnow(),
status=StudyStatus.in_progress)
session.add(study_model)
StudyService.add_study_update_event(study_model,
@ -33,7 +33,7 @@ def add_study(body):
errors = StudyService._add_all_workflow_specs_to_study(study_model)
session.commit()
study = StudyService().get_study(study_model.id)
study = StudyService().get_study(study_model.id, do_status=True)
study_data = StudySchema().dump(study)
study_data["errors"] = ApiErrorSchema(many=True).dump(errors)
return study_data
@ -51,7 +51,7 @@ def update_study(study_id, body):
study: Study = StudyForUpdateSchema().load(body)
status = StudyStatus(study.status)
study_model.last_updated = datetime.now()
study_model.last_updated = datetime.utcnow()
if study_model.status != status:
study_model.status = status
@ -74,8 +74,8 @@ def update_study(study_id, body):
return StudySchema().dump(study)
def get_study(study_id):
study = StudyService.get_study(study_id)
def get_study(study_id, update_status=False):
study = StudyService.get_study(study_id, do_status=update_status)
if (study is None):
raise ApiError("unknown_study", 'The study "' + study_id + '" is not recognized.', status_code=404)
return StudySchema().dump(study)
@ -97,7 +97,7 @@ def user_studies():
"""Returns all the studies associated with the current user. """
user = UserService.current_user(allow_admin_impersonate=True)
StudyService.synch_with_protocol_builder_if_enabled(user)
studies = StudyService.get_studies_for_user(user)
studies = StudyService().get_studies_for_user(user)
results = StudySchema(many=True).dump(studies)
return results

View File

@ -1,3 +1,4 @@
import hashlib
import io
import json
@ -78,16 +79,15 @@ def send_email(subject, address, body, data=None):
def evaluate_python_expression(body):
"""Evaluate the given python expression, returning its result. This is useful if the
front end application needs to do real-time processing on task data. If for instance
there is a hide expression that is based on a previous value in the same form."""
there is a hide expression that is based on a previous value in the same form.
The response includes both the result, and a hash of the original query, subsequent calls
of the same hash are unnecessary. """
try:
script_engine = CustomBpmnScriptEngine()
result = script_engine.eval(body['expression'], body['data'])
return {"result": result}
return {"result": result, "expression": body['expression'], "key": body['key']}
except Exception as e:
raise ApiError("expression_error", f"Failed to evaluate the expression '%s'. %s" %
(body['expression'], str(e)),
task_data = body["data"])
return {"result": False, "expression": body['expression'], "key": body['key'], "error": str(e)}
def send_test_email(subject, address, message, data=None):
rendered, wrapped = EmailService().get_rendered_content(message, data)

View File

@ -214,7 +214,7 @@ def _handle_login(user_info: LdapModel, redirect_url=None):
g.user = user
# Return the frontend auth callback URL, with auth token appended.
auth_token = user.encode_auth_token().decode()
auth_token = user.encode_auth_token()
g.token = auth_token
if redirect_url is not None:

View File

@ -6,7 +6,7 @@ from crc import session
from crc.api.common import ApiError, ApiErrorSchema
from crc.models.api_models import WorkflowApiSchema
from crc.models.file import FileModel, LookupDataSchema
from crc.models.study import StudyModel, WorkflowMetadata
from crc.models.study import StudyModel, WorkflowMetadata, StudyStatus
from crc.models.task_event import TaskEventModel, TaskEvent, TaskEventSchema
from crc.models.user import UserModelSchema
from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, WorkflowSpecModel, WorkflowSpecCategoryModel, \
@ -46,22 +46,15 @@ def get_workflow_specification(spec_id):
return WorkflowSpecModelSchema().dump(spec)
def validate_workflow_specification(spec_id):
errors = {}
def validate_workflow_specification(spec_id, study_id=None, test_until=None):
try:
WorkflowService.test_spec(spec_id)
WorkflowService.test_spec(spec_id, study_id, test_until)
WorkflowService.test_spec(spec_id, study_id, test_until, required_only=True)
except ApiError as ae:
ae.message = "When populating all fields ... \n" + ae.message
errors['all'] = ae
try:
# Run the validation twice, the second time, just populate the required fields.
WorkflowService.test_spec(spec_id, required_only=True)
except ApiError as ae:
ae.message = "When populating only required fields ... \n" + ae.message
errors['required'] = ae
interpreted_errors = ValidationErrorService.interpret_validation_errors(errors)
return ApiErrorSchema(many=True).dump(interpreted_errors)
error = ae
error = ValidationErrorService.interpret_validation_error(error)
return ApiErrorSchema(many=True).dump([error])
return []
def update_workflow_specification(spec_id, body):
if spec_id is None:
@ -101,6 +94,24 @@ def delete_workflow_specification(spec_id):
session.commit()
def get_workflow_from_spec(spec_id):
workflow_model = WorkflowService.get_workflow_from_spec(spec_id, g.user)
processor = WorkflowProcessor(workflow_model)
processor.do_engine_steps()
processor.save()
WorkflowService.update_task_assignments(processor)
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
return WorkflowApiSchema().dump(workflow_api_model)
def standalone_workflow_specs():
schema = WorkflowSpecModelSchema(many=True)
specs = WorkflowService.get_standalone_workflow_specs()
return schema.dump(specs)
def get_workflow(workflow_id, do_engine_steps=True):
"""Retrieve workflow based on workflow_id, and return it in the last saved State.
If do_engine_steps is False, return the workflow without running any engine tasks or logging any events. """
@ -116,11 +127,11 @@ def get_workflow(workflow_id, do_engine_steps=True):
return WorkflowApiSchema().dump(workflow_api_model)
def restart_workflow(workflow_id, clear_data=False):
def restart_workflow(workflow_id, clear_data=False, delete_files=False):
"""Restart a workflow with the latest spec.
Clear data allows user to restart the workflow without previous data."""
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
WorkflowProcessor.reset(workflow_model, clear_data=clear_data)
WorkflowProcessor.reset(workflow_model, clear_data=clear_data, delete_files=delete_files)
return get_workflow(workflow_model.id)
@ -145,7 +156,8 @@ def get_task_events(action = None, workflow = None, study = None):
study = session.query(StudyModel).filter(StudyModel.id == event.study_id).first()
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == event.workflow_id).first()
workflow_meta = WorkflowMetadata.from_workflow(workflow)
task_events.append(TaskEvent(event, study, workflow_meta))
if study and study.status in [StudyStatus.open_for_enrollment, StudyStatus.in_progress]:
task_events.append(TaskEvent(event, study, workflow_meta))
return TaskEventSchema(many=True).dump(task_events)
@ -184,12 +196,10 @@ def update_task(workflow_id, task_id, body, terminate_loop=None, update_all=Fals
if workflow_model is None:
raise ApiError("invalid_workflow_id", "The given workflow id is not valid.", status_code=404)
elif workflow_model.study is None:
raise ApiError("invalid_study", "There is no study associated with the given workflow.", status_code=404)
processor = WorkflowProcessor(workflow_model)
task_id = uuid.UUID(task_id)
spiff_task = processor.bpmn_workflow.get_task(task_id)
spiff_task.workflow.script_engine = processor.bpmn_workflow.script_engine
_verify_user_and_role(processor, spiff_task)
user = UserService.current_user(allow_admin_impersonate=False) # Always log as the real user.
@ -199,7 +209,7 @@ def update_task(workflow_id, task_id, body, terminate_loop=None, update_all=Fals
raise ApiError("invalid_state", "You may not update a task unless it is in the READY state. "
"Consider calling a token reset to make this task Ready.")
if terminate_loop:
if terminate_loop and spiff_task.is_looping():
spiff_task.terminate_loop()
# Extract the details specific to the form submitted
@ -228,6 +238,7 @@ def __update_task(processor, task, data, user):
here because we need to do it multiple times when completing all tasks in
a multi-instance task"""
task.update_data(data)
WorkflowService.post_process_form(task) # some properties may update the data store.
processor.complete_task(task)
processor.do_engine_steps()
processor.save()

View File

@ -1,5 +1,7 @@
import hashlib
import pandas as pd
from pandas._libs.missing import NA
from crc import session, app
from crc.api.common import ApiError
from crc.models.file import FileModel, FileDataModel
@ -184,9 +186,12 @@ def update_or_create_current_file(remote,workflow_spec_id,updatefile):
currentfile.content_type = updatefile['content_type']
currentfile.primary_process_id = updatefile['primary_process_id']
session.add(currentfile)
content = WorkflowSyncService.get_remote_file_by_hash(remote, updatefile['md5_hash'])
FileService.update_file(currentfile, content, updatefile['type'])
try:
content = WorkflowSyncService.get_remote_file_by_hash(remote, updatefile['md5_hash'])
FileService.update_file(currentfile, content, updatefile['type'])
except ApiError:
# Remote files doesn't exist, don't update it.
print("Remote file " + currentfile.name + " does not exist, so not syncing.")
def sync_changed_files(remote,workflow_spec_id):
"""
@ -285,7 +290,7 @@ def get_changed_files(remote,workflow_spec_id,as_df=False):
changedfiles['new'] = False
changedfiles.loc[changedfiles.index.isin(left['filename']), 'new'] = True
changedfiles.loc[changedfiles.index.isin(right['filename']),'new'] = True
changedfiles = changedfiles.replace({pd.np.nan: None})
changedfiles = changedfiles.replace({NA: None})
# return the list as a dict, let swagger convert it to json
if as_df:
return changedfiles

View File

@ -7,7 +7,7 @@ from marshmallow_enum import EnumField
from crc import ma
from crc.models.workflow import WorkflowStatus
from crc.models.file import FileSchema
class MultiInstanceType(enum.Enum):
none = "none"
@ -26,7 +26,8 @@ class Task(object):
PROP_EXTENSIONS_TITLE = "display_name"
# Autocomplete field
# Field Types
FIELD_TYPE_FILE = "file"
FIELD_TYPE_AUTO_COMPLETE = "autocomplete"
FIELD_PROP_AUTO_COMPLETE_MAX = "autocomplete_num" # Not used directly, passed in from the front end.
@ -59,6 +60,10 @@ class Task(object):
FIELD_PROP_REPLEAT_TITLE = "repeat_title"
FIELD_PROP_REPLEAT_BUTTON = "repeat_button_label"
# File specific field properties
FIELD_PROP_DOC_CODE = "doc_code" # to associate a file upload field with a doc code
FIELD_PROP_FILE_DATA = "file_data" # to associate a bit of data with a specific file upload file.
# Additional properties
FIELD_PROP_ENUM_TYPE = "enum_type"
FIELD_PROP_TEXT_AREA_ROWS = "rows"
@ -167,10 +172,31 @@ class NavigationItemSchema(ma.Schema):
item.spec_type = spec_type
return item
class DocumentDirectorySchema(ma.Schema):
level = marshmallow.fields.String()
file = marshmallow.fields.Nested(FileSchema)
filecount = marshmallow.fields.Integer()
expanded = marshmallow.fields.Boolean()
children = marshmallow.fields.Nested("self",many=True)
class DocumentDirectory(object):
def __init__(self, level=None, file=None, children=None):
self.level = level
self.file = file
self.expanded = False
self.filecount = 0
if children is None:
self.children = list()
else:
self.children=children
class WorkflowApi(object):
def __init__(self, id, status, next_task, navigation,
spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks,
last_updated, is_review, title):
last_updated, is_review, title, study_id):
self.id = id
self.status = status
self.next_task = next_task # The next task that requires user input.
@ -183,13 +209,14 @@ class WorkflowApi(object):
self.last_updated = last_updated
self.title = title
self.is_review = is_review
self.study_id = study_id or ''
class WorkflowApiSchema(ma.Schema):
class Meta:
model = WorkflowApi
fields = ["id", "status", "next_task", "navigation",
"workflow_spec_id", "spec_version", "is_latest_spec", "total_tasks", "completed_tasks",
"last_updated", "is_review", "title"]
"last_updated", "is_review", "title", "study_id"]
unknown = INCLUDE
status = EnumField(WorkflowStatus)
@ -200,7 +227,7 @@ class WorkflowApiSchema(ma.Schema):
def make_workflow(self, data, **kwargs):
keys = ['id', 'status', 'next_task', 'navigation',
'workflow_spec_id', 'spec_version', 'is_latest_spec', "total_tasks", "completed_tasks",
"last_updated", "is_review", "title"]
"last_updated", "is_review", "title", "study_id"]
filtered_fields = {key: data[key] for key in keys}
filtered_fields['next_task'] = TaskSchema().make_task(data['next_task'])
return WorkflowApi(**filtered_fields)

View File

@ -1,31 +1,26 @@
from flask_marshmallow.sqla import SQLAlchemyAutoSchema
from marshmallow import EXCLUDE
from sqlalchemy import func
import marshmallow
from marshmallow import INCLUDE, fields
from crc import db, ma
from crc import db
class DataStoreModel(db.Model):
__tablename__ = 'data_store'
id = db.Column(db.Integer, primary_key=True)
last_updated = db.Column(db.DateTime(timezone=True), default=func.now())
last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now())
key = db.Column(db.String, nullable=False)
workflow_id = db.Column(db.Integer)
study_id = db.Column(db.Integer, nullable=True)
task_id = db.Column(db.String)
spec_id = db.Column(db.String)
user_id = db.Column(db.String, nullable=True)
file_id = db.Column(db.Integer, db.ForeignKey('file.id'), nullable=True)
value = db.Column(db.String)
class DataStoreSchema(ma.Schema):
id = fields.Integer(required=False)
key = fields.String(required=True)
last_updated = fields.DateTime(server_default=func.now(), onupdate=func.now())
workflow_id = fields.Integer()
study_id = fields.Integer(allow_none=True)
task_id = fields.String()
spec_id = fields.String()
user_id = fields.String(allow_none=True)
value = fields.String()
class DataStoreSchema(SQLAlchemyAutoSchema):
class Meta:
model = DataStoreModel
load_instance = True
include_fk = True
sqla_session = db.session

View File

@ -1,14 +1,14 @@
import enum
from typing import cast
from marshmallow import INCLUDE, EXCLUDE
from marshmallow import INCLUDE, EXCLUDE, Schema
from marshmallow_enum import EnumField
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from sqlalchemy import func, Index
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import deferred
from sqlalchemy.orm import deferred, relationship
from crc import db, ma
from crc.models.data_store import DataStoreModel
class FileType(enum.Enum):
@ -42,7 +42,7 @@ CONTENT_TYPES = {
"docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
"gif": "image/gif",
"jpg": "image/jpeg",
"md" : "text/plain",
"md": "text/plain",
"pdf": "application/pdf",
"png": "image/png",
"ppt": "application/vnd.ms-powerpoint",
@ -64,9 +64,11 @@ class FileDataModel(db.Model):
md5_hash = db.Column(UUID(as_uuid=True), unique=False, nullable=False)
data = deferred(db.Column(db.LargeBinary)) # Don't load it unless you have to.
version = db.Column(db.Integer, default=0)
date_created = db.Column(db.DateTime(timezone=True), default=func.now())
size = db.Column(db.Integer, default=0)
date_created = db.Column(db.DateTime(timezone=True), server_default=func.now())
file_model_id = db.Column(db.Integer, db.ForeignKey('file.id'))
file_model = db.relationship("FileModel", foreign_keys=[file_model_id])
user_uid = db.Column(db.String, db.ForeignKey('user.uid'), nullable=True)
class FileModel(db.Model):
@ -76,17 +78,19 @@ class FileModel(db.Model):
type = db.Column(db.Enum(FileType))
is_status = db.Column(db.Boolean)
content_type = db.Column(db.String)
is_reference = db.Column(db.Boolean, nullable=False, default=False) # A global reference file.
primary = db.Column(db.Boolean, nullable=False, default=False) # Is this the primary BPMN in a workflow?
primary_process_id = db.Column(db.String, nullable=True) # An id in the xml of BPMN documents, critical for primary BPMN.
is_reference = db.Column(db.Boolean, nullable=False, default=False) # A global reference file.
primary = db.Column(db.Boolean, nullable=False, default=False) # Is this the primary BPMN in a workflow?
primary_process_id = db.Column(db.String, nullable=True) # An id in the xml of BPMN documents, for primary BPMN.
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'), nullable=True)
workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=True)
irb_doc_code = db.Column(db.String, nullable=True) # Code reference to the irb_documents.xlsx reference file.
irb_doc_code = db.Column(db.String, nullable=True) # Code reference to the irb_documents.xlsx reference file.
# A request was made to delete the file, but we can't because there are
# active approvals or running workflows that depend on it. So we archive
# it instead, hide it in the interface.
is_review = db.Column(db.Boolean, default=False, nullable=True)
archived = db.Column(db.Boolean, default=False, nullable=False)
data_stores = relationship(DataStoreModel, cascade="all,delete", backref="file")
class File(object):
@classmethod
@ -103,23 +107,26 @@ class File(object):
instance.workflow_id = model.workflow_id
instance.irb_doc_code = model.irb_doc_code
instance.type = model.type
if model.irb_doc_code and model.irb_doc_code in doc_dictionary:
instance.category = "/".join(filter(None, [doc_dictionary[model.irb_doc_code]['category1'],
doc_dictionary[model.irb_doc_code]['category2'],
doc_dictionary[model.irb_doc_code]['category3']]))
instance.description = doc_dictionary[model.irb_doc_code]['description']
instance.download_name = ".".join([instance.category, model.type.value])
if model.irb_doc_code and model.irb_doc_code in doc_dictionary:
instance.document = doc_dictionary[model.irb_doc_code]
else:
instance.category = ""
instance.description = ""
instance.document = {}
if data_model:
instance.last_modified = data_model.date_created
instance.latest_version = data_model.version
instance.size = data_model.size
instance.user_uid = data_model.user_uid
else:
instance.last_modified = None
instance.latest_version = None
instance.data_store = {}
for ds in model.data_stores:
instance.data_store[ds.key] = ds.value
return instance
class FileModelSchema(SQLAlchemyAutoSchema):
class Meta:
model = FileModel
@ -130,13 +137,13 @@ class FileModelSchema(SQLAlchemyAutoSchema):
type = EnumField(FileType)
class FileSchema(ma.Schema):
class FileSchema(Schema):
class Meta:
model = File
fields = ["id", "name", "is_status", "is_reference", "content_type",
"primary", "primary_process_id", "workflow_spec_id", "workflow_id",
"irb_doc_code", "last_modified", "latest_version", "type", "categories",
"description", "category", "description", "download_name"]
"irb_doc_code", "last_modified", "latest_version", "type", "size", "data_store",
"document", "user_uid"]
unknown = INCLUDE
type = EnumField(FileType)
@ -152,7 +159,8 @@ class LookupFileModel(db.Model):
field_id = db.Column(db.String)
is_ldap = db.Column(db.Boolean) # Allows us to run an ldap query instead of a db lookup.
file_data_model_id = db.Column(db.Integer, db.ForeignKey('file_data.id'))
dependencies = db.relationship("LookupDataModel", lazy="select", backref="lookup_file_model", cascade="all, delete, delete-orphan")
dependencies = db.relationship("LookupDataModel", lazy="select", backref="lookup_file_model",
cascade="all, delete, delete-orphan")
class LookupDataModel(db.Model):
@ -162,7 +170,7 @@ class LookupDataModel(db.Model):
value = db.Column(db.String)
label = db.Column(db.String)
# In the future, we might allow adding an additional "search" column if we want to search things not in label.
data = db.Column(db.JSON) # all data for the row is stored in a json structure here, but not searched presently.
data = db.Column(db.JSON) # all data for the row is stored in a json structure here, but not searched presently.
# Assure there is a searchable index on the label column, so we can get fast results back.
# query with:
@ -185,7 +193,7 @@ class LookupDataSchema(SQLAlchemyAutoSchema):
load_instance = True
include_relationships = False
include_fk = False # Includes foreign keys
exclude = ['id'] # Do not include the id field, it should never be used via the API.
exclude = ['id'] # Do not include the id field, it should never be used via the API.
class SimpleFileSchema(ma.Schema):

View File

@ -15,7 +15,7 @@ class LdapModel(db.Model):
department = db.Column(db.String)
affiliation = db.Column(db.String)
sponsor_type = db.Column(db.String)
date_cached = db.Column(db.DateTime(timezone=True), default=func.now())
date_cached = db.Column(db.DateTime(timezone=True), server_default=func.now())
@classmethod
def from_entry(cls, entry):

View File

@ -41,13 +41,12 @@ class ProtocolBuilderStatus(enum.Enum):
class ProtocolBuilderStudy(object):
def __init__(
self, STUDYID: int, HSRNUMBER: str, TITLE: str, NETBADGEID: str,
Q_COMPLETE: bool, DATE_MODIFIED: str
DATE_MODIFIED: str
):
self.STUDYID = STUDYID
self.HSRNUMBER = HSRNUMBER
self.TITLE = TITLE
self.NETBADGEID = NETBADGEID
self.Q_COMPLETE = Q_COMPLETE
self.DATE_MODIFIED = DATE_MODIFIED
@ -56,7 +55,7 @@ class ProtocolBuilderStudySchema(ma.Schema):
model = ProtocolBuilderStudy
unknown = INCLUDE
fields = ["STUDYID", "HSRNUMBER", "TITLE", "NETBADGEID",
"Q_COMPLETE", "DATE_MODIFIED"]
"DATE_MODIFIED"]
@post_load
def make_pbs(self, data, **kwargs):

View File

@ -41,7 +41,7 @@ class StudyModel(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String)
short_title = db.Column(db.String, nullable=True)
last_updated = db.Column(db.DateTime(timezone=True), default=func.now())
last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now())
status = db.Column(db.Enum(StudyStatus))
irb_status = db.Column(db.Enum(IrbStatus))
primary_investigator_id = db.Column(db.String, nullable=True)
@ -89,7 +89,7 @@ class StudyEvent(db.Model):
id = db.Column(db.Integer, primary_key=True)
study_id = db.Column(db.Integer, db.ForeignKey(StudyModel.id), nullable=False)
study = db.relationship(StudyModel, back_populates='events_history')
create_date = db.Column(db.DateTime(timezone=True), default=func.now())
create_date = db.Column(db.DateTime(timezone=True), server_default=func.now())
status = db.Column(db.Enum(StudyStatus))
comment = db.Column(db.String, default='')
event_type = db.Column(db.Enum(StudyEventType))
@ -100,7 +100,7 @@ class WorkflowMetadata(object):
def __init__(self, id, name = None, display_name = None, description = None, spec_version = None,
category_id = None, category_display_name = None, state: WorkflowState = None,
status: WorkflowStatus = None, total_tasks = None, completed_tasks = None,
is_review=None,display_order = None):
is_review=None,display_order = None, state_message = None):
self.id = id
self.name = name
self.display_name = display_name
@ -109,6 +109,7 @@ class WorkflowMetadata(object):
self.category_id = category_id
self.category_display_name = category_display_name
self.state = state
self.state_message = state_message
self.status = status
self.total_tasks = total_tasks
self.completed_tasks = completed_tasks
@ -144,7 +145,7 @@ class WorkflowMetadataSchema(ma.Schema):
model = WorkflowMetadata
additional = ["id", "name", "display_name", "description",
"total_tasks", "completed_tasks", "display_order",
"category_id", "is_review", "category_display_name"]
"category_id", "is_review", "category_display_name", "state_message"]
unknown = INCLUDE

View File

@ -5,12 +5,13 @@ from crc import db, ma
from crc.models.study import StudyModel, StudySchema, WorkflowMetadataSchema, WorkflowMetadata
from crc.models.workflow import WorkflowModel
from crc.services.ldap_service import LdapService
from sqlalchemy import func
class TaskEventModel(db.Model):
__tablename__ = 'task_event'
id = db.Column(db.Integer, primary_key=True)
study_id = db.Column(db.Integer, db.ForeignKey('study.id'), nullable=False)
study_id = db.Column(db.Integer, db.ForeignKey('study.id'))
user_uid = db.Column(db.String, nullable=False) # In some cases the unique user id may not exist in the db yet.
workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=False)
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
@ -27,7 +28,7 @@ class TaskEventModel(db.Model):
mi_count = db.Column(db.Integer)
mi_index = db.Column(db.Integer)
process_name = db.Column(db.String)
date = db.Column(db.DateTime)
date = db.Column(db.DateTime(timezone=True),default=func.now())
class TaskEventModelSchema(SQLAlchemyAutoSchema):

View File

@ -3,6 +3,7 @@ import enum
import marshmallow
from marshmallow import EXCLUDE
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from sqlalchemy import func
from crc import db
from crc.models.file import FileModel, FileDataModel
@ -33,6 +34,7 @@ class WorkflowSpecModel(db.Model):
category_id = db.Column(db.Integer, db.ForeignKey('workflow_spec_category.id'), nullable=True)
category = db.relationship("WorkflowSpecCategoryModel")
is_master_spec = db.Column(db.Boolean, default=False)
standalone = db.Column(db.Boolean, default=False)
class WorkflowSpecModelSchema(SQLAlchemyAutoSchema):
@ -87,7 +89,8 @@ class WorkflowModel(db.Model):
workflow_spec = db.relationship("WorkflowSpecModel")
total_tasks = db.Column(db.Integer, default=0)
completed_tasks = db.Column(db.Integer, default=0)
last_updated = db.Column(db.DateTime)
last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now())
user_id = db.Column(db.String, default=None)
# Order By is important or generating hashes on reviews.
dependencies = db.relationship(WorkflowSpecDependencyFile, cascade="all, delete, delete-orphan",
order_by="WorkflowSpecDependencyFile.file_data_id")

View File

@ -0,0 +1,30 @@
from crc.scripts.script import Script
from crc.api.common import ApiError
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.study_service import StudyService
class CheckStudy(Script):
pb = ProtocolBuilderService()
def get_description(self):
return """Returns the Check Study data for a Study"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
study = StudyService.get_study(study_id)
if study:
return {"DETAIL": "Passed validation.", "STATUS": "No Error"}
else:
raise ApiError.from_task(code='bad_study',
message=f'No study for study_id {study_id}',
task=task)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
check_study = self.pb.check_study(study_id)
if check_study:
return check_study
else:
raise ApiError.from_task(code='missing_check_study',
message='There was a problem checking information for this study.',
task=task)

View File

@ -114,7 +114,10 @@ Takes two arguments:
doc_context = self.rich_text_update(doc_context)
doc_context = self.append_images(doc, doc_context, image_file_data)
jinja_env = jinja2.Environment(autoescape=True)
doc.render(doc_context, jinja_env)
try:
doc.render(doc_context, jinja_env)
except Exception as e:
print (e)
target_stream = BytesIO()
doc.save(target_stream)
target_stream.seek(0) # move to the beginning of the stream.

View File

@ -0,0 +1,62 @@
from crc import session
from crc.api.common import ApiError
from crc.models.file import FileModel
from crc.scripts.script import Script
from crc.services.document_service import DocumentService
from crc.services.file_service import FileService
class DeleteFile(Script):
@staticmethod
def process_document_deletion(doc_code, workflow_id, task):
if DocumentService.is_allowed_document(doc_code):
result = session.query(FileModel).filter(
FileModel.workflow_id == workflow_id, FileModel.irb_doc_code == doc_code).all()
if isinstance(result, list) and len(result) > 0 and isinstance(result[0], FileModel):
for file in result:
FileService.delete_file(file.id)
else:
raise ApiError.from_task(code='no_document_found',
message=f'No document of type {doc_code} was found for this workflow.',
task=task)
else:
raise ApiError.from_task(code='invalid_document_code',
message=f'{doc_code} is not a valid document code',
task=task)
def get_codes(self, task, args, kwargs):
if 'code' in kwargs:
if isinstance(kwargs['code'], list):
codes = kwargs['code']
else:
codes = [kwargs['code']]
else:
codes = []
for arg in args:
if isinstance(arg, list):
codes.extend(arg)
else:
codes.append(arg)
if codes is None or len(codes) == 0:
raise ApiError.from_task("invalid_argument", "Please provide a valid document code to delete. "
"No valid arguments found.", task=task)
return codes
def get_description(self):
return """Delete an IRB document from a workflow"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
doc_codes = self.get_codes(task, args, kwargs)
for code in doc_codes:
result = session.query(FileModel).filter(
FileModel.workflow_id == workflow_id, FileModel.irb_doc_code == code).all()
if not result:
return False
return True
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
doc_codes = self.get_codes(task, args, kwargs)
for doc_code in doc_codes:
self.process_document_deletion(doc_code, workflow_id, task)

View File

@ -1,8 +1,7 @@
from crc import app
from crc.api.common import ApiError
from crc.models.user import UserModel
from crc.scripts.script import Script
# from crc.services.ldap_service import LdapService
from crc.services.ldap_service import LdapService
from crc.services.email_service import EmailService
from crc.services.study_service import StudyService
@ -101,13 +100,10 @@ email (subject="My Subject", recipients=["dhf8r@virginia.edu", pi.email], cc='as
@staticmethod
def get_associated_emails(study_id):
associated_uids = []
associated_emails = []
associates = StudyService.get_study_associates(study_id)
for associate in associates:
if associate['send_email'] is True:
associated_uids.append(associate['uid'])
returned = UserModel.query.filter(UserModel.uid.in_(associated_uids)).all()
for item in returned:
associated_emails.append(item.email_address)
user_info = LdapService.user_info(associate['uid'])
associated_emails.append(user_info.email_address)
return associated_emails

View File

@ -0,0 +1,38 @@
from flask import g
from crc.api.common import ApiError
from crc.services.data_store_service import DataStoreBase
from crc.scripts.script import Script
class FileDataGet(Script, DataStoreBase):
def get_description(self):
return """Gets user data from the data store - takes only two keyword arguments arguments: 'file_id' and 'key' """
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
if self.validate_kw_args(**kwargs):
myargs = [kwargs['key']]
return True
def validate_kw_args(self,**kwargs):
if kwargs.get('key',None) is None:
raise ApiError(code="missing_argument",
message=f"The 'file_data_get' script requires a keyword argument of 'key'")
if kwargs.get('file_id',None) is None:
raise ApiError(code="missing_argument",
message=f"The 'file_data_get' script requires a keyword argument of 'file_id'")
return True
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
if self.validate_kw_args(**kwargs):
myargs = [kwargs['key']]
if 'default' in kwargs.keys():
myargs.append(kwargs['default'])
return self.get_data_common(None,
None,
'file_data_get',
kwargs['file_id'],
*myargs)

View File

@ -0,0 +1,66 @@
from flask import g
from crc.api.common import ApiError
from crc.services.data_store_service import DataStoreBase
from crc.scripts.script import Script
from crc.services.document_service import DocumentService
from crc.services.file_service import FileService
class FileDataSet(Script, DataStoreBase):
def get_description(self):
return """Sets data the data store - takes three keyword arguments arguments: 'file_id' and 'key' and 'value'"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
if self.validate_kw_args(**kwargs):
myargs = [kwargs['key'],kwargs['value']]
fileid = kwargs['file_id']
del(kwargs['file_id'])
return True
def validate_kw_args(self, **kwargs):
if kwargs.get('key', None) is None:
raise ApiError(code="missing_argument",
message=f"The 'file_data_get' script requires a keyword argument of 'key'")
if kwargs.get('file_id', None) is None:
raise ApiError(code="missing_argument",
message=f"The 'file_data_get' script requires a keyword argument of 'file_id'")
if kwargs.get('value', None) is None:
raise ApiError(code="missing_argument",
message=f"The 'file_data_get' script requires a keyword argument of 'value'")
if kwargs['key'] == 'irb_code' and not DocumentService.is_allowed_document(kwargs.get('value')):
raise ApiError("invalid_form_field_key",
"When setting an irb_code, the form field id must match a known document in the "
"irb_docunents.xslx reference file. This code is not found in that file '%s'" %
kwargs.get('value'))
return True
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
if self.validate_kw_args(**kwargs):
myargs = [kwargs['key'],kwargs['value']]
try:
fileid = int(kwargs['file_id'])
except:
raise ApiError("invalid_file_id",
"Attempting to update DataStore for an invalid fileid '%s'" % kwargs['file_id'])
del(kwargs['file_id'])
if kwargs['key'] == 'irb_code':
irb_doc_code = kwargs['value']
FileService.update_irb_code(fileid,irb_doc_code)
return self.set_data_common(task.id,
None,
None,
workflow_id,
None,
'file_data_set',
fileid,
*myargs,
**kwargs)

View File

@ -0,0 +1,16 @@
from crc.scripts.script import Script
from crc import app
class GetDashboardURL(Script):
def get_description(self):
"""Get the URL for the main dashboard. This should be system instance aware.
I.e., dev, testing, production, etc."""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
self.do_task(task, study_id, workflow_id, *args, **kwargs)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
frontend = app.config['FRONTEND']
return f'https://{frontend}'

View File

@ -0,0 +1,24 @@
from crc.scripts.script import Script
from crc.api.common import ApiError
from crc.services.protocol_builder import ProtocolBuilderService
class IRBInfo(Script):
pb = ProtocolBuilderService()
def get_description(self):
return """Returns the IRB Info data for a Study"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
return isinstance(study_id, int)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
irb_info = self.pb.get_irb_info(study_id)
if irb_info:
return irb_info
else:
raise ApiError.from_task(code='missing_irb_info',
message=f'There was a problem retrieving IRB Info for study {study_id}.',
task=task)

View File

@ -9,23 +9,21 @@ class GetStudyAssociates(Script):
def get_description(self):
return """
Returns person assocated with study or an error if one is not associated.
Returns people associated with a study or an error if one is not associated.
example : get_study_associate('sbp3ey') => {'uid':'sbp3ey','role':'Unicorn Herder', 'send_email': False,
'access':True}
"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
if len(args)<1:
return False
return True
if len(args) < 1:
raise ApiError('no_user_id_specified', 'A uva uid is the sole argument to this function')
return {'uid': 'sbp3ey', 'role': 'Unicorn Herder', 'send_email': False, 'access': True}
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
if len(args)<1:
if len(args) < 1:
raise ApiError('no_user_id_specified', 'A uva uid is the sole argument to this function')
if not isinstance(args[0],type('')):
if not isinstance(args[0], str):
raise ApiError('argument_should_be_string', 'A uva uid is always a string, please check type')
return StudyService.get_study_associate(study_id=study_id,uid=args[0])
return StudyService.get_study_associate(study_id=study_id, uid=args[0])

View File

@ -19,8 +19,11 @@ example : get_study_associates() => [{'uid':'sbp3ey','role':'Unicorn Herder', 's
"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
return True
study_associates = [
{'uid': 'dhf8r', 'role': 'Chief Bee Keeper', 'send_email': True, 'access': True},
{'uid': 'lb3dp', 'role': 'Chief Cat Herder', 'send_email': True, 'access': True}
]
return study_associates
def do_task(self, task, study_id, workflow_id, *args, **kwargs):

View File

@ -0,0 +1,41 @@
from crc import session
from crc.api.common import ApiError
from crc.models.workflow import WorkflowModel, WorkflowSpecModel
from crc.scripts.script import Script
from crc.services.workflow_processor import WorkflowProcessor
class ResetWorkflow(Script):
def get_description(self):
return """Reset a workflow. Run by master workflow.
Designed for completed workflows where we need to force rerunning the workflow.
I.e., a new PI"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
return hasattr(kwargs, 'workflow_name')
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
if 'workflow_name' in kwargs.keys():
workflow_name = kwargs['workflow_name']
workflow_spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(name=workflow_name).first()
if workflow_spec:
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(
workflow_spec_id=workflow_spec.id,
study_id=study_id).first()
if workflow_model:
workflow_processor = WorkflowProcessor.reset(workflow_model, clear_data=False, delete_files=False)
return workflow_processor
else:
raise ApiError(code='missing_workflow_model',
message=f'No WorkflowModel returned. \
workflow_spec_id: {workflow_spec.id} \
study_id: {study_id}')
else:
raise ApiError(code='missing_workflow_spec',
message=f'No WorkflowSpecModel returned. \
name: {workflow_name}')
else:
raise ApiError(code='missing_workflow_name',
message='Reset workflow requires a workflow name')

View File

@ -1,4 +1,4 @@
from crc.scripts.data_store_base import DataStoreBase
from crc.services.data_store_service import DataStoreBase
from crc.scripts.script import Script
@ -13,5 +13,6 @@ class StudyDataGet(Script,DataStoreBase):
return self.get_data_common(study_id,
None,
'study_data_get',
None,
*args)

View File

@ -1,4 +1,4 @@
from crc.scripts.data_store_base import DataStoreBase
from crc.services.data_store_service import DataStoreBase
from crc.scripts.script import Script
@ -11,6 +11,7 @@ class StudyDataSet(Script,DataStoreBase):
workflow_id,
None,
'study_data_set',
None,
*args)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
@ -20,6 +21,7 @@ class StudyDataSet(Script,DataStoreBase):
workflow_id,
None,
'study_data_set',
None,
*args,
**kwargs)

View File

@ -1,21 +1,26 @@
import json
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from SpiffWorkflow.util.metrics import timeit
from crc import session
from crc.api.common import ApiError
from crc.api.workflow import get_workflow
from crc.models.protocol_builder import ProtocolBuilderInvestigatorType
from crc.models.study import StudyModel, StudySchema
from crc.models.workflow import WorkflowStatus
from crc.api import workflow as workflow_api
from crc.scripts.script import Script
from crc.services.document_service import DocumentService
from crc.services.file_service import FileService
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.study_service import StudyService
from box import Box
class StudyInfo(Script):
"""Please see the detailed description that is provided below. """
pb = ProtocolBuilderService()
type_options = ['info', 'investigators', 'roles', 'details', 'approvals', 'documents', 'protocol','sponsors']
type_options = ['info', 'investigators', 'roles', 'details', 'documents', 'sponsors']
# This is used for test/workflow validation, as well as documentation.
example_data = {
@ -30,31 +35,31 @@ class StudyInfo(Script):
"inactive": False
},
"sponsors": [
{
"COMMONRULEAGENCY": None,
"SPONSOR_ID": 2453,
"SP_NAME": "Abbott Ltd",
"SP_TYPE": "Private",
"SP_TYPE_GROUP_NAME": None,
"SS_STUDY": 2
},
{
"COMMONRULEAGENCY": None,
"SPONSOR_ID": 2387,
"SP_NAME": "Abbott-Price",
"SP_TYPE": "Incoming Sub Award",
"SP_TYPE_GROUP_NAME": "Government",
"SS_STUDY": 2
},
{
"COMMONRULEAGENCY": None,
"SPONSOR_ID": 1996,
"SP_NAME": "Abernathy-Heidenreich",
"SP_TYPE": "Foundation/Not for Profit",
"SP_TYPE_GROUP_NAME": "Other External Funding",
"SS_STUDY": 2
}
],
{
"COMMONRULEAGENCY": None,
"SPONSOR_ID": 2453,
"SP_NAME": "Abbott Ltd",
"SP_TYPE": "Private",
"SP_TYPE_GROUP_NAME": None,
"SS_STUDY": 2
},
{
"COMMONRULEAGENCY": None,
"SPONSOR_ID": 2387,
"SP_NAME": "Abbott-Price",
"SP_TYPE": "Incoming Sub Award",
"SP_TYPE_GROUP_NAME": "Government",
"SS_STUDY": 2
},
{
"COMMONRULEAGENCY": None,
"SPONSOR_ID": 1996,
"SP_NAME": "Abernathy-Heidenreich",
"SP_TYPE": "Foundation/Not for Profit",
"SP_TYPE_GROUP_NAME": "Other External Funding",
"SS_STUDY": 2
}
],
"investigators": {
'PI': {
@ -80,37 +85,33 @@ class StudyInfo(Script):
'display': 'Optional',
'unique': 'Yes',
'user_id': 'asd3v',
'error': 'Unable to locate a user with id asd3v in LDAP'}
'error': 'Unable to locate a user with id asd3v in LDAP'},
'DEPT_CH': {
'label': 'Department Chair',
'display': 'Always',
'unique': 'Yes',
'user_id': 'lb3dp'}
},
"documents": {
'AD_CoCApp': {'category1': 'Ancillary Document', 'category2': 'CoC Application', 'category3': '',
'Who Uploads?': 'CRC', 'id': '12',
'description': 'Certificate of Confidentiality Application', 'required': False,
'study_id': 1, 'code': 'AD_CoCApp', 'display_name': 'Ancillary Document / CoC Application',
'count': 0, 'files': []},
'Who Uploads?': 'CRC', 'id': '12',
'description': 'Certificate of Confidentiality Application', 'required': False,
'study_id': 1, 'code': 'AD_CoCApp',
'display_name': 'Ancillary Document / CoC Application',
'count': 0, 'files': []},
'UVACompl_PRCAppr': {'category1': 'UVA Compliance', 'category2': 'PRC Approval', 'category3': '',
'Who Uploads?': 'CRC', 'id': '6', 'description': "Cancer Center's PRC Approval Form",
'required': True, 'study_id': 1, 'code': 'UVACompl_PRCAppr',
'display_name': 'UVA Compliance / PRC Approval', 'count': 1, 'files': [
{'file_id': 10,
'task_id': 'fakingthisout',
'workflow_id': 2,
'workflow_spec_id': 'docx'}],
'status': 'complete'}
'Who Uploads?': 'CRC', 'id': '6',
'description': "Cancer Center's PRC Approval Form",
'required': True, 'study_id': 1, 'code': 'UVACompl_PRCAppr',
'display_name': 'UVA Compliance / PRC Approval', 'count': 1, 'files': [
{'file_id': 10,
'task_id': 'fakingthisout',
'workflow_id': 2,
'workflow_spec_id': 'docx'}],
'status': 'complete'}
},
"details":
{},
"approvals": {
"study_id": 12,
"workflow_id": 321,
"display_name": "IRB API Details",
"name": "irb_api_details",
"status": WorkflowStatus.not_started.value,
"workflow_spec_id": "irb_api_details",
},
'protocol': {
id: 0,
}
}
}
@ -119,8 +120,7 @@ class StudyInfo(Script):
def get_description(self):
return """
StudyInfo [TYPE], where TYPE is one of 'info', 'investigators', 'details', 'approvals',
'documents' or 'protocol'.
StudyInfo [TYPE], where TYPE is one of 'info', 'investigators', 'details', or 'documents'.
Adds details about the current study to the Task Data. The type of information required should be
provided as an argument. The following arguments are available:
@ -151,12 +151,6 @@ that just those that were set in Protocol Builder.
### Details ###
Returns detailed information about variable keys read in from the Protocol Builder.
### Approvals ###
Returns data about the status of approvals related to a study.
```
{approvals_example}
```
### Documents ###
Returns a list of all documents that might be related to a study, reading all columns from the irb_documents.xsl
file. Including information about any files that were uploaded or generated that relate to a given document.
@ -165,223 +159,26 @@ Please note this is just a few examples, ALL known document types are returned i
{documents_example}
```
### Protocol ###
Returns information specific to the protocol.
""".format(info_example=self.example_to_string("info"),
investigators_example=self.example_to_string("investigators"),
approvals_example=self.example_to_string("approvals"),
documents_example=self.example_to_string("documents"),
)
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
"""For validation only, pretend no results come back from pb"""
self.check_args(args,2)
self.check_args(args, 2)
# Assure the reference file exists (a bit hacky, but we want to raise this error early, and cleanly.)
FileService.get_reference_file_data(FileService.DOCUMENT_LIST)
FileService.get_reference_file_data(FileService.INVESTIGATOR_LIST)
data = Box({
"study":{
"info": {
"id": 12,
"title": "test",
"short_title": "tst",
"primary_investigator_id":21,
"user_uid": "dif84",
"sponsor": "sponsor",
"ind_number": "1234",
"inactive": False
},
"sponsors": [
{
"COMMONRULEAGENCY": None,
"SPONSOR_ID": 2453,
"SP_NAME": "Abbott Ltd",
"SP_TYPE": "Private",
"SP_TYPE_GROUP_NAME": None,
"SS_STUDY": 2
},
{
"COMMONRULEAGENCY": None,
"SPONSOR_ID": 2387,
"SP_NAME": "Abbott-Price",
"SP_TYPE": "Incoming Sub Award",
"SP_TYPE_GROUP_NAME": "Government",
"SS_STUDY": 2
},
{
"COMMONRULEAGENCY": None,
"SPONSOR_ID": 1996,
"SP_NAME": "Abernathy-Heidenreich",
"SP_TYPE": "Foundation/Not for Profit",
"SP_TYPE_GROUP_NAME": "Other External Funding",
"SS_STUDY": 2
}
],
"investigators": {
"PI": {
"label": ProtocolBuilderInvestigatorType.PI.value,
"display": "Always",
"unique": "Yes",
"user_id": "dhf8r",
"title": "",
"display_name": "Daniel Harold Funk",
"sponsor_type": "Contractor",
"telephone_number": "0000000000",
"department": "",
"email_address": "dhf8r@virginia.edu",
"given_name": "Daniel",
"uid": "dhf8r",
"affiliation": "",
"date_cached": "2020-08-04T19:32:08.006128+00:00"
},
"SC_I": {
"label": ProtocolBuilderInvestigatorType.SC_I.value,
"display": "Always",
"unique": "Yes",
"user_id": "ajl2j",
"title": "",
"display_name": "Aaron Louie",
"sponsor_type": "Contractor",
"telephone_number": "0000000000",
"department": "",
"email_address": "ajl2j@virginia.edu",
"given_name": "Aaron",
"uid": "ajl2j",
"affiliation": "sponsored",
"date_cached": "2020-08-04T19:32:10.699666+00:00"
},
"SC_II": {
"label": ProtocolBuilderInvestigatorType.SC_II.value,
"display": "Optional",
"unique": "Yes",
"user_id": "cah3us",
"title": "",
"display_name": "Alex Herron",
"sponsor_type": "Contractor",
"telephone_number": "0000000000",
"department": "",
"email_address": "cah3us@virginia.edu",
"given_name": "Alex",
"uid": "cah3us",
"affiliation": "sponsored",
"date_cached": "2020-08-04T19:32:10.075852+00:00"
},
},
"pi": {
"PI": {
"label": ProtocolBuilderInvestigatorType.PI.value,
"display": "Always",
"unique": "Yes",
"user_id": "dhf8r",
"title": "",
"display_name": "Daniel Harold Funk",
"sponsor_type": "Contractor",
"telephone_number": "0000000000",
"department": "",
"email_address": "dhf8r@virginia.edu",
"given_name": "Daniel",
"uid": "dhf8r",
"affiliation": "",
"date_cached": "2020-08-04T19:32:08.006128+00:00"
}
},
"roles":
{
"INVESTIGATORTYPE": "PI",
"INVESTIGATORTYPEFULL": ProtocolBuilderInvestigatorType.PI.value,
"NETBADGEID": "dhf8r"
},
"details":
{
"DSMB": None,
"DSMB_FREQUENCY": None,
"GCRC_NUMBER": None,
"IBC_NUMBER": None,
"IDE": None,
"IND_1": 1234,
"IND_2": None,
"IND_3": None,
"IRBREVIEWERADMIN": None,
"IS_ADULT_PARTICIPANT": None,
"IS_APPROVED_DEVICE": None,
"IS_AUX": None,
"IS_BIOMEDICAL": None,
"IS_CANCER_PATIENT": None,
"IS_CENTRAL_REG_DB": None,
"IS_CHART_REVIEW": None,
"IS_COMMITTEE_CONFLICT": None,
"IS_CONSENT_WAIVER": None,
"IS_DB": None,
"IS_ELDERLY_POP": None,
"IS_ENGAGED_RESEARCH": None,
"IS_FETUS_POP": None,
"IS_FINANCIAL_CONFLICT": None,
"IS_FOR_CANCER_CENTER": None,
"IS_FUNDING_SOURCE": None,
"IS_GCRC": None,
"IS_GENE_TRANSFER": None,
"IS_GRANT": None,
"IS_HGT": None,
"IS_IBC": None,
"IS_IDE": None,
"IS_IND": 1,
"IS_MENTAL_IMPAIRMENT_POP": None,
"IS_MINOR": None,
"IS_MINOR_PARTICIPANT": None,
"IS_MULTI_SITE": None,
"IS_NOT_CONSENT_WAIVER": None,
"IS_NOT_PRC_WAIVER": None,
"IS_OTHER_VULNERABLE_POP": None,
"IS_OUTSIDE_CONTRACT": None,
"IS_PI_INITIATED": None,
"IS_PI_SCHOOL": None,
"IS_PRC": None,
"IS_PRC_DSMP": None,
"IS_PREGNANT_POP": None,
"IS_PRISONERS_POP": None,
"IS_QUALITATIVE": None,
"IS_RADIATION": None,
"IS_REVIEW_BY_CENTRAL_IRB": None,
"IS_SPONSOR": None,
"IS_SPONSOR_MONITORING": None,
"IS_SURROGATE_CONSENT": None,
"IS_TISSUE_BANKING": None,
"IS_UVA_DB": None,
"IS_UVA_IDE": None,
"IS_UVA_IND": None,
"IS_UVA_LOCATION": None,
"IS_UVA_PI_MULTI": None,
"MULTI_SITE_LOCATIONS": None,
"NON_UVA_LOCATION": None,
"OTHER_VULNERABLE_DESC": None,
"PRC_NUMBER": None,
"SPONSORS_PROTOCOL_REVISION_DATE": None,
"UPLOAD_COMPLETE": None
},
"approvals": {
"study_id": 12,
"workflow_id": 321,
"display_name": "IRB API Details",
"name": "irb_api_details",
"status": WorkflowStatus.not_started.value,
"workflow_spec_id": "irb_api_details",
},
'protocol': {
'id': 0,
}
}
})
if args[0]=='documents':
return StudyService().get_documents_status(study_id)
return data['study'][args[0]]
#self.add_data_to_task(task=task, data=data["study"])
#self.add_data_to_task(task, {"documents": StudyService().get_documents_status(study_id)})
FileService.get_reference_file_data(DocumentService.DOCUMENT_LIST)
FileService.get_reference_file_data(StudyService.INVESTIGATOR_LIST)
# we call the real do_task so we can
# seed workflow validations with settings from studies in PB Mock
# in order to test multiple paths thru the workflow
return self.do_task(task, study_id, workflow_id, args[0])
@timeit
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
self.check_args(args,2)
self.check_args(args, 2)
prefix = None
if len(args) > 1:
prefix = args[1]
@ -402,21 +199,18 @@ Returns information specific to the protocol.
retval = self.pb.get_study_details(study_id)
if cmd == 'sponsors':
retval = self.pb.get_sponsors(study_id)
if cmd == 'approvals':
retval = StudyService().get_approvals(study_id)
if cmd == 'documents':
retval = StudyService().get_documents_status(study_id)
if cmd == 'protocol':
retval = StudyService().get_protocol(study_id)
if isinstance(retval, list):
retval = [Box(item) for item in retval]
if isinstance(retval,dict) and prefix is not None:
return Box({x:retval[x] for x in retval.keys() if x[:len(prefix)] == prefix})
elif isinstance(retval,dict) :
return Box(retval)
else:
return retval
return self.box_it(retval, prefix)
def box_it(self, retval, prefix = None):
if isinstance(retval, list):
return [Box(item) for item in retval]
if isinstance(retval, dict) and prefix is not None:
return Box({x: retval[x] for x in retval.keys() if x[:len(prefix)] == prefix})
elif isinstance(retval, dict):
return Box(retval)
def check_args(self, args, maxlen=1):
@ -424,5 +218,3 @@ Returns information specific to the protocol.
raise ApiError(code="missing_argument",
message="The StudyInfo script requires a single argument which must be "
"one of %s" % ",".join(StudyInfo.type_options))

View File

@ -4,13 +4,11 @@ from crc.services.study_service import StudyService
class UpdateStudyAssociates(Script):
argument_error_message = "You must supply at least one argument to the " \
"update_study_associates task, an array of objects in the form " \
"{'uid':'someid', 'role': 'text', 'send_email: 'boolean', " \
"'access':'boolean'} "
def get_description(self):
return """
Allows you to associate other users with a study - only 'uid' is required in the
@ -26,20 +24,26 @@ associations already in place.
example : update_study_associates([{'uid':'sbp3ey','role':'Unicorn Herder', 'send_email': False, 'access':True}])
"""
def validate_arg(self,arg):
if not isinstance(arg,list):
def validate_arg(self, arg):
if not isinstance(arg, list):
raise ApiError("invalid parameter", "This function is expecting a list of dictionaries")
if not len(arg) > 0 and not isinstance(arg[0],dict):
raise ApiError("invalid paramemter","This function is expecting a list of dictionaries")
if len(arg[0]) > 0:
if not len(arg) > 0 and not isinstance(arg[0], dict):
raise ApiError("invalid paramemter", "This function is expecting a list of dictionaries")
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
items = args[0]
self.validate_arg(items)
return all([x.get('uid',False) for x in items])
if len(args) == 0:
items = []
else:
items = args[0]
self.validate_arg(items)
return all([x.get('uid', False) for x in items])
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
access_list = args[0]
self.validate_arg(access_list)
return StudyService.update_study_associates(study_id,access_list)
if len(args) == 0:
access_list = []
else:
access_list = args[0]
self.validate_arg(access_list)
return StudyService.update_study_associates(study_id, access_list)

View File

@ -1,6 +1,6 @@
from flask import g
from crc.scripts.data_store_base import DataStoreBase
from crc.services.data_store_service import DataStoreBase
from crc.scripts.script import Script
@ -15,4 +15,5 @@ class UserDataGet(Script, DataStoreBase):
return self.get_data_common(None,
g.user.uid,
'user_data_get',
None,
*args)

View File

@ -1,6 +1,6 @@
from flask import g
from crc.scripts.data_store_base import DataStoreBase
from crc.services.data_store_service import DataStoreBase
from crc.scripts.script import Script
@ -15,6 +15,7 @@ class UserDataSet(Script,DataStoreBase):
workflow_id,
g.user.uid,
'user_data_set',
None,
*args)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
@ -24,6 +25,7 @@ class UserDataSet(Script,DataStoreBase):
workflow_id,
None,
'user_data_set',
None,
*args,
**kwargs)

View File

@ -0,0 +1,37 @@
import time
from SpiffWorkflow import Task
cache_store = {}
import time
# first pass - meant to be down and dirty
def purge_cache(now):
dellist = []
for key in cache_store.keys():
if cache_store[key]['timeout'] < now:
dellist.append(key)
for key in dellist:
del cache_store[key]
def cache(f,timeout=60):
"""Cache the values for function for x minutes
we still have work to do to make a optional kw argument
to set the length of time to cache
"""
def cached(*args, **kw):
now = time.time()
purge_cache(now)
key =f.__name__+str(args)+str(kw)
if key in cache_store.keys():
return cache_store[key]['value']
else:
newtime = now+timeout*60
result = f(*args, **kw)
cache_store[key] ={}
cache_store[key]['value'] = result
cache_store[key]['timeout'] = newtime
return result
return cached

View File

@ -1,6 +1,3 @@
import importlib
import os
import pkgutil
from crc import session
from crc.api.common import ApiError
from crc.models.data_store import DataStoreModel
@ -20,10 +17,11 @@ class DataStoreBase(object):
overwritten = True
return overwritten
def set_validate_common(self, study_id, workflow_id, user_id, script_name, *args):
def set_validate_common(self, study_id, workflow_id, user_id, script_name, file_id, *args):
self.check_args_2(args, script_name)
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == workflow_id).first()
self.get_prev_value(study_id=study_id, user_id=user_id, key=args[0])
self.get_prev_value(study_id=study_id, user_id=user_id, file_id=file_id, key=args[0])
def check_args(self, args, maxlen=1, script_name='study_data_get'):
if len(args) < 1 or len(args) > maxlen:
@ -37,14 +35,29 @@ class DataStoreBase(object):
message=f"The {script_name} script takes two arguments, starting with the key and a "
"value for the key")
def get_prev_value(self, study_id, user_id, key):
study = session.query(DataStoreModel).filter_by(study_id=study_id, user_id=user_id, key=key).first()
def get_prev_value(self, study_id, user_id, key, file_id):
study = session.query(DataStoreModel).filter_by(study_id=study_id,
user_id=user_id,
file_id=file_id,
key=key).first()
return study
def set_data_common(self, task_id, study_id, user_id, workflow_id, workflow_spec_id, script_name, *args, **kwargs):
def set_data_common(self,
task_id,
study_id,
user_id,
workflow_id,
workflow_spec_id,
script_name,
file_id,
*args,
**kwargs):
self.check_args_2(args, script_name=script_name)
study = self.get_prev_value(study_id=study_id, user_id=user_id, key=args[0])
study = self.get_prev_value(study_id=study_id,
user_id=user_id,
file_id=file_id,
key=args[0])
if workflow_spec_id is None and workflow_id is not None:
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == workflow_id).first()
workflow_spec_id = workflow.workflow_spec_id
@ -56,10 +69,11 @@ class DataStoreBase(object):
study_id=study_id,
task_id=task_id,
user_id=user_id, # Make this available to any User
file_id=file_id,
workflow_id=workflow_id,
spec_id=workflow_spec_id)
study.value = args[1]
study.last_updated = datetime.now()
study.last_updated = datetime.utcnow()
overwritten = self.overwritten(study.value, prev_value)
session.add(study)
session.commit()
@ -67,14 +81,20 @@ class DataStoreBase(object):
'old_value': prev_value,
'overwritten': overwritten}
def get_data_common(self, study_id, user_id, script_name, *args):
def get_data_common(self, study_id, user_id, script_name, file_id=None, *args):
self.check_args(args, 2, script_name)
study = session.query(DataStoreModel).filter_by(study_id=study_id, user_id=user_id, key=args[0]).first()
study = session.query(DataStoreModel).filter_by(study_id=study_id,
user_id=user_id,
file_id=file_id,
key=args[
0]).first()
if study:
return study.value
else:
return args[1]
def get_multi_common(self, study_id, user_id):
study = session.query(DataStoreModel).filter_by(study_id=study_id, user_id=user_id)
def get_multi_common(self, study_id, user_id, file_id=None):
study = session.query(DataStoreModel).filter_by(study_id=study_id,
user_id=user_id,
file_id=file_id)
return study

View File

@ -0,0 +1,98 @@
from crc.api.common import ApiError
from crc.models.api_models import DocumentDirectory
from crc.services.file_service import FileService
from crc.services.lookup_service import LookupService
class DocumentService(object):
"""The document service provides details about the types of documents that can be uploaded to a workflow.
This metadata about different document types is managed in an Excel spreadsheet, which can be uploaded at any
time to change which documents are accepted, and it allows us to categorize these documents. At a minimum,
the spreadsheet should contain the columns 'code', 'category1', 'category2', 'category3', 'description' and 'id',
code is required for all rows in the table, the other fields are optional. """
DOCUMENT_LIST = "documents.xlsx"
@staticmethod
def is_allowed_document(code):
doc_dict = DocumentService.get_dictionary()
return code in doc_dict
@staticmethod
def verify_doc_dictionary(dd):
"""
We are currently getting structured information from an XLS file, if someone accidentally
changes a header we will have problems later, so we will verify we have the headers we need
here
"""
required_fields = ['category1', 'category2', 'category3', 'description']
# we only need to check the first item, as all of the keys should be the same
key = list(dd.keys())[0]
for field in required_fields:
if field not in dd[key].keys():
raise ApiError(code="Invalid document list %s" % DocumentService.DOCUMENT_LIST,
message='Please check the headers in %s' % DocumentService.DOCUMENT_LIST)
@staticmethod
def get_dictionary():
"""Returns a dictionary of document details keyed on the doc_code."""
file_data = FileService.get_reference_file_data(DocumentService.DOCUMENT_LIST)
lookup_model = LookupService.get_lookup_model_for_file_data(file_data, 'code', 'description')
doc_dict = {}
for lookup_data in lookup_model.dependencies:
doc_dict[lookup_data.value] = lookup_data.data
return doc_dict
@staticmethod
def get_directory(doc_dict, files, workflow_id):
"""Returns a list of directories, hierarchically nested by category, with files at the deepest level.
Empty directories are not include."""
directory = []
if files:
for file in files:
if file.irb_doc_code in doc_dict:
doc_code = doc_dict[file.irb_doc_code]
else:
doc_code = {'category1': "Unknown", 'category2': None, 'category3': None}
if workflow_id:
expand = file.workflow_id == int(workflow_id)
else:
expand = False
print(expand)
categories = [x for x in [doc_code['category1'], doc_code['category2'], doc_code['category3'], file] if x]
DocumentService.ensure_exists(directory, categories, expanded=expand)
return directory
@staticmethod
def ensure_exists(output, categories, expanded):
"""
This is a recursive function, it expects a list of
levels with a file object at the end (kinda like duck,duck,duck,goose)
for each level, it makes sure that level is already in the structure and if it is not
it will add it
function terminates upon getting an entry that is a file object ( or really anything but string)
"""
current_item = categories[0]
found = False
if isinstance(current_item, str):
for item in output:
if item.level == current_item:
found = True
item.filecount = item.filecount + 1
item.expanded = expanded | item.expanded
DocumentService.ensure_exists(item.children, categories[1:], expanded)
if not found:
new_level = DocumentDirectory(level=current_item)
new_level.filecount = 1
new_level.expanded = expanded
output.append(new_level)
DocumentService.ensure_exists(new_level.children, categories[1:], expanded)
else:
print("Found it")
else:
new_level = DocumentDirectory(file=current_item)
new_level.expanded = expanded
output.append(new_level)

View File

@ -1,6 +1,5 @@
import re
generic_message = """Workflow validation failed. For more information about the error, see below."""
# known_errors is a dictionary of errors from validation that we want to give users a hint for solving their problem.
# The key is the known error, or part of the known error. It is a string.
@ -14,12 +13,13 @@ generic_message = """Workflow validation failed. For more information about the
# I know this explanation is confusing. If you have ideas for clarification, pull request welcome.
known_errors = {'Error is Non-default exclusive outgoing sequence flow without condition':
known_errors = {'Non-default exclusive outgoing sequence flow without condition':
{'hint': 'Add a Condition Type to your gateway path.'},
'Could not set task title on task (\w+) with \'(.*)\' property because \\1: Error evaluating expression \'(.*)\', "\'Box\' object has no attribute \'\\2\'"$':
{'hint': 'You are overriding the title for task `{task_id}`, using the `{property}` extension, and it is causing an error. Look under the extensions tab for the task, and check the value you are setting for the property.',
'groups': {'task_id': 0, 'property': 1}}}
'Could not set task title on task .*':
{'hint': 'You are overriding the title using an extension and it is causing this error. '
'Look under the extensions tab for the task, and check the value you are setting '
'for the property.'}}
class ValidationErrorService(object):
@ -28,37 +28,16 @@ class ValidationErrorService(object):
Validation is run twice,
once where we try to fill in all form fields
and a second time where we only fill in the required fields.
We get a list that contains possible errors from the validation."""
@staticmethod
def interpret_validation_errors(errors):
if len(errors) == 0:
return ()
interpreted_errors = []
for error_type in ['all', 'required']:
if error_type in errors:
hint = generic_message
for known_key in known_errors:
regex = re.compile(known_key)
result = regex.search(errors[error_type].message)
if result is not None:
if 'hint' in known_errors[known_key]:
if 'groups' in known_errors[known_key]:
caught = {}
for group in known_errors[known_key]['groups']:
group_id = known_errors[known_key]['groups'][group]
group_value = result.groups()[group_id]
caught[group] = group_value
hint = known_errors[known_key]['hint'].format(**caught)
else:
hint = known_errors[known_key]['hint']
errors[error_type].hint = hint
interpreted_errors.append(errors[error_type])
return interpreted_errors
def interpret_validation_error(error):
if error is None:
return
for known_key in known_errors:
regex = re.compile(known_key)
result = regex.search(error.message)
if result is not None:
if 'hint' in known_errors[known_key]:
error.hint = known_errors[known_key]['hint']
return error

View File

@ -2,34 +2,41 @@ import hashlib
import json
import os
from datetime import datetime
import pandas as pd
from github import Github, GithubObject, UnknownObjectException
from uuid import UUID
from lxml import etree
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
from lxml.etree import XMLSyntaxError
from pandas import ExcelFile
from sqlalchemy import desc
from sqlalchemy.exc import IntegrityError
from crc import session, app
from crc.api.common import ApiError
from crc.models.data_store import DataStoreModel
from crc.models.file import FileType, FileDataModel, FileModel, LookupFileModel, LookupDataModel
from crc.models.workflow import WorkflowSpecModel, WorkflowModel, WorkflowSpecDependencyFile
from crc.services.cache_service import cache
from crc.services.user_service import UserService
import re
def camel_to_snake(camel):
"""
make a camelcase from a snakecase
with a few things thrown in - we had a case where
we were parsing a spreadsheet and using the headings as keys in an object
one of the headings was "Who Uploads?"
"""
camel = camel.strip()
camel = re.sub(' ', '', camel)
camel = re.sub('?', '', camel)
return re.sub(r'(?<!^)(?=[A-Z])', '_', camel).lower()
class FileService(object):
"""Provides consistent management and rules for storing, retrieving and processing files."""
DOCUMENT_LIST = "irb_documents.xlsx"
INVESTIGATOR_LIST = "investigators.xlsx"
__doc_dictionary = None
@staticmethod
def get_doc_dictionary():
if not FileService.__doc_dictionary:
FileService.__doc_dictionary = FileService.get_reference_data(FileService.DOCUMENT_LIST, 'code', ['id'])
return FileService.__doc_dictionary
@staticmethod
def add_workflow_spec_file(workflow_spec: WorkflowSpecModel,
@ -52,30 +59,33 @@ class FileService(object):
return FileService.update_file(file_model, binary_data, content_type)
@staticmethod
def is_allowed_document(code):
doc_dict = FileService.get_doc_dictionary()
return code in doc_dict
@staticmethod
@cache
def is_workflow_review(workflow_spec_id):
files = session.query(FileModel).filter(FileModel.workflow_spec_id==workflow_spec_id).all()
review = any([f.is_review for f in files])
return review
@staticmethod
def update_irb_code(file_id, irb_doc_code):
"""Create a new file and associate it with the workflow
Please note that the irb_doc_code MUST be a known file in the irb_documents.xslx reference document."""
file_model = session.query(FileModel)\
.filter(FileModel.id == file_id).first()
if file_model is None:
raise ApiError("invalid_file_id",
"When updating the irb_doc_code for a file, that file_id must already exist "
"This file_id is not found in the database '%d'" % file_id)
file_model.irb_doc_code = irb_doc_code
session.commit()
return True
@staticmethod
def add_workflow_file(workflow_id, irb_doc_code, name, content_type, binary_data):
"""Create a new file and associate it with the workflow
Please note that the irb_doc_code MUST be a known file in the irb_documents.xslx reference document."""
if not FileService.is_allowed_document(irb_doc_code):
raise ApiError("invalid_form_field_key",
"When uploading files, the form field id must match a known document in the "
"irb_docunents.xslx reference file. This code is not found in that file '%s'" % irb_doc_code)
"""Assure this is unique to the workflow, task, and document code AND the Name
Because we will allow users to upload multiple files for the same form field
in some cases """
file_model = session.query(FileModel)\
.filter(FileModel.workflow_id == workflow_id)\
.filter(FileModel.name == name)\
@ -89,24 +99,6 @@ class FileService(object):
)
return FileService.update_file(file_model, binary_data, content_type)
@staticmethod
def get_reference_data(reference_file_name, index_column, int_columns=[]):
""" Opens a reference file (assumes that it is xls file) and returns the data as a
dictionary, each row keyed on the given index_column name. If there are columns
that should be represented as integers, pass these as an array of int_columns, lest
you get '1.0' rather than '1'
fixme: This is stupid stupid slow. Place it in the database and just check if it is up to date."""
data_model = FileService.get_reference_file_data(reference_file_name)
xls = ExcelFile(data_model.data, engine='openpyxl')
df = xls.parse(xls.sheet_names[0])
for c in int_columns:
df[c] = df[c].fillna(0)
df = df.astype({c: 'Int64'})
df = df.fillna('')
df = df.applymap(str)
df = df.set_index(index_column)
return json.loads(df.to_json(orient='index'))
@staticmethod
def get_workflow_files(workflow_id):
"""Returns all the file models associated with a running workflow."""
@ -142,6 +134,8 @@ class FileService(object):
order_by(desc(FileDataModel.date_created)).first()
md5_checksum = UUID(hashlib.md5(binary_data).hexdigest())
size = len(binary_data)
if (latest_data_model is not None) and (md5_checksum == latest_data_model.md5_hash):
# This file does not need to be updated, it's the same file. If it is arhived,
# then de-arvhive it.
@ -175,9 +169,14 @@ class FileService(object):
except XMLSyntaxError as xse:
raise ApiError("invalid_xml", "Failed to parse xml: " + str(xse), file_name=file_model.name)
try:
user_uid = UserService.current_user().uid
except ApiError as ae:
user_uid = None
new_file_data_model = FileDataModel(
data=binary_data, file_model_id=file_model.id, file_model=file_model,
version=version, md5_hash=md5_checksum, date_created=datetime.now()
version=version, md5_hash=md5_checksum, date_created=datetime.utcnow(),
size=size, user_uid=user_uid
)
session.add_all([file_model, new_file_data_model])
session.commit()
@ -356,6 +355,7 @@ class FileService(object):
session.query(LookupDataModel).filter_by(lookup_file_model_id=lf.id).delete()
session.query(LookupFileModel).filter_by(id=lf.id).delete()
session.query(FileDataModel).filter_by(file_model_id=file_id).delete()
session.query(DataStoreModel).filter_by(file_id=file_id).delete()
session.query(FileModel).filter_by(id=file_id).delete()
session.commit()
except IntegrityError as ie:

View File

@ -3,14 +3,16 @@ import re
from collections import OrderedDict
import pandas as pd
from pandas import ExcelFile, np
import numpy
from pandas import ExcelFile
from pandas._libs.missing import NA
from sqlalchemy import desc
from sqlalchemy.sql.functions import GenericFunction
from crc import db
from crc.api.common import ApiError
from crc.models.api_models import Task
from crc.models.file import FileDataModel, LookupFileModel, LookupDataModel
from crc.models.file import FileModel, FileDataModel, LookupFileModel, LookupDataModel
from crc.models.workflow import WorkflowModel, WorkflowSpecDependencyFile
from crc.services.file_service import FileService
from crc.services.ldap_service import LdapService
@ -23,11 +25,14 @@ class TSRank(GenericFunction):
class LookupService(object):
"""Provides tools for doing lookups for auto-complete fields.
This can currently take two forms:
"""Provides tools for doing lookups for auto-complete fields, and rapid access to any
uploaded spreadsheets.
This can currently take three forms:
1) Lookup from spreadsheet data associated with a workflow specification.
in which case we store the spreadsheet data in a lookup table with full
text indexing enabled, and run searches against that table.
2) Lookup from spreadsheet data associated with a specific file. This allows us
to get a lookup model for a specific file object, such as a reference file.
2) Lookup from LDAP records. In which case we call out to an external service
to pull back detailed records and return them.
@ -42,6 +47,14 @@ class LookupService(object):
workflow = db.session.query(WorkflowModel).filter(WorkflowModel.id == workflow_id).first()
return LookupService.__get_lookup_model(workflow, spiff_task.task_spec.name, field.id)
@staticmethod
def get_lookup_model_for_file_data(file_data: FileDataModel, value_column, label_column):
lookup_model = db.session.query(LookupFileModel).filter(LookupFileModel.file_data_model_id == file_data.id).first()
if not lookup_model:
logging.warning("!!!! Making a very expensive call to update the lookup model.")
lookup_model = LookupService.build_lookup_table(file_data, value_column, label_column)
return lookup_model
@staticmethod
def __get_lookup_model(workflow, task_spec_id, field_id):
lookup_model = db.session.query(LookupFileModel) \
@ -137,14 +150,19 @@ class LookupService(object):
return lookup_model
@staticmethod
def build_lookup_table(data_model: FileDataModel, value_column, label_column, workflow_spec_id, task_spec_id, field_id):
def build_lookup_table(data_model: FileDataModel, value_column, label_column,
workflow_spec_id=None, task_spec_id=None, field_id=None):
""" In some cases the lookup table can be very large. This method will add all values to the database
in a way that can be searched and returned via an api call - rather than sending the full set of
options along with the form. It will only open the file and process the options if something has
changed. """
xls = ExcelFile(data_model.data)
xls = ExcelFile(data_model.data, engine='openpyxl')
df = xls.parse(xls.sheet_names[0]) # Currently we only look at the fist sheet.
df = pd.DataFrame(df).replace({np.nan: None})
df = df.convert_dtypes()
df = df.loc[:, ~df.columns.str.contains('^Unnamed')] # Drop unnamed columns.
df = pd.DataFrame(df).dropna(how='all') # Drop null rows
df = pd.DataFrame(df).replace({NA: ''})
if value_column not in df:
raise ApiError("invalid_enum",
"The file %s does not contain a column named % s" % (data_model.file_model.name,

View File

@ -14,6 +14,8 @@ class ProtocolBuilderService(object):
REQUIRED_DOCS_URL = app.config['PB_REQUIRED_DOCS_URL']
STUDY_DETAILS_URL = app.config['PB_STUDY_DETAILS_URL']
SPONSORS_URL = app.config['PB_SPONSORS_URL']
IRB_INFO_URL = app.config['PB_IRB_INFO_URL']
CHECK_STUDY_URL = app.config['PB_CHECK_STUDY_URL']
@staticmethod
def is_enabled():
@ -55,10 +57,18 @@ class ProtocolBuilderService(object):
def get_study_details(study_id) -> {}:
return ProtocolBuilderService.__make_request(study_id, ProtocolBuilderService.STUDY_DETAILS_URL)
@staticmethod
def get_irb_info(study_id) -> {}:
return ProtocolBuilderService.__make_request(study_id, ProtocolBuilderService.IRB_INFO_URL)
@staticmethod
def get_sponsors(study_id) -> {}:
return ProtocolBuilderService.__make_request(study_id, ProtocolBuilderService.SPONSORS_URL)
@staticmethod
def check_study(study_id) -> {}:
return ProtocolBuilderService.__make_request(study_id, ProtocolBuilderService.CHECK_STUDY_URL)
@staticmethod
def __enabled_or_raise():
if not ProtocolBuilderService.is_enabled():

View File

@ -1,34 +1,48 @@
import urllib
from copy import copy
from datetime import datetime
from typing import List
import flask
import requests
from SpiffWorkflow import WorkflowException
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from SpiffWorkflow.exceptions import WorkflowTaskExecException
from ldap3.core.exceptions import LDAPSocketOpenError
from crc import db, session, app
from crc.api.common import ApiError
from crc.models.data_store import DataStoreModel
from crc.models.email import EmailModel
from crc.models.file import FileDataModel, FileModel, FileModelSchema, File, LookupFileModel, LookupDataModel
from crc.models.file import FileModel, File
from crc.models.ldap import LdapSchema
from crc.models.protocol_builder import ProtocolBuilderStudy, ProtocolBuilderStatus
from crc.models.study import StudyModel, Study, StudyStatus, Category, WorkflowMetadata, StudyEventType, StudyEvent, \
IrbStatus, StudyAssociated, StudyAssociatedSchema
from crc.models.task_event import TaskEventModel, TaskEvent
from crc.models.workflow import WorkflowSpecCategoryModel, WorkflowModel, WorkflowSpecModel, WorkflowState, \
WorkflowStatus, WorkflowSpecDependencyFile
from crc.services.document_service import DocumentService
from crc.services.file_service import FileService
from crc.services.ldap_service import LdapService
from crc.services.lookup_service import LookupService
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.workflow_processor import WorkflowProcessor
from SpiffWorkflow import Task as SpiffTask
class StudyService(object):
"""Provides common tools for working with a Study"""
INVESTIGATOR_LIST = "investigators.xlsx" # A reference document containing details about what investigators to show, and when.
@staticmethod
def get_studies_for_user(user):
def _is_valid_study(study_id):
study_info = ProtocolBuilderService().get_study_details(study_id)
if 'REVIEW_TYPE' in study_info.keys() and study_info['REVIEW_TYPE'] in [2, 3, 23, 24]:
return True
return False
def get_studies_for_user(self, user):
"""Returns a list of all studies for the given user."""
associated = session.query(StudyAssociated).filter_by(uid=user.uid,access=True).all()
associated_studies = [x.study_id for x in associated]
@ -37,7 +51,8 @@ class StudyService(object):
studies = []
for study_model in db_studies:
studies.append(StudyService.get_study(study_model.id, study_model,do_status=False))
if self._is_valid_study(study_model.id):
studies.append(StudyService.get_study(study_model.id, study_model,do_status=False))
return studies
@staticmethod
@ -52,7 +67,7 @@ class StudyService(object):
return studies
@staticmethod
def get_study(study_id, study_model: StudyModel = None, do_status=True):
def get_study(study_id, study_model: StudyModel = None, do_status=False):
"""Returns a study model that contains all the workflows organized by category.
IMPORTANT: This is intended to be a lightweight call, it should never involve
loading up and executing all the workflows in a study to calculate information."""
@ -71,10 +86,10 @@ class StudyService(object):
study.last_activity_user = LdapService.user_info(last_event.user_uid).display_name
study.last_activity_date = last_event.date
study.categories = StudyService.get_categories()
workflow_metas = StudyService.__get_workflow_metas(study_id)
workflow_metas = StudyService._get_workflow_metas(study_id)
files = FileService.get_files_for_study(study.id)
files = (File.from_models(model, FileService.get_file_data(model.id),
FileService.get_doc_dictionary()) for model in files)
DocumentService.get_dictionary()) for model in files)
study.files = list(files)
# Calling this line repeatedly is very very slow. It creates the
# master spec and runs it. Don't execute this for Abandoned studies, as
@ -83,8 +98,9 @@ class StudyService(object):
# this line is taking 99% of the time that is used in get_study.
# see ticket #196
if do_status:
status = StudyService.__get_study_status(study_model)
study.warnings = StudyService.__update_status_of_workflow_meta(workflow_metas, status)
# __get_study_status() runs the master workflow to generate the status dictionary
status = StudyService._get_study_status(study_model)
study.warnings = StudyService._update_status_of_workflow_meta(workflow_metas, status)
# Group the workflows into their categories.
for category in study.categories:
@ -128,9 +144,9 @@ class StudyService(object):
raise ApiError('study_not_found','No study found with id = %d'%study_id)
ownerid = study.user_uid
people = db.session.query(StudyAssociated).filter(StudyAssociated.study_id == study_id)
people_list = [{'uid':ownerid,'role':'owner','send_email':True,'access':True}]
people_list += StudyAssociatedSchema().dump(people, many=True)
return people_list
@ -254,14 +270,14 @@ class StudyService(object):
# Loop through all known document types, get the counts for those files,
# and use pb_docs to mark those as required.
doc_dictionary = FileService.get_reference_data(FileService.DOCUMENT_LIST, 'code', ['id'])
doc_dictionary = DocumentService.get_dictionary()
documents = {}
for code, doc in doc_dictionary.items():
if ProtocolBuilderService.is_enabled():
doc['required'] = False
if ProtocolBuilderService.is_enabled() and doc['id']:
pb_data = next((item for item in pb_docs if int(item['AUXDOCID']) == int(doc['id'])), None)
doc['required'] = False
if pb_data:
doc['required'] = True
@ -271,7 +287,7 @@ class StudyService(object):
# Make a display name out of categories
name_list = []
for cat_key in ['category1', 'category2', 'category3']:
if doc[cat_key] not in ['', 'NULL']:
if doc[cat_key] not in ['', 'NULL', None]:
name_list.append(doc[cat_key])
doc['display_name'] = ' / '.join(name_list)
@ -279,24 +295,51 @@ class StudyService(object):
doc_files = FileService.get_files_for_study(study_id=study_id, irb_doc_code=code)
doc['count'] = len(doc_files)
doc['files'] = []
for file in doc_files:
doc['files'].append({'file_id': file.id,
'workflow_id': file.workflow_id})
# when we run tests - it doesn't look like the user is available
# so we return a bogus token
token = 'not_available'
if hasattr(flask.g,'user'):
token = flask.g.user.encode_auth_token()
for file in doc_files:
file_data = {'file_id': file.id,
'name': file.name,
'url': app.config['APPLICATION_ROOT']+
'file/' + str(file.id) +
'/download?auth_token='+
urllib.parse.quote_plus(token),
'workflow_id': file.workflow_id
}
data = db.session.query(DataStoreModel).filter(DataStoreModel.file_id==file.id).all()
data_store_data = {}
for d in data:
data_store_data[d.key] = d.value
file_data["data_store"] = data_store_data
doc['files'].append(Box(file_data))
# update the document status to match the status of the workflow it is in.
if 'status' not in doc or doc['status'] is None:
workflow: WorkflowModel = session.query(WorkflowModel).filter_by(id=file.workflow_id).first()
doc['status'] = workflow.status.value
documents[code] = doc
return documents
return Box(documents)
@staticmethod
def get_investigator_dictionary():
"""Returns a dictionary of document details keyed on the doc_code."""
file_data = FileService.get_reference_file_data(StudyService.INVESTIGATOR_LIST)
lookup_model = LookupService.get_lookup_model_for_file_data(file_data, 'code', 'label')
doc_dict = {}
for lookup_data in lookup_model.dependencies:
doc_dict[lookup_data.value] = lookup_data.data
return doc_dict
@staticmethod
def get_investigators(study_id, all=False):
"""Convert array of investigators from protocol builder into a dictionary keyed on the type. """
# Loop through all known investigator types as set in the reference file
inv_dictionary = FileService.get_reference_data(FileService.INVESTIGATOR_LIST, 'code')
inv_dictionary = StudyService.get_investigator_dictionary()
# Get PB required docs
pb_investigators = ProtocolBuilderService.get_investigators(study_id=study_id)
@ -333,16 +376,6 @@ class StudyService(object):
app.logger.info("Failed to connect to LDAP Server.")
return {}
@staticmethod
def get_protocol(study_id):
"""Returns the study protocol, if it has been uploaded."""
file = db.session.query(FileModel) \
.filter_by(study_id=study_id) \
.filter_by(form_field_key='Study_Protocol_Document') \
.first()
return FileModelSchema().dump(file)
@staticmethod
def synch_with_protocol_builder_if_enabled(user):
"""Assures that the studies we have locally for the given user are
@ -409,24 +442,36 @@ class StudyService(object):
db.session.commit()
@staticmethod
def __update_status_of_workflow_meta(workflow_metas, status):
def _update_status_of_workflow_meta(workflow_metas, status):
# Update the status on each workflow
warnings = []
for wfm in workflow_metas:
if wfm.name in status.keys():
if not WorkflowState.has_value(status[wfm.name]):
warnings.append(ApiError("invalid_status",
"Workflow '%s' can not be set to '%s', should be one of %s" % (
wfm.name, status[wfm.name], ",".join(WorkflowState.list())
)))
else:
wfm.state = WorkflowState[status[wfm.name]]
else:
wfm.state_message = ''
# do we have a status for you
if wfm.name not in status.keys():
warnings.append(ApiError("missing_status", "No status specified for workflow %s" % wfm.name))
continue
if not isinstance(status[wfm.name], dict):
warnings.append(ApiError(code='invalid_status',
message=f'Status must be a dictionary with "status" and "message" keys. Name is {wfm.name}. Status is {status[wfm.name]}'))
continue
if 'status' not in status[wfm.name].keys():
warnings.append(ApiError("missing_status",
"Workflow '%s' does not have a status setting" % wfm.name))
continue
if not WorkflowState.has_value(status[wfm.name]['status']):
warnings.append(ApiError("invalid_state",
"Workflow '%s' can not be set to '%s', should be one of %s" % (
wfm.name, status[wfm.name]['status'], ",".join(WorkflowState.list())
)))
continue
wfm.state = WorkflowState[status[wfm.name]['status']]
if 'message' in status[wfm.name].keys():
wfm.state_message = status[wfm.name]['message']
return warnings
@staticmethod
def __get_workflow_metas(study_id):
def _get_workflow_metas(study_id):
# Add in the Workflows for each category
workflow_models = db.session.query(WorkflowModel). \
join(WorkflowSpecModel). \
@ -439,7 +484,7 @@ class StudyService(object):
return workflow_metas
@staticmethod
def __get_study_status(study_model):
def _get_study_status(study_model):
"""Uses the Top Level Workflow to calculate the status of the study, and it's
workflow models."""
master_specs = db.session.query(WorkflowSpecModel). \
@ -474,8 +519,9 @@ class StudyService(object):
def _create_workflow_model(study: StudyModel, spec):
workflow_model = WorkflowModel(status=WorkflowStatus.not_started,
study=study,
user_id=None,
workflow_spec_id=spec.id,
last_updated=datetime.now())
last_updated=datetime.utcnow())
session.add(workflow_model)
session.commit()
return workflow_model

View File

@ -1,12 +1,13 @@
import re
from SpiffWorkflow.serializer.exceptions import MissingSpecError
from SpiffWorkflow.util.metrics import timeit, firsttime, sincetime
from lxml import etree
import shlex
from datetime import datetime
from typing import List
from SpiffWorkflow import Task as SpiffTask, WorkflowException
from SpiffWorkflow import Task as SpiffTask, WorkflowException, Task
from SpiffWorkflow.bpmn.BpmnScriptEngine import BpmnScriptEngine
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer
@ -29,6 +30,7 @@ from crc.services.file_service import FileService
from crc import app
from crc.services.user_service import UserService
from difflib import SequenceMatcher
class CustomBpmnScriptEngine(BpmnScriptEngine):
"""This is a custom script processor that can be easily injected into Spiff Workflow.
@ -42,7 +44,7 @@ class CustomBpmnScriptEngine(BpmnScriptEngine):
"""
return self.evaluate_expression(task, expression)
@timeit
def execute(self, task: SpiffTask, script, data):
study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
@ -50,24 +52,16 @@ class CustomBpmnScriptEngine(BpmnScriptEngine):
workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
else:
workflow_id = None
try:
if task.workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY]:
augmentMethods = Script.generate_augmented_validate_list(task, study_id, workflow_id)
augment_methods = Script.generate_augmented_validate_list(task, study_id, workflow_id)
else:
augmentMethods = Script.generate_augmented_list(task, study_id, workflow_id)
super().execute(task, script, data, externalMethods=augmentMethods)
except SyntaxError as e:
raise ApiError('syntax_error',
f'Something is wrong with your python script '
f'please correct the following:'
f' {script}, {e.msg}')
except NameError as e:
raise ApiError('name_error',
f'something you are referencing does not exist:'
f' {script}, {e}')
augment_methods = Script.generate_augmented_list(task, study_id, workflow_id)
super().execute(task, script, data, external_methods=augment_methods)
except WorkflowException as e:
raise e
except Exception as e:
raise WorkflowTaskExecException(task, f' {script}, {e}', e)
def evaluate_expression(self, task, expression):
"""
@ -86,7 +80,7 @@ class CustomBpmnScriptEngine(BpmnScriptEngine):
else:
augmentMethods = Script.generate_augmented_list(task, study_id, workflow_id)
exp, valid = self.validateExpression(expression)
return self._eval(exp, externalMethods=augmentMethods, **task.data)
return self._eval(exp, external_methods=augmentMethods, **task.data)
except Exception as e:
raise WorkflowTaskExecException(task,
@ -98,10 +92,6 @@ class CustomBpmnScriptEngine(BpmnScriptEngine):
@staticmethod
def camel_to_snake(camel):
camel = camel.strip()
return re.sub(r'(?<!^)(?=[A-Z])', '_', camel).lower()
class MyCustomParser(BpmnDmnParser):
@ -174,7 +164,7 @@ class WorkflowProcessor(object):
self.is_latest_spec = False
@staticmethod
def reset(workflow_model, clear_data=False):
def reset(workflow_model, clear_data=False, delete_files=False):
print('WorkflowProcessor: reset: ')
# Try to execute a cancel notify
@ -194,6 +184,10 @@ class WorkflowProcessor(object):
for task_event in task_events:
task_event.form_data = {}
session.add(task_event)
if delete_files:
files = FileModel.query.filter(FileModel.workflow_id == workflow_model.id).all()
for file in files:
FileService.delete_file(file.id)
session.commit()
return WorkflowProcessor(workflow_model)
@ -219,7 +213,7 @@ class WorkflowProcessor(object):
self.workflow_model.status = self.get_status()
self.workflow_model.total_tasks = len(tasks)
self.workflow_model.completed_tasks = sum(1 for t in tasks if t.state in complete_states)
self.workflow_model.last_updated = datetime.now()
self.workflow_model.last_updated = datetime.utcnow()
self.update_dependencies(self.spec_data_files)
session.add(self.workflow_model)
session.commit()
@ -277,16 +271,22 @@ class WorkflowProcessor(object):
self.workflow_model.dependencies.append(WorkflowSpecDependencyFile(file_data_id=file_data.id))
@staticmethod
@timeit
def run_master_spec(spec_model, study):
"""Executes a BPMN specification for the given study, without recording any information to the database
Useful for running the master specification, which should not persist. """
lasttime = firsttime()
spec_data_files = FileService.get_spec_data_files(spec_model.id)
lasttime = sincetime('load Files', lasttime)
spec = WorkflowProcessor.get_spec(spec_data_files, spec_model.id)
lasttime = sincetime('get spec', lasttime)
try:
bpmn_workflow = BpmnWorkflow(spec, script_engine=WorkflowProcessor._script_engine)
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = study.id
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = False
lasttime = sincetime('get_workflow', lasttime)
bpmn_workflow.do_engine_steps()
lasttime = sincetime('run steps', lasttime)
except WorkflowException as we:
raise ApiError.from_task_spec("error_running_master_spec", str(we), we.sender)
@ -325,8 +325,8 @@ class WorkflowProcessor(object):
spec = parser.get_spec(process_id)
except ValidationException as ve:
raise ApiError(code="workflow_validation_error",
message="Failed to parse Workflow Specification '%s'. \n" % workflow_spec_id +
"Error is %s. \n" % str(ve),
message="Failed to parse the Workflow Specification. " +
"Error is '%s.'" % str(ve),
file_name=ve.filename,
task_id=ve.id,
tag=ve.tag)
@ -337,6 +337,9 @@ class WorkflowProcessor(object):
if bpmn_workflow.is_completed():
return WorkflowStatus.complete
user_tasks = bpmn_workflow.get_ready_user_tasks()
waiting_tasks = bpmn_workflow.get_tasks(Task.WAITING)
if len(waiting_tasks) > 0:
return WorkflowStatus.waiting
if len(user_tasks) > 0:
return WorkflowStatus.user_input_required
else:
@ -358,9 +361,10 @@ class WorkflowProcessor(object):
def get_status(self):
return self.status_of(self.bpmn_workflow)
def do_engine_steps(self):
def do_engine_steps(self, exit_at = None):
try:
self.bpmn_workflow.do_engine_steps()
self.bpmn_workflow.refresh_waiting_tasks()
self.bpmn_workflow.do_engine_steps(exit_at = exit_at)
except WorkflowTaskExecException as we:
raise ApiError.from_task("task_error", str(we), we.task)
@ -391,12 +395,17 @@ class WorkflowProcessor(object):
return task
# If there are ready tasks to complete, return the next ready task, but return the one
# in the active parallel path if possible.
# in the active parallel path if possible. In some cases the active parallel path may itself be
# a parallel gateway with multiple tasks, so prefer ones that share a parent.
ready_tasks = self.bpmn_workflow.get_tasks(SpiffTask.READY)
if len(ready_tasks) > 0:
for task in ready_tasks:
if task.parent == self.bpmn_workflow.last_task:
return task
for task in ready_tasks:
if self.bpmn_workflow.last_task and task.parent == self.bpmn_workflow.last_task.parent:
return task
return ready_tasks[0]
# If there are no ready tasks, but the thing isn't complete yet, find the first non-complete task

View File

@ -7,6 +7,7 @@ from typing import List
import jinja2
from SpiffWorkflow import Task as SpiffTask, WorkflowException, NavItem
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask
from SpiffWorkflow.bpmn.specs.MultiInstanceTask import MultiInstanceTask
@ -16,17 +17,20 @@ from SpiffWorkflow.bpmn.specs.UserTask import UserTask
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
from SpiffWorkflow.specs import CancelTask, StartTask, MultiChoice
from SpiffWorkflow.util.deep_merge import DeepMerge
from box import Box
from SpiffWorkflow.util.metrics import timeit
from jinja2 import Template
from crc import db, app
from crc.api.common import ApiError
from crc.models.api_models import Task, MultiInstanceType, WorkflowApi
from crc.models.file import LookupDataModel
from crc.models.data_store import DataStoreModel
from crc.models.file import LookupDataModel, FileModel, File, FileSchema
from crc.models.study import StudyModel
from crc.models.task_event import TaskEventModel
from crc.models.user import UserModel, UserModelSchema
from crc.models.workflow import WorkflowModel, WorkflowStatus, WorkflowSpecModel
from crc.services.document_service import DocumentService
from crc.services.file_service import FileService
from crc.services.lookup_service import LookupService
from crc.services.study_service import StudyService
@ -52,77 +56,119 @@ class WorkflowService(object):
handles the testing of a workflow specification by completing it with
random selections, attempting to mimic a front end as much as possible. """
from crc.services.user_service import UserService
@staticmethod
def make_test_workflow(spec_id):
user = db.session.query(UserModel).filter_by(uid="test").first()
def make_test_workflow(spec_id, validate_study_id=None):
try:
user = UserService.current_user()
except ApiError as e:
user = None
if not user:
user = db.session.query(UserModel).filter_by(uid="test").first()
if not user:
db.session.add(UserModel(uid="test"))
study = db.session.query(StudyModel).filter_by(user_uid="test").first()
if not study:
db.session.add(StudyModel(user_uid="test", title="test"))
db.session.commit()
user = db.session.query(UserModel).filter_by(uid="test").first()
if validate_study_id:
study = db.session.query(StudyModel).filter_by(id=validate_study_id).first()
else:
study = db.session.query(StudyModel).filter_by(user_uid=user.uid).first()
if not study:
db.session.add(StudyModel(user_uid=user.uid, title="test"))
db.session.commit()
study = db.session.query(StudyModel).filter_by(user_uid=user.uid).first()
workflow_model = WorkflowModel(status=WorkflowStatus.not_started,
workflow_spec_id=spec_id,
last_updated=datetime.now(),
last_updated=datetime.utcnow(),
study=study)
return workflow_model
@staticmethod
def delete_test_data():
def delete_test_data(workflow: WorkflowModel):
db.session.delete(workflow)
# Also, delete any test study or user models that may have been created.
for study in db.session.query(StudyModel).filter(StudyModel.user_uid == "test"):
StudyService.delete_study(study.id)
db.session.commit()
user = db.session.query(UserModel).filter_by(uid="test").first()
if user:
db.session.delete(user)
db.session.commit()
@staticmethod
def test_spec(spec_id, required_only=False):
def do_waiting():
records = db.session.query(WorkflowModel).filter(WorkflowModel.status==WorkflowStatus.waiting).all()
for workflow_model in records:
try:
app.logger.info('Processing workflow %s' % workflow_model.id)
processor = WorkflowProcessor(workflow_model)
processor.bpmn_workflow.refresh_waiting_tasks()
processor.bpmn_workflow.do_engine_steps()
processor.save()
except Exception as e:
app.logger.error(f"Error running waiting task for workflow #%i (%s) for study #%i. %s" %
(workflow_model.id,
workflow_model.workflow_spec.name,
workflow_model.study_id,
str(e)))
@staticmethod
@timeit
def test_spec(spec_id, validate_study_id=None, test_until=None, required_only=False):
"""Runs a spec through it's paces to see if it results in any errors.
Not fool-proof, but a good sanity check. Returns the final data
output form the last task if successful.
test_until
required_only can be set to true, in which case this will run the
spec, only completing the required fields, rather than everything.
"""
workflow_model = WorkflowService.make_test_workflow(spec_id)
workflow_model = WorkflowService.make_test_workflow(spec_id, validate_study_id)
try:
processor = WorkflowProcessor(workflow_model, validate_only=True)
count = 0
while not processor.bpmn_workflow.is_completed():
processor.bpmn_workflow.get_deep_nav_list() # Assure no errors with navigation.
exit_task = processor.bpmn_workflow.do_engine_steps(exit_at=test_until)
if (exit_task != None):
raise ApiError.from_task("validation_break",
f"The validation has been exited early on task '{exit_task.task_spec.name}' and was parented by ",
exit_task.parent)
tasks = processor.bpmn_workflow.get_tasks(SpiffTask.READY)
for task in tasks:
if task.task_spec.lane is not None and task.task_spec.lane not in task.data:
raise ApiError.from_task("invalid_role",
f"This task is in a lane called '{task.task_spec.lane}', The "
f" current task data must have information mapping this role to "
f" a unique user id.", task)
task_api = WorkflowService.spiff_task_to_api_task(
task,
add_docs_and_forms=True) # Assure we try to process the documentation, and raise those errors.
# make sure forms have a form key
if hasattr(task_api, 'form') and task_api.form is not None and task_api.form.key == '':
raise ApiError(code='missing_form_key',
message='Forms must include a Form Key.',
task_id=task.id,
task_name=task.get_name())
WorkflowService.populate_form_with_random_data(task, task_api, required_only)
processor.complete_task(task)
if test_until == task.task_spec.name:
raise ApiError.from_task("validation_break",
f"The validation has been exited early on task '{task.task_spec.name}' and was parented by ",
task.parent)
count += 1
if count >= 100:
raise ApiError.from_task(code='validation_loop',
message=f'There appears to be an infinite loop in the validation. Task is {task.task_spec.description}',
task=task)
WorkflowService._process_documentation(processor.bpmn_workflow.last_task.parent.parent)
except WorkflowException as we:
WorkflowService.delete_test_data()
raise ApiError.from_workflow_exception("workflow_validation_exception", str(we), we)
while not processor.bpmn_workflow.is_completed():
try:
processor.bpmn_workflow.get_deep_nav_list() # Assure no errors with navigation.
processor.bpmn_workflow.do_engine_steps()
tasks = processor.bpmn_workflow.get_tasks(SpiffTask.READY)
for task in tasks:
if task.task_spec.lane is not None and task.task_spec.lane not in task.data:
raise ApiError.from_task("invalid_role",
f"This task is in a lane called '{task.task_spec.lane}', The "
f" current task data must have information mapping this role to "
f" a unique user id.", task)
task_api = WorkflowService.spiff_task_to_api_task(
task,
add_docs_and_forms=True) # Assure we try to process the documentation, and raise those errors.
# make sure forms have a form key
if hasattr(task_api, 'form') and task_api.form is not None and task_api.form.key == '':
raise ApiError(code='missing_form_key',
message='Forms must include a Form Key.',
task_id=task.id,
task_name=task.get_name())
WorkflowService.populate_form_with_random_data(task, task_api, required_only)
processor.complete_task(task)
except WorkflowException as we:
WorkflowService.delete_test_data()
raise ApiError.from_workflow_exception("workflow_validation_exception", str(we), we)
WorkflowService.delete_test_data()
WorkflowService._process_documentation(processor.bpmn_workflow.last_task.parent.parent)
finally:
WorkflowService.delete_test_data(workflow_model)
return processor.bpmn_workflow.last_task.data
@staticmethod
@ -233,11 +279,53 @@ class WorkflowService(object):
f'The field {field.id} contains an unsupported '
f'property: {name}', task=task)
@staticmethod
def post_process_form(task):
"""Looks through the fields in a submitted form, acting on any properties."""
if not hasattr(task.task_spec, 'form'): return
for field in task.task_spec.form.fields:
data = task.data
if field.has_property(Task.FIELD_PROP_REPEAT):
repeat_array = task.data[field.get_property(Task.FIELD_PROP_REPEAT)]
for repeat_data in repeat_array:
WorkflowService.__post_process_field(task, field, repeat_data)
else:
WorkflowService.__post_process_field(task, field, data)
@staticmethod
def __post_process_field(task, field, data):
if field.has_property(Task.FIELD_PROP_DOC_CODE) and field.id in data:
# This is generally handled by the front end, but it is possible that the file was uploaded BEFORE
# the doc_code was correctly set, so this is a stop gap measure to assure we still hit it correctly.
file_id = data[field.id]["id"]
doc_code = task.workflow.script_engine.eval(field.get_property(Task.FIELD_PROP_DOC_CODE), data)
file = db.session.query(FileModel).filter(FileModel.id == file_id).first()
if(file):
file.irb_doc_code = doc_code
db.session.commit()
else:
# We have a problem, the file doesn't exist, and was removed, but it is still referenced in the data
# At least attempt to clear out the data.
data = {}
if field.has_property(Task.FIELD_PROP_FILE_DATA) and \
field.get_property(Task.FIELD_PROP_FILE_DATA) in data and \
field.id in data:
file_id = data[field.get_property(Task.FIELD_PROP_FILE_DATA)]["id"]
data_store = DataStoreModel(file_id=file_id, key=field.id, value=data[field.id])
db.session.add(data_store)
@staticmethod
def evaluate_property(property_name, field, task):
expression = field.get_property(property_name)
data = task.data
if field.has_property(Task.FIELD_PROP_REPEAT):
# Then you must evaluate the expression based on the data within the group only.
group = field.get_property(Task.FIELD_PROP_REPEAT)
if group in task.data:
data = task.data[group][0]
try:
return task.workflow.script_engine.evaluate_expression(task, expression)
return task.workflow.script_engine.eval(expression, data)
except Exception as e:
message = f"The field {field.id} contains an invalid expression. {e}"
raise ApiError.from_task(f'invalid_{property_name}', message, task=task)
@ -266,7 +354,11 @@ class WorkflowService(object):
# Note: if default is False, we don't want to execute this code
if default is None or (isinstance(default, str) and default.strip() == ''):
if field.type == "enum" or field.type == "autocomplete":
return {'value': None, 'label': None}
# Return empty arrays for multi-select enums, otherwise do a value of None.
if field.has_property(Task.FIELD_PROP_ENUM_TYPE) and field.get_property(Task.FIELD_PROP_ENUM_TYPE) == "checkbox":
return []
else:
return {'value': None, 'label': None}
else:
return None
@ -314,7 +406,7 @@ class WorkflowService(object):
if len(field.options) > 0:
random_choice = random.choice(field.options)
if isinstance(random_choice, dict):
return {'value': random_choice['id'], 'label': random_choice['name']}
return {'value': random_choice['id'], 'label': random_choice['name'], 'data': random_choice['data']}
else:
# fixme: why it is sometimes an EnumFormFieldOption, and other times not?
return {'value': random_choice.id, 'label': random_choice.name}
@ -344,9 +436,14 @@ class WorkflowService(object):
elif field.type == 'boolean':
return random.choice([True, False])
elif field.type == 'file':
# fixme: produce some something sensible for files.
return random.randint(1, 100)
# fixme: produce some something sensible for files.
doc_code = field.id
if field.has_property('doc_code'):
doc_code = WorkflowService.evaluate_property('doc_code', field, task)
file_model = FileModel(name="test.png",
irb_doc_code = field.id)
doc_dict = DocumentService.get_dictionary()
file = File.from_models(file_model, None, doc_dict)
return FileSchema().dump(file)
elif field.type == 'files':
return random.randrange(1, 100)
else:
@ -397,7 +494,8 @@ class WorkflowService(object):
completed_tasks=processor.workflow_model.completed_tasks,
last_updated=processor.workflow_model.last_updated,
is_review=is_review,
title=spec.display_name
title=spec.display_name,
study_id=processor.workflow_model.study_id or None
)
if not next_task: # The Next Task can be requested to be a certain task, useful for parallel tasks.
# This may or may not work, sometimes there is no next task to complete.
@ -634,7 +732,7 @@ class WorkflowService(object):
raise ApiError.from_task("invalid_enum", f"The label column '{label_column}' does not exist for item {item}",
task=spiff_task)
options.append({"id": item[value_column], "name": item[label_column], "data": item})
options.append(Box({"id": item[value_column], "name": item[label_column], "data": item}))
return options
@staticmethod
@ -656,30 +754,39 @@ class WorkflowService(object):
@staticmethod
def get_users_assigned_to_task(processor, spiff_task) -> List[str]:
if not hasattr(spiff_task.task_spec, 'lane') or spiff_task.task_spec.lane is None:
associated = StudyService.get_study_associates(processor.workflow_model.study.id)
return [user['uid'] for user in associated if user['access']]
if spiff_task.task_spec.lane not in spiff_task.data:
return [] # No users are assignable to the task at this moment
lane_users = spiff_task.data[spiff_task.task_spec.lane]
if not isinstance(lane_users, list):
lane_users = [lane_users]
if processor.workflow_model.study_id is None and processor.workflow_model.user_id is None:
raise ApiError.from_task(code='invalid_workflow',
message='A workflow must have either a study_id or a user_id.',
task=spiff_task)
# Standalone workflow - we only care about the current user
elif processor.workflow_model.study_id is None and processor.workflow_model.user_id is not None:
return [processor.workflow_model.user_id]
# Workflow associated with a study - get all the users
else:
if not hasattr(spiff_task.task_spec, 'lane') or spiff_task.task_spec.lane is None:
associated = StudyService.get_study_associates(processor.workflow_model.study.id)
return [user['uid'] for user in associated if user['access']]
if spiff_task.task_spec.lane not in spiff_task.data:
return [] # No users are assignable to the task at this moment
lane_users = spiff_task.data[spiff_task.task_spec.lane]
if not isinstance(lane_users, list):
lane_users = [lane_users]
lane_uids = []
for user in lane_users:
if isinstance(user, dict):
if 'value' in user and user['value'] is not None:
lane_uids.append(user['value'])
lane_uids = []
for user in lane_users:
if isinstance(user, dict):
if 'value' in user and user['value'] is not None:
lane_uids.append(user['value'])
else:
raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." %
spiff_task.task_spec.name, task=spiff_task)
elif isinstance(user, str):
lane_uids.append(user)
else:
raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." %
spiff_task.task_spec.name, task=spiff_task)
elif isinstance(user, str):
lane_uids.append(user)
else:
raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user is not a string or dict" %
spiff_task.task_spec.name, task=spiff_task)
raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user is not a string or dict" %
spiff_task.task_spec.name, task=spiff_task)
return lane_uids
return lane_uids
@staticmethod
def log_task_action(user_uid, processor, spiff_task, action):
@ -703,7 +810,7 @@ class WorkflowService(object):
mi_count=task.multi_instance_count, # This is the number of times the task could repeat.
mi_index=task.multi_instance_index, # And the index of the currently repeating task.
process_name=task.process_name,
date=datetime.now(),
# date=datetime.utcnow(), <=== For future reference, NEVER do this. Let the database set the time.
)
db.session.add(task_event)
db.session.commit()
@ -716,10 +823,7 @@ class WorkflowService(object):
if hasattr(task.task_spec, 'form'):
for field in task.task_spec.form.fields:
if field.has_property(Task.FIELD_PROP_READ_ONLY) and \
field.get_property(Task.FIELD_PROP_READ_ONLY).lower().strip() == "true":
continue # Don't add read-only data
elif field.has_property(Task.FIELD_PROP_REPEAT):
if field.has_property(Task.FIELD_PROP_REPEAT):
group = field.get_property(Task.FIELD_PROP_REPEAT)
if group in latest_data:
data[group] = latest_data[group]
@ -772,3 +876,28 @@ class WorkflowService(object):
for workflow in workflows:
if workflow.status == WorkflowStatus.user_input_required or workflow.status == WorkflowStatus.waiting:
WorkflowProcessor.reset(workflow, clear_data=False)
@staticmethod
def get_workflow_from_spec(workflow_spec_id, user):
workflow_model = WorkflowModel(status=WorkflowStatus.not_started,
study=None,
user_id=user.uid,
workflow_spec_id=workflow_spec_id,
last_updated=datetime.now())
db.session.add(workflow_model)
db.session.commit()
return workflow_model
@staticmethod
def get_standalone_workflow_specs():
specs = db.session.query(WorkflowSpecModel).filter_by(standalone=True).all()
return specs
@staticmethod
def get_primary_workflow(workflow_spec_id):
# Returns the FileModel of the primary workflow for a workflow_spec
primary = None
file = db.session.query(FileModel).filter(FileModel.workflow_spec_id==workflow_spec_id, FileModel.primary==True).first()
if file:
primary = file
return primary

View File

@ -47,6 +47,6 @@ class WorkflowSyncService(object):
return json.loads(response.text)
else:
raise ApiError("workflow_sync_error",
"Received an invalid response from the protocol builder (status %s): %s when calling "
"Received an invalid response from the remote CR-Connect API (status %s): %s when calling "
"url '%s'." %
(response.status_code, response.text, url))

View File

@ -0,0 +1,6 @@
from crc.api.file import get_document_directory
def render_files(study_id,irb_codes):
files = get_document_directory(study_id)
print(files)

View File

@ -89,6 +89,10 @@
padding-top: 10px;
}
td#logo-td {
width: 50px;
}
.footer, .header {
clear: both;
margin-top: 10px;
@ -361,7 +365,7 @@
<table role="presentation">
<tr>
<th role="presentation"></th>
<td>
<td id="logo-td">
<img class="logo"
src="{{ url_for('static', filename='uva_rotunda.svg', _external=True) }}"
alt="University of Virginia">

View File

@ -34,10 +34,10 @@ imagesize==1.2.0
inflection==0.5.1
itsdangerous==1.1.0
jdcal==1.4.1
jinja2==2.11.2
jinja2==2.11.3
jsonschema==3.2.0
ldap3==2.8.1
lxml==4.6.2
lxml==4.6.3
mako==1.1.3
markdown==3.3.3
markupsafe==1.1.1
@ -53,7 +53,7 @@ psycopg2-binary==2.8.6
pyasn1==0.4.8
pycparser==2.20
pygithub==1.53
pygments==2.7.2
pygments==2.7.4
pyjwt==1.7.1
pyparsing==2.4.7
pyrsistent==0.17.3
@ -61,9 +61,8 @@ python-box==5.2.0
python-dateutil==2.8.1
python-docx==0.8.10
python-editor==1.0.4
python-levenshtein==0.12.0
pytz==2020.4
pyyaml==5.3.1
pyyaml==5.4
recommonmark==0.6.0
requests==2.25.0
sentry-sdk==0.14.4
@ -80,7 +79,7 @@ sphinxcontrib-serializinghtml==1.1.4
spiffworkflow
sqlalchemy==1.3.20
swagger-ui-bundle==0.0.8
urllib3==1.26.2
urllib3==1.26.5
waitress==1.4.4
webob==1.8.6
webtest==2.0.35

View File

@ -7,7 +7,9 @@ from crc.models.file import CONTENT_TYPES
from crc.models.ldap import LdapModel
from crc.models.user import UserModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecCategoryModel
from crc.services.document_service import DocumentService
from crc.services.file_service import FileService
from crc.services.study_service import StudyService
class ExampleDataLoader:
@ -266,7 +268,7 @@ class ExampleDataLoader:
from_tests=True)
def create_spec(self, id, name, display_name="", description="", filepath=None, master_spec=False,
category_id=None, display_order=None, from_tests=False):
category_id=None, display_order=None, from_tests=False, standalone=False):
"""Assumes that a directory exists in static/bpmn with the same name as the given id.
further assumes that the [id].bpmn is the primary file for the workflow.
returns an array of data models to be added to the database."""
@ -278,7 +280,8 @@ class ExampleDataLoader:
description=description,
is_master_spec=master_spec,
category_id=category_id,
display_order=display_order)
display_order=display_order,
standalone=standalone)
db.session.add(spec)
db.session.commit()
if not filepath and not from_tests:
@ -314,14 +317,14 @@ class ExampleDataLoader:
def load_reference_documents(self):
file_path = os.path.join(app.root_path, 'static', 'reference', 'irb_documents.xlsx')
file = open(file_path, "rb")
FileService.add_reference_file(FileService.DOCUMENT_LIST,
FileService.add_reference_file(DocumentService.DOCUMENT_LIST,
binary_data=file.read(),
content_type=CONTENT_TYPES['xls'])
file.close()
file_path = os.path.join(app.root_path, 'static', 'reference', 'investigators.xlsx')
file = open(file_path, "rb")
FileService.add_reference_file(FileService.INVESTIGATOR_LIST,
FileService.add_reference_file(StudyService.INVESTIGATOR_LIST,
binary_data=file.read(),
content_type=CONTENT_TYPES['xls'])
file.close()

View File

@ -0,0 +1,27 @@
"""add user_uid column to file_data table
Revision ID: 30e017a03948
Revises: bbf064082623
Create Date: 2021-07-06 10:39:04.661704
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '30e017a03948'
down_revision = 'bbf064082623'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('file_data', sa.Column('user_uid', sa.String(), nullable=True))
op.create_foreign_key(None, 'file_data', 'user', ['user_uid'], ['uid'])
def downgrade():
# op.drop_constraint('file_data_user_uid_fkey', 'file_data', type_='foreignkey')
# op.execute("update file_data set user_uid = NULL WHERE user_uid IS NOT NULL")
op.drop_column('file_data', 'user_uid')

View File

@ -0,0 +1,28 @@
"""empty message
Revision ID: 62910318009f
Revises: 665624ac29f1
Create Date: 2021-04-28 14:09:57.648732
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '62910318009f'
down_revision = '665624ac29f1'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('file_data', sa.Column('size', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('file_data', 'size')
# ### end Alembic commands ###

View File

@ -0,0 +1,29 @@
"""empty message
Revision ID: 665624ac29f1
Revises: c872232ebdcb
Create Date: 2021-04-26 15:10:34.000646
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '665624ac29f1'
down_revision = 'c872232ebdcb'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('data_store', sa.Column('file_id', sa.Integer(), nullable=True))
op.create_foreign_key('file_id_key', 'data_store', 'file', ['file_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('file_id_key', 'data_store', type_='foreignkey')
op.drop_column('data_store', 'file_id')
# ### end Alembic commands ###

View File

@ -0,0 +1,30 @@
"""empty message
Revision ID: 8b976945a54e
Revises: c872232ebdcb
Create Date: 2021-04-18 11:42:41.894378
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8b976945a54e'
down_revision = 'c872232ebdcb'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('workflow', sa.Column('user_id', sa.String(), nullable=True))
op.add_column('workflow_spec', sa.Column('standalone', sa.Boolean(), default=False))
op.execute("UPDATE workflow_spec SET standalone=False WHERE standalone is null;")
op.execute("ALTER TABLE task_event ALTER COLUMN study_id DROP NOT NULL")
def downgrade():
op.execute("UPDATE workflow SET user_id=NULL WHERE user_id is not NULL")
op.drop_column('workflow', 'user_id')
op.drop_column('workflow_spec', 'standalone')
op.execute("ALTER TABLE task_event ALTER COLUMN study_id SET NOT NULL ")

View File

@ -0,0 +1,28 @@
"""update type on task_events table and workflow table
Revision ID: abeffe547305
Revises: 665624ac29f1
Create Date: 2021-04-28 08:51:16.220260
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'abeffe547305'
down_revision = '665624ac29f1'
branch_labels = None
depends_on = None
def upgrade():
op.execute("alter table task_event alter column date type timestamp with time zone")
op.execute("alter table workflow alter column last_updated type timestamp with time zone")
pass
def downgrade():
op.execute("alter table task_event alter column date type timestamp without time zone")
op.execute("alter table workflow alter column last_updated type timestamp without time zone")
pass

View File

@ -0,0 +1,38 @@
"""empty message
Revision ID: bbf064082623
Revises: c1449d1d1681
Create Date: 2021-05-13 15:07:44.463757
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
from sqlalchemy import func
revision = 'bbf064082623'
down_revision = 'c1449d1d1681'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('data_store', 'last_updated', server_default=func.now())
op.alter_column('file_data', 'date_created', server_default=func.now())
op.alter_column('data_store', 'last_updated', server_default=func.now())
op.alter_column('ldap_model', 'date_cached', server_default=func.now())
op.alter_column('study', 'last_updated', server_default=func.now())
op.alter_column('study_event', 'create_date', server_default=func.now())
op.alter_column('workflow', 'last_updated', server_default=func.now())
def downgrade():
op.alter_column('data_store', 'last_updated', server_default=None)
op.alter_column('file_data', 'date_created', server_default=None)
op.alter_column('data_store', 'last_updated', server_default=None)
op.alter_column('ldap_model', 'date_cached', server_default=None)
op.alter_column('study', 'last_updated', server_default=None)
op.alter_column('study_event', 'create_date', server_default=None)
op.alter_column('workflow', 'last_updated', server_default=None)

View File

@ -0,0 +1,24 @@
"""empty message
Revision ID: c1449d1d1681
Revises: abeffe547305, 8b976945a54e, 62910318009f
Create Date: 2021-05-04 13:20:55.447143
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c1449d1d1681'
down_revision = ('abeffe547305', '8b976945a54e', '62910318009f')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass

View File

@ -0,0 +1,25 @@
"""change irb_documents to documents
Revision ID: c16d3047abbe
Revises: bbf064082623
Create Date: 2021-07-07 13:07:53.966102
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c16d3047abbe'
down_revision = 'bbf064082623'
branch_labels = None
depends_on = None
def upgrade():
pass
op.execute("update file set name = 'documents.xlsx' where name='irb_documents.xlsx'")
def downgrade():
op.execute("update file set name = 'irb_documents.xlsx' where name='documents.xlsx'")

View File

@ -0,0 +1,24 @@
"""merge 30e017a03948 and c16d3047abbe
Revision ID: dc30b8f6571c
Revises: 30e017a03948, c16d3047abbe
Create Date: 2021-07-12 11:11:47.410647
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'dc30b8f6571c'
down_revision = ('30e017a03948', 'c16d3047abbe')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass

View File

@ -10,3 +10,5 @@ services:
- POSTGRES_USER=${DB_USER}
- POSTGRES_PASSWORD=${DB_PASS}
- POSTGRES_MULTIPLE_DATABASES=crc_dev,crc_test,pb,pb_test
- TZ=America/New_York
- PGTZ=America/New_York

481
postgres/package-lock.json generated Normal file
View File

@ -0,0 +1,481 @@
{
"requires": true,
"lockfileVersion": 1,
"dependencies": {
"@types/node": {
"version": "14.14.41",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.41.tgz",
"integrity": "sha512-dueRKfaJL4RTtSa7bWeTK1M+VH+Gns73oCgzvYfHZywRCoPSd8EkXBL0mZ9unPTveBn+D9phZBaxuzpwjWkW0g==",
"dev": true,
"optional": true
},
"@types/yauzl": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.9.1.tgz",
"integrity": "sha512-A1b8SU4D10uoPjwb0lnHmmu8wZhR9d+9o2PKBQT2jU5YPTKsxac6M2qGAdY7VcL+dHHhARVUDmeg0rOrcd9EjA==",
"dev": true,
"optional": true,
"requires": {
"@types/node": "*"
}
},
"agent-base": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
"dev": true,
"requires": {
"debug": "4"
}
},
"balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
"dev": true
},
"base64-js": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
"dev": true
},
"bl": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
"integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==",
"dev": true,
"requires": {
"buffer": "^5.5.0",
"inherits": "^2.0.4",
"readable-stream": "^3.4.0"
}
},
"brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dev": true,
"requires": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"buffer": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
"integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
"dev": true,
"requires": {
"base64-js": "^1.3.1",
"ieee754": "^1.1.13"
}
},
"buffer-crc32": {
"version": "0.2.13",
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
"integrity": "sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=",
"dev": true
},
"chownr": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==",
"dev": true
},
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
"dev": true
},
"debug": {
"version": "4.3.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz",
"integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==",
"dev": true,
"requires": {
"ms": "2.1.2"
}
},
"devtools-protocol": {
"version": "0.0.854822",
"resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.854822.tgz",
"integrity": "sha512-xd4D8kHQtB0KtWW0c9xBZD5LVtm9chkMOfs/3Yn01RhT/sFIsVtzTtypfKoFfWBaL+7xCYLxjOLkhwPXaX/Kcg==",
"dev": true
},
"end-of-stream": {
"version": "1.4.4",
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
"integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
"dev": true,
"requires": {
"once": "^1.4.0"
}
},
"extract-zip": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz",
"integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==",
"dev": true,
"requires": {
"@types/yauzl": "^2.9.1",
"debug": "^4.1.1",
"get-stream": "^5.1.0",
"yauzl": "^2.10.0"
}
},
"fd-slicer": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz",
"integrity": "sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4=",
"dev": true,
"requires": {
"pend": "~1.2.0"
}
},
"find-up": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
"integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
"dev": true,
"requires": {
"locate-path": "^5.0.0",
"path-exists": "^4.0.0"
}
},
"fs-constants": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
"integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==",
"dev": true
},
"fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
"dev": true
},
"get-stream": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz",
"integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==",
"dev": true,
"requires": {
"pump": "^3.0.0"
}
},
"glob": {
"version": "7.1.6",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
"integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
"dev": true,
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
}
},
"https-proxy-agent": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz",
"integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==",
"dev": true,
"requires": {
"agent-base": "6",
"debug": "4"
}
},
"ieee754": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
"integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==",
"dev": true
},
"inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
"dev": true,
"requires": {
"once": "^1.3.0",
"wrappy": "1"
}
},
"inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
"dev": true
},
"isexe": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
"integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=",
"dev": true
},
"karma-chrome-launcher": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/karma-chrome-launcher/-/karma-chrome-launcher-3.1.0.tgz",
"integrity": "sha512-3dPs/n7vgz1rxxtynpzZTvb9y/GIaW8xjAwcIGttLbycqoFtI7yo1NGnQi6oFTherRE+GIhCAHZC4vEqWGhNvg==",
"dev": true,
"requires": {
"which": "^1.2.1"
}
},
"locate-path": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
"integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
"dev": true,
"requires": {
"p-locate": "^4.1.0"
}
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dev": true,
"requires": {
"brace-expansion": "^1.1.7"
}
},
"mkdirp-classic": {
"version": "0.5.3",
"resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz",
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==",
"dev": true
},
"ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
"dev": true
},
"node-fetch": {
"version": "2.6.1",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz",
"integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==",
"dev": true
},
"once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
"dev": true,
"requires": {
"wrappy": "1"
}
},
"p-limit": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
"integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
"dev": true,
"requires": {
"p-try": "^2.0.0"
}
},
"p-locate": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
"integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
"dev": true,
"requires": {
"p-limit": "^2.2.0"
}
},
"p-try": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
"integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
"dev": true
},
"path-exists": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
"dev": true
},
"path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
"dev": true
},
"pend": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz",
"integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=",
"dev": true
},
"pkg-dir": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz",
"integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==",
"dev": true,
"requires": {
"find-up": "^4.0.0"
}
},
"progress": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz",
"integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==",
"dev": true
},
"proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
"dev": true
},
"pump": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
"integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==",
"dev": true,
"requires": {
"end-of-stream": "^1.1.0",
"once": "^1.3.1"
}
},
"puppeteer": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/puppeteer/-/puppeteer-8.0.0.tgz",
"integrity": "sha512-D0RzSWlepeWkxPPdK3xhTcefj8rjah1791GE82Pdjsri49sy11ci/JQsAO8K2NRukqvwEtcI+ImP5F4ZiMvtIQ==",
"dev": true,
"requires": {
"debug": "^4.1.0",
"devtools-protocol": "0.0.854822",
"extract-zip": "^2.0.0",
"https-proxy-agent": "^5.0.0",
"node-fetch": "^2.6.1",
"pkg-dir": "^4.2.0",
"progress": "^2.0.1",
"proxy-from-env": "^1.1.0",
"rimraf": "^3.0.2",
"tar-fs": "^2.0.0",
"unbzip2-stream": "^1.3.3",
"ws": "^7.2.3"
}
},
"readable-stream": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
"dev": true,
"requires": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
}
},
"rimraf": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
"integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
"dev": true,
"requires": {
"glob": "^7.1.3"
}
},
"safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
"dev": true
},
"string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
"dev": true,
"requires": {
"safe-buffer": "~5.2.0"
}
},
"tar-fs": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz",
"integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==",
"dev": true,
"requires": {
"chownr": "^1.1.1",
"mkdirp-classic": "^0.5.2",
"pump": "^3.0.0",
"tar-stream": "^2.1.4"
}
},
"tar-stream": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz",
"integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==",
"dev": true,
"requires": {
"bl": "^4.0.3",
"end-of-stream": "^1.4.1",
"fs-constants": "^1.0.0",
"inherits": "^2.0.3",
"readable-stream": "^3.1.1"
}
},
"through": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
"integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=",
"dev": true
},
"unbzip2-stream": {
"version": "1.4.3",
"resolved": "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz",
"integrity": "sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg==",
"dev": true,
"requires": {
"buffer": "^5.2.1",
"through": "^2.3.8"
}
},
"util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=",
"dev": true
},
"which": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz",
"integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==",
"dev": true,
"requires": {
"isexe": "^2.0.0"
}
},
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
"dev": true
},
"ws": {
"version": "7.4.4",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.4.4.tgz",
"integrity": "sha512-Qm8k8ojNQIMx7S+Zp8u/uHOx7Qazv3Yv4q68MiWWWOJhiwG5W3x7iqmRtJo8xxrciZUY4vRxUTJCKuRnF28ZZw==",
"dev": true
},
"yauzl": {
"version": "2.10.0",
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
"integrity": "sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk=",
"dev": true,
"requires": {
"buffer-crc32": "~0.2.3",
"fd-slicer": "~1.1.0"
}
}
}
}

View File

@ -2,6 +2,7 @@
# IMPORTANT - Environment must be loaded before app, models, etc....
import os
os.environ["TESTING"] = "true"
import json
@ -23,6 +24,7 @@ from crc.services.file_service import FileService
from crc.services.study_service import StudyService
from crc.services.user_service import UserService
from crc.services.workflow_service import WorkflowService
from crc.services.document_service import DocumentService
from example_data import ExampleDataLoader
# UNCOMMENT THIS FOR DEBUGGING SQL ALCHEMY QUERIES
@ -70,7 +72,7 @@ class BaseTest(unittest.TestCase):
{
'id': 0,
'title': 'The impact of fried pickles on beer consumption in bipedal software developers.',
'last_updated': datetime.datetime.now(),
'last_updated': datetime.datetime.utcnow(),
'status': StudyStatus.in_progress,
'primary_investigator_id': 'dhf8r',
'sponsor': 'Sartography Pharmaceuticals',
@ -80,7 +82,7 @@ class BaseTest(unittest.TestCase):
{
'id': 1,
'title': 'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels',
'last_updated': datetime.datetime.now(),
'last_updated': datetime.datetime.utcnow(),
'status': StudyStatus.in_progress,
'primary_investigator_id': 'dhf8r',
'sponsor': 'Makerspace & Co.',
@ -131,15 +133,14 @@ class BaseTest(unittest.TestCase):
user = UserService.current_user(allow_admin_impersonate=True)
self.assertEqual(uid, user.uid, 'Logged in user should match given user uid')
return dict(Authorization='Bearer ' + user_model.encode_auth_token().decode())
return dict(Authorization='Bearer ' + user_model.encode_auth_token())
def delete_example_data(self, use_crc_data=False, use_rrt_data=False):
"""
delete everything that matters in the local database - this is used to
test ground zero copy of workflow specs.
"""
session.execute("delete from workflow; delete from file_data; delete from file; delete from workflow_spec;")
session.commit()
ExampleDataLoader.clean_db()
def load_example_data(self, use_crc_data=False, use_rrt_data=False):
"""use_crc_data will cause this to load the mammoth collection of documents
@ -147,13 +148,6 @@ class BaseTest(unittest.TestCase):
otherwise it depends on a small setup for running tests."""
from example_data import ExampleDataLoader
ExampleDataLoader.clean_db()
if use_crc_data:
ExampleDataLoader().load_all()
elif use_rrt_data:
ExampleDataLoader().load_rrt()
else:
ExampleDataLoader().load_test_data()
# If in production mode, only add the first user.
if app.config['PRODUCTION']:
session.add(UserModel(**self.users[0]))
@ -161,23 +155,27 @@ class BaseTest(unittest.TestCase):
for user_json in self.users:
session.add(UserModel(**user_json))
if use_crc_data:
ExampleDataLoader().load_all()
elif use_rrt_data:
ExampleDataLoader().load_rrt()
else:
ExampleDataLoader().load_test_data()
session.commit()
for study_json in self.studies:
study_model = StudyModel(**study_json)
session.add(study_model)
StudyService._add_all_workflow_specs_to_study(study_model)
session.execute(Sequence(StudyModel.__tablename__ + '_id_seq'))
session.commit()
session.commit()
update_seq = f"ALTER SEQUENCE %s RESTART WITH %s" % (StudyModel.__tablename__ + '_id_seq', study_model.id + 1)
print("Update Sequence." + update_seq)
session.execute(update_seq)
session.flush()
specs = session.query(WorkflowSpecModel).all()
self.assertIsNotNone(specs)
for spec in specs:
files = session.query(FileModel).filter_by(workflow_spec_id=spec.id).all()
self.assertIsNotNone(files)
self.assertGreater(len(files), 0)
for spec in specs:
files = session.query(FileModel).filter_by(workflow_spec_id=spec.id).all()
self.assertIsNotNone(files)
@ -285,28 +283,6 @@ class BaseTest(unittest.TestCase):
session.commit()
return study
def _create_study_workflow_approvals(self, user_uid, title, primary_investigator_id, approver_uids, statuses,
workflow_spec_name="random_fact"):
study = self.create_study(uid=user_uid, title=title, primary_investigator_id=primary_investigator_id)
workflow = self.create_workflow(workflow_name=workflow_spec_name, study=study)
approvals = []
for i in range(len(approver_uids)):
approvals.append(self.create_approval(
study=study,
workflow=workflow,
approver_uid=approver_uids[i],
status=statuses[i],
version=1
))
full_study = {
'study': study,
'workflow': workflow,
'approvals': approvals,
}
return full_study
def create_workflow(self, workflow_name, display_name=None, study=None, category_id=None, as_user="dhf8r"):
session.flush()
@ -323,30 +299,11 @@ class BaseTest(unittest.TestCase):
def create_reference_document(self):
file_path = os.path.join(app.root_path, 'static', 'reference', 'irb_documents.xlsx')
file = open(file_path, "rb")
FileService.add_reference_file(FileService.DOCUMENT_LIST,
FileService.add_reference_file(DocumentService.DOCUMENT_LIST,
binary_data=file.read(),
content_type=CONTENT_TYPES['xls'])
content_type=CONTENT_TYPES['xlsx'])
file.close()
def create_approval(
self,
study=None,
workflow=None,
approver_uid=None,
status=None,
version=None,
):
study = study or self.create_study()
workflow = workflow or self.create_workflow()
approver_uid = approver_uid or self.test_uid
status = status or ApprovalStatus.PENDING.value
version = version or 1
approval = ApprovalModel(study=study, workflow=workflow, approver_uid=approver_uid, status=status,
version=version)
session.add(approval)
session.commit()
return approval
def get_workflow_common(self, url, user):
rv = self.app.get(url,
headers=self.logged_in_headers(user),
@ -365,17 +322,22 @@ class BaseTest(unittest.TestCase):
self.assertEqual(workflow.workflow_spec_id, workflow_api.workflow_spec_id)
return workflow_api
def restart_workflow_api(self, workflow, clear_data=False, user_uid="dhf8r"):
def restart_workflow_api(self, workflow, clear_data=False, delete_files=False, user_uid="dhf8r"):
user = session.query(UserModel).filter_by(uid=user_uid).first()
self.assertIsNotNone(user)
url = (f'/v1.0/workflow/{workflow.id}/restart'
f'?clear_data={str(clear_data)}')
f'?clear_data={str(clear_data)}'
f'&delete_files={str(delete_files)}')
workflow_api = self.get_workflow_common(url, user)
self.assertEqual(workflow.workflow_spec_id, workflow_api.workflow_spec_id)
return workflow_api
def complete_form(self, workflow_in, task_in, dict_data, update_all=False, error_code=None, terminate_loop=None,
user_uid="dhf8r"):
# workflow_in should be a workflow, not a workflow_api
# we were passing in workflow_api in many of our tests, and
# this caused problems testing standalone workflows
standalone = getattr(workflow_in.workflow_spec, 'standalone', False)
prev_completed_task_count = workflow_in.completed_tasks
if isinstance(task_in, dict):
task_id = task_in["id"]
@ -418,7 +380,8 @@ class BaseTest(unittest.TestCase):
.order_by(TaskEventModel.date.desc()).all()
self.assertGreater(len(task_events), 0)
event = task_events[0]
self.assertIsNotNone(event.study_id)
if not standalone:
self.assertIsNotNone(event.study_id)
self.assertEqual(user_uid, event.user_uid)
self.assertEqual(workflow.id, event.workflow_id)
self.assertEqual(workflow.workflow_spec_id, event.workflow_spec_id)
@ -454,3 +417,9 @@ class BaseTest(unittest.TestCase):
if 'impersonate_user' in g:
del g.impersonate_user
def minimal_bpmn(self, content):
"""Returns a bytesIO object of a well formed BPMN xml file with some string content of your choosing."""
minimal_dbpm = "<x><process id='1' isExecutable='false'><startEvent id='a'/></process>%s</x>"
return (minimal_dbpm % content).encode()

View File

@ -0,0 +1,56 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_3d948db" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="Process_44b3aca" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1j6i6nv</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1j6i6nv" sourceRef="StartEvent_1" targetRef="Activity_WhichIRBDocument" />
<bpmn:sequenceFlow id="Flow_1rexoi9" sourceRef="Activity_DeleteIRBDocument" targetRef="Event_06rfn6m" />
<bpmn:endEvent id="Event_06rfn6m">
<bpmn:incoming>Flow_1rexoi9</bpmn:incoming>
</bpmn:endEvent>
<bpmn:userTask id="Activity_WhichIRBDocument" name="Which IRB Document" camunda:formKey="UploadIRBDoc">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="irb_document" label="IRB Document" type="string" defaultValue="Study_Protocol_Document" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_1j6i6nv</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1mmief6</bpmn:outgoing>
</bpmn:userTask>
<bpmn:scriptTask id="Activity_DeleteIRBDocument" name="Delete IRB Document">
<bpmn:incoming>SequenceFlow_1mmief6</bpmn:incoming>
<bpmn:outgoing>Flow_1rexoi9</bpmn:outgoing>
<bpmn:script>delete_file(irb_document)</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_1mmief6" sourceRef="Activity_WhichIRBDocument" targetRef="Activity_DeleteIRBDocument" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_44b3aca">
<bpmndi:BPMNEdge id="SequenceFlow_1mmief6_di" bpmnElement="SequenceFlow_1mmief6">
<di:waypoint x="360" y="117" />
<di:waypoint x="400" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1rexoi9_di" bpmnElement="Flow_1rexoi9">
<di:waypoint x="500" y="117" />
<di:waypoint x="562" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1j6i6nv_di" bpmnElement="Flow_1j6i6nv">
<di:waypoint x="215" y="117" />
<di:waypoint x="260" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_06rfn6m_di" bpmnElement="Event_06rfn6m">
<dc:Bounds x="562" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_176crcy_di" bpmnElement="Activity_WhichIRBDocument">
<dc:Bounds x="260" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0om2kg5_di" bpmnElement="Activity_DeleteIRBDocument">
<dc:Bounds x="400" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,53 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_3fd9241" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_9d7b2c2" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_17nzcku</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_17nzcku" sourceRef="StartEvent_1" targetRef="Activity_GetCheckStudy" />
<bpmn:scriptTask id="Activity_GetCheckStudy" name="Get Check Study">
<bpmn:incoming>Flow_17nzcku</bpmn:incoming>
<bpmn:outgoing>Flow_0oozrfg</bpmn:outgoing>
<bpmn:script>check_study = check_study()</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0oozrfg" sourceRef="Activity_GetCheckStudy" targetRef="Activity_DisplayCheckStudy" />
<bpmn:manualTask id="Activity_DisplayCheckStudy" name="Display Check Study">
<bpmn:documentation># Check Study
&lt;div&gt;&lt;span&gt;{{check_study}}&lt;/span&gt;&lt;/div&gt;</bpmn:documentation>
<bpmn:incoming>Flow_0oozrfg</bpmn:incoming>
<bpmn:outgoing>Flow_10sc31i</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:endEvent id="Event_0embsc7">
<bpmn:incoming>Flow_10sc31i</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_10sc31i" sourceRef="Activity_DisplayCheckStudy" targetRef="Event_0embsc7" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_9d7b2c2">
<bpmndi:BPMNEdge id="Flow_10sc31i_di" bpmnElement="Flow_10sc31i">
<di:waypoint x="530" y="177" />
<di:waypoint x="592" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0oozrfg_di" bpmnElement="Flow_0oozrfg">
<di:waypoint x="370" y="177" />
<di:waypoint x="430" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_17nzcku_di" bpmnElement="Flow_17nzcku">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1f9d5ew_di" bpmnElement="Activity_GetCheckStudy">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_01vscea_di" bpmnElement="Activity_DisplayCheckStudy">
<dc:Bounds x="430" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0embsc7_di" bpmnElement="Event_0embsc7">
<dc:Bounds x="592" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,71 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_09rv9vf" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:process id="Process_StatusMessage" name="Status Message" isExecutable="true">
<bpmn:documentation>Testing Workflow Status Messages</bpmn:documentation>
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_0x4n744</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_0x4n744" sourceRef="StartEvent_1" targetRef="Task_SetName" />
<bpmn:sequenceFlow id="SequenceFlow_1o630oy" sourceRef="Task_SetName" targetRef="Task_Decision" />
<bpmn:businessRuleTask id="Task_Decision" name="Make Decision" camunda:decisionRef="Decision_Dog">
<bpmn:incoming>SequenceFlow_1o630oy</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1foyag7</bpmn:outgoing>
</bpmn:businessRuleTask>
<bpmn:sequenceFlow id="SequenceFlow_1foyag7" sourceRef="Task_Decision" targetRef="Task_GoodBye" />
<bpmn:manualTask id="Task_GoodBye" name="Say Good Bye">
<bpmn:documentation>&lt;div&gt;&lt;span&gt;Good Bye {{ dog.name }}&lt;/span&gt;&lt;/div&gt;
&lt;div&gt;&lt;span&gt;You are such a good {{ dog.breed }}&lt;/span&gt;&lt;/div&gt;
</bpmn:documentation>
<bpmn:incoming>SequenceFlow_1foyag7</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1bc1ugw</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:endEvent id="EndEvent_19dasnt">
<bpmn:incoming>SequenceFlow_1bc1ugw</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="SequenceFlow_1bc1ugw" sourceRef="Task_GoodBye" targetRef="EndEvent_19dasnt" />
<bpmn:userTask id="Task_SetName" name="Set Name" camunda:formKey="NameForm">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="name" label="Name" type="string" defaultValue="Layla" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0x4n744</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1o630oy</bpmn:outgoing>
</bpmn:userTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_StatusMessage">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="165" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0x4n744_di" bpmnElement="SequenceFlow_0x4n744">
<di:waypoint x="201" y="117" />
<di:waypoint x="260" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1o630oy_di" bpmnElement="SequenceFlow_1o630oy">
<di:waypoint x="360" y="117" />
<di:waypoint x="420" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="BusinessRuleTask_0dwwkqn_di" bpmnElement="Task_Decision">
<dc:Bounds x="420" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1foyag7_di" bpmnElement="SequenceFlow_1foyag7">
<di:waypoint x="520" y="117" />
<di:waypoint x="580" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="ManualTask_0nb6k7f_di" bpmnElement="Task_GoodBye">
<dc:Bounds x="580" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_19dasnt_di" bpmnElement="EndEvent_19dasnt">
<dc:Bounds x="742" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1bc1ugw_di" bpmnElement="SequenceFlow_1bc1ugw">
<di:waypoint x="680" y="117" />
<di:waypoint x="742" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="UserTask_1h3sio1_di" bpmnElement="Task_SetName">
<dc:Bounds x="260" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,69 @@
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" id="Definitions_1eg3sxk" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<decision id="Decision_Dog" name="Dogs">
<decisionTable id="decisionTable_1">
<input id="input_1" label="Name">
<inputExpression id="inputExpression_1" typeRef="string">
<text>name</text>
</inputExpression>
</input>
<output id="output_1" label="Name" name="dog.name" typeRef="string" />
<output id="OutputClause_1wqk5xi" label="Breed" name="dog.breed" typeRef="string" />
<rule id="DecisionRule_1h6w5qu">
<inputEntry id="UnaryTests_0m8eblt">
<text>'Layla'</text>
</inputEntry>
<outputEntry id="LiteralExpression_186aovp">
<text>'Layla'</text>
</outputEntry>
<outputEntry id="LiteralExpression_10jbx5v">
<text>'Aussie'</text>
</outputEntry>
</rule>
<rule id="DecisionRule_0ziemrx">
<inputEntry id="UnaryTests_0okkroj">
<text>'Mona'</text>
</inputEntry>
<outputEntry id="LiteralExpression_0r1apkh">
<text>'Mona'</text>
</outputEntry>
<outputEntry id="LiteralExpression_08vl869">
<text>'Aussie Mix'</text>
</outputEntry>
</rule>
<rule id="DecisionRule_0fykwob">
<inputEntry id="UnaryTests_044ophk">
<text>'Jerry'</text>
</inputEntry>
<outputEntry id="LiteralExpression_0508umo">
<text>'Jerry'</text>
</outputEntry>
<outputEntry id="LiteralExpression_0ysgqib">
<text>'Aussie Mix'</text>
</outputEntry>
</rule>
<rule id="DecisionRule_05jugdn">
<inputEntry id="UnaryTests_1jri40s">
<text>'Zoey'</text>
</inputEntry>
<outputEntry id="LiteralExpression_1r5jrzq">
<text>'Zoey'</text>
</outputEntry>
<outputEntry id="LiteralExpression_0aqjmjy">
<text>'Healer'</text>
</outputEntry>
</rule>
<rule id="DecisionRule_0gehtk4">
<inputEntry id="UnaryTests_09f1t9t">
<text>'Etta'</text>
</inputEntry>
<outputEntry id="LiteralExpression_0kp8mvr">
<text>'Etta'</text>
</outputEntry>
<outputEntry id="LiteralExpression_0wwry9c">
<text>'Healer Mix'</text>
</outputEntry>
</rule>
</decisionTable>
</decision>
</definitions>

View File

@ -0,0 +1,53 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_024561a" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:process id="Process_1796d29" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0c51a4b</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0c51a4b" sourceRef="StartEvent_1" targetRef="Activity_GetURL" />
<bpmn:sequenceFlow id="Flow_1ker6ik" sourceRef="Activity_GetURL" targetRef="Activity_EmailURL" />
<bpmn:endEvent id="Event_17hmyob">
<bpmn:incoming>Flow_1rfvzi5</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1rfvzi5" sourceRef="Activity_EmailURL" targetRef="Event_17hmyob" />
<bpmn:scriptTask id="Activity_GetURL" name="Get Dashboard URL&#10;">
<bpmn:incoming>Flow_0c51a4b</bpmn:incoming>
<bpmn:outgoing>Flow_1ker6ik</bpmn:outgoing>
<bpmn:script>dashboard_url = get_dashboard_url()</bpmn:script>
</bpmn:scriptTask>
<bpmn:scriptTask id="Activity_EmailURL" name="Email Dashboard URL">
<bpmn:documentation>&lt;a href="{{dashboard_url}}"&gt;{{dashboard_url}}&lt;/a&gt;</bpmn:documentation>
<bpmn:incoming>Flow_1ker6ik</bpmn:incoming>
<bpmn:outgoing>Flow_1rfvzi5</bpmn:outgoing>
<bpmn:script>email(subject='My Email Subject', recipients="test@example.com")</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1796d29">
<bpmndi:BPMNEdge id="Flow_1rfvzi5_di" bpmnElement="Flow_1rfvzi5">
<di:waypoint x="530" y="117" />
<di:waypoint x="592" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1ker6ik_di" bpmnElement="Flow_1ker6ik">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0c51a4b_di" bpmnElement="Flow_0c51a4b">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_17hmyob_di" bpmnElement="Event_17hmyob">
<dc:Bounds x="592" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1n7b49v_di" bpmnElement="Activity_GetURL">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1dn6kw2_di" bpmnElement="Activity_EmailURL">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

Binary file not shown.

View File

@ -10,7 +10,7 @@
<camunda:formData>
<camunda:formField id="empty_select" label="Select One" type="enum">
<camunda:properties>
<camunda:property id="spreadsheet.name" value="empty_spreadsheet.xls" />
<camunda:property id="spreadsheet.name" value="empty_spreadsheet.xlsx" />
<camunda:property id="spreadsheet.value.column" value="COMPANY_ID" />
<camunda:property id="spreadsheet.label.column" value="COMPANY_NAME" />
</camunda:properties>

Binary file not shown.

View File

@ -14,7 +14,7 @@
<camunda:formData>
<camunda:formField id="AllTheNames" label="Select a value" type="enum">
<camunda:properties>
<camunda:property id="spreadsheet.name" value="customer_list.xls" />
<camunda:property id="spreadsheet.name" value="customer_list.xlsx" />
<camunda:property id="spreadsheet.value.column" value="CUSTOMER_NUMBER" />
<camunda:property id="spreadsheet.label.column" value="CUSTOMER_NAME" />
</camunda:properties>

View File

@ -14,7 +14,7 @@
<camunda:formData>
<camunda:formField id="sponsor" label="Select a value" type="autocomplete">
<camunda:properties>
<camunda:property id="spreadsheet.name" value="sponsors.xls" />
<camunda:property id="spreadsheet.name" value="sponsors.xlsx" />
<camunda:property id="spreadsheet.value.column" value="CUSTOMER_NUMBER" />
<camunda:property id="spreadsheet.label.column" value="CUSTOMER_NAME" />
</camunda:properties>

Binary file not shown.

View File

@ -0,0 +1,64 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_0vm4ua3" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="Process_EnumResults" name="Empty Enum" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_08cjvuw</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_08cjvuw" sourceRef="StartEvent_1" targetRef="Activity_EmptyEnum" />
<bpmn:userTask id="Activity_EmptyEnum" name="Empty Results" camunda:formKey="EnumForm">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="checkbox" label="Select One" type="enum">
<camunda:properties>
<camunda:property id="enum_type" value="checkbox" />
</camunda:properties>
<camunda:value id="option_one" name="Option One" />
</camunda:formField>
<camunda:formField id="radio" type="enum">
<camunda:value id="option_one" name="Option One" />
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_08cjvuw</bpmn:incoming>
<bpmn:outgoing>Flow_0qm71qa</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_0qm71qa" sourceRef="Activity_EmptyEnum" targetRef="Activity_GoodBye" />
<bpmn:endEvent id="Event_034utr4">
<bpmn:incoming>Flow_0ynk21r</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0ynk21r" sourceRef="Activity_GoodBye" targetRef="Event_034utr4" />
<bpmn:manualTask id="Activity_GoodBye" name="Good Bye">
<bpmn:documentation>&lt;H1&gt;Good Bye&lt;/H1&gt;</bpmn:documentation>
<bpmn:incoming>Flow_0qm71qa</bpmn:incoming>
<bpmn:outgoing>Flow_0ynk21r</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_EnumResults">
<bpmndi:BPMNEdge id="Flow_0ynk21r_di" bpmnElement="Flow_0ynk21r">
<di:waypoint x="530" y="117" />
<di:waypoint x="592" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0qm71qa_di" bpmnElement="Flow_0qm71qa">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_08cjvuw_di" bpmnElement="Flow_08cjvuw">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1mqyx7y_di" bpmnElement="Activity_EmptyEnum">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_034utr4_di" bpmnElement="Event_034utr4">
<dc:Bounds x="592" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0obcp1b_di" bpmnElement="Activity_GoodBye">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,86 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1j7idla" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:process id="Process_18biih5" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1pnq3kg</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_1pnq3kg" sourceRef="StartEvent_1" targetRef="Task_Has_Bananas" />
<bpmn:scriptTask id="Task_Has_Bananas" name="get Documents">
<bpmn:incoming>SequenceFlow_1pnq3kg</bpmn:incoming>
<bpmn:outgoing>Flow_1xqewuk</bpmn:outgoing>
<bpmn:script>documents = study_info('documents')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1xqewuk" sourceRef="Task_Has_Bananas" targetRef="Activity_0yikdu7" />
<bpmn:scriptTask id="Activity_0yikdu7" name="save arbitrary value">
<bpmn:incoming>Flow_1xqewuk</bpmn:incoming>
<bpmn:outgoing>Flow_0z7kamo</bpmn:outgoing>
<bpmn:script>filelist = list(documents.keys())
fileid = documents['UVACompl_PRCAppr'].files[0]['file_id']
fileurl = documents['UVACompl_PRCAppr'].files[0]['url']
filename = documents['UVACompl_PRCAppr'].files[0]['name']
file_data_set(file_id=fileid,key='test',value='me')</bpmn:script>
</bpmn:scriptTask>
<bpmn:endEvent id="Event_1pdyoyv">
<bpmn:incoming>Flow_02bgcrp</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0z7kamo" sourceRef="Activity_0yikdu7" targetRef="Activity_19x6e2e" />
<bpmn:sequenceFlow id="Flow_15mmymi" sourceRef="Activity_19x6e2e" targetRef="Activity_0oaeqxs" />
<bpmn:scriptTask id="Activity_19x6e2e" name="get output">
<bpmn:incoming>Flow_0z7kamo</bpmn:incoming>
<bpmn:outgoing>Flow_15mmymi</bpmn:outgoing>
<bpmn:script>output=file_data_get(file_id=fileid,key='test')</bpmn:script>
</bpmn:scriptTask>
<bpmn:scriptTask id="Activity_0oaeqxs" name="get output2">
<bpmn:incoming>Flow_15mmymi</bpmn:incoming>
<bpmn:outgoing>Flow_02bgcrp</bpmn:outgoing>
<bpmn:script>output2=file_data_get(file_id=fileid,key='unobtainium',default='nope')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_02bgcrp" sourceRef="Activity_0oaeqxs" targetRef="Event_1pdyoyv" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_18biih5">
<bpmndi:BPMNEdge id="SequenceFlow_1pnq3kg_di" bpmnElement="SequenceFlow_1pnq3kg">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_01ekdl8_di" bpmnElement="Task_Has_Bananas">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1xqewuk_di" bpmnElement="Flow_1xqewuk">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0g5namy_di" bpmnElement="Activity_0yikdu7">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1pdyoyv_di" bpmnElement="Event_1pdyoyv">
<dc:Bounds x="962" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0z7kamo_di" bpmnElement="Flow_0z7kamo">
<di:waypoint x="530" y="117" />
<di:waypoint x="590" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_15mmymi_di" bpmnElement="Flow_15mmymi">
<di:waypoint x="690" y="117" />
<di:waypoint x="760" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0ma7ela_di" bpmnElement="Activity_19x6e2e">
<dc:Bounds x="590" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0oaeqxs_di" bpmnElement="Activity_0oaeqxs">
<dc:Bounds x="760" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_02bgcrp_di" bpmnElement="Flow_02bgcrp">
<di:waypoint x="860" y="117" />
<di:waypoint x="962" y="117" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,13 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1wrlvk8" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1wrlvk8" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="Finance" isExecutable="true">
<bpmn:startEvent id="StartEvent_1p6s47e">
<bpmn:outgoing>SequenceFlow_0ea9hvd</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:endEvent id="EndEvent_14p904o">
<bpmn:incoming>SequenceFlow_1h0d349</bpmn:incoming>
<bpmn:incoming>Flow_0t55959</bpmn:incoming>
</bpmn:endEvent>
<bpmn:userTask id="Task_112migv" name="Upload Executed Non-Funded" camunda:formKey="FormKey_ExecutedNonFunded">
<bpmn:sequenceFlow id="SequenceFlow_0ea9hvd" sourceRef="StartEvent_1p6s47e" targetRef="Activity_0neioh9" />
<bpmn:userTask id="Activity_0neioh9" name="Upload Executed Non-Funded" camunda:formKey="FormKey_ExecutedNonFunded">
<bpmn:documentation>#### Non-Funded Executed Agreement
@ -15,40 +16,55 @@
OGC will upload the Non-Funded Executed Agreement after it has been negotiated by OSP contract negotiator.</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="UVACompl_PRCAppr" label="Non-Funded Executed Agreement" type="file">
<camunda:formField id="Date" label="Version Date" type="date">
<camunda:properties>
<camunda:property id="group" value="upload" />
<camunda:property id="repeat" value="upload" />
<camunda:property id="group" value="PCRApproval" />
<camunda:property id="file_data" value="Some_File" />
</camunda:properties>
</camunda:formField>
<camunda:formField id="file_type" type="enum" defaultValue="AD_CoCApp">
<camunda:value id="AD_CoCApp" name="Ancillary Documents / Case Report Form" />
<camunda:value id="AD_CoCAppr" name="Ancillary Documents / CoC Approval" />
</camunda:formField>
<camunda:formField id="Some_File" label="Upload File" type="file">
<camunda:properties>
<camunda:property id="group" value="PCRApproval" />
<camunda:property id="doc_code" value="file_type" />
</camunda:properties>
</camunda:formField>
<camunda:formField id="Language" label="Language" type="string" defaultValue="Engish">
<camunda:properties>
<camunda:property id="group" value="PCRApproval" />
<camunda:property id="file_data" value="Some_File" />
</camunda:properties>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0ea9hvd</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1h0d349</bpmn:outgoing>
<bpmn:outgoing>Flow_0t55959</bpmn:outgoing>
<bpmn:standardLoopCharacteristics />
</bpmn:userTask>
<bpmn:sequenceFlow id="SequenceFlow_0ea9hvd" sourceRef="StartEvent_1p6s47e" targetRef="Task_112migv" />
<bpmn:sequenceFlow id="SequenceFlow_1h0d349" sourceRef="Task_112migv" targetRef="EndEvent_14p904o" />
<bpmn:sequenceFlow id="Flow_0t55959" sourceRef="Activity_0neioh9" targetRef="EndEvent_14p904o" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Finance">
<bpmndi:BPMNEdge id="Flow_0t55959_di" bpmnElement="Flow_0t55959">
<di:waypoint x="310" y="117" />
<di:waypoint x="392" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_0ea9hvd_di" bpmnElement="SequenceFlow_0ea9hvd">
<di:waypoint x="148" y="117" />
<di:waypoint x="210" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="StartEvent_1p6s47e_di" bpmnElement="StartEvent_1p6s47e">
<dc:Bounds x="112" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_14p904o_di" bpmnElement="EndEvent_14p904o">
<dc:Bounds x="682" y="99" width="36" height="36" />
<dc:Bounds x="392" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="UserTask_1peopdt_di" bpmnElement="Task_112migv">
<dc:Bounds x="350" y="77" width="100" height="80" />
<bpmndi:BPMNShape id="Activity_0neioh9_di" bpmnElement="Activity_0neioh9">
<dc:Bounds x="210" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0ea9hvd_di" bpmnElement="SequenceFlow_0ea9hvd">
<di:waypoint x="148" y="117" />
<di:waypoint x="350" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1h0d349_di" bpmnElement="SequenceFlow_1h0d349">
<di:waypoint x="450" y="117" />
<di:waypoint x="682" y="117" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>
</bpmn:definitions>

View File

@ -0,0 +1,60 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1wrlvk8" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="Finance" isExecutable="true">
<bpmn:startEvent id="StartEvent_1p6s47e">
<bpmn:outgoing>SequenceFlow_0ea9hvd</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:endEvent id="EndEvent_14p904o">
<bpmn:incoming>Flow_0t55959</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="SequenceFlow_0ea9hvd" sourceRef="StartEvent_1p6s47e" targetRef="Activity_0neioh9" />
<bpmn:userTask id="Activity_0neioh9" name="Upload Executed Non-Funded" camunda:formKey="FormKey_ExecutedNonFunded">
<bpmn:documentation>#### Non-Funded Executed Agreement
#### Process:
OGC will upload the Non-Funded Executed Agreement after it has been negotiated by OSP contract negotiator.</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="AD_CoCApp" label="Upload File" type="file" />
<camunda:formField id="Language" label="Language" type="string" defaultValue="Engish">
<camunda:properties>
<camunda:property id="group" value="PCRApproval" />
<camunda:property id="file_data" value="AD_CoCApp" />
</camunda:properties>
</camunda:formField>
<camunda:formField id="Date" label="Version Date" type="date">
<camunda:properties>
<camunda:property id="group" value="PCRApproval" />
<camunda:property id="file_data" value="AD_CoCApp" />
</camunda:properties>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0ea9hvd</bpmn:incoming>
<bpmn:outgoing>Flow_0t55959</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_0t55959" sourceRef="Activity_0neioh9" targetRef="EndEvent_14p904o" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Finance">
<bpmndi:BPMNEdge id="Flow_0t55959_di" bpmnElement="Flow_0t55959">
<di:waypoint x="310" y="117" />
<di:waypoint x="392" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_0ea9hvd_di" bpmnElement="SequenceFlow_0ea9hvd">
<di:waypoint x="148" y="117" />
<di:waypoint x="210" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="StartEvent_1p6s47e_di" bpmnElement="StartEvent_1p6s47e">
<dc:Bounds x="112" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_14p904o_di" bpmnElement="EndEvent_14p904o">
<dc:Bounds x="392" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0neioh9_di" bpmnElement="Activity_0neioh9">
<dc:Bounds x="210" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,62 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_6e97803" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:process id="Process_d8c1062" name="GetValidation" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1aycav1</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1aycav1" sourceRef="StartEvent_1" targetRef="Activity_RunScript" />
<bpmn:scriptTask id="Activity_RunScript" name="Run Script">
<bpmn:incoming>Flow_1aycav1</bpmn:incoming>
<bpmn:outgoing>Flow_0wkyatv</bpmn:outgoing>
<bpmn:script>pi = study_info('investigators').get('PI', False)
if pi:
try:
pi_assc = get_study_associate(pi.user_id)
except:
pi_assc_chk = False
else:
if pi_assc['role'] == "Primary Investigator":
pi_assc_chk = True
else:
pi_assc_chk = False</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0wkyatv" sourceRef="Activity_RunScript" targetRef="Activity_DisplayValue" />
<bpmn:manualTask id="Activity_DisplayValue" name="Display Value">
<bpmn:documentation>pi_assc_chk is {{pi_assc_chk}}</bpmn:documentation>
<bpmn:incoming>Flow_0wkyatv</bpmn:incoming>
<bpmn:outgoing>Flow_0784fc6</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:endEvent id="Event_0hdelnp">
<bpmn:incoming>Flow_0784fc6</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0784fc6" sourceRef="Activity_DisplayValue" targetRef="Event_0hdelnp" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_d8c1062">
<bpmndi:BPMNEdge id="Flow_0784fc6_di" bpmnElement="Flow_0784fc6">
<di:waypoint x="530" y="117" />
<di:waypoint x="592" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0wkyatv_di" bpmnElement="Flow_0wkyatv">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1aycav1_di" bpmnElement="Flow_1aycav1">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1ah3917_di" bpmnElement="Activity_RunScript">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1sysjzs_di" bpmnElement="Activity_DisplayValue">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0hdelnp_di" bpmnElement="Event_0hdelnp">
<dc:Bounds x="592" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,58 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_0ixyfs0" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:process id="Process_HelloWorld" name="Hello World Process" isExecutable="true">
<bpmn:documentation>This workflow asks for a name and says hello</bpmn:documentation>
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_0qyd2b7</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_0qyd2b7" sourceRef="StartEvent_1" targetRef="Task_GetName" />
<bpmn:userTask id="Task_GetName" name="Get Name" camunda:formKey="Name">
<bpmn:documentation>Hello</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="name" label="Name" type="string" defaultValue="World" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0qyd2b7</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1h46b40</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="SequenceFlow_1h46b40" sourceRef="Task_GetName" targetRef="Task_SayHello" />
<bpmn:manualTask id="Task_SayHello" name="Say Hello">
<bpmn:documentation>Hello {{name}}</bpmn:documentation>
<bpmn:incoming>SequenceFlow_1h46b40</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0lqrc6e</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:endEvent id="EndEvent_1l03lqw">
<bpmn:incoming>SequenceFlow_0lqrc6e</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="SequenceFlow_0lqrc6e" sourceRef="Task_SayHello" targetRef="EndEvent_1l03lqw" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_HelloWorld">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0qyd2b7_di" bpmnElement="SequenceFlow_0qyd2b7">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="UserTask_0fbucz7_di" bpmnElement="Task_GetName">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1h46b40_di" bpmnElement="SequenceFlow_1h46b40">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="ManualTask_1tia2zr_di" bpmnElement="Task_SayHello">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_1l03lqw_di" bpmnElement="EndEvent_1l03lqw">
<dc:Bounds x="592" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0lqrc6e_di" bpmnElement="SequenceFlow_0lqrc6e">
<di:waypoint x="530" y="117" />
<di:waypoint x="592" y="117" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,97 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_81799d0" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:process id="Process_InfiniteLoop" name="Infinite Loop" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0ldlhrt</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0ldlhrt" sourceRef="StartEvent_1" targetRef="Activity_StudyInfo" />
<bpmn:scriptTask id="Activity_StudyInfo" name="Get Study Info">
<bpmn:incoming>Flow_0ldlhrt</bpmn:incoming>
<bpmn:incoming>Flow_05mrx8v</bpmn:incoming>
<bpmn:outgoing>Flow_0pddur1</bpmn:outgoing>
<bpmn:script>investigators = study_info('investigators')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0pddur1" sourceRef="Activity_StudyInfo" targetRef="Activity_DisplayInvestigators" />
<bpmn:manualTask id="Activity_DisplayInvestigators" name="Display Investigators">
<bpmn:documentation>Investigators: {{ investigators }}</bpmn:documentation>
<bpmn:incoming>Flow_0pddur1</bpmn:incoming>
<bpmn:outgoing>Flow_03m3cuy</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:exclusiveGateway id="Gateway_0n9lzir" name="Test DEPT_CH" default="Flow_05mrx8v">
<bpmn:incoming>Flow_03m3cuy</bpmn:incoming>
<bpmn:outgoing>Flow_05mrx8v</bpmn:outgoing>
<bpmn:outgoing>Flow_1212fe2</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_03m3cuy" sourceRef="Activity_DisplayInvestigators" targetRef="Gateway_0n9lzir" />
<bpmn:sequenceFlow id="Flow_05mrx8v" name="not has DEPT_CH" sourceRef="Gateway_0n9lzir" targetRef="Activity_StudyInfo" />
<bpmn:sequenceFlow id="Flow_1212fe2" name="Has DEPT_CH" sourceRef="Gateway_0n9lzir" targetRef="Activity_GoodBye">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">hasattr(investigators, 'DEPT_CH')</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:endEvent id="Event_0azm9il">
<bpmn:incoming>Flow_14jn215</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_14jn215" sourceRef="Activity_GoodBye" targetRef="Event_0azm9il" />
<bpmn:manualTask id="Activity_GoodBye" name="Good Bye">
<bpmn:documentation># Thank You</bpmn:documentation>
<bpmn:incoming>Flow_1212fe2</bpmn:incoming>
<bpmn:outgoing>Flow_14jn215</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_InfiniteLoop">
<bpmndi:BPMNEdge id="Flow_14jn215_di" bpmnElement="Flow_14jn215">
<di:waypoint x="810" y="177" />
<di:waypoint x="882" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1212fe2_di" bpmnElement="Flow_1212fe2">
<di:waypoint x="645" y="177" />
<di:waypoint x="710" y="177" />
<bpmndi:BPMNLabel>
<dc:Bounds x="645" y="159" width="74" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_05mrx8v_di" bpmnElement="Flow_05mrx8v">
<di:waypoint x="620" y="152" />
<di:waypoint x="620" y="80" />
<di:waypoint x="320" y="80" />
<di:waypoint x="320" y="137" />
<bpmndi:BPMNLabel>
<dc:Bounds x="447" y="62" width="52" height="27" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_03m3cuy_di" bpmnElement="Flow_03m3cuy">
<di:waypoint x="530" y="177" />
<di:waypoint x="595" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0pddur1_di" bpmnElement="Flow_0pddur1">
<di:waypoint x="370" y="177" />
<di:waypoint x="430" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0ldlhrt_di" bpmnElement="Flow_0ldlhrt">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1ueb1ky_di" bpmnElement="Activity_StudyInfo">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1y5mgz2_di" bpmnElement="Activity_DisplayInvestigators">
<dc:Bounds x="430" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0n9lzir_di" bpmnElement="Gateway_0n9lzir" isMarkerVisible="true">
<dc:Bounds x="595" y="152" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="584" y="209" width="75" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0azm9il_di" bpmnElement="Event_0azm9il">
<dc:Bounds x="882" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0wbzf51_di" bpmnElement="Activity_GoodBye">
<dc:Bounds x="710" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1j7idla" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1j7idla" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="Process_18biih5" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1pnq3kg</bpmn:outgoing>
@ -8,32 +8,34 @@
<bpmn:endEvent id="EndEvent_063bpg6">
<bpmn:incoming>SequenceFlow_12pf6um</bpmn:incoming>
</bpmn:endEvent>
<bpmn:scriptTask id="Invalid_Script_Task" name="An Invalid Script Reference">
<bpmn:scriptTask id="Invalid_Script_Task" name="A Syntax Error">
<bpmn:incoming>SequenceFlow_1pnq3kg</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_12pf6um</bpmn:outgoing>
<bpmn:script>a really bad error that should fail</bpmn:script>
<bpmn:script>x = 1
y = 2
x + y === a</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_12pf6um" sourceRef="Invalid_Script_Task" targetRef="EndEvent_063bpg6" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_18biih5">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_12pf6um_di" bpmnElement="SequenceFlow_12pf6um">
<di:waypoint x="390" y="117" />
<di:waypoint x="442" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1pnq3kg_di" bpmnElement="SequenceFlow_1pnq3kg">
<di:waypoint x="215" y="117" />
<di:waypoint x="290" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_063bpg6_di" bpmnElement="EndEvent_063bpg6">
<dc:Bounds x="442" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ScriptTask_1imeym0_di" bpmnElement="Invalid_Script_Task">
<dc:Bounds x="290" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_12pf6um_di" bpmnElement="SequenceFlow_12pf6um">
<di:waypoint x="390" y="117" />
<di:waypoint x="442" y="117" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,41 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1j7idla" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="Process_18biih5" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1pnq3kg</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_1pnq3kg" sourceRef="StartEvent_1" targetRef="Invalid_Script_Task" />
<bpmn:endEvent id="EndEvent_063bpg6">
<bpmn:incoming>SequenceFlow_12pf6um</bpmn:incoming>
</bpmn:endEvent>
<bpmn:scriptTask id="Invalid_Script_Task" name="An Invalid Variable">
<bpmn:incoming>SequenceFlow_1pnq3kg</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_12pf6um</bpmn:outgoing>
<bpmn:script>x = 1
y = 2
x + a == 3</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_12pf6um" sourceRef="Invalid_Script_Task" targetRef="EndEvent_063bpg6" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_18biih5">
<bpmndi:BPMNEdge id="SequenceFlow_12pf6um_di" bpmnElement="SequenceFlow_12pf6um">
<di:waypoint x="390" y="117" />
<di:waypoint x="442" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1pnq3kg_di" bpmnElement="SequenceFlow_1pnq3kg">
<di:waypoint x="215" y="117" />
<di:waypoint x="290" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_063bpg6_di" bpmnElement="EndEvent_063bpg6">
<dc:Bounds x="442" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ScriptTask_1imeym0_di" bpmnElement="Invalid_Script_Task">
<dc:Bounds x="290" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,52 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1e8c8os" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:process id="Process_GetIRBInfo" name="Get IRB Info" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_0xey0zw</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_0xey0zw" sourceRef="StartEvent_1" targetRef="Task_GetInfo" />
<bpmn:scriptTask id="Task_GetInfo" name="Get IRB Info">
<bpmn:incoming>SequenceFlow_0xey0zw</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_03hympo</bpmn:outgoing>
<bpmn:script>irb_info = get_irb_info()</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_03hympo" sourceRef="Task_GetInfo" targetRef="Task_PrintInfo" />
<bpmn:endEvent id="EndEvent_0qdzlqr">
<bpmn:incoming>SequenceFlow_1s6cthx</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="SequenceFlow_1s6cthx" sourceRef="Task_PrintInfo" targetRef="EndEvent_0qdzlqr" />
<bpmn:manualTask id="Task_PrintInfo" name="Print IRB Info">
<bpmn:documentation>IRB Info: {{irb_info}}</bpmn:documentation>
<bpmn:incoming>SequenceFlow_03hympo</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1s6cthx</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_GetIRBInfo">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0xey0zw_di" bpmnElement="SequenceFlow_0xey0zw">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="ScriptTask_0xmrk10_di" bpmnElement="Task_GetInfo">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_03hympo_di" bpmnElement="SequenceFlow_03hympo">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="EndEvent_0qdzlqr_di" bpmnElement="EndEvent_0qdzlqr">
<dc:Bounds x="592" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1s6cthx_di" bpmnElement="SequenceFlow_1s6cthx">
<di:waypoint x="530" y="117" />
<di:waypoint x="592" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="ManualTask_1uh1r6q_di" bpmnElement="Task_PrintInfo">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,165 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_06fh26c" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="Process_0nnx0ky" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0bjov2v</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0bjov2v" sourceRef="StartEvent_1" targetRef="Gateway_1vey4sb" />
<bpmn:sequenceFlow id="Flow_0st2uhj" sourceRef="A1" targetRef="A2" />
<bpmn:endEvent id="Event_0favmpp">
<bpmn:incoming>Flow_0vtwntu</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1a7jgry" sourceRef="A2" targetRef="Gateway_0hvmyie" />
<bpmn:sequenceFlow id="Flow_15q2q5u" sourceRef="Gateway_1vey4sb" targetRef="Setup_MI_For_A" />
<bpmn:parallelGateway id="Gateway_1vey4sb">
<bpmn:incoming>Flow_0bjov2v</bpmn:incoming>
<bpmn:outgoing>Flow_15q2q5u</bpmn:outgoing>
<bpmn:outgoing>Flow_00cshtp</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:sequenceFlow id="Flow_00cshtp" sourceRef="Gateway_1vey4sb" targetRef="Setup_MI_For_B" />
<bpmn:sequenceFlow id="Flow_13cl181" sourceRef="B1" targetRef="B2" />
<bpmn:sequenceFlow id="Flow_08b9xb7" sourceRef="B2" targetRef="B3" />
<bpmn:userTask id="A1" name="A1">
<bpmn:incoming>Flow_1fqofjk</bpmn:incoming>
<bpmn:outgoing>Flow_0st2uhj</bpmn:outgoing>
<bpmn:multiInstanceLoopCharacteristics camunda:collection="movies" />
</bpmn:userTask>
<bpmn:userTask id="A2" name="A2">
<bpmn:incoming>Flow_0st2uhj</bpmn:incoming>
<bpmn:outgoing>Flow_1a7jgry</bpmn:outgoing>
</bpmn:userTask>
<bpmn:userTask id="B1" name="B1">
<bpmn:incoming>Flow_03221ql</bpmn:incoming>
<bpmn:outgoing>Flow_13cl181</bpmn:outgoing>
<bpmn:multiInstanceLoopCharacteristics camunda:collection="shows" />
</bpmn:userTask>
<bpmn:userTask id="B2" name="B2">
<bpmn:incoming>Flow_13cl181</bpmn:incoming>
<bpmn:outgoing>Flow_08b9xb7</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_1fqofjk" sourceRef="Setup_MI_For_A" targetRef="A1" />
<bpmn:scriptTask id="Setup_MI_For_A" name="Setup MI For A">
<bpmn:incoming>Flow_15q2q5u</bpmn:incoming>
<bpmn:outgoing>Flow_1fqofjk</bpmn:outgoing>
<bpmn:script>movies = {
"mi1": "mission impossible",
"mi2": "mission impossible 2"
} </bpmn:script>
</bpmn:scriptTask>
<bpmn:scriptTask id="Setup_MI_For_B" name="Setup MI For B">
<bpmn:incoming>Flow_00cshtp</bpmn:incoming>
<bpmn:outgoing>Flow_03221ql</bpmn:outgoing>
<bpmn:script>shows = {
"s1": "Simpsons Episode 1",
"s2": "Simpsons Episode 2"
} </bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_03221ql" sourceRef="Setup_MI_For_B" targetRef="B1" />
<bpmn:parallelGateway id="Gateway_0hvmyie">
<bpmn:incoming>Flow_1a7jgry</bpmn:incoming>
<bpmn:incoming>Flow_0z40k4b</bpmn:incoming>
<bpmn:outgoing>Flow_0vtwntu</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:sequenceFlow id="Flow_0vtwntu" sourceRef="Gateway_0hvmyie" targetRef="Event_0favmpp" />
<bpmn:task id="B4" name="B4">
<bpmn:incoming>Flow_1wivplb</bpmn:incoming>
<bpmn:outgoing>Flow_0z40k4b</bpmn:outgoing>
</bpmn:task>
<bpmn:sequenceFlow id="Flow_0z40k4b" sourceRef="B4" targetRef="Gateway_0hvmyie" />
<bpmn:task id="B3" name="B3">
<bpmn:incoming>Flow_08b9xb7</bpmn:incoming>
<bpmn:outgoing>Flow_1wivplb</bpmn:outgoing>
</bpmn:task>
<bpmn:sequenceFlow id="Flow_1wivplb" sourceRef="B3" targetRef="B4" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0nnx0ky">
<bpmndi:BPMNEdge id="Flow_0bjov2v_di" bpmnElement="Flow_0bjov2v">
<di:waypoint x="188" y="260" />
<di:waypoint x="225" y="260" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0st2uhj_di" bpmnElement="Flow_0st2uhj">
<di:waypoint x="620" y="117" />
<di:waypoint x="670" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1a7jgry_di" bpmnElement="Flow_1a7jgry">
<di:waypoint x="770" y="117" />
<di:waypoint x="1070" y="117" />
<di:waypoint x="1070" y="235" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_15q2q5u_di" bpmnElement="Flow_15q2q5u">
<di:waypoint x="250" y="235" />
<di:waypoint x="250" y="117" />
<di:waypoint x="340" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_00cshtp_di" bpmnElement="Flow_00cshtp">
<di:waypoint x="275" y="260" />
<di:waypoint x="340" y="260" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_13cl181_di" bpmnElement="Flow_13cl181">
<di:waypoint x="620" y="260" />
<di:waypoint x="670" y="260" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_08b9xb7_di" bpmnElement="Flow_08b9xb7">
<di:waypoint x="770" y="260" />
<di:waypoint x="790" y="260" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1fqofjk_di" bpmnElement="Flow_1fqofjk">
<di:waypoint x="440" y="117" />
<di:waypoint x="520" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_03221ql_di" bpmnElement="Flow_03221ql">
<di:waypoint x="440" y="260" />
<di:waypoint x="520" y="260" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0vtwntu_di" bpmnElement="Flow_0vtwntu">
<di:waypoint x="1095" y="260" />
<di:waypoint x="1202" y="260" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0z40k4b_di" bpmnElement="Flow_0z40k4b">
<di:waypoint x="1020" y="260" />
<di:waypoint x="1045" y="260" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1wivplb_di" bpmnElement="Flow_1wivplb">
<di:waypoint x="890" y="260" />
<di:waypoint x="920" y="260" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0mfhj9s_di" bpmnElement="A1">
<dc:Bounds x="520" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0gxwv6p_di" bpmnElement="B1">
<dc:Bounds x="520" y="220" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0xre4js_di" bpmnElement="B2">
<dc:Bounds x="670" y="220" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0anfpzl_di" bpmnElement="A2">
<dc:Bounds x="670" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="152" y="242" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0ugjrco_di" bpmnElement="Gateway_1vey4sb">
<dc:Bounds x="225" y="235" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0b6ruvt_di" bpmnElement="Setup_MI_For_A">
<dc:Bounds x="340" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1375jh1_di" bpmnElement="Setup_MI_For_B">
<dc:Bounds x="340" y="220" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0favmpp_di" bpmnElement="Event_0favmpp">
<dc:Bounds x="1202" y="242" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_1lpgh3b_di" bpmnElement="Gateway_0hvmyie">
<dc:Bounds x="1045" y="235" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0baovqi_di" bpmnElement="B4">
<dc:Bounds x="920" y="220" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0olvxvx_di" bpmnElement="B3">
<dc:Bounds x="790" y="220" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,3 @@
[
{"DETAIL": "Passed validation.", "STATUS": "No Error"}
]

View File

@ -0,0 +1,38 @@
[
{
"AGENDA_DATE": "2021-04-15T00:00:00+00:00",
"DATE_MODIFIED": "2021-04-15T00:00:00+00:00",
"IRBEVENT": "IRB Event 1",
"IRB_ADMINISTRATIVE_REVIEWER": "IRB Admin Reviewer 1",
"IRB_OF_RECORD": "IRB of Record 1",
"IRB_REVIEW_TYPE": "IRB Review Type 1",
"IRB_STATUS": "IRB Status 1",
"STUDYIRBREVIEWERADMIN": "Study IRB Review Admin 1",
"UVA_IRB_HSR_IS_IRB_OF_RECORD_FOR_ALL_SITES": 1,
"UVA_STUDY_TRACKING": "UVA Study Tracking 1"
},
{
"AGENDA_DATE": "2021-04-15T00:00:00+00:00",
"DATE_MODIFIED": "2021-04-15T00:00:00+00:00",
"IRBEVENT": "IRB Event 2",
"IRB_ADMINISTRATIVE_REVIEWER": "IRB Admin Reviewer 2",
"IRB_OF_RECORD": "IRB of Record 2",
"IRB_REVIEW_TYPE": "IRB Review Type 2",
"IRB_STATUS": "IRB Status 2",
"STUDYIRBREVIEWERADMIN": "Study IRB Review Admin 2",
"UVA_IRB_HSR_IS_IRB_OF_RECORD_FOR_ALL_SITES": 2,
"UVA_STUDY_TRACKING": "UVA Study Tracking 2"
},
{
"AGENDA_DATE": "2021-04-15T00:00:00+00:00",
"DATE_MODIFIED": "2021-04-15T00:00:00+00:00",
"IRBEVENT": "IRB Event 3",
"IRB_ADMINISTRATIVE_REVIEWER": "IRB Admin Reviewer 3",
"IRB_OF_RECORD": "IRB of Record 3",
"IRB_REVIEW_TYPE": "IRB Review Type 3",
"IRB_STATUS": "IRB Status 3",
"STUDYIRBREVIEWERADMIN": "Study IRB Review Admin 3",
"UVA_IRB_HSR_IS_IRB_OF_RECORD_FOR_ALL_SITES": 3,
"UVA_STUDY_TRACKING": "UVA Study Tracking 3"
}
]

View File

@ -1,12 +1,12 @@
{
"DSMB": null,
"DSMB_FREQUENCY": null,
"GCRC_NUMBER": null,
"IBC_NUMBER": null,
"IDE": null,
"IND_1": 1234,
"IND_2": null,
"IND_3": null,
"DSMB": 1,
"DSMB_FREQUENCY": 2,
"GCRC_NUMBER": "9",
"IBC_NUMBER": "7",
"IDE": "12345",
"IND_1": "1234",
"IND_2": "2345",
"IND_3": "3456",
"IRBREVIEWERADMIN": null,
"IS_ADULT_PARTICIPANT": null,
"IS_APPROVED_DEVICE": null,
@ -61,6 +61,7 @@
"NON_UVA_LOCATION": null,
"OTHER_VULNERABLE_DESC": null,
"PRC_NUMBER": null,
"SPONSORS_PROTOCOL_REVISION_DATE": null,
"UPLOAD_COMPLETE": null
"SPONSORS_PROTOCOL_REVISION_DATE": "2021-04-20",
"UPLOAD_COMPLETE": null,
"REVIEW_TYPE": 2
}

View File

@ -0,0 +1,67 @@
{
"DSMB": 1,
"DSMB_FREQUENCY": 2,
"GCRC_NUMBER": "9",
"IBC_NUMBER": "7",
"IDE": "12345",
"IND_1": "1234",
"IND_2": "2345",
"IND_3": "3456",
"IRBREVIEWERADMIN": null,
"IS_ADULT_PARTICIPANT": null,
"IS_APPROVED_DEVICE": null,
"IS_AUX": null,
"IS_BIOMEDICAL": null,
"IS_CANCER_PATIENT": null,
"IS_CENTRAL_REG_DB": null,
"IS_CHART_REVIEW": null,
"IS_COMMITTEE_CONFLICT": null,
"IS_CONSENT_WAIVER": null,
"IS_DB": null,
"IS_ELDERLY_POP": null,
"IS_ENGAGED_RESEARCH": null,
"IS_FETUS_POP": null,
"IS_FINANCIAL_CONFLICT": null,
"IS_FOR_CANCER_CENTER": null,
"IS_FUNDING_SOURCE": null,
"IS_GCRC": null,
"IS_GENE_TRANSFER": null,
"IS_GRANT": null,
"IS_HGT": null,
"IS_IBC": null,
"IS_IDE": null,
"IS_IND": null,
"IS_MENTAL_IMPAIRMENT_POP": null,
"IS_MINOR": null,
"IS_MINOR_PARTICIPANT": null,
"IS_MULTI_SITE": null,
"IS_NOT_CONSENT_WAIVER": null,
"IS_NOT_PRC_WAIVER": null,
"IS_OTHER_VULNERABLE_POP": null,
"IS_OUTSIDE_CONTRACT": null,
"IS_PI_INITIATED": null,
"IS_PI_SCHOOL": null,
"IS_PRC": null,
"IS_PRC_DSMP": null,
"IS_PREGNANT_POP": null,
"IS_PRISONERS_POP": null,
"IS_QUALITATIVE": null,
"IS_RADIATION": null,
"IS_REVIEW_BY_CENTRAL_IRB": null,
"IS_SPONSOR": null,
"IS_SPONSOR_MONITORING": null,
"IS_SURROGATE_CONSENT": null,
"IS_TISSUE_BANKING": null,
"IS_UVA_DB": null,
"IS_UVA_IDE": null,
"IS_UVA_IND": null,
"IS_UVA_LOCATION": null,
"IS_UVA_PI_MULTI": null,
"MULTI_SITE_LOCATIONS": null,
"NON_UVA_LOCATION": null,
"OTHER_VULNERABLE_DESC": null,
"PRC_NUMBER": null,
"SPONSORS_PROTOCOL_REVISION_DATE": "2021-04-20",
"UPLOAD_COMPLETE": null,
"REVIEW_TYPE": 99
}

View File

@ -3,7 +3,6 @@
"DATE_MODIFIED": "2020-02-19T14:26:49.127756",
"HSRNUMBER": "12345",
"NETBADGEID": "dhf8r",
"Q_COMPLETE": true,
"STUDYID": 54321,
"TITLE": "Another study about the effect of a naked mannequin on software productivity"
},
@ -11,7 +10,6 @@
"DATE_MODIFIED": "2020-02-19T14:24:55.101695",
"HSRNUMBER": "",
"NETBADGEID": "dhf8r",
"Q_COMPLETE": true,
"STUDYID": 65432,
"TITLE": "Peanut butter consumption among quiet dogs"
},
@ -19,7 +17,6 @@
"DATE_MODIFIED": "2020-02-19T14:24:55.101695",
"HSRNUMBER": "",
"NETBADGEID": "dhf8r",
"Q_COMPLETE": false,
"STUDYID": 1,
"TITLE": "Efficacy of xenomorph bio-augmented circuits on dexterity of cybernetic prostheses"
}

View File

@ -0,0 +1,77 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_5e40639" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_ReadOnlyField" name="Test Read Only Field" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0to8etb</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0to8etb" sourceRef="StartEvent_1" targetRef="Activity_SetData" />
<bpmn:sequenceFlow id="Flow_04r75ca" sourceRef="Activity_SetData" targetRef="Activity_DisplayOnlyField" />
<bpmn:sequenceFlow id="Flow_0g25v76" sourceRef="Activity_DisplayOnlyField" targetRef="Activity_CheckData" />
<bpmn:endEvent id="Event_0cfckhy">
<bpmn:incoming>Flow_0a95kns</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0a95kns" sourceRef="Activity_CheckData" targetRef="Event_0cfckhy" />
<bpmn:scriptTask id="Activity_SetData" name="Set Data">
<bpmn:incoming>Flow_0to8etb</bpmn:incoming>
<bpmn:outgoing>Flow_04r75ca</bpmn:outgoing>
<bpmn:script>string_value = 'asdf'</bpmn:script>
</bpmn:scriptTask>
<bpmn:userTask id="Activity_DisplayOnlyField" name="Display Only Field&#10;" camunda:formKey="ReadOnlyFormField">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="read_only_field" label="Read Only" type="string">
<camunda:properties>
<camunda:property id="read_only" value="True" />
<camunda:property id="value_expression" value="string_value" />
</camunda:properties>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_04r75ca</bpmn:incoming>
<bpmn:outgoing>Flow_0g25v76</bpmn:outgoing>
</bpmn:userTask>
<bpmn:manualTask id="Activity_CheckData" name="Check Data Persistence">
<bpmn:documentation>Read only is {{ read_only_field }}</bpmn:documentation>
<bpmn:incoming>Flow_0g25v76</bpmn:incoming>
<bpmn:outgoing>Flow_0a95kns</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_ReadOnlyField">
<bpmndi:BPMNEdge id="Flow_0a95kns_di" bpmnElement="Flow_0a95kns">
<di:waypoint x="690" y="177" />
<di:waypoint x="752" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0g25v76_di" bpmnElement="Flow_0g25v76">
<di:waypoint x="530" y="177" />
<di:waypoint x="590" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_04r75ca_di" bpmnElement="Flow_04r75ca">
<di:waypoint x="370" y="177" />
<di:waypoint x="430" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0to8etb_di" bpmnElement="Flow_0to8etb">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0cfckhy_di" bpmnElement="Event_0cfckhy">
<dc:Bounds x="752" y="159" width="36" height="36" />
<bpmndi:BPMNLabel>
<dc:Bounds x="733" y="202" width="76" height="27" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_10544ek_di" bpmnElement="Activity_SetData">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0ho1wsm_di" bpmnElement="Activity_DisplayOnlyField">
<dc:Bounds x="430" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_10ds6m4_di" bpmnElement="Activity_CheckData">
<dc:Bounds x="590" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,82 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_0vny0hv" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:process id="Process_ResetWorkflow" name="Reset Workflow" isExecutable="true">
<bpmn:documentation>Use this process to reset a workflow for the current study. You must enter the name of the workflow. I.e., lower case with underscores.</bpmn:documentation>
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_0i872g2</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_0i872g2" sourceRef="StartEvent_1" targetRef="Task_GetWorkflow" />
<bpmn:sequenceFlow id="SequenceFlow_1q2ton3" sourceRef="Task_GetWorkflow" targetRef="Task_ResetWorkflow" />
<bpmn:sequenceFlow id="SequenceFlow_0x127gc" sourceRef="Task_ResetWorkflow" targetRef="Task_DisplayWorkflow" />
<bpmn:endEvent id="EndEvent_0fdym05">
<bpmn:incoming>SequenceFlow_0yy50p2</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="SequenceFlow_0yy50p2" sourceRef="Task_DisplayWorkflow" targetRef="EndEvent_0fdym05" />
<bpmn:userTask id="Task_GetWorkflow" name="Get Workflow" camunda:formKey="WorkflowForm">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="workflow_name" label="Workflow Name" type="string">
<camunda:validation>
<camunda:constraint name="required" config="True" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0i872g2</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1q2ton3</bpmn:outgoing>
</bpmn:userTask>
<bpmn:scriptTask id="Task_ResetWorkflow" name="Reset Workflow">
<bpmn:incoming>SequenceFlow_1q2ton3</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0x127gc</bpmn:outgoing>
<bpmn:script>value = reset_workflow(workflow_name=workflow_name)</bpmn:script>
</bpmn:scriptTask>
<bpmn:manualTask id="Task_DisplayWorkflow" name="Display Workflow">
<bpmn:documentation># Reset Workflow
&lt;div&gt;
{% if value %}
&lt;span&gt;Workflow {{workflow_name}} was reset.&lt;/span&gt;
{% else %}
&lt;span&gt;There was a problem resetting workflow {{workflow_name}}.&lt;/span&gt;
{% endif %}
&lt;/div&gt;
</bpmn:documentation>
<bpmn:incoming>SequenceFlow_0x127gc</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0yy50p2</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_ResetWorkflow">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0i872g2_di" bpmnElement="SequenceFlow_0i872g2">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1q2ton3_di" bpmnElement="SequenceFlow_1q2ton3">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_0x127gc_di" bpmnElement="SequenceFlow_0x127gc">
<di:waypoint x="530" y="117" />
<di:waypoint x="590" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="EndEvent_0fdym05_di" bpmnElement="EndEvent_0fdym05">
<dc:Bounds x="752" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0yy50p2_di" bpmnElement="SequenceFlow_0yy50p2">
<di:waypoint x="690" y="117" />
<di:waypoint x="752" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="UserTask_0li5ksb_di" bpmnElement="Task_GetWorkflow">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ScriptTask_07qq4pl_di" bpmnElement="Task_ResetWorkflow">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ManualTask_0ianu3f_di" bpmnElement="Task_DisplayWorkflow">
<dc:Bounds x="590" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

Some files were not shown because too many files have changed in this diff Show More