Merge branch 'dev' into file-cleanup-466

This commit is contained in:
mike cullerton 2021-09-30 14:11:59 -04:00
commit 64e3cd044f
19 changed files with 377 additions and 88 deletions

98
Pipfile.lock generated
View File

@ -48,11 +48,11 @@
}, },
"apscheduler": { "apscheduler": {
"hashes": [ "hashes": [
"sha256:1cab7f2521e107d07127b042155b632b7a1cd5e02c34be5a28ff62f77c900c6a", "sha256:793b2d37c52ece53e34626619e6142e99b20b59a12155f39e1e6932e324f079d",
"sha256:c06cc796d5bb9eb3c4f77727f6223476eb67749e7eea074d1587550702a7fbe3" "sha256:82d6d21b2f0343510d07bb35618333a794653439a265635555b12935647b460a"
], ],
"index": "pypi", "index": "pypi",
"version": "==3.7.0" "version": "==3.8.0"
}, },
"attrs": { "attrs": {
"hashes": [ "hashes": [
@ -1039,49 +1039,49 @@
}, },
"regex": { "regex": {
"hashes": [ "hashes": [
"sha256:04f6b9749e335bb0d2f68c707f23bb1773c3fb6ecd10edf0f04df12a8920d468", "sha256:0628ed7d6334e8f896f882a5c1240de8c4d9b0dd7c7fb8e9f4692f5684b7d656",
"sha256:08d74bfaa4c7731b8dac0a992c63673a2782758f7cfad34cf9c1b9184f911354", "sha256:09eb62654030f39f3ba46bc6726bea464069c29d00a9709e28c9ee9623a8da4a",
"sha256:0fc1f8f06977c2d4f5e3d3f0d4a08089be783973fc6b6e278bde01f0544ff308", "sha256:0bba1f6df4eafe79db2ecf38835c2626dbd47911e0516f6962c806f83e7a99ae",
"sha256:121f4b3185feaade3f85f70294aef3f777199e9b5c0c0245c774ae884b110a2d", "sha256:10a7a9cbe30bd90b7d9a1b4749ef20e13a3528e4215a2852be35784b6bd070f0",
"sha256:1413b5022ed6ac0d504ba425ef02549a57d0f4276de58e3ab7e82437892704fc", "sha256:17310b181902e0bb42b29c700e2c2346b8d81f26e900b1328f642e225c88bce1",
"sha256:1743345e30917e8c574f273f51679c294effba6ad372db1967852f12c76759d8", "sha256:1e8d1898d4fb817120a5f684363b30108d7b0b46c7261264b100d14ec90a70e7",
"sha256:28fc475f560d8f67cc8767b94db4c9440210f6958495aeae70fac8faec631797", "sha256:2054dea683f1bda3a804fcfdb0c1c74821acb968093d0be16233873190d459e3",
"sha256:31a99a4796bf5aefc8351e98507b09e1b09115574f7c9dbb9cf2111f7220d2e2", "sha256:29385c4dbb3f8b3a55ce13de6a97a3d21bd00de66acd7cdfc0b49cb2f08c906c",
"sha256:328a1fad67445550b982caa2a2a850da5989fd6595e858f02d04636e7f8b0b13", "sha256:295bc8a13554a25ad31e44c4bedabd3c3e28bba027e4feeb9bb157647a2344a7",
"sha256:473858730ef6d6ff7f7d5f19452184cd0caa062a20047f6d6f3e135a4648865d", "sha256:2cdb3789736f91d0b3333ac54d12a7e4f9efbc98f53cb905d3496259a893a8b3",
"sha256:4cde065ab33bcaab774d84096fae266d9301d1a2f5519d7bd58fc55274afbf7a", "sha256:3baf3eaa41044d4ced2463fd5d23bf7bd4b03d68739c6c99a59ce1f95599a673",
"sha256:5f6a808044faae658f546dd5f525e921de9fa409de7a5570865467f03a626fc0", "sha256:4e61100200fa6ab7c99b61476f9f9653962ae71b931391d0264acfb4d9527d9c",
"sha256:610b690b406653c84b7cb6091facb3033500ee81089867ee7d59e675f9ca2b73", "sha256:6266fde576e12357b25096351aac2b4b880b0066263e7bc7a9a1b4307991bb0e",
"sha256:66256b6391c057305e5ae9209941ef63c33a476b73772ca967d4a2df70520ec1", "sha256:650c4f1fc4273f4e783e1d8e8b51a3e2311c2488ba0fcae6425b1e2c248a189d",
"sha256:6eebf512aa90751d5ef6a7c2ac9d60113f32e86e5687326a50d7686e309f66ed", "sha256:658e3477676009083422042c4bac2bdad77b696e932a3de001c42cc046f8eda2",
"sha256:79aef6b5cd41feff359acaf98e040844613ff5298d0d19c455b3d9ae0bc8c35a", "sha256:6adc1bd68f81968c9d249aab8c09cdc2cbe384bf2d2cb7f190f56875000cdc72",
"sha256:808ee5834e06f57978da3e003ad9d6292de69d2bf6263662a1a8ae30788e080b", "sha256:6c4d83d21d23dd854ffbc8154cf293f4e43ba630aa9bd2539c899343d7f59da3",
"sha256:8e44769068d33e0ea6ccdf4b84d80c5afffe5207aa4d1881a629cf0ef3ec398f", "sha256:6f74b6d8f59f3cfb8237e25c532b11f794b96f5c89a6f4a25857d85f84fbef11",
"sha256:999ad08220467b6ad4bd3dd34e65329dd5d0df9b31e47106105e407954965256", "sha256:7783d89bd5413d183a38761fbc68279b984b9afcfbb39fa89d91f63763fbfb90",
"sha256:9b006628fe43aa69259ec04ca258d88ed19b64791693df59c422b607b6ece8bb", "sha256:7e3536f305f42ad6d31fc86636c54c7dafce8d634e56fef790fbacb59d499dd5",
"sha256:9d05ad5367c90814099000442b2125535e9d77581855b9bee8780f1b41f2b1a2", "sha256:821e10b73e0898544807a0692a276e539e5bafe0a055506a6882814b6a02c3ec",
"sha256:a577a21de2ef8059b58f79ff76a4da81c45a75fe0bfb09bc8b7bb4293fa18983", "sha256:835962f432bce92dc9bf22903d46c50003c8d11b1dc64084c8fae63bca98564a",
"sha256:a617593aeacc7a691cc4af4a4410031654f2909053bd8c8e7db837f179a630eb", "sha256:85c61bee5957e2d7be390392feac7e1d7abd3a49cbaed0c8cee1541b784c8561",
"sha256:abb48494d88e8a82601af905143e0de838c776c1241d92021e9256d5515b3645", "sha256:86f9931eb92e521809d4b64ec8514f18faa8e11e97d6c2d1afa1bcf6c20a8eab",
"sha256:ac88856a8cbccfc14f1b2d0b829af354cc1743cb375e7f04251ae73b2af6adf8", "sha256:8a5c2250c0a74428fd5507ae8853706fdde0f23bfb62ee1ec9418eeacf216078",
"sha256:b4c220a1fe0d2c622493b0a1fd48f8f991998fb447d3cd368033a4b86cf1127a", "sha256:8aec4b4da165c4a64ea80443c16e49e3b15df0f56c124ac5f2f8708a65a0eddc",
"sha256:b844fb09bd9936ed158ff9df0ab601e2045b316b17aa8b931857365ea8586906", "sha256:8c268e78d175798cd71d29114b0a1f1391c7d011995267d3b62319ec1a4ecaa1",
"sha256:bdc178caebd0f338d57ae445ef8e9b737ddf8fbc3ea187603f65aec5b041248f", "sha256:8d80087320632457aefc73f686f66139801959bf5b066b4419b92be85be3543c",
"sha256:c206587c83e795d417ed3adc8453a791f6d36b67c81416676cad053b4104152c", "sha256:95e89a8558c8c48626dcffdf9c8abac26b7c251d352688e7ab9baf351e1c7da6",
"sha256:c61dcc1cf9fd165127a2853e2c31eb4fb961a4f26b394ac9fe5669c7a6592892", "sha256:9c371dd326289d85906c27ec2bc1dcdedd9d0be12b543d16e37bad35754bde48",
"sha256:c7cb4c512d2d3b0870e00fbbac2f291d4b4bf2634d59a31176a87afe2777c6f0", "sha256:9c7cb25adba814d5f419733fe565f3289d6fa629ab9e0b78f6dff5fa94ab0456",
"sha256:d4a332404baa6665b54e5d283b4262f41f2103c255897084ec8f5487ce7b9e8e", "sha256:a731552729ee8ae9c546fb1c651c97bf5f759018fdd40d0e9b4d129e1e3a44c8",
"sha256:d5111d4c843d80202e62b4fdbb4920db1dcee4f9366d6b03294f45ed7b18b42e", "sha256:aea4006b73b555fc5bdb650a8b92cf486d678afa168cf9b38402bb60bf0f9c18",
"sha256:e1e8406b895aba6caa63d9fd1b6b1700d7e4825f78ccb1e5260551d168db38ed", "sha256:b0e3f59d3c772f2c3baaef2db425e6fc4149d35a052d874bb95ccfca10a1b9f4",
"sha256:e8690ed94481f219a7a967c118abaf71ccc440f69acd583cab721b90eeedb77c", "sha256:b15dc34273aefe522df25096d5d087abc626e388a28a28ac75a4404bb7668736",
"sha256:ed283ab3a01d8b53de3a05bfdf4473ae24e43caee7dcb5584e86f3f3e5ab4374", "sha256:c000635fd78400a558bd7a3c2981bb2a430005ebaa909d31e6e300719739a949",
"sha256:ed4b50355b066796dacdd1cf538f2ce57275d001838f9b132fab80b75e8c84dd", "sha256:c31f35a984caffb75f00a86852951a337540b44e4a22171354fb760cefa09346",
"sha256:ee329d0387b5b41a5dddbb6243a21cb7896587a651bebb957e2d2bb8b63c0791", "sha256:c50a6379763c733562b1fee877372234d271e5c78cd13ade5f25978aa06744db",
"sha256:f3bf1bc02bc421047bfec3343729c4bbbea42605bcfd6d6bfe2c07ade8b12d2a", "sha256:c94722bf403b8da744b7d0bb87e1f2529383003ceec92e754f768ef9323f69ad",
"sha256:f585cbbeecb35f35609edccb95efd95a3e35824cd7752b586503f7e6087303f1", "sha256:dcbbc9cfa147d55a577d285fd479b43103188855074552708df7acc31a476dd9",
"sha256:f60667673ff9c249709160529ab39667d1ae9fd38634e006bec95611f632e759" "sha256:fb9f5844db480e2ef9fce3a72e71122dd010ab7b2920f777966ba25f7eb63819"
], ],
"version": "==2021.8.28" "version": "==2021.9.24"
}, },
"requests": { "requests": {
"hashes": [ "hashes": [
@ -1183,7 +1183,7 @@
}, },
"spiffworkflow": { "spiffworkflow": {
"git": "https://github.com/sartography/SpiffWorkflow", "git": "https://github.com/sartography/SpiffWorkflow",
"ref": "3330f2a3d098737ec79350a0b853aca7493e3957" "ref": "d911632569d73ec5e039504005a740c3d4d50451"
}, },
"sqlalchemy": { "sqlalchemy": {
"hashes": [ "hashes": [
@ -1322,11 +1322,11 @@
}, },
"zipp": { "zipp": {
"hashes": [ "hashes": [
"sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3", "sha256:1fc9641b26f3bd81069b7738b039f2819cab6e3fc3399a953e19d92cc81eff4d",
"sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4" "sha256:8dc6c4d5a809d659067cc713f76bcf42fae8ae641db12fddfa93694a15abc96b"
], ],
"markers": "python_version < '3.10'", "markers": "python_version < '3.10'",
"version": "==3.5.0" "version": "==3.5.1"
} }
}, },
"develop": { "develop": {

View File

@ -145,6 +145,7 @@ paths:
description: The remote endpoint description: The remote endpoint
schema: schema:
type: string type: string
example: https://testing.crconnect.uvadcos.io/api
tags: tags:
- Workflow Sync API - Workflow Sync API
responses: responses:

View File

@ -159,6 +159,9 @@ def create_or_update_local_spec(remote,workflow_spec_id):
session.add(local_category) session.add(local_category)
local_spec.category = local_category local_spec.category = local_category
# Add the local spec to the database, then we can link the libraries.
session.add(local_spec)
# Set the libraries # Set the libraries
session.query(WorkflowLibraryModel).filter(WorkflowLibraryModel.workflow_spec_id == local_spec.id).delete() session.query(WorkflowLibraryModel).filter(WorkflowLibraryModel.workflow_spec_id == local_spec.id).delete()
for library in specdict['libraries']: for library in specdict['libraries']:
@ -167,7 +170,7 @@ def create_or_update_local_spec(remote,workflow_spec_id):
local_lib = WorkflowLibraryModel(workflow_spec_id=local_spec.id, local_lib = WorkflowLibraryModel(workflow_spec_id=local_spec.id,
library_spec_id=library['id']) library_spec_id=library['id'])
session.add(local_lib) session.add(local_lib)
session.add(local_spec) session.commit()
def update_or_create_current_file(remote,workflow_spec_id,updatefile): def update_or_create_current_file(remote,workflow_spec_id,updatefile):
currentfile = file_get(workflow_spec_id, updatefile['filename']) currentfile = file_get(workflow_spec_id, updatefile['filename'])

View File

@ -27,8 +27,19 @@ class Task(object):
# Field Types # Field Types
FIELD_TYPE_FILE = "file" FIELD_TYPE_STRING = "string"
FIELD_TYPE_LONG = "long"
FIELD_TYPE_BOOLEAN = "boolean"
FIELD_TYPE_DATE = "date"
FIELD_TYPE_ENUM = "enum"
FIELD_TYPE_TEXTAREA = "textarea" # textarea: Multiple lines of text
FIELD_TYPE_AUTO_COMPLETE = "autocomplete" FIELD_TYPE_AUTO_COMPLETE = "autocomplete"
FIELD_TYPE_FILE = "file"
FIELD_TYPE_FILES = "files" # files: Multiple files
FIELD_TYPE_TEL = "tel" # tel: Phone number
FIELD_TYPE_EMAIL = "email" # email: Email address
FIELD_TYPE_URL = "url" # url: Website address
FIELD_PROP_AUTO_COMPLETE_MAX = "autocomplete_num" # Not used directly, passed in from the front end. FIELD_PROP_AUTO_COMPLETE_MAX = "autocomplete_num" # Not used directly, passed in from the front end.
# Required field # Required field
@ -77,8 +88,6 @@ class Task(object):
FIELD_PROP_HELP = "help" FIELD_PROP_HELP = "help"
########################################################################## ##########################################################################
def __init__(self, id, name, title, type, state, lane, form, documentation, data, def __init__(self, id, name, title, type, state, lane, form, documentation, data,
@ -103,6 +112,11 @@ class Task(object):
def valid_property_names(cls): def valid_property_names(cls):
return [value for name, value in vars(cls).items() if name.startswith('FIELD_PROP')] return [value for name, value in vars(cls).items() if name.startswith('FIELD_PROP')]
@classmethod
def valid_field_types(cls):
return [value for name, value in vars(cls).items() if name.startswith('FIELD_TYPE')]
class OptionSchema(ma.Schema): class OptionSchema(ma.Schema):
class Meta: class Meta:
fields = ["id", "name", "data"] fields = ["id", "name", "data"]

View File

@ -1,6 +1,9 @@
import enum import enum
import urllib import urllib
import connexion
import flask import flask
from flask import url_for
from marshmallow import INCLUDE, EXCLUDE, Schema from marshmallow import INCLUDE, EXCLUDE, Schema
from marshmallow.fields import Method from marshmallow.fields import Method
from marshmallow_enum import EnumField from marshmallow_enum import EnumField
@ -153,10 +156,14 @@ class FileSchema(Schema):
def get_url(self, obj): def get_url(self, obj):
token = 'not_available' token = 'not_available'
base_url = connexion.request.host_url
if obj.id is None:
return "" # We can't return a url for a file that isn't stored yet.
file_url = url_for("/v1_0.crc_api_file_get_file_data_link", file_id=obj.id)
if hasattr(flask.g, 'user'): if hasattr(flask.g, 'user'):
token = flask.g.user.encode_auth_token() token = flask.g.user.encode_auth_token()
return (app.config['APPLICATION_ROOT'] + 'file/' + url = base_url + file_url + '?auth_token=' + urllib.parse.quote_plus(token)
str(obj.id) + '/download?auth_token=' + urllib.parse.quote_plus(token)) return url
class LookupFileModel(db.Model): class LookupFileModel(db.Model):

View File

@ -162,13 +162,14 @@ class Category(object):
self.name = model.name self.name = model.name
self.display_name = model.display_name self.display_name = model.display_name
self.display_order = model.display_order self.display_order = model.display_order
self.admin = model.admin
class CategorySchema(ma.Schema): class CategorySchema(ma.Schema):
workflows = fields.List(fields.Nested(WorkflowMetadataSchema), dump_only=True) workflows = fields.List(fields.Nested(WorkflowMetadataSchema), dump_only=True)
class Meta: class Meta:
model = Category model = Category
additional = ["id", "name", "display_name", "display_order"] additional = ["id", "name", "display_name", "display_order", "admin"]
unknown = INCLUDE unknown = INCLUDE

View File

@ -16,7 +16,7 @@ class WorkflowSpecCategoryModel(db.Model):
name = db.Column(db.String) name = db.Column(db.String)
display_name = db.Column(db.String) display_name = db.Column(db.String)
display_order = db.Column(db.Integer) display_order = db.Column(db.Integer)
admin = db.Column(db.Boolean)
class WorkflowSpecCategoryModelSchema(SQLAlchemyAutoSchema): class WorkflowSpecCategoryModelSchema(SQLAlchemyAutoSchema):
class Meta: class Meta:

View File

@ -1,5 +1,6 @@
import re import re
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.serializer.exceptions import MissingSpecError from SpiffWorkflow.serializer.exceptions import MissingSpecError
from SpiffWorkflow.util.metrics import timeit, firsttime, sincetime from SpiffWorkflow.util.metrics import timeit, firsttime, sincetime
from lxml import etree from lxml import etree
@ -8,7 +9,6 @@ from datetime import datetime
from typing import List from typing import List
from SpiffWorkflow import Task as SpiffTask, WorkflowException, Task from SpiffWorkflow import Task as SpiffTask, WorkflowException, Task
from SpiffWorkflow.bpmn.BpmnScriptEngine import BpmnScriptEngine
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
@ -30,9 +30,8 @@ from crc.services.file_service import FileService
from crc import app from crc import app
from crc.services.user_service import UserService from crc.services.user_service import UserService
from difflib import SequenceMatcher
class CustomBpmnScriptEngine(BpmnScriptEngine): class CustomBpmnScriptEngine(PythonScriptEngine):
"""This is a custom script processor that can be easily injected into Spiff Workflow. """This is a custom script processor that can be easily injected into Spiff Workflow.
It will execute python code read in from the bpmn. It will also make any scripts in the It will execute python code read in from the bpmn. It will also make any scripts in the
scripts directory available for execution. """ scripts directory available for execution. """
@ -79,7 +78,7 @@ class CustomBpmnScriptEngine(BpmnScriptEngine):
augmentMethods = Script.generate_augmented_validate_list(task, study_id, workflow_id) augmentMethods = Script.generate_augmented_validate_list(task, study_id, workflow_id)
else: else:
augmentMethods = Script.generate_augmented_list(task, study_id, workflow_id) augmentMethods = Script.generate_augmented_list(task, study_id, workflow_id)
exp, valid = self.validateExpression(expression) exp, valid = self.validate_expression(expression)
return self._eval(exp, external_methods=augmentMethods, **task.data) return self._eval(exp, external_methods=augmentMethods, **task.data)
except Exception as e: except Exception as e:

View File

@ -132,6 +132,13 @@ class WorkflowService(object):
spec, only completing the required fields, rather than everything. spec, only completing the required fields, rather than everything.
""" """
# Get workflow state dictionary, make sure workflow is not disabled.
if validate_study_id is not None:
study_model = session.query(StudyModel).filter(StudyModel.id == validate_study_id).first()
spec_model = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.id == spec_id).first()
status = StudyService._get_study_status(study_model)
if status[spec_model.name]['status'] == 'disabled':
raise ApiError(code='disabled_workflow', message=f"This workflow is disabled. {status[spec_model.name]['message']}")
workflow_model = WorkflowService.make_test_workflow(spec_id, validate_study_id) workflow_model = WorkflowService.make_test_workflow(spec_id, validate_study_id)
try: try:
processor = WorkflowProcessor(workflow_model, validate_only=True) processor = WorkflowProcessor(workflow_model, validate_only=True)
@ -203,6 +210,7 @@ class WorkflowService(object):
task_name = task.get_name()) task_name = task.get_name())
# Assure field has valid properties # Assure field has valid properties
WorkflowService.check_field_properties(field, task) WorkflowService.check_field_properties(field, task)
WorkflowService.check_field_type(field, task)
# Process the label of the field if it is dynamic. # Process the label of the field if it is dynamic.
if field.has_property(Task.FIELD_PROP_LABEL_EXPRESSION): if field.has_property(Task.FIELD_PROP_LABEL_EXPRESSION):
@ -294,6 +302,14 @@ class WorkflowService(object):
f'The field {field.id} contains an unsupported ' f'The field {field.id} contains an unsupported '
f'property: {name}', task=task) f'property: {name}', task=task)
@staticmethod
def check_field_type(field, task):
"""Assures that the field type is valid."""
valid_types = Task.valid_field_types()
if field.type not in valid_types:
raise ApiError.from_task("invalid_field_type",
f'The field {field.id} has an unknown field type '
f'{field.type}, valid types include {valid_types}', task=task)
@staticmethod @staticmethod
def post_process_form(task): def post_process_form(task):
@ -434,10 +450,15 @@ class WorkflowService(object):
if len(field.options) > 0: if len(field.options) > 0:
random_choice = random.choice(field.options) random_choice = random.choice(field.options)
if isinstance(random_choice, dict): if isinstance(random_choice, dict):
return {'value': random_choice['id'], 'label': random_choice['name'], 'data': random_choice['data']} random_value = {'value': random_choice['id'], 'label': random_choice['name'], 'data': random_choice['data']}
else: else:
# fixme: why it is sometimes an EnumFormFieldOption, and other times not? # fixme: why it is sometimes an EnumFormFieldOption, and other times not?
return {'value': random_choice.id, 'label': random_choice.name} random_value = {'value': random_choice.id, 'label': random_choice.name}
if field.has_property(Task.FIELD_PROP_ENUM_TYPE) and field.get_property(Task.FIELD_PROP_ENUM_TYPE) == 'checkbox':
return [random_value]
else:
return random_value
else: else:
raise ApiError.from_task("invalid_enum", "You specified an enumeration field (%s)," raise ApiError.from_task("invalid_enum", "You specified an enumeration field (%s),"
" with no options" % field.id, task) " with no options" % field.id, task)
@ -446,19 +467,25 @@ class WorkflowService(object):
# from the lookup model # from the lookup model
lookup_model = LookupService.get_lookup_model(task, field) lookup_model = LookupService.get_lookup_model(task, field)
if field.has_property(Task.FIELD_PROP_LDAP_LOOKUP): # All ldap records get the same person. if field.has_property(Task.FIELD_PROP_LDAP_LOOKUP): # All ldap records get the same person.
return WorkflowService._random_ldap_record() random_value = WorkflowService._random_ldap_record()
elif lookup_model: elif lookup_model:
data = db.session.query(LookupDataModel).filter( data = db.session.query(LookupDataModel).filter(
LookupDataModel.lookup_file_model == lookup_model).limit(10).all() LookupDataModel.lookup_file_model == lookup_model).limit(10).all()
options = [{"value": d.value, "label": d.label, "data": d.data} for d in data] options = [{"value": d.value, "label": d.label, "data": d.data} for d in data]
if len(options) > 0: if len(options) > 0:
return random.choice(options) random_value = random.choice(options)
else: else:
raise ApiError.from_task("invalid enum", "You specified an enumeration field (%s)," raise ApiError.from_task("invalid enum", "You specified an enumeration field (%s),"
" with no options" % field.id, task) " with no options" % field.id, task)
else: else:
raise ApiError.from_task("unknown_lookup_option", "The settings for this auto complete field " raise ApiError.from_task("unknown_lookup_option", "The settings for this auto complete field "
"are incorrect: %s " % field.id, task) "are incorrect: %s " % field.id, task)
if field.has_property(Task.FIELD_PROP_ENUM_TYPE) and field.get_property(Task.FIELD_PROP_ENUM_TYPE) == 'checkbox':
return [random_value]
else:
return random_value
elif field.type == "long": elif field.type == "long":
return random.randint(1, 1000) return random.randint(1, 1000)
elif field.type == 'boolean': elif field.type == 'boolean':

View File

@ -1,9 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.7.0"> <definitions xmlns="https://www.omg.org/spec/DMN/20191111/MODEL/" xmlns:dmndi="https://www.omg.org/spec/DMN/20191111/DMNDI/" xmlns:dc="http://www.omg.org/spec/DMN/20180521/DC/" id="Definitions_1p34ouw" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="4.10.0">
<decision id="sponsor_funding_source" name="Sponsor Funding Source"> <decision id="sponsor_funding_source" name="Sponsor Funding Source">
<extensionElements>
<biodi:bounds x="280" y="90" width="180" height="80" />
</extensionElements>
<decisionTable id="DecisionTable_00zdxg0"> <decisionTable id="DecisionTable_00zdxg0">
<input id="InputClause_02n3ccs" label="CoCApplication Required?"> <input id="InputClause_02n3ccs" label="CoCApplication Required?">
<inputExpression id="LiteralExpression_1ju4o1o" typeRef="boolean" expressionLanguage="feel"> <inputExpression id="LiteralExpression_1ju4o1o" typeRef="boolean" expressionLanguage="feel">
@ -13,7 +10,7 @@
<output id="OutputClause_1ybi1ud" label="sponsor_funding_source" name="sponsor_funding_source" typeRef="string" /> <output id="OutputClause_1ybi1ud" label="sponsor_funding_source" name="sponsor_funding_source" typeRef="string" />
<rule id="DecisionRule_1t97mw4"> <rule id="DecisionRule_1t97mw4">
<inputEntry id="UnaryTests_0ym4ln2"> <inputEntry id="UnaryTests_0ym4ln2">
<text>true</text> <text>True</text>
</inputEntry> </inputEntry>
<outputEntry id="LiteralExpression_1pweuqc"> <outputEntry id="LiteralExpression_1pweuqc">
<text>"required"</text> <text>"required"</text>
@ -22,7 +19,7 @@
<rule id="DecisionRule_1q965wz"> <rule id="DecisionRule_1q965wz">
<description>was "disabled"</description> <description>was "disabled"</description>
<inputEntry id="UnaryTests_1mlhh3t"> <inputEntry id="UnaryTests_1mlhh3t">
<text>false</text> <text>False</text>
</inputEntry> </inputEntry>
<outputEntry id="LiteralExpression_073vd6i"> <outputEntry id="LiteralExpression_073vd6i">
<text>"required"</text> <text>"required"</text>
@ -38,4 +35,11 @@
</rule> </rule>
</decisionTable> </decisionTable>
</decision> </decision>
<dmndi:DMNDI>
<dmndi:DMNDiagram id="DMNDiagram_1vnvmg2">
<dmndi:DMNShape id="DMNShape_04xj0eb" dmnElementRef="sponsor_funding_source">
<dc:Bounds height="80" width="180" x="280" y="90" />
</dmndi:DMNShape>
</dmndi:DMNDiagram>
</dmndi:DMNDI>
</definitions> </definitions>

View File

@ -249,7 +249,8 @@ class ExampleDataLoader:
id=0, id=0,
name='test_category', name='test_category',
display_name='Test Category', display_name='Test Category',
display_order=0 display_order=0,
admin=False
) )
db.session.add(category) db.session.add(category)
db.session.commit() db.session.commit()
@ -394,4 +395,4 @@ for k in investigators.keys():
del investigator del investigator
cnt_subs = len(subs.keys()) cnt_subs = len(subs.keys())
del investigators del investigators

View File

@ -0,0 +1,43 @@
"""empty message
Revision ID: 5c63a89ee7b7
Revises: 9afbd55082a0
Create Date: 2021-09-29 10:24:20.413807
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5c63a89ee7b7'
down_revision = '9afbd55082a0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_foreign_key(None, 'user', 'ldap_model', ['uid'], ['uid'])
op.drop_column('user', 'affiliation')
op.drop_column('user', 'email_address')
op.drop_column('user', 'eppn')
op.drop_column('user', 'title')
op.drop_column('user', 'first_name')
op.drop_column('user', 'last_name')
op.drop_column('user', 'display_name')
op.add_column('workflow_spec_category', sa.Column('admin', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('workflow_spec_category', 'admin')
op.add_column('user', sa.Column('display_name', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column('last_name', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column('first_name', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column('eppn', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column('email_address', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column('affiliation', sa.VARCHAR(), autoincrement=False, nullable=True))
# ### end Alembic commands ###

View File

@ -0,0 +1,67 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_0vabmzb" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.2.0">
<bpmn:process id="Process_0ubt44i" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1ui50vr</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1ui50vr" sourceRef="StartEvent_1" targetRef="Activity_GetData" />
<bpmn:userTask id="Activity_GetData" name="Get Data" camunda:formKey="DataForm">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="some_field" label="Select" type="enum">
<camunda:properties>
<camunda:property id="enum_type" value="checkbox" />
</camunda:properties>
<camunda:value id="value_1" name="value_1" />
<camunda:value id="value_2" name="value_2" />
<camunda:value id="value_3" name="value_3" />
<camunda:value id="value_4" name="value_4" />
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_1ui50vr</bpmn:incoming>
<bpmn:outgoing>Flow_07pr9lr</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_07pr9lr" sourceRef="Activity_GetData" targetRef="Activity_DisplayData" />
<bpmn:manualTask id="Activity_DisplayData" name="Display Data">
<bpmn:documentation># Enum data
{% for i in range(some_field | length) %}
{{ some_field[i] }}
{% endfor %}</bpmn:documentation>
<bpmn:incoming>Flow_07pr9lr</bpmn:incoming>
<bpmn:outgoing>Flow_13oillk</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:endEvent id="Event_0nm59tf">
<bpmn:incoming>Flow_13oillk</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_13oillk" sourceRef="Activity_DisplayData" targetRef="Event_0nm59tf" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0ubt44i">
<bpmndi:BPMNEdge id="Flow_1ui50vr_di" bpmnElement="Flow_1ui50vr">
<di:waypoint x="215" y="117" />
<di:waypoint x="271" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_07pr9lr_di" bpmnElement="Flow_07pr9lr">
<di:waypoint x="371" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_13oillk_di" bpmnElement="Flow_13oillk">
<di:waypoint x="530" y="117" />
<di:waypoint x="592" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1aq717a_di" bpmnElement="Activity_DisplayData">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0nm59tf_di" bpmnElement="Event_0nm59tf">
<dc:Bounds x="592" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0krkqig_di" bpmnElement="Activity_GetData">
<dc:Bounds x="271" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_08a4c34" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.10.0">
<bpmn:process id="Process_DeleteTaskData" name="Delete Task Data" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_12ulmn8</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:endEvent id="Event_19ssfc0">
<bpmn:incoming>SequenceFlow_06786ls</bpmn:incoming>
</bpmn:endEvent>
<bpmn:userTask id="Activity_InvalidField" name="Invalid Custom Field" camunda:formKey="SomeForm">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="TextBadArea" label="Short Description" type="noSuchType" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_12ulmn8</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_06786ls</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="SequenceFlow_12ulmn8" sourceRef="StartEvent_1" targetRef="Activity_InvalidField" />
<bpmn:sequenceFlow id="SequenceFlow_06786ls" sourceRef="Activity_InvalidField" targetRef="Event_19ssfc0" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_DeleteTaskData">
<bpmndi:BPMNEdge id="SequenceFlow_06786ls_di" bpmnElement="SequenceFlow_06786ls">
<di:waypoint x="390" y="120" />
<di:waypoint x="512" y="120" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_12ulmn8_di" bpmnElement="SequenceFlow_12ulmn8">
<di:waypoint x="215" y="120" />
<di:waypoint x="290" y="120" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="102" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_19ssfc0_di" bpmnElement="Event_19ssfc0">
<dc:Bounds x="512" y="102" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1hrox53_di" bpmnElement="Activity_InvalidField">
<dc:Bounds x="290" y="80" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,5 @@
[{
"core_info": {"status": "required", "message": "This workflow is always required and recommended that it is completed after your Protocol Builder entries are done and the Personnel workflow completed"},
"protocol": {"status": "required", "message": "required"},
"data_security_plan": {"status": "disabled", "message": "This is my mocked disable message."}
}]

View File

@ -78,7 +78,8 @@ class TestFilesApi(BaseTest):
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')} data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_spec_name=%s&form_field_key=%s' % rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_spec_name=%s&form_field_key=%s' %
(workflow.study_id, workflow.id, task.get_name(), correct_name), data=data, follow_redirects=True, (workflow.study_id, workflow.id, task.get_name(), correct_name), data=data,
follow_redirects=True,
content_type='multipart/form-data', headers=self.logged_in_headers()) content_type='multipart/form-data', headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
@ -94,7 +95,8 @@ class TestFilesApi(BaseTest):
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')} data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_spec_name=%s&form_field_key=%s' % rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_spec_name=%s&form_field_key=%s' %
(workflow.study_id, workflow.id, task.get_name(), correct_name), data=data, follow_redirects=True, (workflow.study_id, workflow.id, task.get_name(), correct_name), data=data,
follow_redirects=True,
content_type='multipart/form-data', headers=self.logged_in_headers()) content_type='multipart/form-data', headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
@ -176,13 +178,25 @@ class TestFilesApi(BaseTest):
file.name = "silly_new_name.bpmn" file.name = "silly_new_name.bpmn"
rv = self.app.put('/v1.0/file/%i' % file.id, rv = self.app.put('/v1.0/file/%i' % file.id,
content_type="application/json", content_type="application/json",
data=json.dumps(FileModelSchema().dump(file)), headers=self.logged_in_headers()) data=json.dumps(FileModelSchema().dump(file)), headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
db_file = session.query(FileModel).filter_by(id=file.id).first() db_file = session.query(FileModel).filter_by(id=file.id).first()
self.assertIsNotNone(db_file) self.assertIsNotNone(db_file)
self.assertEqual(file.name, db_file.name) self.assertEqual(file.name, db_file.name)
def test_load_valid_url_for_files(self):
self.load_example_data()
self.create_reference_document()
file: FileModel = session.query(FileModel).filter(FileModel.is_reference == False).first()
rv = self.app.get('/v1.0/file/%i' % file.id, content_type="application/json", headers=self.logged_in_headers())
self.assert_success(rv)
file_json = json.loads(rv.get_data(as_text=True))
print(file_json)
self.assertIsNotNone(file_json['url'])
file_data_rv = self.app.get(file_json['url'])
self.assert_success(file_data_rv)
def test_update_file_data(self): def test_update_file_data(self):
self.load_example_data() self.load_example_data()
spec = session.query(WorkflowSpecModel).first() spec = session.query(WorkflowSpecModel).first()
@ -209,7 +223,7 @@ class TestFilesApi(BaseTest):
file_data = FileService.get_file_data(file_model.id) file_data = FileService.get_file_data(file_model.id)
self.assertEqual(2, file_data.version) self.assertEqual(2, file_data.version)
rv = self.app.get('/v1.0/file/%i/data' % file_json['id'], headers=self.logged_in_headers()) rv = self.app.get('/v1.0/file/%i/data' % file_json['id'], headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
data = rv.get_data() data = rv.get_data()
self.assertIsNotNone(data) self.assertIsNotNone(data)
@ -262,7 +276,8 @@ class TestFilesApi(BaseTest):
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')} data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_spec_name=%s&form_field_key=%s' % rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_spec_name=%s&form_field_key=%s' %
(workflow.study_id, workflow.id, task.get_name(), correct_name), data=data, follow_redirects=True, (workflow.study_id, workflow.id, task.get_name(), correct_name), data=data,
follow_redirects=True,
content_type='multipart/form-data', headers=self.logged_in_headers()) content_type='multipart/form-data', headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
@ -296,7 +311,8 @@ class TestFilesApi(BaseTest):
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')} data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_spec_name=%s&form_field_key=%s' % rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_spec_name=%s&form_field_key=%s' %
(workflow.study_id, workflow.id, task.get_name(), correct_name), data=data, follow_redirects=True, (workflow.study_id, workflow.id, task.get_name(), correct_name), data=data,
follow_redirects=True,
content_type='multipart/form-data', headers=self.logged_in_headers()) content_type='multipart/form-data', headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True)) json_data = json.loads(rv.get_data(as_text=True))
@ -315,8 +331,6 @@ class TestFilesApi(BaseTest):
rv = self.app.get('/v1.0/file/%i' % file_id, headers=self.logged_in_headers()) rv = self.app.get('/v1.0/file/%i' % file_id, headers=self.logged_in_headers())
self.assertEqual(404, rv.status_code) self.assertEqual(404, rv.status_code)
def test_change_primary_bpmn(self): def test_change_primary_bpmn(self):
self.load_example_data() self.load_example_data()
spec = session.query(WorkflowSpecModel).first() spec = session.query(WorkflowSpecModel).first()
@ -332,19 +346,17 @@ class TestFilesApi(BaseTest):
file = FileModelSchema().load(json_data, session=session) file = FileModelSchema().load(json_data, session=session)
# Delete the primary BPMN file for the workflow. # Delete the primary BPMN file for the workflow.
orig_model = session.query(FileModel).\ orig_model = session.query(FileModel). \
filter(FileModel.primary == True).\ filter(FileModel.primary == True). \
filter(FileModel.workflow_spec_id == spec.id).first() filter(FileModel.workflow_spec_id == spec.id).first()
rv = self.app.delete('/v1.0/file?file_id=%s' % orig_model.id, headers=self.logged_in_headers()) rv = self.app.delete('/v1.0/file?file_id=%s' % orig_model.id, headers=self.logged_in_headers())
# Set that new file to be the primary BPMN, assure it has a primary_process_id # Set that new file to be the primary BPMN, assure it has a primary_process_id
file.primary = True file.primary = True
rv = self.app.put('/v1.0/file/%i' % file.id, rv = self.app.put('/v1.0/file/%i' % file.id,
content_type="application/json", content_type="application/json",
data=json.dumps(FileModelSchema().dump(file)), headers=self.logged_in_headers()) data=json.dumps(FileModelSchema().dump(file)), headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True)) json_data = json.loads(rv.get_data(as_text=True))
self.assertTrue(json_data['primary']) self.assertTrue(json_data['primary'])
self.assertIsNotNone(json_data['primary_process_id']) self.assertIsNotNone(json_data['primary_process_id'])

View File

@ -69,6 +69,7 @@ class TestStudyApi(BaseTest):
category = study.categories[0] category = study.categories[0]
self.assertEqual("test_category", category['name']) self.assertEqual("test_category", category['name'])
self.assertEqual("Test Category", category['display_name']) self.assertEqual("Test Category", category['display_name'])
self.assertEqual(False, category['admin'])
self.assertEqual(1, len(category["workflows"])) self.assertEqual(1, len(category["workflows"]))
workflow = category["workflows"][0] workflow = category["workflows"][0]
self.assertEqual("random_fact", workflow["name"]) self.assertEqual("random_fact", workflow["name"])

View File

@ -0,0 +1,22 @@
from tests.base_test import BaseTest
class TestEnumCheckbox(BaseTest):
def test_enum_checkbox_validation(self):
spec_model = self.load_test_spec('enum_checkbox')
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % spec_model.id, headers=self.logged_in_headers())
self.assertEqual([], rv.json)
def test_enum_checkbox(self):
workflow = self.create_workflow('enum_checkbox')
workflow_api = self.get_workflow_api(workflow)
task = workflow_api.next_task
data_values = [{'value': 'value_1', 'label': 'value_1'}, {'value': 'value_3', 'label': 'value_3'}]
self.complete_form(workflow, task, {'some_field': data_values})
workflow_api = self.get_workflow_api(workflow)
task = workflow_api.next_task
self.assertIn("{'value': 'value_1', 'label': 'value_1'}", task.documentation)
self.assertIn("{'value': 'value_3', 'label': 'value_3'}", task.documentation)

View File

@ -9,6 +9,7 @@ from tests.base_test import BaseTest
from crc import session, app from crc import session, app
from crc.api.common import ApiErrorSchema from crc.api.common import ApiErrorSchema
from crc.models.protocol_builder import ProtocolBuilderStudySchema from crc.models.protocol_builder import ProtocolBuilderStudySchema
from crc.models.study import StudyModel
from crc.models.workflow import WorkflowSpecModel, WorkflowModel from crc.models.workflow import WorkflowSpecModel, WorkflowModel
from crc.services.workflow_service import WorkflowService from crc.services.workflow_service import WorkflowService
@ -147,3 +148,41 @@ class TestWorkflowSpecValidation(BaseTest):
self.assertIn('enum_with_default', final_data) self.assertIn('enum_with_default', final_data)
self.assertEqual('maybe', final_data['enum_with_default']['value']) self.assertEqual('maybe', final_data['enum_with_default']['value'])
def test_invalid_custom_field(self):
self.load_example_data()
errors = self.validate_workflow("invalid_custom_field")
self.assertEqual(1, len(errors))
self.assertEqual("invalid_field_type", errors[0]['code'])
@patch('crc.services.study_service.StudyService._get_study_status')
def test_disabled_spec_validation(self, mock_status):
"""A disabled workflow spec should fail validation"""
app.config['PB_ENABLED'] = True
self.load_example_data()
study_model = session.query(StudyModel).first()
# workflow spec to validate
spec_model = WorkflowSpecModel(id='data_security_plan',
name='data_security_plan',
display_name='Data Security Plan',
description='Data Security Plan',
is_master_spec=False,
category_id=0,
display_order=0,
standalone=False,
library=False)
session.add(spec_model)
session.commit()
# This response sets the status for data_security_plan to disabled
status_response = self.protocol_builder_response('_get_study_status.json')
mock_status.return_value = json.loads(status_response)[0]
# This should raise an ApiError which we can see in the json data
rv = self.app.get('/v1.0/workflow-specification/%s/validate?study_id=%s' % (spec_model.id, study_model.id), headers=self.logged_in_headers())
self.assert_success(rv)
json_data = json.loads(rv.get_data())
self.assertEqual(1, len(json_data))
api_error = json_data[0]
self.assertEqual('disabled_workflow', api_error['code'])
self.assertEqual('This workflow is disabled. This is my mocked disable message.', api_error['message'])