Merge branch 'dev' into 346-waiting-task-schedule

# Conflicts:
#	Pipfile.lock
#	crc/services/workflow_service.py
This commit is contained in:
Dan 2021-06-30 10:14:37 -04:00
commit 9a32fadc2f
37 changed files with 538 additions and 252 deletions

View File

@ -40,6 +40,7 @@ sentry-sdk = {extras = ["flask"],version = "==0.14.4"}
sphinx = "*"
swagger-ui-bundle = "*"
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow.git",ref = "master"}
# spiffworkflow = {editable = true, path = "./../SpiffWorkflow"}
webtest = "*"
werkzeug = "*"
xlrd = "*"

164
Pipfile.lock generated
View File

@ -244,14 +244,6 @@
"index": "pypi",
"version": "==5.5"
},
"dataclasses": {
"hashes": [
"sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf",
"sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"
],
"markers": "python_version < '3.7'",
"version": "==0.8"
},
"deprecated": {
"hashes": [
"sha256:08452d69b6b5bc66e8330adde0a4f8642e969b9e1702904d137eeb29c8ffc771",
@ -433,14 +425,6 @@
],
"version": "==1.2.0"
},
"importlib-metadata": {
"hashes": [
"sha256:4a5611fea3768d3d967c447ab4e93f567d95db92225b43b7b238dbfb855d70bb",
"sha256:c6513572926a96458f8c8f725bf0e00108fba0c9583ade9bd15b869c9d726e33"
],
"markers": "python_version < '3.8'",
"version": "==4.6.0"
},
"inflection": {
"hashes": [
"sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417",
@ -616,42 +600,36 @@
},
"numpy": {
"hashes": [
"sha256:012426a41bc9ab63bb158635aecccc7610e3eff5d31d1eb43bc099debc979d94",
"sha256:06fab248a088e439402141ea04f0fffb203723148f6ee791e9c75b3e9e82f080",
"sha256:0eef32ca3132a48e43f6a0f5a82cb508f22ce5a3d6f67a8329c81c8e226d3f6e",
"sha256:1ded4fce9cfaaf24e7a0ab51b7a87be9038ea1ace7f34b841fe3b6894c721d1c",
"sha256:2e55195bc1c6b705bfd8ad6f288b38b11b1af32f3c8289d6c50d47f950c12e76",
"sha256:2ea52bd92ab9f768cc64a4c3ef8f4b2580a17af0a5436f6126b08efbd1838371",
"sha256:36674959eed6957e61f11c912f71e78857a8d0604171dfd9ce9ad5cbf41c511c",
"sha256:384ec0463d1c2671170901994aeb6dce126de0a95ccc3976c43b0038a37329c2",
"sha256:39b70c19ec771805081578cc936bbe95336798b7edf4732ed102e7a43ec5c07a",
"sha256:400580cbd3cff6ffa6293df2278c75aef2d58d8d93d3c5614cd67981dae68ceb",
"sha256:43d4c81d5ffdff6bae58d66a3cd7f54a7acd9a0e7b18d97abb255defc09e3140",
"sha256:50a4a0ad0111cc1b71fa32dedd05fa239f7fb5a43a40663269bb5dc7877cfd28",
"sha256:603aa0706be710eea8884af807b1b3bc9fb2e49b9f4da439e76000f3b3c6ff0f",
"sha256:6149a185cece5ee78d1d196938b2a8f9d09f5a5ebfbba66969302a778d5ddd1d",
"sha256:759e4095edc3c1b3ac031f34d9459fa781777a93ccc633a472a5468587a190ff",
"sha256:7fb43004bce0ca31d8f13a6eb5e943fa73371381e53f7074ed21a4cb786c32f8",
"sha256:811daee36a58dc79cf3d8bdd4a490e4277d0e4b7d103a001a4e73ddb48e7e6aa",
"sha256:8b5e972b43c8fc27d56550b4120fe6257fdc15f9301914380b27f74856299fea",
"sha256:99abf4f353c3d1a0c7a5f27699482c987cf663b1eac20db59b8c7b061eabd7fc",
"sha256:a0d53e51a6cb6f0d9082decb7a4cb6dfb33055308c4c44f53103c073f649af73",
"sha256:a12ff4c8ddfee61f90a1633a4c4afd3f7bcb32b11c52026c92a12e1325922d0d",
"sha256:a4646724fba402aa7504cd48b4b50e783296b5e10a524c7a6da62e4a8ac9698d",
"sha256:a76f502430dd98d7546e1ea2250a7360c065a5fdea52b2dffe8ae7180909b6f4",
"sha256:a9d17f2be3b427fbb2bce61e596cf555d6f8a56c222bd2ca148baeeb5e5c783c",
"sha256:ab83f24d5c52d60dbc8cd0528759532736b56db58adaa7b5f1f76ad551416a1e",
"sha256:aeb9ed923be74e659984e321f609b9ba54a48354bfd168d21a2b072ed1e833ea",
"sha256:c843b3f50d1ab7361ca4f0b3639bf691569493a56808a0b0c54a051d260b7dbd",
"sha256:cae865b1cae1ec2663d8ea56ef6ff185bad091a5e33ebbadd98de2cfa3fa668f",
"sha256:cc6bd4fd593cb261332568485e20a0712883cf631f6f5e8e86a52caa8b2b50ff",
"sha256:cf2402002d3d9f91c8b01e66fbb436a4ed01c6498fffed0e4c7566da1d40ee1e",
"sha256:d051ec1c64b85ecc69531e1137bb9751c6830772ee5c1c426dbcfe98ef5788d7",
"sha256:d6631f2e867676b13026e2846180e2c13c1e11289d67da08d71cacb2cd93d4aa",
"sha256:dbd18bcf4889b720ba13a27ec2f2aac1981bd41203b3a3b27ba7a33f88ae4827",
"sha256:df609c82f18c5b9f6cb97271f03315ff0dbe481a2a02e56aeb1b1a985ce38e60"
"sha256:1a784e8ff7ea2a32e393cc53eb0003eca1597c7ca628227e34ce34eb11645a0e",
"sha256:2ba579dde0563f47021dcd652253103d6fd66165b18011dce1a0609215b2791e",
"sha256:3537b967b350ad17633b35c2f4b1a1bbd258c018910b518c30b48c8e41272717",
"sha256:3c40e6b860220ed862e8097b8f81c9af6d7405b723f4a7af24a267b46f90e461",
"sha256:598fe100b2948465cf3ed64b1a326424b5e4be2670552066e17dfaa67246011d",
"sha256:620732f42259eb2c4642761bd324462a01cdd13dd111740ce3d344992dd8492f",
"sha256:709884863def34d72b183d074d8ba5cfe042bc3ff8898f1ffad0209161caaa99",
"sha256:75579acbadbf74e3afd1153da6177f846212ea2a0cc77de53523ae02c9256513",
"sha256:7c55407f739f0bfcec67d0df49103f9333edc870061358ac8a8c9e37ea02fcd2",
"sha256:a1f2fb2da242568af0271455b89aee0f71e4e032086ee2b4c5098945d0e11cf6",
"sha256:a290989cd671cd0605e9c91a70e6df660f73ae87484218e8285c6522d29f6e38",
"sha256:ac4fd578322842dbda8d968e3962e9f22e862b6ec6e3378e7415625915e2da4d",
"sha256:ad09f55cc95ed8d80d8ab2052f78cc21cb231764de73e229140d81ff49d8145e",
"sha256:b9205711e5440954f861ceeea8f1b415d7dd15214add2e878b4d1cf2bcb1a914",
"sha256:bba474a87496d96e61461f7306fba2ebba127bed7836212c360f144d1e72ac54",
"sha256:bebab3eaf0641bba26039fb0b2c5bf9b99407924b53b1ea86e03c32c64ef5aef",
"sha256:cc367c86eb87e5b7c9592935620f22d13b090c609f1b27e49600cd033b529f54",
"sha256:ccc6c650f8700ce1e3a77668bb7c43e45c20ac06ae00d22bdf6760b38958c883",
"sha256:cf680682ad0a3bef56dae200dbcbac2d57294a73e5b0f9864955e7dd7c2c2491",
"sha256:d2910d0a075caed95de1a605df00ee03b599de5419d0b95d55342e9a33ad1fb3",
"sha256:d5caa946a9f55511e76446e170bdad1d12d6b54e17a2afe7b189112ed4412bb8",
"sha256:d89b0dc7f005090e32bb4f9bf796e1dcca6b52243caf1803fdd2b748d8561f63",
"sha256:d95d16204cd51ff1a1c8d5f9958ce90ae190be81d348b514f9be39f878b8044a",
"sha256:e4d5a86a5257843a18fb1220c5f1c199532bc5d24e849ed4b0289fb59fbd4d8f",
"sha256:e58ddb53a7b4959932f5582ac455ff90dcb05fac3f8dcc8079498d43afbbde6c",
"sha256:e80fe25cba41c124d04c662f33f6364909b985f2eb5998aaa5ae4b9587242cce",
"sha256:eda2829af498946c59d8585a9fd74da3f810866e05f8df03a86f70079c7531dd",
"sha256:fd0a359c1c17f00cb37de2969984a74320970e0ceef4808c32e00773b06649d9"
],
"version": "==1.19.5"
"version": "==1.21.0"
},
"openapi-schema-validator": {
"hashes": [
@ -686,33 +664,27 @@
},
"pandas": {
"hashes": [
"sha256:0a643bae4283a37732ddfcecab3f62dd082996021b980f580903f4e8e01b3c5b",
"sha256:0de3ddb414d30798cbf56e642d82cac30a80223ad6fe484d66c0ce01a84d6f2f",
"sha256:19a2148a1d02791352e9fa637899a78e371a3516ac6da5c4edc718f60cbae648",
"sha256:21b5a2b033380adbdd36b3116faaf9a4663e375325831dac1b519a44f9e439bb",
"sha256:24c7f8d4aee71bfa6401faeba367dd654f696a77151a8a28bc2013f7ced4af98",
"sha256:26fa92d3ac743a149a31b21d6f4337b0594b6302ea5575b37af9ca9611e8981a",
"sha256:2860a97cbb25444ffc0088b457da0a79dc79f9c601238a3e0644312fcc14bf11",
"sha256:2b1c6cd28a0dfda75c7b5957363333f01d370936e4c6276b7b8e696dd500582a",
"sha256:2c2f7c670ea4e60318e4b7e474d56447cf0c7d83b3c2a5405a0dbb2600b9c48e",
"sha256:3be7a7a0ca71a2640e81d9276f526bca63505850add10206d0da2e8a0a325dae",
"sha256:4c62e94d5d49db116bef1bd5c2486723a292d79409fc9abd51adf9e05329101d",
"sha256:5008374ebb990dad9ed48b0f5d0038124c73748f5384cc8c46904dace27082d9",
"sha256:5447ea7af4005b0daf695a316a423b96374c9c73ffbd4533209c5ddc369e644b",
"sha256:573fba5b05bf2c69271a32e52399c8de599e4a15ab7cec47d3b9c904125ab788",
"sha256:5a780260afc88268a9d3ac3511d8f494fdcf637eece62fb9eb656a63d53eb7ca",
"sha256:70865f96bb38fec46f7ebd66d4b5cfd0aa6b842073f298d621385ae3898d28b5",
"sha256:731568be71fba1e13cae212c362f3d2ca8932e83cb1b85e3f1b4dd77d019254a",
"sha256:b61080750d19a0122469ab59b087380721d6b72a4e7d962e4d7e63e0c4504814",
"sha256:bf23a3b54d128b50f4f9d4675b3c1857a688cc6731a32f931837d72effb2698d",
"sha256:c16d59c15d946111d2716856dd5479221c9e4f2f5c7bc2d617f39d870031e086",
"sha256:c61c043aafb69329d0f961b19faa30b1dab709dd34c9388143fc55680059e55a",
"sha256:c94ff2780a1fd89f190390130d6d36173ca59fcfb3fe0ff596f9a56518191ccb",
"sha256:edda9bacc3843dfbeebaf7a701763e68e741b08fccb889c003b0a52f0ee95782",
"sha256:f10fc41ee3c75a474d3bdf68d396f10782d013d7f67db99c0efbfd0acb99701b"
"sha256:0c34b89215f984a9e4956446e0a29330d720085efa08ea72022387ee37d8b373",
"sha256:0dbd125b0e44e5068163cbc9080a00db1756a5e36309329ae14fd259747f2300",
"sha256:1102d719038e134e648e7920672188a00375f3908f0383fd3b202fbb9d2c3a95",
"sha256:14abb8ea73fce8aebbb1fb44bec809163f1c55241bcc1db91c2c780e97265033",
"sha256:25fc8ef6c6beb51c9224284a1ad89dfb591832f23ceff78845f182de35c52356",
"sha256:38e7486410de23069392bdf1dc7297ae75d2d67531750753f3149c871cd1c6e3",
"sha256:4bfbf62b00460f78a8bc4407112965c5ab44324f34551e8e1f4cac271a07706c",
"sha256:78de96c1174bcfdbe8dece9c38c2d7994e407fd8bb62146bb46c61294bcc06ef",
"sha256:7b09293c7119ab22ab3f7f086f813ac2acbfa3bcaaaeb650f4cddfb5b9fa9be4",
"sha256:821d92466fcd2826656374a9b6fe4f2ec2ba5e370cce71d5a990577929d948df",
"sha256:9244fb0904512b074d8c6362fb13aac1da6c4db94372760ddb2565c620240264",
"sha256:94ca6ea3f46f44a979a38a4d5a70a88cee734f7248d7aeeed202e6b3ba485af1",
"sha256:a67227e17236442c6bc31c02cb713b5277b26eee204eac14b5aecba52492e3a3",
"sha256:c862cd72353921c102166784fc4db749f1c3b691dd017fc36d9df2c67a9afe4e",
"sha256:d9e6edddeac9a8e473391d2d2067bb3c9dc7ad79fd137af26a39ee425c2b4c78",
"sha256:e36515163829e0e95a6af10820f178dd8768102482c01872bff8ae592e508e58",
"sha256:f20e4b8a7909f5a0c0a9e745091e3ea18b45af9f73496a4d498688badbdac7ea",
"sha256:fc9215dd1dd836ff26b896654e66b2dfcf4bbb18aa4c1089a79bab527b665a90"
],
"index": "pypi",
"version": "==1.1.5"
"version": "==1.2.5"
},
"psycopg2-binary": {
"hashes": [
@ -1058,15 +1030,6 @@
"index": "pypi",
"version": "==0.0.8"
},
"typing-extensions": {
"hashes": [
"sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497",
"sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342",
"sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"
],
"markers": "python_version < '3.8'",
"version": "==3.10.0.0"
},
"tzlocal": {
"hashes": [
"sha256:643c97c5294aedc737780a49d9df30889321cbe1204eac2c2ec6134035a92e44",
@ -1139,13 +1102,6 @@
],
"index": "pypi",
"version": "==1.4.3"
},
"zipp": {
"hashes": [
"sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76",
"sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"
],
"version": "==3.4.1"
}
},
"develop": {
@ -1214,14 +1170,6 @@
"index": "pypi",
"version": "==5.5"
},
"importlib-metadata": {
"hashes": [
"sha256:4a5611fea3768d3d967c447ab4e93f567d95db92225b43b7b238dbfb855d70bb",
"sha256:c6513572926a96458f8c8f725bf0e00108fba0c9583ade9bd15b869c9d726e33"
],
"markers": "python_version < '3.8'",
"version": "==4.6.0"
},
"iniconfig": {
"hashes": [
"sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3",
@ -1279,22 +1227,6 @@
"sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
],
"version": "==0.10.2"
},
"typing-extensions": {
"hashes": [
"sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497",
"sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342",
"sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"
],
"markers": "python_version < '3.8'",
"version": "==3.10.0.0"
},
"zipp": {
"hashes": [
"sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76",
"sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"
],
"version": "==3.4.1"
}
}
}

View File

@ -83,8 +83,6 @@ paths:
type : integer
get:
operationId: crc.api.file.get_document_directory
security:
- auth_admin: ['secret']
summary: Returns a directory of all files for study in a nested structure
tags:
- Document Categories

View File

@ -1,3 +1,4 @@
import hashlib
import io
import json
@ -78,16 +79,15 @@ def send_email(subject, address, body, data=None):
def evaluate_python_expression(body):
"""Evaluate the given python expression, returning its result. This is useful if the
front end application needs to do real-time processing on task data. If for instance
there is a hide expression that is based on a previous value in the same form."""
there is a hide expression that is based on a previous value in the same form.
The response includes both the result, and a hash of the original query, subsequent calls
of the same hash are unnecessary. """
try:
script_engine = CustomBpmnScriptEngine()
result = script_engine.eval(body['expression'], body['data'])
return {"result": result}
return {"result": result, "expression": body['expression'], "key": body['key']}
except Exception as e:
raise ApiError("expression_error", f"Failed to evaluate the expression '%s'. %s" %
(body['expression'], str(e)),
task_data = body["data"])
return {"result": False, "expression": body['expression'], "key": body['key'], "error": str(e)}
def send_test_email(subject, address, message, data=None):
rendered, wrapped = EmailService().get_rendered_content(message, data)

View File

@ -1,5 +1,7 @@
import hashlib
import pandas as pd
from pandas._libs.missing import NA
from crc import session, app
from crc.api.common import ApiError
from crc.models.file import FileModel, FileDataModel
@ -288,7 +290,7 @@ def get_changed_files(remote,workflow_spec_id,as_df=False):
changedfiles['new'] = False
changedfiles.loc[changedfiles.index.isin(left['filename']), 'new'] = True
changedfiles.loc[changedfiles.index.isin(right['filename']),'new'] = True
changedfiles = changedfiles.replace({pd.np.nan: None})
changedfiles = changedfiles.replace({NA: None})
# return the list as a dict, let swagger convert it to json
if as_df:
return changedfiles

View File

@ -1,7 +1,8 @@
from flask_marshmallow.sqla import SQLAlchemyAutoSchema
from sqlalchemy import func
from crc import db, ma
from crc import db
class DataStoreModel(db.Model):
__tablename__ = 'data_store'

View File

@ -108,14 +108,9 @@ class File(object):
instance.irb_doc_code = model.irb_doc_code
instance.type = model.type
if model.irb_doc_code and model.irb_doc_code in doc_dictionary:
instance.category = "/".join(filter(None, [doc_dictionary[model.irb_doc_code]['category1'],
doc_dictionary[model.irb_doc_code]['category2'],
doc_dictionary[model.irb_doc_code]['category3']]))
instance.description = doc_dictionary[model.irb_doc_code]['description']
instance.download_name = "/".join([instance.category, model.name])
instance.document = doc_dictionary[model.irb_doc_code]
else:
instance.category = ""
instance.description = ""
instance.document = {}
if data_model:
instance.last_modified = data_model.date_created
instance.latest_version = data_model.version
@ -146,9 +141,8 @@ class FileSchema(Schema):
model = File
fields = ["id", "name", "is_status", "is_reference", "content_type",
"primary", "primary_process_id", "workflow_spec_id", "workflow_id",
"irb_doc_code", "last_modified", "latest_version", "type", "categories",
"description", "category", "download_name", "size", "data_store"]
"irb_doc_code", "last_modified", "latest_version", "type", "size", "data_store",
"document"]
unknown = INCLUDE
type = EnumField(FileType)

View File

@ -0,0 +1,41 @@
from crc import session
from crc.api.common import ApiError
from crc.models.workflow import WorkflowModel, WorkflowSpecModel
from crc.scripts.script import Script
from crc.services.workflow_processor import WorkflowProcessor
class ResetWorkflow(Script):
def get_description(self):
return """Reset a workflow. Run by master workflow.
Designed for completed workflows where we need to force rerunning the workflow.
I.e., a new PI"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
return hasattr(kwargs, 'workflow_name')
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
if 'workflow_name' in kwargs.keys():
workflow_name = kwargs['workflow_name']
workflow_spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(name=workflow_name).first()
if workflow_spec:
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(
workflow_spec_id=workflow_spec.id,
study_id=study_id).first()
if workflow_model:
workflow_processor = WorkflowProcessor.reset(workflow_model, clear_data=False, delete_files=False)
return workflow_processor
else:
raise ApiError(code='missing_workflow_model',
message=f'No WorkflowModel returned. \
workflow_spec_id: {workflow_spec.id} \
study_id: {study_id}')
else:
raise ApiError(code='missing_workflow_spec',
message=f'No WorkflowSpecModel returned. \
name: {workflow_name}')
else:
raise ApiError(code='missing_workflow_name',
message='Reset workflow requires a workflow name')

View File

@ -1,6 +1,7 @@
import json
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from SpiffWorkflow.util.metrics import timeit
from crc import session
from crc.api.common import ApiError
@ -9,7 +10,6 @@ from crc.models.protocol_builder import ProtocolBuilderInvestigatorType
from crc.models.study import StudyModel, StudySchema
from crc.api import workflow as workflow_api
from crc.scripts.script import Script
from crc.services.cache_service import timeit
from crc.services.file_service import FileService
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.study_service import StudyService

View File

@ -1,30 +1,11 @@
import time
from SpiffWorkflow import Task
cache_store = {}
import time
def firsttime():
return time.time()
def sincetime(txt,lasttime):
thistime=firsttime()
print('%2.4f sec | %s' % (thistime-lasttime, txt))
return thistime
def timeit(f):
def timed(*args, **kw):
ts = time.time()
result = f(*args, **kw)
te = time.time()
print('%2.4f sec | func:%r args:[%r, %r] ' % (te-ts, f.__name__, args, kw))
return result
return timed
# first pass - meant to be down and dirty
def purge_cache(now):
dellist = []

View File

@ -2,6 +2,8 @@ import hashlib
import json
import os
from datetime import datetime
import pandas as pd
from github import Github, GithubObject, UnknownObjectException
from uuid import UUID
from lxml import etree
@ -9,6 +11,7 @@ from lxml import etree
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
from lxml.etree import XMLSyntaxError
from pandas import ExcelFile
from pandas._libs.missing import NA
from sqlalchemy import desc
from sqlalchemy.exc import IntegrityError
@ -144,6 +147,10 @@ class FileService(object):
data_model = FileService.get_reference_file_data(reference_file_name)
xls = ExcelFile(data_model.data, engine='openpyxl')
df = xls.parse(xls.sheet_names[0])
df = df.convert_dtypes()
df = pd.DataFrame(df).dropna(how='all') # Drop null rows
df = pd.DataFrame(df).replace({NA: None}) # replace NA with None.
for c in int_columns:
df[c] = df[c].fillna(0)
df = df.astype({c: 'Int64'})

View File

@ -3,7 +3,9 @@ import re
from collections import OrderedDict
import pandas as pd
from pandas import ExcelFile, np
import numpy
from pandas import ExcelFile
from pandas._libs.missing import NA
from sqlalchemy import desc
from sqlalchemy.sql.functions import GenericFunction
@ -142,9 +144,12 @@ class LookupService(object):
in a way that can be searched and returned via an api call - rather than sending the full set of
options along with the form. It will only open the file and process the options if something has
changed. """
xls = ExcelFile(data_model.data)
xls = ExcelFile(data_model.data, engine='openpyxl')
df = xls.parse(xls.sheet_names[0]) # Currently we only look at the fist sheet.
df = pd.DataFrame(df).replace({np.nan: None})
df = df.convert_dtypes()
df = pd.DataFrame(df).dropna(how='all') # Drop null rows
df = pd.DataFrame(df).replace({NA: None})
if value_column not in df:
raise ApiError("invalid_enum",
"The file %s does not contain a column named % s" % (data_model.file_model.name,

View File

@ -26,8 +26,6 @@ from crc.services.file_service import FileService
from crc.services.ldap_service import LdapService
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.workflow_processor import WorkflowProcessor
from SpiffWorkflow import Task as SpiffTask
from crc.services.cache_service import timeit
class StudyService(object):
"""Provides common tools for working with a Study"""

View File

@ -1,6 +1,7 @@
import re
from SpiffWorkflow.serializer.exceptions import MissingSpecError
from SpiffWorkflow.util.metrics import timeit, firsttime, sincetime
from lxml import etree
import shlex
from datetime import datetime
@ -28,7 +29,6 @@ from crc.scripts.script import Script
from crc.services.file_service import FileService
from crc import app
from crc.services.user_service import UserService
from crc.services.cache_service import timeit, firsttime, sincetime
class CustomBpmnScriptEngine(BpmnScriptEngine):
"""This is a custom script processor that can be easily injected into Spiff Workflow.

View File

@ -7,6 +7,7 @@ from typing import List
import jinja2
from SpiffWorkflow import Task as SpiffTask, WorkflowException, NavItem
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask
from SpiffWorkflow.bpmn.specs.MultiInstanceTask import MultiInstanceTask
@ -16,6 +17,7 @@ from SpiffWorkflow.bpmn.specs.UserTask import UserTask
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
from SpiffWorkflow.specs import CancelTask, StartTask, MultiChoice
from SpiffWorkflow.util.deep_merge import DeepMerge
from SpiffWorkflow.util.metrics import timeit
from jinja2 import Template
@ -23,7 +25,7 @@ from crc import db, app
from crc.api.common import ApiError
from crc.models.api_models import Task, MultiInstanceType, WorkflowApi
from crc.models.data_store import DataStoreModel
from crc.models.file import LookupDataModel, FileModel
from crc.models.file import LookupDataModel, FileModel, File, FileSchema
from crc.models.study import StudyModel
from crc.models.task_event import TaskEventModel
from crc.models.user import UserModel, UserModelSchema
@ -81,14 +83,15 @@ class WorkflowService(object):
return workflow_model
@staticmethod
def delete_test_data():
def delete_test_data(workflow: WorkflowModel):
db.session.delete(workflow)
# Also, delete any test study or user models that may have been created.
for study in db.session.query(StudyModel).filter(StudyModel.user_uid == "test"):
StudyService.delete_study(study.id)
db.session.commit()
user = db.session.query(UserModel).filter_by(uid="test").first()
if user:
db.session.delete(user)
db.session.commit()
@staticmethod
def do_waiting():
@ -102,6 +105,7 @@ class WorkflowService(object):
@staticmethod
@timeit
def test_spec(spec_id, validate_study_id=None, required_only=False):
"""Runs a spec through it's paces to see if it results in any errors.
Not fool-proof, but a good sanity check. Returns the final data
@ -112,17 +116,11 @@ class WorkflowService(object):
"""
workflow_model = WorkflowService.make_test_workflow(spec_id, validate_study_id)
try:
processor = WorkflowProcessor(workflow_model, validate_only=True)
except WorkflowException as we:
WorkflowService.delete_test_data()
raise ApiError.from_workflow_exception("workflow_validation_exception", str(we), we)
count = 0
while not processor.bpmn_workflow.is_completed():
if count < 100: # check for infinite loop
try:
processor.bpmn_workflow.get_deep_nav_list() # Assure no errors with navigation.
processor.bpmn_workflow.do_engine_steps()
tasks = processor.bpmn_workflow.get_tasks(SpiffTask.READY)
@ -141,19 +139,19 @@ class WorkflowService(object):
message='Forms must include a Form Key.',
task_id=task.id,
task_name=task.get_name())
WorkflowService._process_documentation(task)
WorkflowService.populate_form_with_random_data(task, task_api, required_only)
processor.complete_task(task)
count += 1
except WorkflowException as we:
WorkflowService.delete_test_data()
raise ApiError.from_workflow_exception("workflow_validation_exception", str(we), we)
else:
if count >= 100:
raise ApiError.from_task(code='validation_loop',
message=f'There appears to be an infinite loop in the validation. Task is {task.task_spec.description}',
task=task)
WorkflowService.delete_test_data()
WorkflowService._process_documentation(processor.bpmn_workflow.last_task.parent.parent)
except WorkflowException as we:
raise ApiError.from_workflow_exception("workflow_validation_exception", str(we), we)
finally:
WorkflowService.delete_test_data(workflow_model)
return processor.bpmn_workflow.last_task.data
@staticmethod
@ -270,25 +268,47 @@ class WorkflowService(object):
"""Looks through the fields in a submitted form, acting on any properties."""
if not hasattr(task.task_spec, 'form'): return
for field in task.task_spec.form.fields:
if field.has_property(Task.FIELD_PROP_DOC_CODE) and \
field.type == Task.FIELD_TYPE_FILE:
file_id = task.data[field.id]
data = task.data
if field.has_property(Task.FIELD_PROP_REPEAT):
repeat_array = task.data[field.get_property(Task.FIELD_PROP_REPEAT)]
for repeat_data in repeat_array:
WorkflowService.__post_process_field(task, field, repeat_data)
else:
WorkflowService.__post_process_field(task, field, data)
@staticmethod
def __post_process_field(task, field, data):
if field.has_property(Task.FIELD_PROP_DOC_CODE) and field.id in data:
# This is generally handled by the front end, but it is possible that the file was uploaded BEFORE
# the doc_code was correctly set, so this is a stop gap measure to assure we still hit it correctly.
file_id = data[field.id]["id"]
doc_code = task.workflow.script_engine.eval(field.get_property(Task.FIELD_PROP_DOC_CODE), data)
file = db.session.query(FileModel).filter(FileModel.id == file_id).first()
doc_code = WorkflowService.evaluate_property(Task.FIELD_PROP_DOC_CODE, field, task)
if(file):
file.irb_doc_code = doc_code
db.session.commit()
# Set the doc code on the file.
else:
# We have a problem, the file doesn't exist, and was removed, but it is still referenced in the data
# At least attempt to clear out the data.
data = {}
if field.has_property(Task.FIELD_PROP_FILE_DATA) and \
field.get_property(Task.FIELD_PROP_FILE_DATA) in task.data:
file_id = task.data[field.get_property(Task.FIELD_PROP_FILE_DATA)]
data_store = DataStoreModel(file_id=file_id, key=field.id, value=task.data[field.id])
field.get_property(Task.FIELD_PROP_FILE_DATA) in data and \
field.id in data:
file_id = data[field.get_property(Task.FIELD_PROP_FILE_DATA)]["id"]
data_store = DataStoreModel(file_id=file_id, key=field.id, value=data[field.id])
db.session.add(data_store)
@staticmethod
def evaluate_property(property_name, field, task):
expression = field.get_property(property_name)
data = task.data
if field.has_property(Task.FIELD_PROP_REPEAT):
# Then you must evaluate the expression based on the data within the group only.
group = field.get_property(Task.FIELD_PROP_REPEAT)
if group in task.data:
data = task.data[group][0]
try:
return task.workflow.script_engine.evaluate_expression(task, expression)
return task.workflow.script_engine.eval(expression, data)
except Exception as e:
message = f"The field {field.id} contains an invalid expression. {e}"
raise ApiError.from_task(f'invalid_{property_name}', message, task=task)
@ -369,7 +389,7 @@ class WorkflowService(object):
if len(field.options) > 0:
random_choice = random.choice(field.options)
if isinstance(random_choice, dict):
return {'value': random_choice['id'], 'label': random_choice['name']}
return {'value': random_choice['id'], 'label': random_choice['name'], 'data': random_choice['data']}
else:
# fixme: why it is sometimes an EnumFormFieldOption, and other times not?
return {'value': random_choice.id, 'label': random_choice.name}
@ -399,9 +419,14 @@ class WorkflowService(object):
elif field.type == 'boolean':
return random.choice([True, False])
elif field.type == 'file':
# fixme: produce some something sensible for files.
return random.randint(1, 100)
# fixme: produce some something sensible for files.
doc_code = field.id
if field.has_property('doc_code'):
doc_code = WorkflowService.evaluate_property('doc_code', field, task)
file_model = FileModel(name="test.png",
irb_doc_code = field.id)
doc_dict = FileService.get_doc_dictionary()
file = File.from_models(file_model, None, doc_dict)
return FileSchema().dump(file)
elif field.type == 'files':
return random.randrange(1, 100)
else:
@ -690,7 +715,7 @@ class WorkflowService(object):
raise ApiError.from_task("invalid_enum", f"The label column '{label_column}' does not exist for item {item}",
task=spiff_task)
options.append({"id": item[value_column], "name": item[label_column], "data": item})
options.append(Box({"id": item[value_column], "name": item[label_column], "data": item}))
return options
@staticmethod

View File

@ -80,7 +80,7 @@ sphinxcontrib-serializinghtml==1.1.4
spiffworkflow
sqlalchemy==1.3.20
swagger-ui-bundle==0.0.8
urllib3==1.26.4
urllib3==1.26.5
waitress==1.4.4
webob==1.8.6
webtest==2.0.35

Binary file not shown.

View File

@ -10,7 +10,7 @@
<camunda:formData>
<camunda:formField id="empty_select" label="Select One" type="enum">
<camunda:properties>
<camunda:property id="spreadsheet.name" value="empty_spreadsheet.xls" />
<camunda:property id="spreadsheet.name" value="empty_spreadsheet.xlsx" />
<camunda:property id="spreadsheet.value.column" value="COMPANY_ID" />
<camunda:property id="spreadsheet.label.column" value="COMPANY_NAME" />
</camunda:properties>

Binary file not shown.

View File

@ -14,7 +14,7 @@
<camunda:formData>
<camunda:formField id="AllTheNames" label="Select a value" type="enum">
<camunda:properties>
<camunda:property id="spreadsheet.name" value="customer_list.xls" />
<camunda:property id="spreadsheet.name" value="customer_list.xlsx" />
<camunda:property id="spreadsheet.value.column" value="CUSTOMER_NUMBER" />
<camunda:property id="spreadsheet.label.column" value="CUSTOMER_NAME" />
</camunda:properties>

View File

@ -14,7 +14,7 @@
<camunda:formData>
<camunda:formField id="sponsor" label="Select a value" type="autocomplete">
<camunda:properties>
<camunda:property id="spreadsheet.name" value="sponsors.xls" />
<camunda:property id="spreadsheet.name" value="sponsors.xlsx" />
<camunda:property id="spreadsheet.value.column" value="CUSTOMER_NUMBER" />
<camunda:property id="spreadsheet.label.column" value="CUSTOMER_NAME" />
</camunda:properties>

Binary file not shown.

View File

@ -16,9 +16,9 @@
OGC will upload the Non-Funded Executed Agreement after it has been negotiated by OSP contract negotiator.</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="file_type" type="enum" defaultValue="FileType1">
<camunda:value id="FileType1" name="My First file type" />
<camunda:value id="FileType2" name="My second file type" />
<camunda:formField id="file_type" type="enum" defaultValue="AD_CoCApp">
<camunda:value id="AD_CoCApp" name="Ancillary Documents / Case Report Form" />
<camunda:value id="AD_CoCAppr" name="Ancillary Documents / CoC Approval" />
</camunda:formField>
<camunda:formField id="Some_File" label="Upload File" type="file">
<camunda:properties>
@ -48,14 +48,14 @@ OGC will upload the Non-Funded Executed Agreement after it has been negotiated b
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Finance">
<bpmndi:BPMNEdge id="SequenceFlow_0ea9hvd_di" bpmnElement="SequenceFlow_0ea9hvd">
<di:waypoint x="148" y="117" />
<di:waypoint x="210" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0t55959_di" bpmnElement="Flow_0t55959">
<di:waypoint x="310" y="117" />
<di:waypoint x="392" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_0ea9hvd_di" bpmnElement="SequenceFlow_0ea9hvd">
<di:waypoint x="148" y="117" />
<di:waypoint x="210" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="StartEvent_1p6s47e_di" bpmnElement="StartEvent_1p6s47e">
<dc:Bounds x="112" y="99" width="36" height="36" />
</bpmndi:BPMNShape>

View File

@ -0,0 +1,60 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1wrlvk8" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="Finance" isExecutable="true">
<bpmn:startEvent id="StartEvent_1p6s47e">
<bpmn:outgoing>SequenceFlow_0ea9hvd</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:endEvent id="EndEvent_14p904o">
<bpmn:incoming>Flow_0t55959</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="SequenceFlow_0ea9hvd" sourceRef="StartEvent_1p6s47e" targetRef="Activity_0neioh9" />
<bpmn:userTask id="Activity_0neioh9" name="Upload Executed Non-Funded" camunda:formKey="FormKey_ExecutedNonFunded">
<bpmn:documentation>#### Non-Funded Executed Agreement
#### Process:
OGC will upload the Non-Funded Executed Agreement after it has been negotiated by OSP contract negotiator.</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="AD_CoCApp" label="Upload File" type="file" />
<camunda:formField id="Language" label="Language" type="string" defaultValue="Engish">
<camunda:properties>
<camunda:property id="group" value="PCRApproval" />
<camunda:property id="file_data" value="AD_CoCApp" />
</camunda:properties>
</camunda:formField>
<camunda:formField id="Date" label="Version Date" type="date">
<camunda:properties>
<camunda:property id="group" value="PCRApproval" />
<camunda:property id="file_data" value="AD_CoCApp" />
</camunda:properties>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0ea9hvd</bpmn:incoming>
<bpmn:outgoing>Flow_0t55959</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_0t55959" sourceRef="Activity_0neioh9" targetRef="EndEvent_14p904o" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Finance">
<bpmndi:BPMNEdge id="Flow_0t55959_di" bpmnElement="Flow_0t55959">
<di:waypoint x="310" y="117" />
<di:waypoint x="392" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_0ea9hvd_di" bpmnElement="SequenceFlow_0ea9hvd">
<di:waypoint x="148" y="117" />
<di:waypoint x="210" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="StartEvent_1p6s47e_di" bpmnElement="StartEvent_1p6s47e">
<dc:Bounds x="112" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_14p904o_di" bpmnElement="EndEvent_14p904o">
<dc:Bounds x="392" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0neioh9_di" bpmnElement="Activity_0neioh9">
<dc:Bounds x="210" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,82 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_0vny0hv" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:process id="Process_ResetWorkflow" name="Reset Workflow" isExecutable="true">
<bpmn:documentation>Use this process to reset a workflow for the current study. You must enter the name of the workflow. I.e., lower case with underscores.</bpmn:documentation>
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_0i872g2</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_0i872g2" sourceRef="StartEvent_1" targetRef="Task_GetWorkflow" />
<bpmn:sequenceFlow id="SequenceFlow_1q2ton3" sourceRef="Task_GetWorkflow" targetRef="Task_ResetWorkflow" />
<bpmn:sequenceFlow id="SequenceFlow_0x127gc" sourceRef="Task_ResetWorkflow" targetRef="Task_DisplayWorkflow" />
<bpmn:endEvent id="EndEvent_0fdym05">
<bpmn:incoming>SequenceFlow_0yy50p2</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="SequenceFlow_0yy50p2" sourceRef="Task_DisplayWorkflow" targetRef="EndEvent_0fdym05" />
<bpmn:userTask id="Task_GetWorkflow" name="Get Workflow" camunda:formKey="WorkflowForm">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="workflow_name" label="Workflow Name" type="string">
<camunda:validation>
<camunda:constraint name="required" config="True" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0i872g2</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1q2ton3</bpmn:outgoing>
</bpmn:userTask>
<bpmn:scriptTask id="Task_ResetWorkflow" name="Reset Workflow">
<bpmn:incoming>SequenceFlow_1q2ton3</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0x127gc</bpmn:outgoing>
<bpmn:script>value = reset_workflow(workflow_name=workflow_name)</bpmn:script>
</bpmn:scriptTask>
<bpmn:manualTask id="Task_DisplayWorkflow" name="Display Workflow">
<bpmn:documentation># Reset Workflow
&lt;div&gt;
{% if value %}
&lt;span&gt;Workflow {{workflow_name}} was reset.&lt;/span&gt;
{% else %}
&lt;span&gt;There was a problem resetting workflow {{workflow_name}}.&lt;/span&gt;
{% endif %}
&lt;/div&gt;
</bpmn:documentation>
<bpmn:incoming>SequenceFlow_0x127gc</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0yy50p2</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_ResetWorkflow">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0i872g2_di" bpmnElement="SequenceFlow_0i872g2">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1q2ton3_di" bpmnElement="SequenceFlow_1q2ton3">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_0x127gc_di" bpmnElement="SequenceFlow_0x127gc">
<di:waypoint x="530" y="117" />
<di:waypoint x="590" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="EndEvent_0fdym05_di" bpmnElement="EndEvent_0fdym05">
<dc:Bounds x="752" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0yy50p2_di" bpmnElement="SequenceFlow_0yy50p2">
<di:waypoint x="690" y="117" />
<di:waypoint x="752" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="UserTask_0li5ksb_di" bpmnElement="Task_GetWorkflow">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ScriptTask_07qq4pl_di" bpmnElement="Task_ResetWorkflow">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ManualTask_0ianu3f_di" bpmnElement="Task_DisplayWorkflow">
<dc:Bounds x="590" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,82 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_04zta39" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:process id="Process_NameAge" name="Name Age" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1oykjju</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_1oykjju" sourceRef="StartEvent_1" targetRef="Task_GetName" />
<bpmn:sequenceFlow id="SequenceFlow_0z8c3ob" sourceRef="Task_GetName" targetRef="Task_GetAge" />
<bpmn:sequenceFlow id="SequenceFlow_1jfrd7w" sourceRef="Task_GetAge" targetRef="Task_PrintData" />
<bpmn:userTask id="Task_GetName" name="Get Name" camunda:formKey="NameForm">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="name" label="Name" type="string">
<camunda:validation>
<camunda:constraint name="required" config="True" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_1oykjju</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0z8c3ob</bpmn:outgoing>
</bpmn:userTask>
<bpmn:userTask id="Task_GetAge" name="Get Age" camunda:formKey="AgeForm">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="age" label="Age" type="long">
<camunda:validation>
<camunda:constraint name="required" config="True" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0z8c3ob</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1jfrd7w</bpmn:outgoing>
</bpmn:userTask>
<bpmn:manualTask id="Task_PrintData" name="Print Data">
<bpmn:documentation># Data
{{name}} is {{age}} years old.</bpmn:documentation>
<bpmn:incoming>SequenceFlow_1jfrd7w</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0yjk26l</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:endEvent id="EndEvent_125fqq9">
<bpmn:incoming>SequenceFlow_0yjk26l</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="SequenceFlow_0yjk26l" sourceRef="Task_PrintData" targetRef="EndEvent_125fqq9" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_NameAge">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1oykjju_di" bpmnElement="SequenceFlow_1oykjju">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_0z8c3ob_di" bpmnElement="SequenceFlow_0z8c3ob">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1jfrd7w_di" bpmnElement="SequenceFlow_1jfrd7w">
<di:waypoint x="530" y="117" />
<di:waypoint x="590" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="UserTask_1jrkk5z_di" bpmnElement="Task_GetName">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="UserTask_080wksg_di" bpmnElement="Task_GetAge">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ManualTask_1k7rizm_di" bpmnElement="Task_PrintData">
<dc:Bounds x="590" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_125fqq9_di" bpmnElement="EndEvent_125fqq9">
<dc:Bounds x="752" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0yjk26l_di" bpmnElement="SequenceFlow_0yjk26l">
<di:waypoint x="690" y="117" />
<di:waypoint x="752" y="117" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -273,6 +273,22 @@ class TestFilesApi(BaseTest):
json_data = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(json_data), 1)
def test_add_file_returns_document_metadata(self):
self.create_reference_document()
workflow = self.create_workflow('file_upload_form_single')
processor = WorkflowProcessor(workflow)
processor.do_engine_steps()
task = processor.next_task()
correct_name = task.task_spec.form.fields[0].id
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_id=%i&form_field_key=%s' %
(workflow.study_id, workflow.id, task.id, correct_name), data=data, follow_redirects=True,
content_type='multipart/form-data', headers=self.logged_in_headers())
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
self.assertEqual('Ancillary Document', json_data['document']['category1'])
self.assertEqual('CRC', json_data['document']['Who Uploads?'])
def test_delete_file(self):
self.load_example_data()

View File

@ -95,13 +95,11 @@ class TestStudyApi(BaseTest):
self.assert_success(api_response)
study = StudySchema().loads(api_response.get_data(as_text=True))
self.assertEqual(1, len(study.files))
self.assertEqual("UVA Compliance/PRC Approval", study.files[0]["category"])
self.assertEqual("Cancer Center's PRC Approval Form", study.files[0]["description"])
self.assertEqual("UVA Compliance/PRC Approval/anything.png", study.files[0]["download_name"])
self.assertEqual("UVA Compliance", study.files[0]["document"]["category1"])
self.assertEqual("Cancer Center's PRC Approval Form", study.files[0]["document"]["description"])
# TODO: WRITE A TEST FOR STUDY FILES
def test_add_study(self):
self.load_example_data()
study = self.add_test_study()
@ -113,7 +111,7 @@ class TestStudyApi(BaseTest):
self.assertEqual(study["ind_number"], db_study.ind_number)
self.assertEqual(study["user_uid"], db_study.user_uid)
workflow_spec_count =session.query(WorkflowSpecModel).count()
workflow_spec_count = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.is_master_spec == False).count()
workflow_count = session.query(WorkflowModel).filter(WorkflowModel.study_id == study['id']).count()
self.assertEqual(workflow_spec_count, workflow_count)

View File

@ -15,7 +15,7 @@ class TestLookupService(BaseTest):
def test_lookup_returns_good_error_on_bad_field(self):
spec = BaseTest.load_test_spec('enum_options_with_search')
workflow = self.create_workflow('enum_options_with_search')
file_model = session.query(FileModel).filter(FileModel.name == "customer_list.xls").first()
file_model = session.query(FileModel).filter(FileModel.name == "customer_list.xlsx").first()
file_data_model = session.query(FileDataModel).filter(FileDataModel.file_model == file_model).first()
with self.assertRaises(ApiError):
LookupService.lookup(workflow, "Task_Enum_Lookup", "not_the_right_field", "sam", limit=10)
@ -36,7 +36,7 @@ class TestLookupService(BaseTest):
def test_updates_to_file_cause_lookup_rebuild(self):
spec = BaseTest.load_test_spec('enum_options_with_search')
workflow = self.create_workflow('enum_options_with_search')
file_model = session.query(FileModel).filter(FileModel.name == "sponsors.xls").first()
file_model = session.query(FileModel).filter(FileModel.name == "sponsors.xlsx").first()
LookupService.lookup(workflow, "Task_Enum_Lookup", "sponsor", "sam", limit=10)
lookup_records = session.query(LookupFileModel).all()
self.assertIsNotNone(lookup_records)
@ -47,9 +47,9 @@ class TestLookupService(BaseTest):
# Update the workflow specification file.
file_path = os.path.join(app.root_path, '..', 'tests', 'data',
'enum_options_with_search', 'sponsors_modified.xls')
'enum_options_with_search', 'sponsors_modified.xlsx')
file = open(file_path, 'rb')
FileService.update_file(file_model, file.read(), CONTENT_TYPES['xls'])
FileService.update_file(file_model, file.read(), CONTENT_TYPES['xlsx'])
file.close()
# restart the workflow, so it can pick up the changes.

View File

@ -5,7 +5,7 @@ from tests.base_test import BaseTest
from crc import app, mail
class TestStudyApi(BaseTest):
class TestToolsApi(BaseTest):
def test_render_markdown(self):
template = "My name is {{name}}"
@ -41,19 +41,34 @@ class TestStudyApi(BaseTest):
def test_eval_hide_expression(self):
"""Assures we can use python to process a hide expression from the front end"""
rv = self.app.put('/v1.0/eval',
data='{"expression": "x.y==2", "data": {"x":{"y":2}}}', follow_redirects=True,
data='{"expression": "x.y==2", "data": {"x":{"y":2}}, "key": 1234}', follow_redirects=True,
content_type='application/json',
headers=self.logged_in_headers())
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual(True, response['result'])
def test_eval_returns_query(self):
"""Assures that along with the result, we get the key and expression.
This can be useful if the calling client is caching results and needs to hash the expression and data
when it gets returned."""
data = '{"expression": "x.y==2", "data": {"x":{"y":2}}, "key":1234}'
rv = self.app.put('/v1.0/eval',
data=data, follow_redirects=True,
content_type='application/json',
headers=self.logged_in_headers())
self.assert_success(rv)
response = json.loads(rv.get_data(as_text=True))
self.assertEqual("x.y==2", response['expression'])
self.assertEqual(1234, response['key'])
def test_eval_expression_with_strings(self):
"""Assures we can use python to process a value expression from the front end"""
rv = self.app.put('/v1.0/eval',
data='{"expression": "\'Hello, \' + user.first_name + \' \' + user.last_name + \'!!!\'", '
'"data": {"user":{"first_name": "Trillian", "last_name": "Astra"}}}',
'"data": {"user":{"first_name": "Trillian", "last_name": "Astra"}},'
'"key":1234}',
follow_redirects=True,
content_type='application/json',
headers=self.logged_in_headers())
@ -86,7 +101,7 @@ CR Connect
def test_eval_to_boolean_expression_with_dot_notation(self):
"""Assures we can use python to process a value expression from the front end"""
rv = self.app.put('/v1.0/eval',
data='{"expression": "test.value", "data": {"test":{"value": true}}}',
data='{"expression": "test.value", "data": {"test":{"value": true}}, "key": 1234}',
follow_redirects=True,
content_type='application/json',
headers=self.logged_in_headers())

View File

@ -0,0 +1,45 @@
from tests.base_test import BaseTest
from crc.scripts.reset_workflow import ResetWorkflow
from crc.api.common import ApiError
class TestWorkflowReset(BaseTest):
def test_workflow_reset_validation(self):
self.load_example_data()
spec_model = self.load_test_spec('reset_workflow')
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % spec_model.id, headers=self.logged_in_headers())
self.assertEqual([], rv.json)
def test_workflow_reset(self):
workflow = self.create_workflow('two_user_tasks')
workflow_api = self.get_workflow_api(workflow)
first_task = workflow_api.next_task
self.assertEqual('Task_GetName', first_task.name)
self.complete_form(workflow, first_task, {'name': 'Mona'})
workflow_api = self.get_workflow_api(workflow)
second_task = workflow_api.next_task
self.assertEqual('Task_GetAge', second_task.name)
ResetWorkflow().do_task(second_task, workflow.study_id, workflow.id, workflow_name='two_user_tasks')
workflow_api = self.get_workflow_api(workflow)
task = workflow_api.next_task
self.assertEqual('Task_GetName', task.name)
def test_workflow_reset_missing_name(self):
workflow = self.create_workflow('two_user_tasks')
workflow_api = self.get_workflow_api(workflow)
first_task = workflow_api.next_task
with self.assertRaises(ApiError):
ResetWorkflow().do_task(first_task, workflow.study_id, workflow.id)
def test_workflow_reset_bad_name(self):
workflow = self.create_workflow('two_user_tasks')
workflow_api = self.get_workflow_api(workflow)
first_task = workflow_api.next_task
with self.assertRaises(ApiError):
ResetWorkflow().do_task(first_task, workflow.study_id, workflow.id, workflow_name='bad_workflow_name')

View File

@ -2,12 +2,14 @@ import json
import unittest
from unittest.mock import patch
from sqlalchemy import func
from tests.base_test import BaseTest
from crc import session, app
from crc.api.common import ApiErrorSchema
from crc.models.protocol_builder import ProtocolBuilderStudySchema
from crc.models.workflow import WorkflowSpecModel
from crc.models.workflow import WorkflowSpecModel, WorkflowModel
from crc.services.workflow_service import WorkflowService
@ -15,8 +17,11 @@ class TestWorkflowSpecValidation(BaseTest):
def validate_workflow(self, workflow_name):
spec_model = self.load_test_spec(workflow_name)
total_workflows = session.query(WorkflowModel).count()
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % spec_model.id, headers=self.logged_in_headers())
self.assert_success(rv)
total_workflows_after = session.query(WorkflowModel).count()
self.assertEqual(total_workflows, total_workflows_after, "No rogue workflow exists after validation.")
json_data = json.loads(rv.get_data(as_text=True))
return ApiErrorSchema(many=True).load(json_data)
@ -59,10 +64,7 @@ class TestWorkflowSpecValidation(BaseTest):
workflows = session.query(WorkflowSpecModel).all()
errors = []
for w in workflows:
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % w.id,
headers=self.logged_in_headers())
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
json_data = self.validate_workflow(w.name)
errors.extend(ApiErrorSchema(many=True).load(json_data))
self.assertEqual(0, len(errors), json.dumps(errors))
@ -87,6 +89,7 @@ class TestWorkflowSpecValidation(BaseTest):
self.assertEqual("StartEvent_1", errors[0]['task_id'])
self.assertEqual("invalid_spec.bpmn", errors[0]['file_name'])
def test_invalid_script(self):
self.load_example_data()
errors = self.validate_workflow("invalid_script")