mirror of
https://github.com/sartography/cr-connect-workflow.git
synced 2025-02-22 20:58:28 +00:00
Merge branch 'dev' into get-erroring-workflows-574
This commit is contained in:
commit
5def436602
3
.gitignore
vendored
3
.gitignore
vendored
@ -238,3 +238,6 @@ postgres/var/
|
||||
.coverage
|
||||
coverage.xml
|
||||
.~lock.*
|
||||
|
||||
# Specification files
|
||||
SPECS
|
1
Pipfile
1
Pipfile
@ -43,6 +43,7 @@ werkzeug = "*"
|
||||
xlrd = "*"
|
||||
xlsxwriter = "*"
|
||||
pygithub = "*"
|
||||
gitpython = "*"
|
||||
apscheduler = "*"
|
||||
connexion = {extras = [ "swagger-ui",], version = "*"}
|
||||
sentry-sdk = {extras = [ "flask",], version = "==0.14.4"}
|
||||
|
674
Pipfile.lock
generated
674
Pipfile.lock
generated
@ -1,7 +1,7 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "e57391a2dd61b002ccffb3a7b5d23dd4e363e105f70e5979ed5484e594b1410d"
|
||||
"sha256": "bf333e0e7aaaff0988808dab8191f9711f1100f3af3dc35123672086c290667c"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
@ -25,11 +25,11 @@
|
||||
},
|
||||
"alembic": {
|
||||
"hashes": [
|
||||
"sha256:7c328694a2e68f03ee971e63c3bd885846470373a5b532cf2c9f1601c413b153",
|
||||
"sha256:a9dde941534e3d7573d9644e8ea62a2953541e27bc1793e166f60b777ae098b4"
|
||||
"sha256:6c0c05e9768a896d804387e20b299880fe01bc56484246b0dffe8075d6d3d847",
|
||||
"sha256:ad842f2c3ab5c5d4861232730779c05e33db4ba880a08b85eb505e87c01095bc"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.7.5"
|
||||
"version": "==1.7.6"
|
||||
},
|
||||
"amqp": {
|
||||
"hashes": [
|
||||
@ -201,11 +201,11 @@
|
||||
},
|
||||
"charset-normalizer": {
|
||||
"hashes": [
|
||||
"sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd",
|
||||
"sha256:cb957888737fc0bbcd78e3df769addb41fd1ff8cf950dc9e7ad7793f1bf44455"
|
||||
"sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597",
|
||||
"sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"
|
||||
],
|
||||
"markers": "python_version >= '3'",
|
||||
"version": "==2.0.10"
|
||||
"version": "==2.0.12"
|
||||
},
|
||||
"click": {
|
||||
"hashes": [
|
||||
@ -242,64 +242,58 @@
|
||||
"swagger-ui"
|
||||
],
|
||||
"hashes": [
|
||||
"sha256:0105b03ea1c54fa0e8160825c729e416b8bd6bf3f007981b04c92ce6d5ae990b",
|
||||
"sha256:656d451e21df5f38c4ddb826b805ebe5a641423253f7f33c798ca3ec1cb94cda"
|
||||
"sha256:66620b10b2c03eab6af981f8489d0ff7ada19f66710274effc71258fb8221419",
|
||||
"sha256:b5e5ba236894a02b8da4d10face412f471abb6ff77de10dad32fa88cb894acf7"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.9.0"
|
||||
"version": "==2.11.1"
|
||||
},
|
||||
"coverage": {
|
||||
"hashes": [
|
||||
"sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0",
|
||||
"sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd",
|
||||
"sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884",
|
||||
"sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48",
|
||||
"sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76",
|
||||
"sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0",
|
||||
"sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64",
|
||||
"sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685",
|
||||
"sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47",
|
||||
"sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d",
|
||||
"sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840",
|
||||
"sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f",
|
||||
"sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971",
|
||||
"sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c",
|
||||
"sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a",
|
||||
"sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de",
|
||||
"sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17",
|
||||
"sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4",
|
||||
"sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521",
|
||||
"sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57",
|
||||
"sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b",
|
||||
"sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282",
|
||||
"sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644",
|
||||
"sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475",
|
||||
"sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d",
|
||||
"sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da",
|
||||
"sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953",
|
||||
"sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2",
|
||||
"sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e",
|
||||
"sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c",
|
||||
"sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc",
|
||||
"sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64",
|
||||
"sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74",
|
||||
"sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617",
|
||||
"sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3",
|
||||
"sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d",
|
||||
"sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa",
|
||||
"sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739",
|
||||
"sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8",
|
||||
"sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8",
|
||||
"sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781",
|
||||
"sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58",
|
||||
"sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9",
|
||||
"sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c",
|
||||
"sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd",
|
||||
"sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e",
|
||||
"sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"
|
||||
"sha256:1245ab82e8554fa88c4b2ab1e098ae051faac5af829efdcf2ce6b34dccd5567c",
|
||||
"sha256:1bc6d709939ff262fd1432f03f080c5042dc6508b6e0d3d20e61dd045456a1a0",
|
||||
"sha256:25e73d4c81efa8ea3785274a2f7f3bfbbeccb6fcba2a0bdd3be9223371c37554",
|
||||
"sha256:276b13cc085474e482566c477c25ed66a097b44c6e77132f3304ac0b039f83eb",
|
||||
"sha256:2aed4761809640f02e44e16b8b32c1a5dee5e80ea30a0ff0912158bde9c501f2",
|
||||
"sha256:2dd70a167843b4b4b2630c0c56f1b586fe965b4f8ac5da05b6690344fd065c6b",
|
||||
"sha256:352c68e233409c31048a3725c446a9e48bbff36e39db92774d4f2380d630d8f8",
|
||||
"sha256:3f2b05757c92ad96b33dbf8e8ec8d4ccb9af6ae3c9e9bd141c7cc44d20c6bcba",
|
||||
"sha256:448d7bde7ceb6c69e08474c2ddbc5b4cd13c9e4aa4a717467f716b5fc938a734",
|
||||
"sha256:463e52616ea687fd323888e86bf25e864a3cc6335a043fad6bbb037dbf49bbe2",
|
||||
"sha256:482fb42eea6164894ff82abbcf33d526362de5d1a7ed25af7ecbdddd28fc124f",
|
||||
"sha256:56c4a409381ddd7bbff134e9756077860d4e8a583d310a6f38a2315b9ce301d0",
|
||||
"sha256:56d296cbc8254a7dffdd7bcc2eb70be5a233aae7c01856d2d936f5ac4e8ac1f1",
|
||||
"sha256:5e15d424b8153756b7c903bde6d4610be0c3daca3986173c18dd5c1a1625e4cd",
|
||||
"sha256:618eeba986cea7f621d8607ee378ecc8c2504b98b3fdc4952b30fe3578304687",
|
||||
"sha256:61d47a897c1e91f33f177c21de897267b38fbb45f2cd8e22a710bcef1df09ac1",
|
||||
"sha256:621f6ea7260ea2ffdaec64fe5cb521669984f567b66f62f81445221d4754df4c",
|
||||
"sha256:6a5cdc3adb4f8bb8d8f5e64c2e9e282bc12980ef055ec6da59db562ee9bdfefa",
|
||||
"sha256:6c3f6158b02ac403868eea390930ae64e9a9a2a5bbfafefbb920d29258d9f2f8",
|
||||
"sha256:704f89b87c4f4737da2860695a18c852b78ec7279b24eedacab10b29067d3a38",
|
||||
"sha256:72128176fea72012063200b7b395ed8a57849282b207321124d7ff14e26988e8",
|
||||
"sha256:78fbb2be068a13a5d99dce9e1e7d168db880870f7bc73f876152130575bd6167",
|
||||
"sha256:7bff3a98f63b47464480de1b5bdd80c8fade0ba2832c9381253c9b74c4153c27",
|
||||
"sha256:84f2436d6742c01136dd940ee158bfc7cf5ced3da7e4c949662b8703b5cd8145",
|
||||
"sha256:9976fb0a5709988778ac9bc44f3d50fccd989987876dfd7716dee28beed0a9fa",
|
||||
"sha256:9ad0a117b8dc2061ce9461ea4c1b4799e55edceb236522c5b8f958ce9ed8fa9a",
|
||||
"sha256:9e3dd806f34de38d4c01416344e98eab2437ac450b3ae39c62a0ede2f8b5e4ed",
|
||||
"sha256:9eb494070aa060ceba6e4bbf44c1bc5fa97bfb883a0d9b0c9049415f9e944793",
|
||||
"sha256:9fde6b90889522c220dd56a670102ceef24955d994ff7af2cb786b4ba8fe11e4",
|
||||
"sha256:9fff3ff052922cb99f9e52f63f985d4f7a54f6b94287463bc66b7cdf3eb41217",
|
||||
"sha256:a06c358f4aed05fa1099c39decc8022261bb07dfadc127c08cfbd1391b09689e",
|
||||
"sha256:a4f923b9ab265136e57cc14794a15b9dcea07a9c578609cd5dbbfff28a0d15e6",
|
||||
"sha256:c5b81fb37db76ebea79aa963b76d96ff854e7662921ce742293463635a87a78d",
|
||||
"sha256:d5ed164af5c9078596cfc40b078c3b337911190d3faeac830c3f1274f26b8320",
|
||||
"sha256:d651fde74a4d3122e5562705824507e2f5b2d3d57557f1916c4b27635f8fbe3f",
|
||||
"sha256:de73fca6fb403dd72d4da517cfc49fcf791f74eee697d3219f6be29adf5af6ce",
|
||||
"sha256:e647a0be741edbb529a72644e999acb09f2ad60465f80757da183528941ff975",
|
||||
"sha256:e92c7a5f7d62edff50f60a045dc9542bf939758c95b2fcd686175dd10ce0ed10",
|
||||
"sha256:eeffd96882d8c06d31b65dddcf51db7c612547babc1c4c5db6a011abe9798525",
|
||||
"sha256:f5a4551dfd09c3bd12fca8144d47fe7745275adf3229b7223c2f9e29a975ebda",
|
||||
"sha256:fac0bcc5b7e8169bffa87f0dcc24435446d329cbc2b5486d155c2e0f3b493ae1"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==6.2"
|
||||
"version": "==6.3.1"
|
||||
},
|
||||
"dateparser": {
|
||||
"hashes": [
|
||||
@ -333,11 +327,11 @@
|
||||
},
|
||||
"docxtpl": {
|
||||
"hashes": [
|
||||
"sha256:4a590f7c3e056de1a24cf8e0e27639b8ea243e989b64c1660270e42add2243c7",
|
||||
"sha256:c32af9c98863bb76d6e03a4ae4eb0f0e2c8bb179c3b50d85c4d2fe80f4c8e41d"
|
||||
"sha256:d6f7a0d383bcb4e779b9c6abaef29f1af639c038e767fa53a9cb3d628bc0512e",
|
||||
"sha256:de0988b2a7a92402d6da3aa9dec474cf3cfb941ed69927248a8e97d0ff50d6f9"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.15.1"
|
||||
"version": "==0.15.2"
|
||||
},
|
||||
"et-xmlfile": {
|
||||
"hashes": [
|
||||
@ -357,10 +351,10 @@
|
||||
},
|
||||
"flask-admin": {
|
||||
"hashes": [
|
||||
"sha256:eb06a1f31b98881dee53a55c64faebd1990d6aac38826364b280df0b2679ff74"
|
||||
"sha256:424ffc79b7b0dfff051555686ea12e86e48dffacac14beaa319fb4502ac40988"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.5.8"
|
||||
"version": "==1.6.0"
|
||||
},
|
||||
"flask-bcrypt": {
|
||||
"hashes": [
|
||||
@ -423,6 +417,22 @@
|
||||
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==0.18.2"
|
||||
},
|
||||
"gitdb": {
|
||||
"hashes": [
|
||||
"sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd",
|
||||
"sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==4.0.9"
|
||||
},
|
||||
"gitpython": {
|
||||
"hashes": [
|
||||
"sha256:26ac35c212d1f7b16036361ca5cff3ec66e11753a0d677fb6c48fa4e1a9dd8d6",
|
||||
"sha256:fc8868f63a2e6d268fb25f481995ba185a85a66fcad126f039323ff6635669ee"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.1.26"
|
||||
},
|
||||
"greenlet": {
|
||||
"hashes": [
|
||||
"sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3",
|
||||
@ -517,11 +527,11 @@
|
||||
},
|
||||
"importlib-metadata": {
|
||||
"hashes": [
|
||||
"sha256:92a8b58ce734b2a4494878e0ecf7d79ccd7a128b5fc6014c401e0b61f006f0f6",
|
||||
"sha256:b7cf7d3fef75f1e4c80a96ca660efbd51473d7e8f39b5ab9210febc7809012a4"
|
||||
"sha256:175f4ee440a0317f6e8d81b7f8d4869f93316170a65ad2b007d2929186c8052c",
|
||||
"sha256:e0bc84ff355328a4adfc5240c4f211e0ab386f80aa640d1b11f0618a1d282094"
|
||||
],
|
||||
"markers": "python_version < '3.10'",
|
||||
"version": "==4.10.0"
|
||||
"version": "==4.11.1"
|
||||
},
|
||||
"importlib-resources": {
|
||||
"hashes": [
|
||||
@ -771,31 +781,28 @@
|
||||
},
|
||||
"numpy": {
|
||||
"hashes": [
|
||||
"sha256:0cfe07133fd00b27edee5e6385e333e9eeb010607e8a46e1cd673f05f8596595",
|
||||
"sha256:11a1f3816ea82eed4178102c56281782690ab5993251fdfd75039aad4d20385f",
|
||||
"sha256:2762331de395739c91f1abb88041f94a080cb1143aeec791b3b223976228af3f",
|
||||
"sha256:283d9de87c0133ef98f93dfc09fad3fb382f2a15580de75c02b5bb36a5a159a5",
|
||||
"sha256:3d22662b4b10112c545c91a0741f2436f8ca979ab3d69d03d19322aa970f9695",
|
||||
"sha256:41388e32e40b41dd56eb37fcaa7488b2b47b0adf77c66154d6b89622c110dfe9",
|
||||
"sha256:42c16cec1c8cf2728f1d539bd55aaa9d6bb48a7de2f41eb944697293ef65a559",
|
||||
"sha256:47ee7a839f5885bc0c63a74aabb91f6f40d7d7b639253768c4199b37aede7982",
|
||||
"sha256:5a311ee4d983c487a0ab546708edbdd759393a3dc9cd30305170149fedd23c88",
|
||||
"sha256:5dc65644f75a4c2970f21394ad8bea1a844104f0fe01f278631be1c7eae27226",
|
||||
"sha256:6ed0d073a9c54ac40c41a9c2d53fcc3d4d4ed607670b9e7b0de1ba13b4cbfe6f",
|
||||
"sha256:76ba7c40e80f9dc815c5e896330700fd6e20814e69da9c1267d65a4d051080f1",
|
||||
"sha256:818b9be7900e8dc23e013a92779135623476f44a0de58b40c32a15368c01d471",
|
||||
"sha256:a024181d7aef0004d76fb3bce2a4c9f2e67a609a9e2a6ff2571d30e9976aa383",
|
||||
"sha256:a955e4128ac36797aaffd49ab44ec74a71c11d6938df83b1285492d277db5397",
|
||||
"sha256:a97a954a8c2f046d3817c2bce16e3c7e9a9c2afffaf0400f5c16df5172a67c9c",
|
||||
"sha256:a97e82c39d9856fe7d4f9b86d8a1e66eff99cf3a8b7ba48202f659703d27c46f",
|
||||
"sha256:b55b953a1bdb465f4dc181758570d321db4ac23005f90ffd2b434cc6609a63dd",
|
||||
"sha256:bb02929b0d6bfab4c48a79bd805bd7419114606947ec8284476167415171f55b",
|
||||
"sha256:bece0a4a49e60e472a6d1f70ac6cdea00f9ab80ff01132f96bd970cdd8a9e5a9",
|
||||
"sha256:e41e8951749c4b5c9a2dc5fdbc1a4eec6ab2a140fdae9b460b0f557eed870f4d",
|
||||
"sha256:f71d57cc8645f14816ae249407d309be250ad8de93ef61d9709b45a0ddf4050c"
|
||||
"sha256:03ae5850619abb34a879d5f2d4bb4dcd025d6d8fb72f5e461dae84edccfe129f",
|
||||
"sha256:076aee5a3763d41da6bef9565fdf3cb987606f567cd8b104aded2b38b7b47abf",
|
||||
"sha256:0b536b6840e84c1c6a410f3a5aa727821e6108f3454d81a5cd5900999ef04f89",
|
||||
"sha256:15efb7b93806d438e3bc590ca8ef2f953b0ce4f86f337ef4559d31ec6cf9d7dd",
|
||||
"sha256:168259b1b184aa83a514f307352c25c56af111c269ffc109d9704e81f72e764b",
|
||||
"sha256:2638389562bda1635b564490d76713695ff497242a83d9b684d27bb4a6cc9d7a",
|
||||
"sha256:3556c5550de40027d3121ebbb170f61bbe19eb639c7ad0c7b482cd9b560cd23b",
|
||||
"sha256:4a176959b6e7e00b5a0d6f549a479f869829bfd8150282c590deee6d099bbb6e",
|
||||
"sha256:515a8b6edbb904594685da6e176ac9fbea8f73a5ebae947281de6613e27f1956",
|
||||
"sha256:55535c7c2f61e2b2fc817c5cbe1af7cb907c7f011e46ae0a52caa4be1f19afe2",
|
||||
"sha256:59153979d60f5bfe9e4c00e401e24dfe0469ef8da6d68247439d3278f30a180f",
|
||||
"sha256:60cb8e5933193a3cc2912ee29ca331e9c15b2da034f76159b7abc520b3d1233a",
|
||||
"sha256:6767ad399e9327bfdbaa40871be4254d1995f4a3ca3806127f10cec778bd9896",
|
||||
"sha256:76a4f9bce0278becc2da7da3b8ef854bed41a991f4226911a24a9711baad672c",
|
||||
"sha256:8cf33634b60c9cef346663a222d9841d3bbbc0a2f00221d6bcfd0d993d5543f6",
|
||||
"sha256:94dd11d9f13ea1be17bac39c1942f527cbf7065f94953cf62dfe805653da2f8f",
|
||||
"sha256:aafa46b5a39a27aca566198d3312fb3bde95ce9677085efd02c86f7ef6be4ec7",
|
||||
"sha256:badca914580eb46385e7f7e4e426fea6de0a37b9e06bec252e481ae7ec287082",
|
||||
"sha256:d76a26c5118c4d96e264acc9e3242d72e1a2b92e739807b3b69d8d47684b6677"
|
||||
],
|
||||
"markers": "python_version < '3.10' and platform_machine != 'aarch64' and platform_machine != 'arm64'",
|
||||
"version": "==1.22.0"
|
||||
"version": "==1.22.2"
|
||||
},
|
||||
"openapi-schema-validator": {
|
||||
"hashes": [
|
||||
@ -808,12 +815,11 @@
|
||||
},
|
||||
"openapi-spec-validator": {
|
||||
"hashes": [
|
||||
"sha256:0a7da925bad4576f4518f77302c0b1990adb2fbcbe7d63fb4ed0de894cad8bdd",
|
||||
"sha256:3d70e6592754799f7e77a45b98c6a91706bdd309a425169d17d8e92173e198a2",
|
||||
"sha256:ba28b06e63274f2bc6de995a07fb572c657e534425b5baf68d9f7911efe6929f"
|
||||
"sha256:43d606c5910ed66e1641807993bd0a981de2fc5da44f03e1c4ca2bb65b94b68e",
|
||||
"sha256:49d7da81996714445116f6105c9c5955c0e197ef8636da4f368c913f64753443"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
||||
"version": "==0.3.1"
|
||||
"version": "==0.3.3"
|
||||
},
|
||||
"openpyxl": {
|
||||
"hashes": [
|
||||
@ -833,34 +839,30 @@
|
||||
},
|
||||
"pandas": {
|
||||
"hashes": [
|
||||
"sha256:1e4285f5de1012de20ca46b188ccf33521bff61ba5c5ebd78b4fb28e5416a9f1",
|
||||
"sha256:2651d75b9a167cc8cc572cf787ab512d16e316ae00ba81874b560586fa1325e0",
|
||||
"sha256:2c21778a688d3712d35710501f8001cdbf96eb70a7c587a3d5613573299fdca6",
|
||||
"sha256:32e1a26d5ade11b547721a72f9bfc4bd113396947606e00d5b4a5b79b3dcb006",
|
||||
"sha256:3345343206546545bc26a05b4602b6a24385b5ec7c75cb6059599e3d56831da2",
|
||||
"sha256:344295811e67f8200de2390093aeb3c8309f5648951b684d8db7eee7d1c81fb7",
|
||||
"sha256:37f06b59e5bc05711a518aa10beaec10942188dccb48918bb5ae602ccbc9f1a0",
|
||||
"sha256:552020bf83b7f9033b57cbae65589c01e7ef1544416122da0c79140c93288f56",
|
||||
"sha256:5cce0c6bbeb266b0e39e35176ee615ce3585233092f685b6a82362523e59e5b4",
|
||||
"sha256:5f261553a1e9c65b7a310302b9dbac31cf0049a51695c14ebe04e4bfd4a96f02",
|
||||
"sha256:60a8c055d58873ad81cae290d974d13dd479b82cbb975c3e1fa2cf1920715296",
|
||||
"sha256:62d5b5ce965bae78f12c1c0df0d387899dd4211ec0bdc52822373f13a3a022b9",
|
||||
"sha256:7d28a3c65463fd0d0ba8bbb7696b23073efee0510783340a44b08f5e96ffce0c",
|
||||
"sha256:8025750767e138320b15ca16d70d5cdc1886e8f9cc56652d89735c016cd8aea6",
|
||||
"sha256:8b6dbec5f3e6d5dc80dcfee250e0a2a652b3f28663492f7dab9a24416a48ac39",
|
||||
"sha256:a395692046fd8ce1edb4c6295c35184ae0c2bbe787ecbe384251da609e27edcb",
|
||||
"sha256:a62949c626dd0ef7de11de34b44c6475db76995c2064e2d99c6498c3dba7fe58",
|
||||
"sha256:aaf183a615ad790801fa3cf2fa450e5b6d23a54684fe386f7e3208f8b9bfbef6",
|
||||
"sha256:adfeb11be2d54f275142c8ba9bf67acee771b7186a5745249c7d5a06c670136b",
|
||||
"sha256:b6b87b2fb39e6383ca28e2829cddef1d9fc9e27e55ad91ca9c435572cdba51bf",
|
||||
"sha256:bd971a3f08b745a75a86c00b97f3007c2ea175951286cdda6abe543e687e5f2f",
|
||||
"sha256:c69406a2808ba6cf580c2255bcf260b3f214d2664a3a4197d0e640f573b46fd3",
|
||||
"sha256:d3bc49af96cd6285030a64779de5b3688633a07eb75c124b0747134a63f4c05f",
|
||||
"sha256:fd541ab09e1f80a2a1760032d665f6e032d8e44055d602d65eeea6e6e85498cb",
|
||||
"sha256:fe95bae4e2d579812865db2212bb733144e34d0c6785c0685329e5b60fcb85dd"
|
||||
"sha256:0259cd11e7e6125aaea3af823b80444f3adad6149ff4c97fef760093598b3e34",
|
||||
"sha256:04dd15d9db538470900c851498e532ef28d4e56bfe72c9523acb32042de43dfb",
|
||||
"sha256:0b1a13f647e4209ed7dbb5da3497891d0045da9785327530ab696417ef478f84",
|
||||
"sha256:19f7c632436b1b4f84615c3b127bbd7bc603db95e3d4332ed259dc815c9aaa26",
|
||||
"sha256:1b384516dbb4e6aae30e3464c2e77c563da5980440fbdfbd0968e3942f8f9d70",
|
||||
"sha256:1d85d5f6be66dfd6d1d8d13b9535e342a2214260f1852654b19fa4d7b8d1218b",
|
||||
"sha256:2e5a7a1e0ecaac652326af627a3eca84886da9e667d68286866d4e33f6547caf",
|
||||
"sha256:3129a35d9dad1d80c234dd78f8f03141b914395d23f97cf92a366dcd19f8f8bf",
|
||||
"sha256:358b0bc98a5ff067132d23bf7a2242ee95db9ea5b7bbc401cf79205f11502fd3",
|
||||
"sha256:3dfb32ed50122fe8c5e7f2b8d97387edd742cc78f9ec36f007ee126cd3720907",
|
||||
"sha256:4e1176f45981c8ccc8161bc036916c004ca51037a7ed73f2d2a9857e6dbe654f",
|
||||
"sha256:508c99debccd15790d526ce6b1624b97a5e1e4ca5b871319fb0ebfd46b8f4dad",
|
||||
"sha256:6105af6533f8b63a43ea9f08a2ede04e8f43e49daef0209ab0d30352bcf08bee",
|
||||
"sha256:6d6ad1da00c7cc7d8dd1559a6ba59ba3973be6b15722d49738b2be0977eb8a0c",
|
||||
"sha256:7ea47ba1d6f359680130bd29af497333be6110de8f4c35b9211eec5a5a9630fa",
|
||||
"sha256:8db93ec98ac7cb5f8ac1420c10f5e3c43533153f253fe7fb6d891cf5aa2b80d2",
|
||||
"sha256:96e9ece5759f9b47ae43794b6359bbc54805d76e573b161ae770c1ea59393106",
|
||||
"sha256:bbb15ad79050e8b8d39ec40dd96a30cd09b886a2ae8848d0df1abba4d5502a67",
|
||||
"sha256:c614001129b2a5add5e3677c3a213a9e6fd376204cb8d17c04e84ff7dfc02a73",
|
||||
"sha256:e6a7bbbb7950063bfc942f8794bc3e31697c020a14f1cd8905fc1d28ec674a01",
|
||||
"sha256:f02e85e6d832be37d7f16cf6ac8bb26b519ace3e5f3235564a91c7f658ab2a43"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.3.5"
|
||||
"version": "==1.4.1"
|
||||
},
|
||||
"psycopg2-binary": {
|
||||
"hashes": [
|
||||
@ -960,11 +962,11 @@
|
||||
},
|
||||
"pygments": {
|
||||
"hashes": [
|
||||
"sha256:59b895e326f0fb0d733fd28c6839bd18ad0687ba20efc26d4277fd1d30b971f4",
|
||||
"sha256:9135c1af61eec0f650cd1ea1ed8ce298e54d56bcd8cc2ef46edd7702c171337c"
|
||||
"sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65",
|
||||
"sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"
|
||||
],
|
||||
"markers": "python_version >= '3.5'",
|
||||
"version": "==2.11.1"
|
||||
"version": "==2.11.2"
|
||||
},
|
||||
"pyjwt": {
|
||||
"hashes": [
|
||||
@ -976,62 +978,34 @@
|
||||
},
|
||||
"pynacl": {
|
||||
"hashes": [
|
||||
"sha256:06cbb4d9b2c4bd3c8dc0d267416aaed79906e7b33f114ddbf0911969794b1cc4",
|
||||
"sha256:11335f09060af52c97137d4ac54285bcb7df0cef29014a1a4efe64ac065434c4",
|
||||
"sha256:2fe0fc5a2480361dcaf4e6e7cea00e078fcda07ba45f811b167e3f99e8cff574",
|
||||
"sha256:30f9b96db44e09b3304f9ea95079b1b7316b2b4f3744fe3aaecccd95d547063d",
|
||||
"sha256:4e10569f8cbed81cb7526ae137049759d2a8d57726d52c1a000a3ce366779634",
|
||||
"sha256:511d269ee845037b95c9781aa702f90ccc36036f95d0f31373a6a79bd8242e25",
|
||||
"sha256:537a7ccbea22905a0ab36ea58577b39d1fa9b1884869d173b5cf111f006f689f",
|
||||
"sha256:54e9a2c849c742006516ad56a88f5c74bf2ce92c9f67435187c3c5953b346505",
|
||||
"sha256:757250ddb3bff1eecd7e41e65f7f833a8405fede0194319f87899690624f2122",
|
||||
"sha256:7757ae33dae81c300487591c68790dfb5145c7d03324000433d9a2c141f82af7",
|
||||
"sha256:7c6092102219f59ff29788860ccb021e80fffd953920c4a8653889c029b2d420",
|
||||
"sha256:8122ba5f2a2169ca5da936b2e5a511740ffb73979381b4229d9188f6dcb22f1f",
|
||||
"sha256:9c4a7ea4fb81536c1b1f5cc44d54a296f96ae78c1ebd2311bd0b60be45a48d96",
|
||||
"sha256:c914f78da4953b33d4685e3cdc7ce63401247a21425c16a39760e282075ac4a6",
|
||||
"sha256:cd401ccbc2a249a47a3a1724c2918fcd04be1f7b54eb2a5a71ff915db0ac51c6",
|
||||
"sha256:d452a6746f0a7e11121e64625109bc4468fc3100452817001dbe018bb8b08514",
|
||||
"sha256:ea6841bc3a76fa4942ce00f3bda7d436fda21e2d91602b9e21b7ca9ecab8f3ff",
|
||||
"sha256:f8851ab9041756003119368c1e6cd0b9c631f46d686b3904b18c0139f4419f80"
|
||||
"sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858",
|
||||
"sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d",
|
||||
"sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93",
|
||||
"sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1",
|
||||
"sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92",
|
||||
"sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff",
|
||||
"sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba",
|
||||
"sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394",
|
||||
"sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b",
|
||||
"sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==1.4.0"
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.5.0"
|
||||
},
|
||||
"pyparsing": {
|
||||
"hashes": [
|
||||
"sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4",
|
||||
"sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"
|
||||
"sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea",
|
||||
"sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.0.6"
|
||||
"version": "==3.0.7"
|
||||
},
|
||||
"pyrsistent": {
|
||||
"hashes": [
|
||||
"sha256:097b96f129dd36a8c9e33594e7ebb151b1515eb52cceb08474c10a5479e799f2",
|
||||
"sha256:2aaf19dc8ce517a8653746d98e962ef480ff34b6bc563fc067be6401ffb457c7",
|
||||
"sha256:404e1f1d254d314d55adb8d87f4f465c8693d6f902f67eb6ef5b4526dc58e6ea",
|
||||
"sha256:48578680353f41dca1ca3dc48629fb77dfc745128b56fc01096b2530c13fd426",
|
||||
"sha256:4916c10896721e472ee12c95cdc2891ce5890898d2f9907b1b4ae0f53588b710",
|
||||
"sha256:527be2bfa8dc80f6f8ddd65242ba476a6c4fb4e3aedbf281dfbac1b1ed4165b1",
|
||||
"sha256:58a70d93fb79dc585b21f9d72487b929a6fe58da0754fa4cb9f279bb92369396",
|
||||
"sha256:5e4395bbf841693eaebaa5bb5c8f5cdbb1d139e07c975c682ec4e4f8126e03d2",
|
||||
"sha256:6b5eed00e597b5b5773b4ca30bd48a5774ef1e96f2a45d105db5b4ebb4bca680",
|
||||
"sha256:73ff61b1411e3fb0ba144b8f08d6749749775fe89688093e1efef9839d2dcc35",
|
||||
"sha256:772e94c2c6864f2cd2ffbe58bb3bdefbe2a32afa0acb1a77e472aac831f83427",
|
||||
"sha256:773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b",
|
||||
"sha256:a0c772d791c38bbc77be659af29bb14c38ced151433592e326361610250c605b",
|
||||
"sha256:b29b869cf58412ca5738d23691e96d8aff535e17390128a1a52717c9a109da4f",
|
||||
"sha256:c1a9ff320fa699337e05edcaae79ef8c2880b52720bc031b219e5b5008ebbdef",
|
||||
"sha256:cd3caef37a415fd0dae6148a1b6957a8c5f275a62cca02e18474608cb263640c",
|
||||
"sha256:d5ec194c9c573aafaceebf05fc400656722793dac57f254cd4741f3c27ae57b4",
|
||||
"sha256:da6e5e818d18459fa46fac0a4a4e543507fe1110e808101277c5a2b5bab0cd2d",
|
||||
"sha256:e79d94ca58fcafef6395f6352383fa1a76922268fa02caa2272fff501c2fdc78",
|
||||
"sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b",
|
||||
"sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72"
|
||||
"sha256:aa2ae1c2e496f4d6777f869ea5de7166a8ccb9c2e06ebcf6c7ff1b670c98c5ef"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==0.18.0"
|
||||
"markers": "python_version >= '2.7'",
|
||||
"version": "==0.16.1"
|
||||
},
|
||||
"python-dateutil": {
|
||||
"hashes": [
|
||||
@ -1107,82 +1081,82 @@
|
||||
},
|
||||
"regex": {
|
||||
"hashes": [
|
||||
"sha256:0416f7399e918c4b0e074a0f66e5191077ee2ca32a0f99d4c187a62beb47aa05",
|
||||
"sha256:05b7d6d7e64efe309972adab77fc2af8907bb93217ec60aa9fe12a0dad35874f",
|
||||
"sha256:0617383e2fe465732af4509e61648b77cbe3aee68b6ac8c0b6fe934db90be5cc",
|
||||
"sha256:07856afef5ffcc052e7eccf3213317fbb94e4a5cd8177a2caa69c980657b3cb4",
|
||||
"sha256:0f594b96fe2e0821d026365f72ac7b4f0b487487fb3d4aaf10dd9d97d88a9737",
|
||||
"sha256:139a23d1f5d30db2cc6c7fd9c6d6497872a672db22c4ae1910be22d4f4b2068a",
|
||||
"sha256:162abfd74e88001d20cb73ceaffbfe601469923e875caf9118333b1a4aaafdc4",
|
||||
"sha256:2207ae4f64ad3af399e2d30dde66f0b36ae5c3129b52885f1bffc2f05ec505c8",
|
||||
"sha256:2409b5c9cef7054dde93a9803156b411b677affc84fca69e908b1cb2c540025d",
|
||||
"sha256:2fee3ed82a011184807d2127f1733b4f6b2ff6ec7151d83ef3477f3b96a13d03",
|
||||
"sha256:30ab804ea73972049b7a2a5c62d97687d69b5a60a67adca07eb73a0ddbc9e29f",
|
||||
"sha256:3598893bde43091ee5ca0a6ad20f08a0435e93a69255eeb5f81b85e81e329264",
|
||||
"sha256:3b5df18db1fccd66de15aa59c41e4f853b5df7550723d26aa6cb7f40e5d9da5a",
|
||||
"sha256:3c5fb32cc6077abad3bbf0323067636d93307c9fa93e072771cf9a64d1c0f3ef",
|
||||
"sha256:416c5f1a188c91e3eb41e9c8787288e707f7d2ebe66e0a6563af280d9b68478f",
|
||||
"sha256:42b50fa6666b0d50c30a990527127334d6b96dd969011e843e726a64011485da",
|
||||
"sha256:432bd15d40ed835a51617521d60d0125867f7b88acf653e4ed994a1f8e4995dc",
|
||||
"sha256:473e67837f786404570eae33c3b64a4b9635ae9f00145250851a1292f484c063",
|
||||
"sha256:4aaa4e0705ef2b73dd8e36eeb4c868f80f8393f5f4d855e94025ce7ad8525f50",
|
||||
"sha256:50a7ddf3d131dc5633dccdb51417e2d1910d25cbcf842115a3a5893509140a3a",
|
||||
"sha256:529801a0d58809b60b3531ee804d3e3be4b412c94b5d267daa3de7fadef00f49",
|
||||
"sha256:537ca6a3586931b16a85ac38c08cc48f10fc870a5b25e51794c74df843e9966d",
|
||||
"sha256:53db2c6be8a2710b359bfd3d3aa17ba38f8aa72a82309a12ae99d3c0c3dcd74d",
|
||||
"sha256:5537f71b6d646f7f5f340562ec4c77b6e1c915f8baae822ea0b7e46c1f09b733",
|
||||
"sha256:563d5f9354e15e048465061509403f68424fef37d5add3064038c2511c8f5e00",
|
||||
"sha256:5d408a642a5484b9b4d11dea15a489ea0928c7e410c7525cd892f4d04f2f617b",
|
||||
"sha256:61600a7ca4bcf78a96a68a27c2ae9389763b5b94b63943d5158f2a377e09d29a",
|
||||
"sha256:6650f16365f1924d6014d2ea770bde8555b4a39dc9576abb95e3cd1ff0263b36",
|
||||
"sha256:666abff54e474d28ff42756d94544cdfd42e2ee97065857413b72e8a2d6a6345",
|
||||
"sha256:68a067c11463de2a37157930d8b153005085e42bcb7ad9ca562d77ba7d1404e0",
|
||||
"sha256:6e1d2cc79e8dae442b3fa4a26c5794428b98f81389af90623ffcc650ce9f6732",
|
||||
"sha256:74cbeac0451f27d4f50e6e8a8f3a52ca074b5e2da9f7b505c4201a57a8ed6286",
|
||||
"sha256:780b48456a0f0ba4d390e8b5f7c661fdd218934388cde1a974010a965e200e12",
|
||||
"sha256:788aef3549f1924d5c38263104dae7395bf020a42776d5ec5ea2b0d3d85d6646",
|
||||
"sha256:7ee1227cf08b6716c85504aebc49ac827eb88fcc6e51564f010f11a406c0a667",
|
||||
"sha256:7f301b11b9d214f83ddaf689181051e7f48905568b0c7017c04c06dfd065e244",
|
||||
"sha256:83ee89483672b11f8952b158640d0c0ff02dc43d9cb1b70c1564b49abe92ce29",
|
||||
"sha256:85bfa6a5413be0ee6c5c4a663668a2cad2cbecdee367630d097d7823041bdeec",
|
||||
"sha256:9345b6f7ee578bad8e475129ed40123d265464c4cfead6c261fd60fc9de00bcf",
|
||||
"sha256:93a5051fcf5fad72de73b96f07d30bc29665697fb8ecdfbc474f3452c78adcf4",
|
||||
"sha256:962b9a917dd7ceacbe5cd424556914cb0d636001e393b43dc886ba31d2a1e449",
|
||||
"sha256:96fc32c16ea6d60d3ca7f63397bff5c75c5a562f7db6dec7d412f7c4d2e78ec0",
|
||||
"sha256:98ba568e8ae26beb726aeea2273053c717641933836568c2a0278a84987b2a1a",
|
||||
"sha256:a3feefd5e95871872673b08636f96b61ebef62971eab044f5124fb4dea39919d",
|
||||
"sha256:a955b747d620a50408b7fdf948e04359d6e762ff8a85f5775d907ceced715129",
|
||||
"sha256:b43c2b8a330a490daaef5a47ab114935002b13b3f9dc5da56d5322ff218eeadb",
|
||||
"sha256:b483c9d00a565633c87abd0aaf27eb5016de23fed952e054ecc19ce32f6a9e7e",
|
||||
"sha256:b9ed0b1e5e0759d6b7f8e2f143894b2a7f3edd313f38cf44e1e15d360e11749b",
|
||||
"sha256:ba05430e819e58544e840a68b03b28b6d328aff2e41579037e8bab7653b37d83",
|
||||
"sha256:ca49e1ab99593438b204e00f3970e7a5f70d045267051dfa6b5f4304fcfa1dbf",
|
||||
"sha256:ca5f18a75e1256ce07494e245cdb146f5a9267d3c702ebf9b65c7f8bd843431e",
|
||||
"sha256:cd410a1cbb2d297c67d8521759ab2ee3f1d66206d2e4328502a487589a2cb21b",
|
||||
"sha256:ce298e3d0c65bd03fa65ffcc6db0e2b578e8f626d468db64fdf8457731052942",
|
||||
"sha256:d5ca078bb666c4a9d1287a379fe617a6dccd18c3e8a7e6c7e1eb8974330c626a",
|
||||
"sha256:d5fd67df77bab0d3f4ea1d7afca9ef15c2ee35dfb348c7b57ffb9782a6e4db6e",
|
||||
"sha256:da1a90c1ddb7531b1d5ff1e171b4ee61f6345119be7351104b67ff413843fe94",
|
||||
"sha256:dba70f30fd81f8ce6d32ddeef37d91c8948e5d5a4c63242d16a2b2df8143aafc",
|
||||
"sha256:dc07f021ee80510f3cd3af2cad5b6a3b3a10b057521d9e6aaeb621730d320c5a",
|
||||
"sha256:dd33eb9bdcfbabab3459c9ee651d94c842bc8a05fabc95edf4ee0c15a072495e",
|
||||
"sha256:e0538c43565ee6e703d3a7c3bdfe4037a5209250e8502c98f20fea6f5fdf2965",
|
||||
"sha256:e1f54b9b4b6c53369f40028d2dd07a8c374583417ee6ec0ea304e710a20f80a0",
|
||||
"sha256:e32d2a2b02ccbef10145df9135751abea1f9f076e67a4e261b05f24b94219e36",
|
||||
"sha256:e6096b0688e6e14af6a1b10eaad86b4ff17935c49aa774eac7c95a57a4e8c296",
|
||||
"sha256:e71255ba42567d34a13c03968736c5d39bb4a97ce98188fafb27ce981115beec",
|
||||
"sha256:ed2e07c6a26ed4bea91b897ee2b0835c21716d9a469a96c3e878dc5f8c55bb23",
|
||||
"sha256:eef2afb0fd1747f33f1ee3e209bce1ed582d1896b240ccc5e2697e3275f037c7",
|
||||
"sha256:f23222527b307970e383433daec128d769ff778d9b29343fb3496472dc20dabe",
|
||||
"sha256:f341ee2df0999bfdf7a95e448075effe0db212a59387de1a70690e4acb03d4c6",
|
||||
"sha256:f5be7805e53dafe94d295399cfbe5227f39995a997f4fd8539bf3cbdc8f47ca8",
|
||||
"sha256:f7f325be2804246a75a4f45c72d4ce80d2443ab815063cdf70ee8fb2ca59ee1b",
|
||||
"sha256:f8af619e3be812a2059b212064ea7a640aff0568d972cd1b9e920837469eb3cb",
|
||||
"sha256:fa8c626d6441e2d04b6ee703ef2d1e17608ad44c7cb75258c09dd42bacdfc64b",
|
||||
"sha256:fbb9dc00e39f3e6c0ef48edee202f9520dafb233e8b51b06b8428cfcb92abd30",
|
||||
"sha256:fff55f3ce50a3ff63ec8e2a8d3dd924f1941b250b0aac3d3d42b687eeff07a8e"
|
||||
"sha256:04611cc0f627fc4a50bc4a9a2e6178a974c6a6a4aa9c1cca921635d2c47b9c87",
|
||||
"sha256:0b5d6f9aed3153487252d00a18e53f19b7f52a1651bc1d0c4b5844bc286dfa52",
|
||||
"sha256:0d2f5c3f7057530afd7b739ed42eb04f1011203bc5e4663e1e1d01bb50f813e3",
|
||||
"sha256:11772be1eb1748e0e197a40ffb82fb8fd0d6914cd147d841d9703e2bef24d288",
|
||||
"sha256:1333b3ce73269f986b1fa4d5d395643810074dc2de5b9d262eb258daf37dc98f",
|
||||
"sha256:16f81025bb3556eccb0681d7946e2b35ff254f9f888cff7d2120e8826330315c",
|
||||
"sha256:1a171eaac36a08964d023eeff740b18a415f79aeb212169080c170ec42dd5184",
|
||||
"sha256:1d6301f5288e9bdca65fab3de6b7de17362c5016d6bf8ee4ba4cbe833b2eda0f",
|
||||
"sha256:1e031899cb2bc92c0cf4d45389eff5b078d1936860a1be3aa8c94fa25fb46ed8",
|
||||
"sha256:1f8c0ae0a0de4e19fddaaff036f508db175f6f03db318c80bbc239a1def62d02",
|
||||
"sha256:2245441445099411b528379dee83e56eadf449db924648e5feb9b747473f42e3",
|
||||
"sha256:22709d701e7037e64dae2a04855021b62efd64a66c3ceed99dfd684bfef09e38",
|
||||
"sha256:24c89346734a4e4d60ecf9b27cac4c1fee3431a413f7aa00be7c4d7bbacc2c4d",
|
||||
"sha256:25716aa70a0d153cd844fe861d4f3315a6ccafce22b39d8aadbf7fcadff2b633",
|
||||
"sha256:2dacb3dae6b8cc579637a7b72f008bff50a94cde5e36e432352f4ca57b9e54c4",
|
||||
"sha256:34316bf693b1d2d29c087ee7e4bb10cdfa39da5f9c50fa15b07489b4ab93a1b5",
|
||||
"sha256:36b2d700a27e168fa96272b42d28c7ac3ff72030c67b32f37c05616ebd22a202",
|
||||
"sha256:37978254d9d00cda01acc1997513f786b6b971e57b778fbe7c20e30ae81a97f3",
|
||||
"sha256:38289f1690a7e27aacd049e420769b996826f3728756859420eeee21cc857118",
|
||||
"sha256:385ccf6d011b97768a640e9d4de25412204fbe8d6b9ae39ff115d4ff03f6fe5d",
|
||||
"sha256:3c7ea86b9ca83e30fa4d4cd0eaf01db3ebcc7b2726a25990966627e39577d729",
|
||||
"sha256:49810f907dfe6de8da5da7d2b238d343e6add62f01a15d03e2195afc180059ed",
|
||||
"sha256:519c0b3a6fbb68afaa0febf0d28f6c4b0a1074aefc484802ecb9709faf181607",
|
||||
"sha256:51f02ca184518702975b56affde6c573ebad4e411599005ce4468b1014b4786c",
|
||||
"sha256:552a39987ac6655dad4bf6f17dd2b55c7b0c6e949d933b8846d2e312ee80005a",
|
||||
"sha256:596f5ae2eeddb79b595583c2e0285312b2783b0ec759930c272dbf02f851ff75",
|
||||
"sha256:6014038f52b4b2ac1fa41a58d439a8a00f015b5c0735a0cd4b09afe344c94899",
|
||||
"sha256:61ebbcd208d78658b09e19c78920f1ad38936a0aa0f9c459c46c197d11c580a0",
|
||||
"sha256:6213713ac743b190ecbf3f316d6e41d099e774812d470422b3a0f137ea635832",
|
||||
"sha256:637e27ea1ebe4a561db75a880ac659ff439dec7f55588212e71700bb1ddd5af9",
|
||||
"sha256:6aa427c55a0abec450bca10b64446331b5ca8f79b648531138f357569705bc4a",
|
||||
"sha256:6ca45359d7a21644793de0e29de497ef7f1ae7268e346c4faf87b421fea364e6",
|
||||
"sha256:6db1b52c6f2c04fafc8da17ea506608e6be7086715dab498570c3e55e4f8fbd1",
|
||||
"sha256:752e7ddfb743344d447367baa85bccd3629c2c3940f70506eb5f01abce98ee68",
|
||||
"sha256:760c54ad1b8a9b81951030a7e8e7c3ec0964c1cb9fee585a03ff53d9e531bb8e",
|
||||
"sha256:768632fd8172ae03852e3245f11c8a425d95f65ff444ce46b3e673ae5b057b74",
|
||||
"sha256:7a0b9f6a1a15d494b35f25ed07abda03209fa76c33564c09c9e81d34f4b919d7",
|
||||
"sha256:7e070d3aef50ac3856f2ef5ec7214798453da878bb5e5a16c16a61edf1817cc3",
|
||||
"sha256:7e12949e5071c20ec49ef00c75121ed2b076972132fc1913ddf5f76cae8d10b4",
|
||||
"sha256:7e26eac9e52e8ce86f915fd33380f1b6896a2b51994e40bb094841e5003429b4",
|
||||
"sha256:85ffd6b1cb0dfb037ede50ff3bef80d9bf7fa60515d192403af6745524524f3b",
|
||||
"sha256:8618d9213a863c468a865e9d2ec50221015f7abf52221bc927152ef26c484b4c",
|
||||
"sha256:8acef4d8a4353f6678fd1035422a937c2170de58a2b29f7da045d5249e934101",
|
||||
"sha256:8d2f355a951f60f0843f2368b39970e4667517e54e86b1508e76f92b44811a8a",
|
||||
"sha256:90b6840b6448203228a9d8464a7a0d99aa8fa9f027ef95fe230579abaf8a6ee1",
|
||||
"sha256:9187500d83fd0cef4669385cbb0961e227a41c0c9bc39219044e35810793edf7",
|
||||
"sha256:93c20777a72cae8620203ac11c4010365706062aa13aaedd1a21bb07adbb9d5d",
|
||||
"sha256:93cce7d422a0093cfb3606beae38a8e47a25232eea0f292c878af580a9dc7605",
|
||||
"sha256:94c623c331a48a5ccc7d25271399aff29729fa202c737ae3b4b28b89d2b0976d",
|
||||
"sha256:97f32dc03a8054a4c4a5ab5d761ed4861e828b2c200febd4e46857069a483916",
|
||||
"sha256:9a2bf98ac92f58777c0fafc772bf0493e67fcf677302e0c0a630ee517a43b949",
|
||||
"sha256:a602bdc8607c99eb5b391592d58c92618dcd1537fdd87df1813f03fed49957a6",
|
||||
"sha256:a9d24b03daf7415f78abc2d25a208f234e2c585e5e6f92f0204d2ab7b9ab48e3",
|
||||
"sha256:abfcb0ef78df0ee9df4ea81f03beea41849340ce33a4c4bd4dbb99e23ec781b6",
|
||||
"sha256:b013f759cd69cb0a62de954d6d2096d648bc210034b79b1881406b07ed0a83f9",
|
||||
"sha256:b02e3e72665cd02afafb933453b0c9f6c59ff6e3708bd28d0d8580450e7e88af",
|
||||
"sha256:b52cc45e71657bc4743a5606d9023459de929b2a198d545868e11898ba1c3f59",
|
||||
"sha256:ba37f11e1d020969e8a779c06b4af866ffb6b854d7229db63c5fdddfceaa917f",
|
||||
"sha256:bb804c7d0bfbd7e3f33924ff49757de9106c44e27979e2492819c16972ec0da2",
|
||||
"sha256:bf594cc7cc9d528338d66674c10a5b25e3cde7dd75c3e96784df8f371d77a298",
|
||||
"sha256:c38baee6bdb7fe1b110b6b3aaa555e6e872d322206b7245aa39572d3fc991ee4",
|
||||
"sha256:c73d2166e4b210b73d1429c4f1ca97cea9cc090e5302df2a7a0a96ce55373f1c",
|
||||
"sha256:c9099bf89078675c372339011ccfc9ec310310bf6c292b413c013eb90ffdcafc",
|
||||
"sha256:cf0db26a1f76aa6b3aa314a74b8facd586b7a5457d05b64f8082a62c9c49582a",
|
||||
"sha256:d19a34f8a3429bd536996ad53597b805c10352a8561d8382e05830df389d2b43",
|
||||
"sha256:da80047524eac2acf7c04c18ac7a7da05a9136241f642dd2ed94269ef0d0a45a",
|
||||
"sha256:de2923886b5d3214be951bc2ce3f6b8ac0d6dfd4a0d0e2a4d2e5523d8046fdfb",
|
||||
"sha256:defa0652696ff0ba48c8aff5a1fac1eef1ca6ac9c660b047fc8e7623c4eb5093",
|
||||
"sha256:e54a1eb9fd38f2779e973d2f8958fd575b532fe26013405d1afb9ee2374e7ab8",
|
||||
"sha256:e5c31d70a478b0ca22a9d2d76d520ae996214019d39ed7dd93af872c7f301e52",
|
||||
"sha256:ebaeb93f90c0903233b11ce913a7cb8f6ee069158406e056f884854c737d2442",
|
||||
"sha256:ecfe51abf7f045e0b9cdde71ca9e153d11238679ef7b5da6c82093874adf3338",
|
||||
"sha256:f99112aed4fb7cee00c7f77e8b964a9b10f69488cdff626ffd797d02e2e4484f",
|
||||
"sha256:fd914db437ec25bfa410f8aa0aa2f3ba87cdfc04d9919d608d02330947afaeab"
|
||||
],
|
||||
"version": "==2021.11.10"
|
||||
"version": "==2022.1.18"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
@ -1211,6 +1185,14 @@
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==1.16.0"
|
||||
},
|
||||
"smmap": {
|
||||
"hashes": [
|
||||
"sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94",
|
||||
"sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==5.0.0"
|
||||
},
|
||||
"snowballstemmer": {
|
||||
"hashes": [
|
||||
"sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1",
|
||||
@ -1228,11 +1210,11 @@
|
||||
},
|
||||
"sphinx": {
|
||||
"hashes": [
|
||||
"sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c",
|
||||
"sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851"
|
||||
"sha256:5da895959511473857b6d0200f56865ed62c31e8f82dd338063b84ec022701fe",
|
||||
"sha256:6caad9786055cb1fa22b4a365c1775816b876f91966481765d7d50e9f0dd35cc"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.3.2"
|
||||
"version": "==4.4.0"
|
||||
},
|
||||
"sphinxcontrib-applehelp": {
|
||||
"hashes": [
|
||||
@ -1284,49 +1266,49 @@
|
||||
},
|
||||
"spiffworkflow": {
|
||||
"git": "https://github.com/sartography/SpiffWorkflow",
|
||||
"ref": "3d697f163eebfafac7c0629b955a91178b085623"
|
||||
"ref": "747b0a9cafeb2900264dbc5235c01c2386c55bd1"
|
||||
},
|
||||
"sqlalchemy": {
|
||||
"hashes": [
|
||||
"sha256:0072f9887aabe66db23f818bbe950cfa1b6127c5cb769b00bcc07935b3adb0ad",
|
||||
"sha256:027f356c727db24f3c75828c7feb426f87ce1241242d08958e454bd025810660",
|
||||
"sha256:08cfd35eecaba79be930c9bfd2e1f0c67a7e1314355d83a378f9a512b1cf7587",
|
||||
"sha256:0fc4eec2f46b40bdd42112b3be3fbbf88e194bcf02950fbb88bcdc1b32f07dc7",
|
||||
"sha256:101d2e100ba9182c9039699588e0b2d833c54b3bad46c67c192159876c9f27ea",
|
||||
"sha256:15b65887b6c324cad638c7671cb95985817b733242a7eb69edd7cdf6953be1e0",
|
||||
"sha256:37b46bfc4af3dc226acb6fa28ecd2e1fd223433dc5e15a2bad62bf0a0cbb4e8b",
|
||||
"sha256:56d9d62021946263d4478c9ca012fbd1805f10994cb615c88e7bfd1ae14604d8",
|
||||
"sha256:5919e647e1d4805867ea556ed4967c68b4d8b266059fa35020dbaed8ffdd60f3",
|
||||
"sha256:5a717c2e70fd1bb477161c4cc85258e41d978584fbe5522613618195f7e87d9b",
|
||||
"sha256:5e9cd33459afa69c88fa648e803d1f1245e3caa60bfe8b80a9595e5edd3bda9c",
|
||||
"sha256:621854dbb4d2413c759a5571564170de45ef37299df52e78e62b42e2880192e1",
|
||||
"sha256:78abc507d17753ed434b6cc0c0693126279723d5656d9775bfcac966a99a899b",
|
||||
"sha256:7dd0502cb091660ad0d89c5e95a29825f37cde2a5249957838e975871fbffaad",
|
||||
"sha256:804e22d5b6165a4f3f019dd9c94bec5687de985a9c54286b93ded9f7846b8c82",
|
||||
"sha256:878daecb6405e786b07f97e1c77a9cfbbbec17432e8a90c487967e32cfdecb33",
|
||||
"sha256:886359f734b95ad1ef443b13bb4518bcade4db4f9553c9ce33d6d04ebda8d44e",
|
||||
"sha256:9ce960a1dc60524136cf6f75621588e2508a117e04a6e3eedb0968bd13b8c824",
|
||||
"sha256:ad618d687d26d4cbfa9c6fa6141d59e05bcdfc60cb6e1f1d3baa18d8c62fef5f",
|
||||
"sha256:c5de7adfb91d351f44062b8dedf29f49d4af7cb765be65816e79223a4e31062b",
|
||||
"sha256:ceac84dd9abbbe115e8be0c817bed85d9fa639b4d294e7817f9e61162d5f766c",
|
||||
"sha256:da64423c05256f4ab8c0058b90202053b201cbe3a081f3a43eb590cd554395ab",
|
||||
"sha256:dc27dcc6c72eb38be7f144e9c2c4372d35a3684d3a6dd43bd98c1238358ee17c",
|
||||
"sha256:dd49d21d1f03c81fbec9080ecdc4486d5ddda67e7fbb75ebf48294465c022cdc",
|
||||
"sha256:debaf09a823061f88a8dee04949814cf7e82fb394c5bca22c780cb03172ca23b",
|
||||
"sha256:e027bdf0a4cf6bd0a3ad3b998643ea374d7991bd117b90bf9982e41ceb742941",
|
||||
"sha256:e4ddd4f2e247128c58bb3dd4489922874afce157d2cff0b2295d67fcd0f22494",
|
||||
"sha256:e5f6959466a42b6569774c257e55f9cd85200d5b0ba09f0f5d8b5845349c5822",
|
||||
"sha256:e89347d3bd2ef873832b47e85f4bbd810a5e626c5e749d90a07638da100eb1c8",
|
||||
"sha256:e9cc6d844e24c307c3272677982a9b33816aeb45e4977791c3bdd47637a8d810",
|
||||
"sha256:eb8c993706e86178ce15a6b86a335a2064f52254b640e7f53365e716423d33f4",
|
||||
"sha256:eeaebceb24b46e884c4ad3c04f37feb178b81f6ce720af19bfa2592ca32fdef7",
|
||||
"sha256:f3909194751bb6cb7c5511dd18bcf77e6e3f0b31604ed4004dffa9461f71e737",
|
||||
"sha256:f74d6c05d2d163464adbdfbc1ab85048cc15462ff7d134b8aed22bd521e1faa5",
|
||||
"sha256:fa2bad14e1474ba649cfc969c1d2ec915dd3e79677f346bbfe08e93ef9020b39",
|
||||
"sha256:fbc6e63e481fa323036f305ada96a3362e1d60dd2bfa026cac10c3553e6880e9"
|
||||
"sha256:05fa14f279d43df68964ad066f653193187909950aa0163320b728edfc400167",
|
||||
"sha256:0ddc5e5ccc0160e7ad190e5c61eb57560f38559e22586955f205e537cda26034",
|
||||
"sha256:15a03261aa1e68f208e71ae3cd845b00063d242cbf8c87348a0c2c0fc6e1f2ac",
|
||||
"sha256:289465162b1fa1e7a982f8abe59d26a8331211cad4942e8031d2b7db1f75e649",
|
||||
"sha256:2e216c13ecc7fcdcbb86bb3225425b3ed338e43a8810c7089ddb472676124b9b",
|
||||
"sha256:2fd4d3ca64c41dae31228b80556ab55b6489275fb204827f6560b65f95692cf3",
|
||||
"sha256:330eb45395874cc7787214fdd4489e2afb931bc49e0a7a8f9cd56d6e9c5b1639",
|
||||
"sha256:3c7ed6c69debaf6198fadb1c16ae1253a29a7670bbf0646f92582eb465a0b999",
|
||||
"sha256:4ad31cec8b49fd718470328ad9711f4dc703507d434fd45461096da0a7135ee0",
|
||||
"sha256:57205844f246bab9b666a32f59b046add8995c665d9ecb2b7b837b087df90639",
|
||||
"sha256:582b59d1e5780a447aada22b461e50b404a9dc05768da1d87368ad8190468418",
|
||||
"sha256:5e9c7b3567edbc2183607f7d9f3e7e89355b8f8984eec4d2cd1e1513c8f7b43f",
|
||||
"sha256:6a01ec49ca54ce03bc14e10de55dfc64187a2194b3b0e5ac0fdbe9b24767e79e",
|
||||
"sha256:6f22c040d196f841168b1456e77c30a18a3dc16b336ddbc5a24ce01ab4e95ae0",
|
||||
"sha256:81f2dd355b57770fdf292b54f3e0a9823ec27a543f947fa2eb4ec0df44f35f0d",
|
||||
"sha256:85e4c244e1de056d48dae466e9baf9437980c19fcde493e0db1a0a986e6d75b4",
|
||||
"sha256:8d0949b11681380b4a50ac3cd075e4816afe9fa4a8c8ae006c1ca26f0fa40ad8",
|
||||
"sha256:975f5c0793892c634c4920057da0de3a48bbbbd0a5c86f5fcf2f2fedf41b76da",
|
||||
"sha256:9e4fb2895b83993831ba2401b6404de953fdbfa9d7d4fa6a4756294a83bbc94f",
|
||||
"sha256:b35dca159c1c9fa8a5f9005e42133eed82705bf8e243da371a5e5826440e65ca",
|
||||
"sha256:b7b20c88873675903d6438d8b33fba027997193e274b9367421e610d9da76c08",
|
||||
"sha256:bb4b15fb1f0aafa65cbdc62d3c2078bea1ceecbfccc9a1f23a2113c9ac1191fa",
|
||||
"sha256:c0c7171aa5a57e522a04a31b84798b6c926234cb559c0939840c3235cf068813",
|
||||
"sha256:c317ddd7c586af350a6aef22b891e84b16bff1a27886ed5b30f15c1ed59caeaa",
|
||||
"sha256:c3abc34fed19fdeaead0ced8cf56dd121f08198008c033596aa6aae7cc58f59f",
|
||||
"sha256:ca68c52e3cae491ace2bf39b35fef4ce26c192fd70b4cd90f040d419f70893b5",
|
||||
"sha256:cf2cd387409b12d0a8b801610d6336ee7d24043b6dd965950eaec09b73e7262f",
|
||||
"sha256:d046a9aeba9bc53e88a41e58beb72b6205abb9a20f6c136161adf9128e589db5",
|
||||
"sha256:d5c20c8415173b119762b6110af64448adccd4d11f273fb9f718a9865b88a99c",
|
||||
"sha256:d86132922531f0dc5a4f424c7580a472a924dd737602638e704841c9cb24aea2",
|
||||
"sha256:dccff41478050e823271642837b904d5f9bda3f5cf7d371ce163f00a694118d6",
|
||||
"sha256:de85c26a5a1c72e695ab0454e92f60213b4459b8d7c502e0be7a6369690eeb1a",
|
||||
"sha256:e3a86b59b6227ef72ffc10d4b23f0fe994bef64d4667eab4fb8cd43de4223bec",
|
||||
"sha256:e79e73d5ee24196d3057340e356e6254af4d10e1fc22d3207ea8342fc5ffb977",
|
||||
"sha256:ea8210090a816d48a4291a47462bac750e3bc5c2442e6d64f7b8137a7c3f9ac5",
|
||||
"sha256:f3b7ec97e68b68cb1f9ddb82eda17b418f19a034fa8380a0ac04e8fe01532875"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
|
||||
"version": "==1.4.29"
|
||||
"version": "==1.4.31"
|
||||
},
|
||||
"swagger-ui-bundle": {
|
||||
"hashes": [
|
||||
@ -1354,11 +1336,11 @@
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece",
|
||||
"sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"
|
||||
"sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed",
|
||||
"sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
|
||||
"version": "==1.26.7"
|
||||
"version": "==1.26.8"
|
||||
},
|
||||
"vine": {
|
||||
"hashes": [
|
||||
@ -1501,56 +1483,50 @@
|
||||
},
|
||||
"coverage": {
|
||||
"hashes": [
|
||||
"sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0",
|
||||
"sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd",
|
||||
"sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884",
|
||||
"sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48",
|
||||
"sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76",
|
||||
"sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0",
|
||||
"sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64",
|
||||
"sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685",
|
||||
"sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47",
|
||||
"sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d",
|
||||
"sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840",
|
||||
"sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f",
|
||||
"sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971",
|
||||
"sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c",
|
||||
"sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a",
|
||||
"sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de",
|
||||
"sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17",
|
||||
"sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4",
|
||||
"sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521",
|
||||
"sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57",
|
||||
"sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b",
|
||||
"sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282",
|
||||
"sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644",
|
||||
"sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475",
|
||||
"sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d",
|
||||
"sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da",
|
||||
"sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953",
|
||||
"sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2",
|
||||
"sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e",
|
||||
"sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c",
|
||||
"sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc",
|
||||
"sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64",
|
||||
"sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74",
|
||||
"sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617",
|
||||
"sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3",
|
||||
"sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d",
|
||||
"sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa",
|
||||
"sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739",
|
||||
"sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8",
|
||||
"sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8",
|
||||
"sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781",
|
||||
"sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58",
|
||||
"sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9",
|
||||
"sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c",
|
||||
"sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd",
|
||||
"sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e",
|
||||
"sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"
|
||||
"sha256:1245ab82e8554fa88c4b2ab1e098ae051faac5af829efdcf2ce6b34dccd5567c",
|
||||
"sha256:1bc6d709939ff262fd1432f03f080c5042dc6508b6e0d3d20e61dd045456a1a0",
|
||||
"sha256:25e73d4c81efa8ea3785274a2f7f3bfbbeccb6fcba2a0bdd3be9223371c37554",
|
||||
"sha256:276b13cc085474e482566c477c25ed66a097b44c6e77132f3304ac0b039f83eb",
|
||||
"sha256:2aed4761809640f02e44e16b8b32c1a5dee5e80ea30a0ff0912158bde9c501f2",
|
||||
"sha256:2dd70a167843b4b4b2630c0c56f1b586fe965b4f8ac5da05b6690344fd065c6b",
|
||||
"sha256:352c68e233409c31048a3725c446a9e48bbff36e39db92774d4f2380d630d8f8",
|
||||
"sha256:3f2b05757c92ad96b33dbf8e8ec8d4ccb9af6ae3c9e9bd141c7cc44d20c6bcba",
|
||||
"sha256:448d7bde7ceb6c69e08474c2ddbc5b4cd13c9e4aa4a717467f716b5fc938a734",
|
||||
"sha256:463e52616ea687fd323888e86bf25e864a3cc6335a043fad6bbb037dbf49bbe2",
|
||||
"sha256:482fb42eea6164894ff82abbcf33d526362de5d1a7ed25af7ecbdddd28fc124f",
|
||||
"sha256:56c4a409381ddd7bbff134e9756077860d4e8a583d310a6f38a2315b9ce301d0",
|
||||
"sha256:56d296cbc8254a7dffdd7bcc2eb70be5a233aae7c01856d2d936f5ac4e8ac1f1",
|
||||
"sha256:5e15d424b8153756b7c903bde6d4610be0c3daca3986173c18dd5c1a1625e4cd",
|
||||
"sha256:618eeba986cea7f621d8607ee378ecc8c2504b98b3fdc4952b30fe3578304687",
|
||||
"sha256:61d47a897c1e91f33f177c21de897267b38fbb45f2cd8e22a710bcef1df09ac1",
|
||||
"sha256:621f6ea7260ea2ffdaec64fe5cb521669984f567b66f62f81445221d4754df4c",
|
||||
"sha256:6a5cdc3adb4f8bb8d8f5e64c2e9e282bc12980ef055ec6da59db562ee9bdfefa",
|
||||
"sha256:6c3f6158b02ac403868eea390930ae64e9a9a2a5bbfafefbb920d29258d9f2f8",
|
||||
"sha256:704f89b87c4f4737da2860695a18c852b78ec7279b24eedacab10b29067d3a38",
|
||||
"sha256:72128176fea72012063200b7b395ed8a57849282b207321124d7ff14e26988e8",
|
||||
"sha256:78fbb2be068a13a5d99dce9e1e7d168db880870f7bc73f876152130575bd6167",
|
||||
"sha256:7bff3a98f63b47464480de1b5bdd80c8fade0ba2832c9381253c9b74c4153c27",
|
||||
"sha256:84f2436d6742c01136dd940ee158bfc7cf5ced3da7e4c949662b8703b5cd8145",
|
||||
"sha256:9976fb0a5709988778ac9bc44f3d50fccd989987876dfd7716dee28beed0a9fa",
|
||||
"sha256:9ad0a117b8dc2061ce9461ea4c1b4799e55edceb236522c5b8f958ce9ed8fa9a",
|
||||
"sha256:9e3dd806f34de38d4c01416344e98eab2437ac450b3ae39c62a0ede2f8b5e4ed",
|
||||
"sha256:9eb494070aa060ceba6e4bbf44c1bc5fa97bfb883a0d9b0c9049415f9e944793",
|
||||
"sha256:9fde6b90889522c220dd56a670102ceef24955d994ff7af2cb786b4ba8fe11e4",
|
||||
"sha256:9fff3ff052922cb99f9e52f63f985d4f7a54f6b94287463bc66b7cdf3eb41217",
|
||||
"sha256:a06c358f4aed05fa1099c39decc8022261bb07dfadc127c08cfbd1391b09689e",
|
||||
"sha256:a4f923b9ab265136e57cc14794a15b9dcea07a9c578609cd5dbbfff28a0d15e6",
|
||||
"sha256:c5b81fb37db76ebea79aa963b76d96ff854e7662921ce742293463635a87a78d",
|
||||
"sha256:d5ed164af5c9078596cfc40b078c3b337911190d3faeac830c3f1274f26b8320",
|
||||
"sha256:d651fde74a4d3122e5562705824507e2f5b2d3d57557f1916c4b27635f8fbe3f",
|
||||
"sha256:de73fca6fb403dd72d4da517cfc49fcf791f74eee697d3219f6be29adf5af6ce",
|
||||
"sha256:e647a0be741edbb529a72644e999acb09f2ad60465f80757da183528941ff975",
|
||||
"sha256:e92c7a5f7d62edff50f60a045dc9542bf939758c95b2fcd686175dd10ce0ed10",
|
||||
"sha256:eeffd96882d8c06d31b65dddcf51db7c612547babc1c4c5db6a011abe9798525",
|
||||
"sha256:f5a4551dfd09c3bd12fca8144d47fe7745275adf3229b7223c2f9e29a975ebda",
|
||||
"sha256:fac0bcc5b7e8169bffa87f0dcc24435446d329cbc2b5486d155c2e0f3b493ae1"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==6.2"
|
||||
"version": "==6.3.1"
|
||||
},
|
||||
"iniconfig": {
|
||||
"hashes": [
|
||||
@ -1569,11 +1545,11 @@
|
||||
},
|
||||
"pbr": {
|
||||
"hashes": [
|
||||
"sha256:176e8560eaf61e127817ef93d8a844803abb27a4d4637f0ff3bb783129be2e0a",
|
||||
"sha256:672d8ebee84921862110f23fcec2acea191ef58543d34dfe9ef3d9f13c31cddf"
|
||||
"sha256:27108648368782d07bbf1cb468ad2e2eeef29086affd14087a6d04b7de8af4ec",
|
||||
"sha256:66bc5a34912f408bb3925bf21231cb6f59206267b7f63f3503ef865c1a292e25"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==5.8.0"
|
||||
"version": "==5.8.1"
|
||||
},
|
||||
"pluggy": {
|
||||
"hashes": [
|
||||
@ -1593,27 +1569,27 @@
|
||||
},
|
||||
"pyparsing": {
|
||||
"hashes": [
|
||||
"sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4",
|
||||
"sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"
|
||||
"sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea",
|
||||
"sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.0.6"
|
||||
"version": "==3.0.7"
|
||||
},
|
||||
"pytest": {
|
||||
"hashes": [
|
||||
"sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89",
|
||||
"sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"
|
||||
"sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db",
|
||||
"sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==6.2.5"
|
||||
"version": "==7.0.1"
|
||||
},
|
||||
"toml": {
|
||||
"tomli": {
|
||||
"hashes": [
|
||||
"sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
|
||||
"sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
|
||||
"sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
|
||||
"sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"
|
||||
],
|
||||
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==0.10.2"
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==2.0.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
62
README.md
62
README.md
@ -95,9 +95,65 @@ and primary investigator to dhf8r - which is a user in the mock ldap service, an
|
||||
fire up the interface.
|
||||
|
||||
### Configuration
|
||||
1. `instance/config.py`: This will configure the application for your local instance, overriding the configuration
|
||||
in config/default
|
||||
|
||||
This covers both local configurations for development, and production settings.
|
||||
We will cover all the settings below, but perhaps the most important part of the configuration is setting
|
||||
the location of your workflow specifications. If you start up without setting a path, then it will
|
||||
use a few testing workflows as a default.
|
||||
|
||||
For CRConnect, there is a private repository that contains all the workflow specifications, so get this
|
||||
checked out, then in a file called /instance/config.py add the following setting:
|
||||
SYNC_FILE_ROOT = '/path/to/my/git/dir/crconnect-workflow-specs'
|
||||
|
||||
#### Local Configuration
|
||||
`instance/config.py`: This will configure the application for your local instance, overriding the configuration
|
||||
in config/default. Very handy for setting
|
||||
|
||||
#### Production Configuration
|
||||
We use environment variables with identical names to the variables in the default configuration file to configure the
|
||||
application for production deplyment in a docker container
|
||||
|
||||
#### Common Configuration Settings
|
||||
While these can be set in instance/config.py or as environment settings in docker, I'll present this as how you would
|
||||
define the Docker Container, as /config/default.py offers a good example of the former.
|
||||
```yaml
|
||||
cr-connect-backend-testing:
|
||||
container_name: cr-connect-backend-testing
|
||||
image: ghcr.io/sartography/cr-connect-workflow:master # We use GitHub's actions to publish images
|
||||
volumes:
|
||||
- /home/sartography/docker-volumes/testing:/var/lib/cr-connect/specs # where specs are located.
|
||||
ports:
|
||||
environment:
|
||||
- PRODUCTION=true # Should be set to true if you aren't running locally for development.
|
||||
- DEVELOPMENT=false # Should be the opposite of production.
|
||||
- PB_ENABLED=true # Generally true, we should connect to Protocol Builder
|
||||
- PREFERRED_URL_SCHEME=https # Generally you want to run on SSL, should be https
|
||||
- SERVER_NAME=testing.crconnect.uvadcos.io # The url used to access this app.
|
||||
- TOKEN_AUTH_SECRET_KEY=-0-0-0- TESTING SUPER SECURE -0-0-0- # Some random characters that seed our key gen.
|
||||
- APPLICATION_ROOT=/api # Appended to SERVER_NAME, is the full path to this service
|
||||
- ADMIN_UIDS=dhf8r,cah3us # A comma delimited list of people who can preform administrative tasks.
|
||||
- CORS_ALLOW_ORIGINS=testing.crconnect.uvadcos.io,shibidp.its.virginia.edu,sp.uvadcos.io # CORS stuff
|
||||
- FRONTEND=testing.crconnect.uvadcos.io # URL to reach the front end application.
|
||||
- BPMN=testing.crconnect.uvadcos.io/bpmn # URL to reach the configuration interface.
|
||||
- PB_BASE_URL=http://10.250.124.174:11022/pb/v2.0/ # URL for Protocol Builder
|
||||
- UPGRADE_DB=true # Will run all migrations on startup if set to true. Generally a good idea for production.
|
||||
- DB_USER=crc_user # Database user name
|
||||
- DB_NAME=crc_testing # Database passwprd
|
||||
- DB_HOST=10.250.124.186 # Domain/IP of database server.
|
||||
- DB_PORT=15432 # Port of database server.
|
||||
- DB_PASSWORD=XXXXX # Passwword for the database
|
||||
- MAIL_PASSWORD=XXXX # Mail Password
|
||||
- MAIL_USERNAME=XXXXX # Mail username
|
||||
- LDAP_URL=privopenldap.its.virginia.edu # URL for the LDAP Server
|
||||
- LDAP_PASS=XXXX # Password for the ldap server
|
||||
- LDAP_USER=cn=crcconnect,ou=Special Users,o=University of Virginia,c=US #LDAP settings for search, likely these.
|
||||
- SENTRY_ENVIRONMENT=testing.crconnect.uvadcos.io # Configuration for Sentry
|
||||
- GITHUB_TOKEN=XXXX # A token for GitHub so we can push and pull changes.
|
||||
- PORT0=11021 # The port on the server where this sytem should be avialable, could be 80, but we are behind a proxy.
|
||||
- SYNC_FILE_ROOT=/var/lib/cr-connect/specs # This should be the same as the volumes above, a location where the specs are checked out in git.
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
### Project Initialization
|
||||
1. Clone this repository.
|
||||
|
@ -25,7 +25,8 @@ CORS_ALLOW_ORIGINS = re.split(r',\s*', environ.get('CORS_ALLOW_ORIGINS', default
|
||||
TESTING = environ.get('TESTING', default="false") == "true"
|
||||
PRODUCTION = (environ.get('PRODUCTION', default="false") == "true")
|
||||
TEST_UID = environ.get('TEST_UID', default="dhf8r")
|
||||
ADMIN_UIDS = re.split(r',\s*', environ.get('ADMIN_UIDS', default="dhf8r,ajl2j,cah3us,cl3wf"))
|
||||
ADMIN_UIDS = re.split(r',\s*', environ.get('ADMIN_UIDS', default="dhf8r,kcm4zc,cah3us"))
|
||||
DEFAULT_UID = environ.get('DEFAULT_UID', default="dhf8r")
|
||||
|
||||
# Sentry flag
|
||||
ENABLE_SENTRY = environ.get('ENABLE_SENTRY', default="false") == "true" # To be removed soon
|
||||
@ -77,6 +78,13 @@ GITHUB_TOKEN = environ.get('GITHUB_TOKEN', None)
|
||||
GITHUB_REPO = environ.get('GITHUB_REPO', None)
|
||||
TARGET_BRANCH = environ.get('TARGET_BRANCH', None)
|
||||
|
||||
# Git settings, used by git_service
|
||||
# The above Github settings--used in file_service, will likely be deprecated
|
||||
# You can override these settings in instance/config
|
||||
GIT_REMOTE_PATH = environ.get('GIT_REMOTE_PATH', None)
|
||||
GIT_BRANCH = environ.get('GIT_BRANCH', None)
|
||||
GIT_MERGE_BRANCH = environ.get('GIT_MERGE_BRANCH', None) # Developers can set this to 'all' in instance.config
|
||||
|
||||
# Email configuration
|
||||
DEFAULT_SENDER = 'uvacrconnect@virginia.edu'
|
||||
FALLBACK_EMAILS = ['askresearch@virginia.edu', 'sartographysupport@googlegroups.com']
|
||||
@ -87,3 +95,9 @@ MAIL_USE_SSL = environ.get('MAIL_USE_SSL', default=False)
|
||||
MAIL_USE_TLS = environ.get('MAIL_USE_TLS', default=False)
|
||||
MAIL_USERNAME = environ.get('MAIL_USERNAME', default='')
|
||||
MAIL_PASSWORD = environ.get('MAIL_PASSWORD', default='')
|
||||
|
||||
# Local file path
|
||||
SYNC_FILE_ROOT = environ.get('SYNC_FILE_ROOT', default='tests/data/IMPORT_TEST')
|
||||
|
||||
# Turn on/off processing waiting tasks
|
||||
PROCESS_WAITING_TASKS = environ.get('PROCESS_WAITING_TASKS', default='true')
|
||||
|
@ -30,3 +30,7 @@ print('TESTING = ', TESTING)
|
||||
|
||||
#Use the mock ldap.
|
||||
LDAP_URL = 'mock'
|
||||
|
||||
SYNC_FILE_ROOT = 'tests/SPECS'
|
||||
|
||||
GIT_BRANCH = 'my_testing_branch'
|
||||
|
@ -18,6 +18,7 @@ from sentry_sdk.integrations.flask import FlaskIntegration
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
|
||||
|
||||
connexion_app = connexion.FlaskApp(__name__)
|
||||
|
||||
app = connexion_app.app
|
||||
@ -43,6 +44,7 @@ session = db.session
|
||||
""":type: sqlalchemy.orm.Session"""
|
||||
scheduler = BackgroundScheduler()
|
||||
|
||||
|
||||
# Mail settings
|
||||
mail = Mail(app)
|
||||
|
||||
@ -52,11 +54,9 @@ ma = Marshmallow(app)
|
||||
from crc import models
|
||||
from crc import api
|
||||
from crc.api import admin
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.workflow_service import WorkflowService
|
||||
connexion_app.add_api('api.yml', base_path='/v1.0')
|
||||
|
||||
|
||||
# needed function to avoid circular import
|
||||
def process_waiting_tasks():
|
||||
with app.app_context():
|
||||
@ -65,10 +65,11 @@ def process_waiting_tasks():
|
||||
|
||||
@app.before_first_request
|
||||
def init_scheduler():
|
||||
scheduler.add_job(process_waiting_tasks, 'interval', minutes=1)
|
||||
scheduler.add_job(FileService.cleanup_file_data, 'interval', minutes=1440) # once a day
|
||||
scheduler.add_job(WorkflowService().process_erroring_workflows, 'interval', minutes=1440)
|
||||
scheduler.start()
|
||||
if app.config['PROCESS_WAITING_TASKS']:
|
||||
scheduler.add_job(process_waiting_tasks, 'interval', minutes=1)
|
||||
scheduler.add_job(FileService.cleanup_file_data, 'interval', minutes=1440) # once a day
|
||||
scheduler.add_job(WorkflowService().process_erroring_workflows, 'interval', minutes=1440)
|
||||
scheduler.start()
|
||||
|
||||
|
||||
# Convert list of allowed origins to list of regexes
|
||||
@ -88,7 +89,7 @@ if app.config['SENTRY_ENVIRONMENT']:
|
||||
def render_errors(exception):
|
||||
from crc.api.common import ApiError, ApiErrorSchema
|
||||
error = ApiError(code=exception.title, message=exception.detail, status_code=exception.status)
|
||||
return Response(ApiErrorSchema().dump(error), status=401, mimetype="application/json")
|
||||
return Response(ApiErrorSchema().dumps(error), status=500, mimetype="text/json")
|
||||
|
||||
|
||||
connexion_app.add_error_handler(ProblemException, render_errors)
|
||||
@ -107,28 +108,6 @@ print('TESTING = ', app.config['TESTING'])
|
||||
print('TEST_UID = ', app.config['TEST_UID'])
|
||||
print('ADMIN_UIDS = ', app.config['ADMIN_UIDS'])
|
||||
|
||||
@app.cli.command()
|
||||
def load_example_data():
|
||||
"""Load example data into the database."""
|
||||
from example_data import ExampleDataLoader
|
||||
ExampleDataLoader.clean_db()
|
||||
ExampleDataLoader().load_all()
|
||||
ExampleDataLoader().load_default_user()
|
||||
|
||||
|
||||
@app.cli.command()
|
||||
def load_example_rrt_data():
|
||||
"""Load example data into the database."""
|
||||
from example_data import ExampleDataLoader
|
||||
ExampleDataLoader.clean_db()
|
||||
ExampleDataLoader().load_rrt()
|
||||
|
||||
|
||||
@app.cli.command()
|
||||
def load_reference_files():
|
||||
"""Load example data into the database."""
|
||||
from example_data import ExampleDataLoader
|
||||
ExampleDataLoader().load_reference_documents()
|
||||
|
||||
@app.cli.command()
|
||||
def clear_db():
|
||||
@ -136,12 +115,6 @@ def clear_db():
|
||||
from example_data import ExampleDataLoader
|
||||
ExampleDataLoader.clean_db()
|
||||
|
||||
@app.cli.command()
|
||||
def sync_with_testing():
|
||||
"""Load all the workflows currently on testing into this system."""
|
||||
from crc.api import workflow_sync
|
||||
workflow_sync.sync_all_changed_workflows("https://testing.crconnect.uvadcos.io/api")
|
||||
|
||||
@app.cli.command()
|
||||
@click.argument("study_id")
|
||||
@click.argument("category", required=False)
|
||||
|
555
crc/api.yml
555
crc/api.yml
@ -30,6 +30,7 @@ paths:
|
||||
responses:
|
||||
'304':
|
||||
description: Redirection to the hosted frontend with an auth_token header.
|
||||
|
||||
/user:
|
||||
parameters:
|
||||
- name: admin_impersonate_uid
|
||||
@ -50,6 +51,7 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/User"
|
||||
|
||||
/list_users:
|
||||
get:
|
||||
operationId: crc.api.user.get_all_users
|
||||
@ -160,201 +162,6 @@ paths:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Study"
|
||||
/workflow_sync/pullall:
|
||||
get:
|
||||
operationId: crc.api.workflow_sync.sync_all_changed_workflows
|
||||
summary: Sync all workflows that have changed on the remote side and provide a list of the results
|
||||
security:
|
||||
- ApiKeyAuth : []
|
||||
# in the endpoint
|
||||
parameters:
|
||||
- name: remote
|
||||
in: query
|
||||
required: true
|
||||
description: The remote endpoint
|
||||
schema:
|
||||
type: string
|
||||
example: https://testing.crconnect.uvadcos.io/api
|
||||
tags:
|
||||
- Workflow Sync API
|
||||
responses:
|
||||
'200':
|
||||
description: An array of workflow specs that were synced from remote.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
example : ['top_level_workflow','3b495037-f7d4-4509-bf58-cee41c0c6b0e']
|
||||
|
||||
|
||||
|
||||
|
||||
/workflow_sync/diff:
|
||||
get:
|
||||
operationId: crc.api.workflow_sync.get_changed_workflows
|
||||
summary: Provides a list of workflow that differ from remote and if it is new or not
|
||||
security :
|
||||
- ApiKeyAuth : []
|
||||
# in the endpoint
|
||||
parameters:
|
||||
- name: remote
|
||||
in: query
|
||||
required: true
|
||||
description: The remote endpoint
|
||||
schema:
|
||||
type: string
|
||||
tags:
|
||||
- Workflow Sync API
|
||||
responses:
|
||||
'200':
|
||||
description: An array of workflow specs, with last touched date and which one is most recent.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/WorkflowSpecDiffList"
|
||||
|
||||
/workflow_sync/{workflow_spec_id}/spec:
|
||||
parameters:
|
||||
- name: workflow_spec_id
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of an existing workflow specification to modify.
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: crc.api.workflow_sync.get_sync_workflow_specification
|
||||
summary: Returns a single workflow specification
|
||||
security:
|
||||
- ApiKeyAuth: []
|
||||
tags:
|
||||
- Workflow Sync API
|
||||
responses:
|
||||
'200':
|
||||
description: Workflow specification.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/WorkflowSpec"
|
||||
|
||||
|
||||
/workflow_sync/{workflow_spec_id}/files:
|
||||
get:
|
||||
operationId: crc.api.workflow_sync.get_workflow_spec_files
|
||||
summary: Provides a list of files for a workflow spec on this machine.
|
||||
security :
|
||||
- ApiKeyAuth : []
|
||||
parameters:
|
||||
- name: workflow_spec_id
|
||||
in: path
|
||||
required: true
|
||||
description: The workflow_spec id
|
||||
schema:
|
||||
type: string
|
||||
|
||||
tags:
|
||||
- Workflow Sync API
|
||||
responses:
|
||||
'200':
|
||||
description: An array of files for a workflow spec on the local system, with details.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/WorkflowSpecFilesList"
|
||||
|
||||
/workflow_sync/{workflow_spec_id}/files/sync:
|
||||
get:
|
||||
operationId: crc.api.workflow_sync.sync_changed_files
|
||||
summary: Syncs files from a workflow on a remote system and provides a list of files that were updated
|
||||
security :
|
||||
- ApiKeyAuth : []
|
||||
parameters:
|
||||
- name: workflow_spec_id
|
||||
in: path
|
||||
required: true
|
||||
description: The workflow_spec id
|
||||
schema:
|
||||
type: string
|
||||
- name: remote
|
||||
in: query
|
||||
required: true
|
||||
description: The remote endpoint
|
||||
schema:
|
||||
type: string
|
||||
|
||||
tags:
|
||||
- Workflow Sync API
|
||||
responses:
|
||||
'200':
|
||||
description: A list of files that were synced for the workflow.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type : string
|
||||
example : ["data_security_plan.dmn",'some_other_file.xml']
|
||||
|
||||
|
||||
/workflow_sync/{workflow_spec_id}/files/diff:
|
||||
get:
|
||||
operationId: crc.api.workflow_sync.get_changed_files
|
||||
summary: Provides a list of files for a workflow specs that differ from remote and their signature.
|
||||
security :
|
||||
- ApiKeyAuth : []
|
||||
|
||||
parameters:
|
||||
- name: workflow_spec_id
|
||||
in: path
|
||||
required: true
|
||||
description: The workflow_spec id
|
||||
schema:
|
||||
type: string
|
||||
- name: remote
|
||||
in: query
|
||||
required: true
|
||||
description: The remote endpoint
|
||||
schema:
|
||||
type: string
|
||||
|
||||
tags:
|
||||
- Workflow Sync API
|
||||
responses:
|
||||
'200':
|
||||
description: An array of files that are different from remote, with last touched date and file signature.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/WorkflowSpecFilesDiff"
|
||||
|
||||
|
||||
/workflow_sync/all:
|
||||
get:
|
||||
operationId: crc.api.workflow_sync.get_all_spec_state
|
||||
summary: Provides a list of workflow specs, last update date and thumbprint
|
||||
security:
|
||||
- ApiKeyAuth : []
|
||||
|
||||
tags:
|
||||
- Workflow Sync API
|
||||
responses:
|
||||
'200':
|
||||
description: An array of workflow specs, with last touched date and file signature.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/WorkflowSpecAll"
|
||||
|
||||
|
||||
/study/all:
|
||||
get:
|
||||
operationId: crc.api.study.all_studies
|
||||
@ -523,7 +330,6 @@ paths:
|
||||
schema:
|
||||
$ref: "#/components/schemas/WorkflowSpec"
|
||||
|
||||
|
||||
/workflow-specification/{spec_id}/library/{library_id}:
|
||||
parameters:
|
||||
- name: spec_id
|
||||
@ -565,7 +371,6 @@ paths:
|
||||
schema:
|
||||
$ref: "#/components/schemas/WorkflowSpec"
|
||||
|
||||
|
||||
/workflow-specification/{spec_id}:
|
||||
parameters:
|
||||
- name: spec_id
|
||||
@ -691,6 +496,160 @@ paths:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Error"
|
||||
/workflow-specification/{spec_id}/file:
|
||||
parameters:
|
||||
- name: spec_id
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of an existing workflow specification to validate.
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: crc.api.spec_file.get_files
|
||||
summary: Provide a list of workflow spec files for the given workflow_spec_id. IMPORTANT, only includes metadata, not the file content.
|
||||
tags:
|
||||
- Spec Files
|
||||
responses:
|
||||
'200':
|
||||
description: An array of file descriptions (not the file content)
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/File"
|
||||
post:
|
||||
operationId: crc.api.spec_file.add_file
|
||||
summary: Add a new workflow spec file
|
||||
tags:
|
||||
- Spec Files
|
||||
requestBody:
|
||||
content:
|
||||
multipart/form-data:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
file:
|
||||
type: string
|
||||
format: binary
|
||||
responses:
|
||||
'200':
|
||||
description: Metadata about the uploaded file, but not the file content.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/File"
|
||||
/workflow-specification/{spec_id}/file/{file_name}:
|
||||
parameters:
|
||||
- name: spec_id
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of an existing workflow specification to validate.
|
||||
schema:
|
||||
type: string
|
||||
- name: file_name
|
||||
in: path
|
||||
required: true
|
||||
description: The id of the spec file
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: crc.api.spec_file.get_file
|
||||
summary: Returns metadata about the file
|
||||
tags:
|
||||
- Spec Files
|
||||
responses:
|
||||
'200':
|
||||
description: Returns the file information requested.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/File"
|
||||
put:
|
||||
operationId: crc.api.spec_file.update
|
||||
summary: updates the given file to be the primary file and process, if so specified.
|
||||
tags:
|
||||
- Spec Files
|
||||
parameters:
|
||||
- name: is_primary
|
||||
in: query
|
||||
required: true
|
||||
description: Whether to make this the primary file for the workflow.
|
||||
schema:
|
||||
type: boolean
|
||||
requestBody:
|
||||
description: Log Pagination Request
|
||||
required: false
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/File'
|
||||
responses:
|
||||
'200':
|
||||
description: Returns the file information.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/File"
|
||||
delete:
|
||||
operationId: crc.api.spec_file.delete
|
||||
summary: Removes an existing workflow spec file.
|
||||
tags:
|
||||
- Spec Files
|
||||
responses:
|
||||
'204':
|
||||
description: The file was removed.
|
||||
/workflow-specification/{spec_id}/file/{file_name}/data:
|
||||
parameters:
|
||||
- name: spec_id
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of an existing workflow specification to validate.
|
||||
schema:
|
||||
type: string
|
||||
- name: file_name
|
||||
in: path
|
||||
required: true
|
||||
description: The id of the requested file
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: crc.api.spec_file.get_data
|
||||
summary: Returns only the spec file content
|
||||
tags:
|
||||
- Spec Files
|
||||
responses:
|
||||
'200':
|
||||
description: Returns the actual spec file
|
||||
content:
|
||||
application/octet-stream:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
put:
|
||||
operationId: crc.api.spec_file.update_data
|
||||
summary: Update the contents of a spec file
|
||||
tags:
|
||||
- Spec Files
|
||||
requestBody:
|
||||
content:
|
||||
multipart/form-data:
|
||||
schema:
|
||||
x-body-name: file
|
||||
type: object
|
||||
properties:
|
||||
file:
|
||||
type: string
|
||||
format: binary
|
||||
required:
|
||||
- file
|
||||
responses:
|
||||
'200':
|
||||
description: Returns the updated file model
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/File"
|
||||
/workflow-specification-category:
|
||||
get:
|
||||
operationId: crc.api.workflow.list_workflow_spec_categories
|
||||
@ -803,14 +762,9 @@ paths:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/WorkflowSpecCategory"
|
||||
|
||||
/file:
|
||||
parameters:
|
||||
- name: workflow_spec_id
|
||||
in: query
|
||||
required: false
|
||||
description: The unique id of a workflow specification
|
||||
schema:
|
||||
type: string
|
||||
- name: workflow_id
|
||||
in: query
|
||||
required: false
|
||||
@ -910,36 +864,12 @@ paths:
|
||||
$ref: "#/components/schemas/File"
|
||||
delete:
|
||||
operationId: crc.api.file.delete_file
|
||||
summary: Removes an existing file. In the event the file can not be deleted, it is marked as "archived" in the database and is no longer returned unless specifically requested by id.
|
||||
summary: Removes an existing file.
|
||||
tags:
|
||||
- Files
|
||||
responses:
|
||||
'204':
|
||||
description: The file has been removed.
|
||||
/file/{md5_hash}/hash_data:
|
||||
parameters:
|
||||
- name: md5_hash
|
||||
in: path
|
||||
required: true
|
||||
description: The md5 hash of the file requested
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: crc.api.file.get_file_data_by_hash
|
||||
summary: Returns only the file contents
|
||||
security:
|
||||
- ApiKeyAuth: []
|
||||
tags:
|
||||
- Files
|
||||
responses:
|
||||
'200':
|
||||
description: Returns the actual file
|
||||
content:
|
||||
application/octet-stream:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
example: '<?xml version="1.0" encoding="UTF-8"?><bpmn:definitions></bpmn:definitions>'
|
||||
/file/{file_id}/download :
|
||||
parameters :
|
||||
- name : file_id
|
||||
@ -1027,12 +957,13 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/File"
|
||||
|
||||
/reference_file:
|
||||
get:
|
||||
operationId: crc.api.file.get_reference_files
|
||||
operationId: crc.api.reference_file.get_reference_files
|
||||
summary: Provides a list of existing reference files that are available in the system.
|
||||
tags:
|
||||
- Files
|
||||
- Reference Files
|
||||
responses:
|
||||
'200':
|
||||
description: An array of file descriptions (not the file content)
|
||||
@ -1043,10 +974,12 @@ paths:
|
||||
items:
|
||||
$ref: "#/components/schemas/File"
|
||||
post:
|
||||
operationId: crc.api.file.add_reference_file
|
||||
operationId: crc.api.reference_file.add_reference_file
|
||||
security:
|
||||
- auth_admin: [ 'secret' ]
|
||||
summary: Add a new reference file.
|
||||
tags:
|
||||
- Files
|
||||
- Reference Files
|
||||
requestBody:
|
||||
content:
|
||||
multipart/form-data:
|
||||
@ -1072,54 +1005,75 @@ paths:
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: crc.api.file.get_reference_file
|
||||
summary: Reference files are called by name rather than by id.
|
||||
operationId: crc.api.reference_file.get_reference_file_info
|
||||
summary: Returns the file info for a reference file
|
||||
tags:
|
||||
- Files
|
||||
- Reference Files
|
||||
responses:
|
||||
'200':
|
||||
description: Returns the actual file
|
||||
description: Returns the info for a reference file
|
||||
content:
|
||||
application/octet-stream:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
example: '<?xml version="1.0" encoding="UTF-8"?><bpmn:definitions></bpmn:definitions>'
|
||||
delete:
|
||||
operationId: crc.api.reference_file.delete_reference_file
|
||||
summary: Remove an existing reference file.
|
||||
tags:
|
||||
- Reference Files
|
||||
responses:
|
||||
'204':
|
||||
description: The reference file was removed.
|
||||
/reference_file/{name}/data:
|
||||
parameters:
|
||||
- name: name
|
||||
in: path
|
||||
required: true
|
||||
description: The special name of the reference file.
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: crc.api.reference_file.get_reference_file_data
|
||||
summary: Returns only the reference file content
|
||||
tags:
|
||||
- Reference Files
|
||||
responses:
|
||||
'200':
|
||||
description: Returns the actual reference file
|
||||
content:
|
||||
application/octet-stream:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
put:
|
||||
operationId: crc.api.file.set_reference_file
|
||||
operationId: crc.api.reference_file.update_reference_file_data
|
||||
security:
|
||||
- auth_admin: ['secret']
|
||||
summary: Update the contents of a named reference file.
|
||||
summary: Update the contents of a reference file
|
||||
tags:
|
||||
- Files
|
||||
- Reference Files
|
||||
requestBody:
|
||||
content:
|
||||
multipart/form-data:
|
||||
schema:
|
||||
x-body-name: file
|
||||
type: object
|
||||
properties:
|
||||
file:
|
||||
type: string
|
||||
format: binary
|
||||
required:
|
||||
- file
|
||||
responses:
|
||||
'200':
|
||||
description: Returns the actual file
|
||||
description: Returns the updated file model
|
||||
content:
|
||||
application/octet-stream:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
example: '<?xml version="1.0" encoding="UTF-8"?><bpmn:definitions></bpmn:definitions>'
|
||||
|
||||
|
||||
$ref: "#/components/schemas/File"
|
||||
/dmn_from_ss:
|
||||
# parameters:
|
||||
# - name: workflow_spec_id
|
||||
# in: query
|
||||
# required: true
|
||||
# description: The unique id of a workflow specification
|
||||
# schema:
|
||||
# type: string
|
||||
post:
|
||||
operationId: crc.api.file.dmn_from_ss
|
||||
summary: Create a DMN table from a spreadsheet
|
||||
@ -1537,6 +1491,7 @@ paths:
|
||||
text/plain:
|
||||
schema:
|
||||
type: string
|
||||
|
||||
/datastore:
|
||||
post:
|
||||
operationId: crc.api.data_store.add_datastore
|
||||
@ -1555,7 +1510,6 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/DataStore"
|
||||
|
||||
/datastore/{id}:
|
||||
parameters:
|
||||
- name: id
|
||||
@ -1609,8 +1563,6 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/DataStore"
|
||||
|
||||
|
||||
/datastore/study/{study_id}:
|
||||
parameters:
|
||||
- name: study_id
|
||||
@ -1674,6 +1626,67 @@ paths:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/DataStore"
|
||||
|
||||
/git_repo:
|
||||
get:
|
||||
operationId: crc.api.git_repo.get_repo
|
||||
summary: get current state of the git repo
|
||||
tags:
|
||||
- Git Repo
|
||||
responses:
|
||||
'200':
|
||||
description: Current state of the git repo
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/GitRepo"
|
||||
/git_repo/merge:
|
||||
parameters:
|
||||
- name: branch
|
||||
in: query
|
||||
required: true
|
||||
description: The branch to merge with
|
||||
schema:
|
||||
type: string
|
||||
example: staging
|
||||
get:
|
||||
operationId: crc.api.git_repo.merge_with_branch
|
||||
summary: merge with given branch
|
||||
tags:
|
||||
- Git Repo
|
||||
responses:
|
||||
'200':
|
||||
description: Current state of the git repo
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/GitRepo"
|
||||
/git_repo/push:
|
||||
parameters:
|
||||
- name: comment
|
||||
in: query
|
||||
required: false
|
||||
description: The comment we want to pass along with the push commit
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: crc.api.git_repo.push_to_remote
|
||||
summary: Push local changes to remote
|
||||
tags:
|
||||
- Git Repo
|
||||
responses:
|
||||
'200':
|
||||
description: Local changes were pushed successfully
|
||||
/git_repo/pull:
|
||||
get:
|
||||
operationId: crc.api.git_repo.pull_from_remote
|
||||
summary: Pull current code from remote
|
||||
tags:
|
||||
- Git Repo
|
||||
responses:
|
||||
'200':
|
||||
description: Remote code was pulled successfully
|
||||
|
||||
components:
|
||||
securitySchemes:
|
||||
jwt:
|
||||
@ -1686,11 +1699,6 @@ components:
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
x-bearerInfoFunc: crc.api.user.verify_token_admin
|
||||
ApiKeyAuth :
|
||||
type : apiKey
|
||||
in : header
|
||||
name : X-CR-API-KEY
|
||||
x-apikeyInfoFunc: crc.api.workflow_sync.verify_token
|
||||
|
||||
schemas:
|
||||
User:
|
||||
@ -1912,7 +1920,7 @@ components:
|
||||
type: string
|
||||
nullable: true
|
||||
category_id:
|
||||
type: integer
|
||||
type: string
|
||||
nullable: true
|
||||
standalone:
|
||||
type: boolean
|
||||
@ -1926,7 +1934,7 @@ components:
|
||||
WorkflowSpecCategory:
|
||||
properties:
|
||||
id:
|
||||
type: integer
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
display_name:
|
||||
@ -2359,3 +2367,24 @@ components:
|
||||
type: number
|
||||
format: integer
|
||||
example: 5
|
||||
GitRepo:
|
||||
properties:
|
||||
# remote:
|
||||
# type: string
|
||||
# example: sartography/crconnect-workflow-specs
|
||||
directory:
|
||||
type: string
|
||||
example: /home/cr-connect/sync_files
|
||||
branch:
|
||||
type: string
|
||||
example: dev
|
||||
merge_branch:
|
||||
type: string
|
||||
example: staging
|
||||
# status:
|
||||
# type: string
|
||||
# example: staging
|
||||
changes:
|
||||
type: array
|
||||
untracked:
|
||||
type: array
|
||||
|
@ -11,13 +11,13 @@ from werkzeug.utils import redirect
|
||||
from jinja2 import Markup
|
||||
|
||||
from crc import db, app
|
||||
from crc.api.common import ApiError
|
||||
from crc.api.user import verify_token, verify_token_admin
|
||||
from crc.models.file import FileModel, FileDataModel
|
||||
from crc.models.task_event import TaskEventModel
|
||||
from crc.models.study import StudyModel
|
||||
from crc.models.user import UserModel
|
||||
from crc.models.workflow import WorkflowModel
|
||||
from crc.services.file_service import FileService
|
||||
|
||||
|
||||
class AdminModelView(sqla.ModelView):
|
||||
@ -59,11 +59,11 @@ class FileView(AdminModelView):
|
||||
|
||||
@action('publish', 'Publish', 'Are you sure you want to publish this file(s)?')
|
||||
def action_publish(self, ids):
|
||||
FileService.publish_to_github(ids)
|
||||
raise ApiError("not_implemented", "This method is not yet implemented.")
|
||||
|
||||
@action('update', 'Update', 'Are you sure you want to update this file(s)?')
|
||||
def action_update(self, ids):
|
||||
FileService.update_from_github(ids)
|
||||
raise ApiError("not_implemented", "This method is not yet implemented.")
|
||||
|
||||
|
||||
def json_formatter(view, context, model, name):
|
||||
|
@ -29,9 +29,9 @@ class ApiError(Exception):
|
||||
self.offset = offset
|
||||
self.error_type = error_type
|
||||
self.error_line = error_line
|
||||
if hasattr(g, 'user'):
|
||||
try:
|
||||
user = g.user.uid
|
||||
else:
|
||||
except Exception as e:
|
||||
user = 'Unknown'
|
||||
self.task_user = user
|
||||
# This is for sentry logging into Slack
|
||||
|
@ -1,8 +1,8 @@
|
||||
from crc.models.api_models import DocumentDirectorySchema
|
||||
from crc.models.file import File
|
||||
from crc.services.document_service import DocumentService
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.lookup_service import LookupService
|
||||
from crc.services.user_file_service import UserFileService
|
||||
|
||||
|
||||
def get_document_directory(study_id, workflow_id=None):
|
||||
@ -10,9 +10,9 @@ def get_document_directory(study_id, workflow_id=None):
|
||||
return a nested list of files arranged according to the category hierarchy
|
||||
defined in the doc dictionary
|
||||
"""
|
||||
file_models = FileService.get_files_for_study(study_id=study_id)
|
||||
file_models = UserFileService.get_files_for_study(study_id=study_id)
|
||||
doc_dict = DocumentService.get_dictionary()
|
||||
files = (File.from_models(model, FileService.get_file_data(model.id), doc_dict) for model in file_models)
|
||||
files = (File.from_models(model, UserFileService.get_file_data(model.id), doc_dict) for model in file_models)
|
||||
directory = DocumentService.get_directory(doc_dict, files, workflow_id)
|
||||
|
||||
return DocumentDirectorySchema(many=True).dump(directory)
|
||||
|
131
crc/api/file.py
131
crc/api/file.py
@ -1,6 +1,5 @@
|
||||
import io
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
import connexion
|
||||
from flask import send_file
|
||||
@ -8,41 +7,33 @@ from flask import send_file
|
||||
from crc import session
|
||||
from crc.api.common import ApiError
|
||||
from crc.api.user import verify_token
|
||||
from crc.models.file import FileSchema, FileModel, File, FileModelSchema, FileDataModel, FileType
|
||||
from crc.models.workflow import WorkflowSpecModel
|
||||
from crc.models.file import FileSchema, FileModel, File, FileModelSchema, FileDataModel
|
||||
from crc.services.document_service import DocumentService
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.user_file_service import UserFileService
|
||||
|
||||
|
||||
def to_file_api(file_model):
|
||||
"""Converts a FileModel object to something we can return via the api"""
|
||||
return File.from_models(file_model, FileService.get_file_data(file_model.id),
|
||||
file_data_model = UserFileService.get_file_data(file_model.id)
|
||||
return File.from_models(file_model, file_data_model,
|
||||
DocumentService.get_dictionary())
|
||||
|
||||
|
||||
def get_files(workflow_spec_id=None, workflow_id=None, form_field_key=None,study_id=None):
|
||||
if all(v is None for v in [workflow_spec_id, workflow_id, form_field_key,study_id]):
|
||||
def get_files(workflow_id=None, form_field_key=None, study_id=None):
|
||||
if workflow_id is None:
|
||||
raise ApiError('missing_parameter',
|
||||
'Please specify either a workflow_spec_id or a '
|
||||
'workflow_id with an optional form_field_key')
|
||||
'Please specify a workflow_id with an optional form_field_key')
|
||||
|
||||
if study_id is not None:
|
||||
file_models = FileService.get_files_for_study(study_id=study_id, irb_doc_code=form_field_key)
|
||||
file_models = UserFileService.get_files_for_study(study_id=study_id, irb_doc_code=form_field_key)
|
||||
else:
|
||||
file_models = FileService.get_files(workflow_spec_id=workflow_spec_id,
|
||||
workflow_id=workflow_id,
|
||||
irb_doc_code=form_field_key)
|
||||
file_models = UserFileService.get_files(workflow_id=workflow_id,
|
||||
irb_doc_code=form_field_key)
|
||||
|
||||
files = (to_file_api(model) for model in file_models)
|
||||
return FileSchema(many=True).dump(files)
|
||||
|
||||
|
||||
def get_reference_files():
|
||||
results = FileService.get_files(is_reference=True)
|
||||
files = (to_file_api(model) for model in results)
|
||||
return FileSchema(many=True).dump(files)
|
||||
|
||||
|
||||
def add_file(workflow_spec_id=None, workflow_id=None, task_spec_name=None, form_field_key=None):
|
||||
def add_file(workflow_id=None, task_spec_name=None, form_field_key=None):
|
||||
file = connexion.request.files['file']
|
||||
if workflow_id:
|
||||
if form_field_key is None:
|
||||
@ -51,107 +42,61 @@ def add_file(workflow_spec_id=None, workflow_id=None, task_spec_name=None, form_
|
||||
if task_spec_name is None:
|
||||
raise ApiError('invalid_workflow_file',
|
||||
'When adding a workflow related file, you must specify a task_spec_name')
|
||||
file_model = FileService.add_workflow_file(workflow_id=workflow_id, irb_doc_code=form_field_key,
|
||||
file_model = UserFileService.add_workflow_file(workflow_id=workflow_id, irb_doc_code=form_field_key,
|
||||
task_spec_name=task_spec_name,
|
||||
name=file.filename, content_type=file.content_type,
|
||||
binary_data=file.stream.read())
|
||||
elif workflow_spec_id:
|
||||
# check if we have a primary already
|
||||
have_primary = FileModel.query.filter(FileModel.workflow_spec_id==workflow_spec_id, FileModel.type==FileType.bpmn, FileModel.primary==True).all()
|
||||
# set this to primary if we don't already have one
|
||||
if not have_primary:
|
||||
primary = True
|
||||
else:
|
||||
primary = False
|
||||
workflow_spec = session.query(WorkflowSpecModel).filter_by(id=workflow_spec_id).first()
|
||||
file_model = FileService.add_workflow_spec_file(workflow_spec, file.filename, file.content_type,
|
||||
file.stream.read(), primary=primary)
|
||||
else:
|
||||
raise ApiError("invalid_file", "You must supply either a workflow spec id or a workflow_id and form_field_key.")
|
||||
|
||||
return FileSchema().dump(to_file_api(file_model))
|
||||
|
||||
|
||||
def get_reference_file(name):
|
||||
file_data = FileService.get_reference_file_data(name)
|
||||
return send_file(
|
||||
io.BytesIO(file_data.data),
|
||||
attachment_filename=file_data.file_model.name,
|
||||
mimetype=file_data.file_model.content_type,
|
||||
cache_timeout=-1 # Don't cache these files on the browser.
|
||||
)
|
||||
|
||||
|
||||
def set_reference_file(name):
|
||||
"""Uses the file service to manage reference-files. They will be used in script tasks to compute values."""
|
||||
if 'file' not in connexion.request.files:
|
||||
raise ApiError('invalid_file',
|
||||
'Expected a file named "file" in the multipart form request', status_code=400)
|
||||
|
||||
file = connexion.request.files['file']
|
||||
|
||||
name_extension = FileService.get_extension(name)
|
||||
file_extension = FileService.get_extension(file.filename)
|
||||
if name_extension != file_extension:
|
||||
raise ApiError('invalid_file_type',
|
||||
"The file you uploaded has an extension '%s', but it should have an extension of '%s' " %
|
||||
(file_extension, name_extension))
|
||||
|
||||
file_models = FileService.get_files(name=name, is_reference=True)
|
||||
if len(file_models) == 0:
|
||||
file_model = FileService.add_reference_file(name, file.content_type, file.stream.read())
|
||||
else:
|
||||
file_model = file_models[0]
|
||||
FileService.update_file(file_models[0], file.stream.read(), file.content_type)
|
||||
|
||||
return FileSchema().dump(to_file_api(file_model))
|
||||
|
||||
|
||||
def add_reference_file():
|
||||
file = connexion.request.files['file']
|
||||
file_model = FileService.add_reference_file(name=file.filename, content_type=file.content_type,
|
||||
binary_data=file.stream.read())
|
||||
return FileSchema().dump(to_file_api(file_model))
|
||||
|
||||
|
||||
def update_file_data(file_id):
|
||||
file_model = session.query(FileModel).filter_by(id=file_id).with_for_update().first()
|
||||
file = connexion.request.files['file']
|
||||
if file_model is None:
|
||||
raise ApiError('no_such_file', f'The file id you provided ({file_id}) does not exist')
|
||||
file_model = FileService.update_file(file_model, file.stream.read(), file.content_type)
|
||||
file_model = UserFileService.update_file(file_model, file.stream.read(), file.content_type)
|
||||
return FileSchema().dump(to_file_api(file_model))
|
||||
|
||||
|
||||
def get_file_data_by_hash(md5_hash):
|
||||
filedatamodel = session.query(FileDataModel).filter(FileDataModel.md5_hash == md5_hash).first()
|
||||
return get_file_data(filedatamodel.file_model_id,version=filedatamodel.version)
|
||||
return get_file_data(filedatamodel.file_model_id, version=filedatamodel.version)
|
||||
|
||||
|
||||
def get_file_data(file_id, version=None):
|
||||
file_data = FileService.get_file_data(file_id, version)
|
||||
if file_data is None:
|
||||
raise ApiError('no_such_file', f'The file id you provided ({file_id}) does not exist')
|
||||
return send_file(
|
||||
io.BytesIO(file_data.data),
|
||||
attachment_filename=file_data.file_model.name,
|
||||
mimetype=file_data.file_model.content_type,
|
||||
cache_timeout=-1, # Don't cache these files on the browser.
|
||||
last_modified=file_data.date_created
|
||||
)
|
||||
file_model = session.query(FileModel).filter(FileModel.id==file_id).first()
|
||||
if file_model is not None:
|
||||
file_data_model = UserFileService.get_file_data(file_id, version)
|
||||
if file_data_model is not None:
|
||||
return send_file(
|
||||
io.BytesIO(file_data_model.data),
|
||||
attachment_filename=file_model.name,
|
||||
mimetype=file_model.content_type,
|
||||
cache_timeout=-1 # Don't cache these files on the browser.
|
||||
)
|
||||
else:
|
||||
raise ApiError('missing_data_model', f'The data model for file ({file_id}) does not exist')
|
||||
else:
|
||||
raise ApiError('missing_file_model', f'The file id you provided ({file_id}) does not exist')
|
||||
|
||||
|
||||
def get_file_data_link(file_id, auth_token, version=None):
|
||||
if not verify_token(auth_token):
|
||||
raise ApiError('not_authenticated', 'You need to include an authorization token in the URL with this')
|
||||
file_data = FileService.get_file_data(file_id, version)
|
||||
file_model = session.query(FileModel).filter(FileModel.id==file_id).first()
|
||||
file_data = UserFileService.get_file_data(file_id, version)
|
||||
if file_data is None:
|
||||
raise ApiError('no_such_file', f'The file id you provided ({file_id}) does not exist')
|
||||
return send_file(
|
||||
io.BytesIO(file_data.data),
|
||||
attachment_filename=file_data.file_model.name,
|
||||
mimetype=file_data.file_model.content_type,
|
||||
attachment_filename=file_model.name,
|
||||
mimetype=file_model.content_type,
|
||||
cache_timeout=-1, # Don't cache these files on the browser.
|
||||
last_modified=file_data.date_created,
|
||||
as_attachment = True
|
||||
as_attachment=True
|
||||
)
|
||||
|
||||
|
||||
@ -178,12 +123,12 @@ def update_file_info(file_id, body):
|
||||
|
||||
|
||||
def delete_file(file_id):
|
||||
FileService.delete_file(file_id)
|
||||
UserFileService.delete_file(file_id)
|
||||
|
||||
|
||||
def dmn_from_ss():
|
||||
file = connexion.request.files['file']
|
||||
result = FileService.dmn_from_spreadsheet(file)
|
||||
result = UserFileService.dmn_from_spreadsheet(file)
|
||||
return send_file(
|
||||
io.BytesIO(result),
|
||||
attachment_filename='temp_dmn.dmn',
|
||||
|
25
crc/api/git_repo.py
Normal file
25
crc/api/git_repo.py
Normal file
@ -0,0 +1,25 @@
|
||||
from crc.models.git_models import GitRepo, GitRepoSchema
|
||||
from crc.services.git_service import GitService
|
||||
|
||||
|
||||
def get_repo():
|
||||
repo_model = GitService().get_repo()
|
||||
return GitRepoSchema().dump(repo_model)
|
||||
|
||||
|
||||
def pull_from_remote():
|
||||
repo = GitService().pull_from_remote()
|
||||
repo_model = GitRepo.from_repo(repo)
|
||||
return GitRepoSchema().dump(repo_model)
|
||||
|
||||
|
||||
def push_to_remote(comment=None):
|
||||
repo = GitService().push_to_remote(comment)
|
||||
repo_model = GitRepo.from_repo(repo)
|
||||
return GitRepoSchema().dump(repo_model)
|
||||
|
||||
|
||||
def merge_with_branch(branch):
|
||||
repo = GitService().merge_with_branch(branch)
|
||||
repo_model = GitRepo.from_repo(repo)
|
||||
return GitRepoSchema().dump(repo_model)
|
61
crc/api/reference_file.py
Normal file
61
crc/api/reference_file.py
Normal file
@ -0,0 +1,61 @@
|
||||
from crc import session
|
||||
from crc.api.common import ApiError
|
||||
from crc.api.file import to_file_api
|
||||
from crc.models.file import FileModel, FileSchema, CONTENT_TYPES
|
||||
from crc.services.reference_file_service import ReferenceFileService
|
||||
|
||||
from flask import send_file
|
||||
|
||||
import io
|
||||
import connexion
|
||||
|
||||
|
||||
def get_reference_files():
|
||||
"""Gets a list of all reference files"""
|
||||
files = ReferenceFileService.get_reference_files()
|
||||
return FileSchema(many=True).dump(files)
|
||||
|
||||
|
||||
def get_reference_file_data(name):
|
||||
file_extension = ReferenceFileService.get_extension(name)
|
||||
content_type = CONTENT_TYPES[file_extension]
|
||||
file_data = ReferenceFileService().get_data(name)
|
||||
return send_file(
|
||||
io.BytesIO(file_data),
|
||||
attachment_filename=name,
|
||||
mimetype=content_type,
|
||||
cache_timeout=-1 # Don't cache these files on the browser.
|
||||
)
|
||||
|
||||
def get_reference_file_info(name):
|
||||
"""Return metadata for a reference file"""
|
||||
return FileSchema().dump(ReferenceFileService.get_reference_file(name))
|
||||
|
||||
|
||||
def update_reference_file_data(name):
|
||||
"""Uses the file service to manage reference-files. They will be used in script tasks to compute values."""
|
||||
if 'file' not in connexion.request.files:
|
||||
raise ApiError('invalid_file',
|
||||
'Expected a file named "file" in the multipart form request', status_code=400)
|
||||
|
||||
file = connexion.request.files['file']
|
||||
name_extension = ReferenceFileService.get_extension(name)
|
||||
file_extension = ReferenceFileService.get_extension(file.filename)
|
||||
if name_extension != file_extension:
|
||||
raise ApiError('invalid_file_type',
|
||||
"The file you uploaded has an extension '%s', but it should have an extension of '%s' " %
|
||||
(file_extension, name_extension))
|
||||
|
||||
return_file = ReferenceFileService.update_reference_file(file_name=name, binary_data=file.stream.read())
|
||||
return FileSchema().dump(return_file)
|
||||
|
||||
def add_reference_file():
|
||||
file = connexion.request.files['file']
|
||||
file_model = ReferenceFileService.add_reference_file(file.filename, file.stream.read())
|
||||
return FileSchema().dump(file_model)
|
||||
|
||||
|
||||
def delete_reference_file(name):
|
||||
ReferenceFileService().delete(name)
|
||||
|
||||
|
96
crc/api/spec_file.py
Normal file
96
crc/api/spec_file.py
Normal file
@ -0,0 +1,96 @@
|
||||
from crc import session
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileSchema, FileType
|
||||
from crc.services.spec_file_service import SpecFileService
|
||||
|
||||
from flask import send_file
|
||||
|
||||
import io
|
||||
import connexion
|
||||
|
||||
from crc.services.workflow_spec_service import WorkflowSpecService
|
||||
|
||||
def get_files(spec_id, include_libraries=False):
|
||||
if spec_id is None:
|
||||
raise ApiError(code='missing_spec_id',
|
||||
message='Please specify the workflow_spec_id.')
|
||||
workflow_spec_service = WorkflowSpecService()
|
||||
workflow_spec = workflow_spec_service.get_spec(spec_id)
|
||||
if workflow_spec is None:
|
||||
raise ApiError(code='unknown_spec',
|
||||
message=f'Unknown Spec: {spec_id}')
|
||||
files = SpecFileService.get_files(workflow_spec,
|
||||
include_libraries=include_libraries)
|
||||
return FileSchema(many=True).dump(files)
|
||||
|
||||
|
||||
def get_file(spec_id, file_name):
|
||||
workflow_spec_service = WorkflowSpecService()
|
||||
workflow_spec = workflow_spec_service.get_spec(spec_id)
|
||||
files = SpecFileService.get_files(workflow_spec, file_name)
|
||||
if len(files) == 0:
|
||||
raise ApiError(code='unknown file',
|
||||
message=f'No information exists for file {file_name}'
|
||||
f' it does not exist in workflow {spec_id}.', status_code=404)
|
||||
return FileSchema().dump(files[0])
|
||||
|
||||
|
||||
def add_file(spec_id):
|
||||
workflow_spec_service = WorkflowSpecService()
|
||||
workflow_spec = workflow_spec_service.get_spec(spec_id)
|
||||
file = connexion.request.files['file']
|
||||
file = SpecFileService.add_file(workflow_spec, file.filename, file.stream.read())
|
||||
if not workflow_spec.primary_process_id and file.type == FileType.bpmn:
|
||||
SpecFileService.set_primary_bpmn(workflow_spec, file.name)
|
||||
workflow_spec_service.update_spec(workflow_spec)
|
||||
return FileSchema().dump(file)
|
||||
|
||||
|
||||
def update(spec_id, file_name, is_primary):
|
||||
workflow_spec_service = WorkflowSpecService()
|
||||
workflow_spec = workflow_spec_service.get_spec(spec_id)
|
||||
files = SpecFileService.get_files(workflow_spec, file_name)
|
||||
if len(files) < 1:
|
||||
raise ApiError(code='unknown file',
|
||||
message=f'No information exists for file {file_name}'
|
||||
f' it does not exist in workflow {spec_id}.')
|
||||
file = files[0]
|
||||
if is_primary:
|
||||
SpecFileService.set_primary_bpmn(workflow_spec, file_name)
|
||||
workflow_spec_service.update_spec(workflow_spec)
|
||||
return FileSchema().dump(file)
|
||||
|
||||
|
||||
def update_data(spec_id, file_name):
|
||||
workflow_spec_service = WorkflowSpecService()
|
||||
workflow_spec_model = workflow_spec_service.get_spec(spec_id)
|
||||
if workflow_spec_model is None:
|
||||
raise ApiError(code='missing_spec',
|
||||
message=f'The workflow spec for id {spec_id} does not exist.')
|
||||
file_data = connexion.request.files['file']
|
||||
file = SpecFileService.update_file(workflow_spec_model, file_name, file_data.stream.read())
|
||||
return FileSchema().dump(file)
|
||||
|
||||
|
||||
def get_data(spec_id, file_name):
|
||||
workflow_spec_service = WorkflowSpecService()
|
||||
workflow_spec = workflow_spec_service.get_spec(spec_id)
|
||||
file_data = SpecFileService.get_data(workflow_spec, file_name)
|
||||
if file_data is not None:
|
||||
file_info = SpecFileService.get_files(workflow_spec, file_name)[0]
|
||||
return send_file(
|
||||
io.BytesIO(file_data),
|
||||
attachment_filename=file_name,
|
||||
mimetype=file_info.content_type,
|
||||
cache_timeout=-1 # Don't cache these files on the browser.
|
||||
)
|
||||
else:
|
||||
raise ApiError(code='missing_data_model',
|
||||
message=f'The data model for file {file_name}'
|
||||
f' does not exist in workflow {spec_id}.')
|
||||
|
||||
|
||||
def delete(spec_id, file_name):
|
||||
workflow_spec_service = WorkflowSpecService()
|
||||
workflow_spec = workflow_spec_service.get_spec(spec_id)
|
||||
SpecFileService.delete_file(workflow_spec, file_name)
|
@ -12,7 +12,9 @@ from crc.models.task_log import TaskLogModelSchema, TaskLogQuery, TaskLogQuerySc
|
||||
from crc.services.study_service import StudyService
|
||||
from crc.services.task_logging_service import TaskLoggingService
|
||||
from crc.services.user_service import UserService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from crc.services.workflow_service import WorkflowService
|
||||
from crc.services.workflow_spec_service import WorkflowSpecService
|
||||
|
||||
|
||||
def add_study(body):
|
||||
@ -33,15 +35,33 @@ def add_study(body):
|
||||
event_type=StudyEventType.user,
|
||||
user_uid=g.user.uid)
|
||||
|
||||
errors = StudyService._add_all_workflow_specs_to_study(study_model)
|
||||
spec_service = WorkflowSpecService()
|
||||
specs = spec_service.get_specs()
|
||||
categories = spec_service.get_categories()
|
||||
errors = StudyService.add_all_workflow_specs_to_study(study_model, specs)
|
||||
session.commit()
|
||||
study = StudyService().get_study(study_model.id, do_status=True)
|
||||
|
||||
master_workflow_results = __run_master_spec(study_model, spec_service.master_spec)
|
||||
study = StudyService().get_study(study_model.id, categories, master_workflow_results=master_workflow_results)
|
||||
study_data = StudySchema().dump(study)
|
||||
study_data["errors"] = ApiErrorSchema(many=True).dump(errors)
|
||||
return study_data
|
||||
|
||||
|
||||
def __run_master_spec(study_model, master_spec):
|
||||
"""Runs the master workflow spec to get details on the status of each workflow.
|
||||
This is a fairly expensive call."""
|
||||
"""Uses the Top Level Workflow to calculate the status of the study, and it's
|
||||
workflow models."""
|
||||
if not master_spec:
|
||||
raise ApiError("missing_master_spec", "No specifications are currently marked as the master spec.")
|
||||
return WorkflowProcessor.run_master_spec(master_spec, study_model)
|
||||
|
||||
|
||||
def update_study(study_id, body):
|
||||
spec_service = WorkflowSpecService()
|
||||
categories = spec_service.get_categories()
|
||||
|
||||
"""Pretty limited, but allows manual modifications to the study status """
|
||||
if study_id is None:
|
||||
raise ApiError('unknown_study', 'Please provide a valid Study ID.')
|
||||
@ -72,12 +92,18 @@ def update_study(study_id, body):
|
||||
WorkflowService.process_workflows_for_cancels(study_id)
|
||||
|
||||
# Need to reload the full study to return it to the frontend
|
||||
study = StudyService.get_study(study_id)
|
||||
study = StudyService.get_study(study_id, categories)
|
||||
return StudySchema().dump(study)
|
||||
|
||||
|
||||
def get_study(study_id, update_status=False):
|
||||
study = StudyService.get_study(study_id, do_status=update_status)
|
||||
spec_service = WorkflowSpecService()
|
||||
categories = spec_service.get_categories()
|
||||
master_workflow_results = []
|
||||
if update_status:
|
||||
study_model = session.query(StudyModel).filter(StudyModel.id == study_id).first()
|
||||
master_workflow_results = __run_master_spec(study_model, spec_service.master_spec)
|
||||
study = StudyService().get_study(study_id, categories, master_workflow_results=master_workflow_results)
|
||||
if (study is None):
|
||||
raise ApiError("unknown_study", 'The study "' + study_id + '" is not recognized.', status_code=404)
|
||||
return StudySchema().dump(study)
|
||||
@ -105,10 +131,13 @@ def delete_study(study_id):
|
||||
def user_studies():
|
||||
"""Returns all the studies associated with the current user. """
|
||||
user = UserService.current_user(allow_admin_impersonate=True)
|
||||
StudyService.synch_with_protocol_builder_if_enabled(user)
|
||||
studies = StudyService().get_studies_for_user(user)
|
||||
spec_service = WorkflowSpecService()
|
||||
specs = spec_service.get_specs()
|
||||
cats = spec_service.get_categories()
|
||||
StudyService.synch_with_protocol_builder_if_enabled(user, specs)
|
||||
studies = StudyService().get_studies_for_user(user, categories=cats)
|
||||
if len(studies) == 0:
|
||||
studies = StudyService().get_studies_for_user(user, include_invalid=True)
|
||||
studies = StudyService().get_studies_for_user(user, categories=cats, include_invalid=True)
|
||||
if len(studies) > 0:
|
||||
message = f"All studies associated with User: {user.uid} failed study validation"
|
||||
raise ApiError(code="study_integrity_error", message=message)
|
||||
|
@ -171,6 +171,9 @@ def login(
|
||||
# If we're in production, override any uid with the uid from the SSO request headers
|
||||
if _is_production():
|
||||
uid = _get_request_uid(request)
|
||||
else:
|
||||
if not app.config['TESTING']:
|
||||
uid = app.config['DEFAULT_UID']
|
||||
|
||||
if uid:
|
||||
app.logger.info("SSO_LOGIN: Full URL: " + request.url)
|
||||
|
@ -1,3 +1,4 @@
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from flask import g
|
||||
@ -5,75 +6,75 @@ from flask import g
|
||||
from crc import session
|
||||
from crc.api.common import ApiError, ApiErrorSchema
|
||||
from crc.models.api_models import WorkflowApiSchema
|
||||
from crc.models.file import FileModel
|
||||
from crc.models.study import StudyModel, WorkflowMetadata, StudyStatus
|
||||
from crc.models.task_event import TaskEventModel, TaskEvent, TaskEventSchema
|
||||
from crc.models.task_log import TaskLogModelSchema, TaskLogQuery, TaskLogQuerySchema
|
||||
from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, WorkflowSpecModel, WorkflowSpecCategoryModel, \
|
||||
WorkflowSpecCategoryModelSchema, WorkflowLibraryModel, WorkflowLibraryModelSchema
|
||||
from crc.models.workflow import WorkflowModel, WorkflowSpecInfoSchema, WorkflowSpecCategorySchema
|
||||
from crc.services.error_service import ValidationErrorService
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.lookup_service import LookupService
|
||||
from crc.services.spec_file_service import SpecFileService
|
||||
from crc.services.study_service import StudyService
|
||||
from crc.services.task_logging_service import TaskLoggingService
|
||||
from crc.services.user_service import UserService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from crc.services.workflow_service import WorkflowService
|
||||
from crc.services.workflow_spec_service import WorkflowSpecService
|
||||
|
||||
|
||||
def all_specifications(libraries=False,standalone=False):
|
||||
spec_service = WorkflowSpecService()
|
||||
if libraries and standalone:
|
||||
raise ApiError('inconceivable!', 'You should specify libraries or standalone, but not both')
|
||||
schema = WorkflowSpecModelSchema(many=True)
|
||||
|
||||
if libraries:
|
||||
return schema.dump(session.query(WorkflowSpecModel)\
|
||||
.filter(WorkflowSpecModel.library==True).all())
|
||||
workflows = spec_service.get_libraries()
|
||||
return WorkflowSpecInfoSchema(many=True).dump(workflows)
|
||||
|
||||
if standalone:
|
||||
return schema.dump(session.query(WorkflowSpecModel)\
|
||||
.filter(WorkflowSpecModel.standalone==True).all())
|
||||
workflows = spec_service.get_standalones()
|
||||
return WorkflowSpecInfoSchema(many=True).dump(workflows)
|
||||
|
||||
# return standard workflows (not library, not standalone)
|
||||
return schema.dump(session.query(WorkflowSpecModel)
|
||||
.filter((WorkflowSpecModel.library==False) | (
|
||||
WorkflowSpecModel.library==None))
|
||||
.filter((WorkflowSpecModel.standalone==False) | (
|
||||
WorkflowSpecModel.standalone==None))
|
||||
.all())
|
||||
specs = spec_service.get_specs()
|
||||
master_spec = spec_service.get_master_spec()
|
||||
if master_spec:
|
||||
specs.append(spec_service.get_master_spec())
|
||||
return WorkflowSpecInfoSchema(many=True).dump(specs)
|
||||
|
||||
|
||||
def add_workflow_specification(body):
|
||||
category_id = body['category_id']
|
||||
WorkflowService.cleanup_workflow_spec_display_order(category_id)
|
||||
count = session.query(WorkflowSpecModel).filter_by(category_id=category_id).count()
|
||||
body['display_order'] = count
|
||||
# Libraries and standalone workflows don't get a category_id
|
||||
if body['library'] is True or body['standalone'] is True:
|
||||
body['category_id'] = None
|
||||
new_spec: WorkflowSpecModel = WorkflowSpecModelSchema().load(body, session=session)
|
||||
session.add(new_spec)
|
||||
session.commit()
|
||||
return WorkflowSpecModelSchema().dump(new_spec)
|
||||
spec = WorkflowSpecInfoSchema().load(body)
|
||||
spec_service = WorkflowSpecService()
|
||||
category = spec_service.get_category(spec.category_id)
|
||||
spec.category = category
|
||||
workflows = spec_service.cleanup_workflow_spec_display_order(category)
|
||||
size = len(workflows)
|
||||
spec.display_order = size
|
||||
spec_service.add_spec(spec)
|
||||
return WorkflowSpecInfoSchema().dump(spec)
|
||||
|
||||
|
||||
def get_workflow_specification(spec_id):
|
||||
spec_service = WorkflowSpecService()
|
||||
if spec_id is None:
|
||||
raise ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.')
|
||||
|
||||
spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
||||
spec = spec_service.get_spec(spec_id)
|
||||
|
||||
if spec is None:
|
||||
raise ApiError('unknown_spec', 'The Workflow Specification "' + spec_id + '" is not recognized.')
|
||||
|
||||
return WorkflowSpecModelSchema().dump(spec)
|
||||
return WorkflowSpecInfoSchema().dump(spec)
|
||||
|
||||
def validate_spec_and_library(spec_id,library_id):
|
||||
spec_service = WorkflowSpecService()
|
||||
|
||||
if spec_id is None:
|
||||
raise ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.')
|
||||
if library_id is None:
|
||||
raise ApiError('unknown_spec', 'Please provide a valid Library Specification ID.')
|
||||
spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
||||
library: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=library_id).first()
|
||||
|
||||
spec = spec_service.get_spec(spec_id)
|
||||
library = spec_service.get_spec(library_id);
|
||||
if spec is None:
|
||||
raise ApiError('unknown_spec', 'The Workflow Specification "' + spec_id + '" is not recognized.')
|
||||
if library is None:
|
||||
@ -82,33 +83,50 @@ def validate_spec_and_library(spec_id,library_id):
|
||||
raise ApiError('unknown_spec', 'Linked workflow spec is not a library.')
|
||||
|
||||
|
||||
def add_workflow_spec_library(spec_id,library_id):
|
||||
def add_workflow_spec_library(spec_id, library_id):
|
||||
validate_spec_and_library(spec_id, library_id)
|
||||
libraries: WorkflowLibraryModel = session.query(WorkflowLibraryModel).filter_by(workflow_spec_id=spec_id).all()
|
||||
libraryids = [x.library_spec_id for x in libraries]
|
||||
if library_id in libraryids:
|
||||
raise ApiError('unknown_spec', 'The Library Specification "' + spec_id + '" is already attached.')
|
||||
newlib = WorkflowLibraryModel()
|
||||
newlib.workflow_spec_id = spec_id
|
||||
newlib.library_spec_id = library_id
|
||||
session.add(newlib)
|
||||
session.commit()
|
||||
libraries: WorkflowLibraryModel = session.query(WorkflowLibraryModel).filter_by(workflow_spec_id=spec_id).all()
|
||||
return WorkflowLibraryModelSchema(many=True).dump(libraries)
|
||||
spec_service = WorkflowSpecService()
|
||||
spec = spec_service.get_spec(spec_id)
|
||||
if library_id in spec.libraries:
|
||||
raise ApiError('invalid_request', 'The Library Specification "' + library_id + '" is already attached.')
|
||||
|
||||
def drop_workflow_spec_library(spec_id,library_id):
|
||||
spec.libraries.append(library_id)
|
||||
spec_service.update_spec(spec)
|
||||
return WorkflowSpecInfoSchema().dump(spec)
|
||||
|
||||
|
||||
def drop_workflow_spec_library(spec_id, library_id):
|
||||
validate_spec_and_library(spec_id, library_id)
|
||||
session.query(WorkflowLibraryModel).filter_by(workflow_spec_id=spec_id,library_spec_id=library_id).delete()
|
||||
session.commit()
|
||||
libraries: WorkflowLibraryModel = session.query(WorkflowLibraryModel).filter_by(workflow_spec_id=spec_id).all()
|
||||
return WorkflowLibraryModelSchema(many=True).dump(libraries)
|
||||
spec_service = WorkflowSpecService()
|
||||
|
||||
spec = spec_service.get_spec(spec_id)
|
||||
|
||||
if library_id in spec.libraries:
|
||||
spec.libraries.remove(library_id)
|
||||
spec_service.update_spec(spec)
|
||||
return WorkflowSpecInfoSchema().dump(spec)
|
||||
|
||||
|
||||
def validate_workflow_specification(spec_id, study_id=None, test_until=None):
|
||||
|
||||
try:
|
||||
WorkflowService.raise_if_disabled(spec_id, study_id)
|
||||
master_spec = WorkflowSpecService().master_spec
|
||||
if study_id is not None:
|
||||
study_model = session.query(StudyModel).filter(StudyModel.id == study_id).first()
|
||||
statuses = WorkflowProcessor.run_master_spec(master_spec, study_model)
|
||||
if spec_id in statuses and statuses[spec_id]['status'] == 'disabled':
|
||||
raise ApiError(code='disabled_workflow',
|
||||
message=f"This workflow is disabled. {statuses[spec_id]['message']}")
|
||||
ts = time.time()
|
||||
WorkflowService.test_spec(spec_id, study_id, test_until)
|
||||
te = time.time()
|
||||
print('| %2.4f | % s ' % (te - ts, 'validate and complete all fields'))
|
||||
|
||||
ts = time.time()
|
||||
WorkflowService.test_spec(spec_id, study_id, test_until, required_only=True)
|
||||
te = time.time()
|
||||
print('| %2.4f | % s ' % (te - ts, 'validate only with required fields'))
|
||||
|
||||
except ApiError as ae:
|
||||
error = ae
|
||||
error = ValidationErrorService.interpret_validation_error(error)
|
||||
@ -117,9 +135,11 @@ def validate_workflow_specification(spec_id, study_id=None, test_until=None):
|
||||
|
||||
|
||||
def update_workflow_specification(spec_id, body):
|
||||
spec_service = WorkflowSpecService()
|
||||
|
||||
if spec_id is None:
|
||||
raise ApiError('unknown_spec', 'Please provide a valid Workflow Spec ID.')
|
||||
spec = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
||||
spec = spec_service.get_spec(spec_id)
|
||||
|
||||
if spec is None:
|
||||
raise ApiError('unknown_study', 'The spec "' + spec_id + '" is not recognized.')
|
||||
@ -131,56 +151,36 @@ def update_workflow_specification(spec_id, body):
|
||||
# Libraries and standalone workflows don't get a category_id
|
||||
if body['library'] is True or body['standalone'] is True:
|
||||
body['category_id'] = None
|
||||
|
||||
schema = WorkflowSpecModelSchema()
|
||||
spec = schema.load(body, session=session, instance=spec, partial=True)
|
||||
session.add(spec)
|
||||
session.commit()
|
||||
return schema.dump(spec)
|
||||
spec = WorkflowSpecInfoSchema().load(body)
|
||||
spec_service.update_spec(spec)
|
||||
return WorkflowSpecInfoSchema().dump(spec)
|
||||
|
||||
|
||||
def delete_workflow_specification(spec_id):
|
||||
if spec_id is None:
|
||||
raise ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.')
|
||||
|
||||
spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
||||
category_id = spec.category_id
|
||||
|
||||
spec_service = WorkflowSpecService()
|
||||
spec = spec_service.get_spec(spec_id)
|
||||
if spec is None:
|
||||
raise ApiError('unknown_spec', 'The Workflow Specification "' + spec_id + '" is not recognized.')
|
||||
|
||||
# Delete all workflow models related to this specification
|
||||
WorkflowService.delete_workflow_spec_workflow_models(spec_id)
|
||||
|
||||
# Delete all files related to this specification
|
||||
WorkflowService.delete_workflow_spec_files(spec_id)
|
||||
|
||||
# Delete all events related to this specification
|
||||
WorkflowService.delete_workflow_spec_task_events(spec_id)
|
||||
|
||||
|
||||
# .delete() doesn't work when we need a cascade. Must grab the record, and explicitly delete
|
||||
workflow_spec = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
||||
session.delete(workflow_spec)
|
||||
session.commit()
|
||||
|
||||
# Reorder the remaining specs
|
||||
WorkflowService.cleanup_workflow_spec_display_order(category_id)
|
||||
spec_service.delete_spec(spec_id)
|
||||
category = spec_service.get_category(spec.category_id) # Reload the category, or cleanup may re-create the spec.
|
||||
spec_service.cleanup_workflow_spec_display_order(category)
|
||||
|
||||
|
||||
def reorder_workflow_specification(spec_id, direction):
|
||||
if direction not in ('up', 'down'):
|
||||
raise ApiError(code='bad_direction',
|
||||
message='The direction must be `up` or `down`.')
|
||||
spec = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.id == spec_id).first()
|
||||
spec_service = WorkflowSpecService()
|
||||
|
||||
spec = spec_service.get_spec(spec_id)
|
||||
if spec:
|
||||
WorkflowService.cleanup_workflow_spec_display_order(spec.category_id)
|
||||
ordered_specs = WorkflowService.reorder_workflow_spec(spec, direction)
|
||||
ordered_specs = spec_service.reorder_spec(spec, direction)
|
||||
else:
|
||||
raise ApiError(code='bad_spec_id',
|
||||
message=f'The spec_id {spec_id} did not return a specification. Please check that it is valid.')
|
||||
schema = WorkflowSpecModelSchema(many=True)
|
||||
return schema.dump(ordered_specs)
|
||||
return WorkflowSpecInfoSchema(many=True).dump(ordered_specs)
|
||||
|
||||
|
||||
def get_workflow_from_spec(spec_id):
|
||||
@ -238,7 +238,8 @@ def get_task_events(action = None, workflow = None, study = None):
|
||||
for event in events:
|
||||
study = session.query(StudyModel).filter(StudyModel.id == event.study_id).first()
|
||||
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == event.workflow_id).first()
|
||||
workflow_meta = WorkflowMetadata.from_workflow(workflow)
|
||||
spec = WorkflowSpecService().get_spec(workflow.workflow_spec_id)
|
||||
workflow_meta = WorkflowMetadata.from_workflow(workflow, spec)
|
||||
if study and study.status in [StudyStatus.open_for_enrollment, StudyStatus.in_progress]:
|
||||
task_events.append(TaskEvent(event, study, workflow_meta))
|
||||
return TaskEventSchema(many=True).dump(task_events)
|
||||
@ -253,6 +254,9 @@ def set_current_task(workflow_id, task_id):
|
||||
processor = WorkflowProcessor(workflow_model)
|
||||
task_id = uuid.UUID(task_id)
|
||||
spiff_task = processor.bpmn_workflow.get_task(task_id)
|
||||
if not spiff_task:
|
||||
# An invalid task_id was requested.
|
||||
raise ApiError("invalid_task", "The Task you requested no longer exists as a part of this workflow.")
|
||||
_verify_user_and_role(processor, spiff_task)
|
||||
user_uid = UserService.current_user(allow_admin_impersonate=True).uid
|
||||
if spiff_task.state != spiff_task.COMPLETED and spiff_task.state != spiff_task.READY:
|
||||
@ -331,64 +335,55 @@ def __update_task(processor, task, data, user):
|
||||
|
||||
|
||||
def list_workflow_spec_categories():
|
||||
schema = WorkflowSpecCategoryModelSchema(many=True)
|
||||
return schema.dump(session.query(WorkflowSpecCategoryModel).order_by(WorkflowSpecCategoryModel.display_order).all())
|
||||
spec_service = WorkflowSpecService()
|
||||
categories = spec_service.get_categories()
|
||||
return WorkflowSpecCategorySchema(many=True).dump(categories)
|
||||
|
||||
|
||||
|
||||
def get_workflow_spec_category(cat_id):
|
||||
schema = WorkflowSpecCategoryModelSchema()
|
||||
return schema.dump(session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).first())
|
||||
spec_service = WorkflowSpecService()
|
||||
category = spec_service.get_category(cat_id)
|
||||
return WorkflowSpecCategorySchema().dump(category)
|
||||
|
||||
|
||||
def add_workflow_spec_category(body):
|
||||
WorkflowService.cleanup_workflow_spec_category_display_order()
|
||||
count = session.query(WorkflowSpecCategoryModel).count()
|
||||
body['display_order'] = count
|
||||
schema = WorkflowSpecCategoryModelSchema()
|
||||
new_cat: WorkflowSpecCategoryModel = schema.load(body, session=session)
|
||||
session.add(new_cat)
|
||||
session.commit()
|
||||
return schema.dump(new_cat)
|
||||
|
||||
spec_service = WorkflowSpecService()
|
||||
category = WorkflowSpecCategorySchema().load(body)
|
||||
spec_service.add_category(category)
|
||||
return WorkflowSpecCategorySchema().dump(category)
|
||||
|
||||
def update_workflow_spec_category(cat_id, body):
|
||||
if cat_id is None:
|
||||
raise ApiError('unknown_category', 'Please provide a valid Workflow Spec Category ID.')
|
||||
|
||||
category = session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).first()
|
||||
|
||||
spec_service = WorkflowSpecService()
|
||||
category = spec_service.get_category(cat_id)
|
||||
if category is None:
|
||||
raise ApiError('unknown_category', 'The category "' + cat_id + '" is not recognized.')
|
||||
|
||||
# Make sure they don't try to change the display_order
|
||||
# There is a separate endpoint for that
|
||||
body['display_order'] = category.display_order
|
||||
|
||||
schema = WorkflowSpecCategoryModelSchema()
|
||||
category = schema.load(body, session=session, instance=category, partial=True)
|
||||
session.add(category)
|
||||
session.commit()
|
||||
return schema.dump(category)
|
||||
category = WorkflowSpecCategorySchema().load(body)
|
||||
spec_service.update_category(category)
|
||||
return WorkflowSpecCategorySchema().dump(category)
|
||||
|
||||
|
||||
def delete_workflow_spec_category(cat_id):
|
||||
session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).delete()
|
||||
session.commit()
|
||||
# Reorder the remaining categories
|
||||
WorkflowService.cleanup_workflow_spec_category_display_order()
|
||||
spec_service = WorkflowSpecService()
|
||||
spec_service.delete_category(cat_id)
|
||||
|
||||
|
||||
def reorder_workflow_spec_category(cat_id, direction):
|
||||
if direction not in ('up', 'down'):
|
||||
raise ApiError(code='bad_direction',
|
||||
message='The direction must be `up` or `down`.')
|
||||
WorkflowService.cleanup_workflow_spec_category_display_order()
|
||||
category = session.query(WorkflowSpecCategoryModel).\
|
||||
filter(WorkflowSpecCategoryModel.id == cat_id).first()
|
||||
spec_service = WorkflowSpecService()
|
||||
spec_service.cleanup_category_display_order()
|
||||
category = spec_service.get_category(cat_id)
|
||||
if category:
|
||||
ordered_categories = WorkflowService.reorder_workflow_spec_category(category, direction)
|
||||
schema = WorkflowSpecCategoryModelSchema(many=True)
|
||||
return schema.dump(ordered_categories)
|
||||
ordered_categories = spec_service.reorder_workflow_spec_category(category, direction)
|
||||
return WorkflowSpecCategorySchema(many=True).dump(ordered_categories)
|
||||
else:
|
||||
raise ApiError(code='bad_category_id',
|
||||
message=f'The category id {cat_id} did not return a Workflow Spec Category. Make sure it is a valid ID.')
|
||||
|
@ -1,405 +0,0 @@
|
||||
import hashlib
|
||||
import pandas as pd
|
||||
from pandas._libs.missing import NA
|
||||
|
||||
from crc import session, app
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileModel, FileDataModel
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecCategoryModel, WorkflowSpecCategoryModelSchema, \
|
||||
WorkflowSpecModelSchema, WorkflowLibraryModel, WorkflowLibraryModelSchema
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.workflow_sync import WorkflowSyncService
|
||||
from crc.api.workflow import get_workflow_specification
|
||||
|
||||
|
||||
def get_sync_workflow_specification(workflow_spec_id):
|
||||
return get_workflow_specification(workflow_spec_id)
|
||||
|
||||
def join_uuids(uuids):
|
||||
"""Joins a pandas Series of uuids and combines them in one hash"""
|
||||
combined_uuids = ''.join([str(uuid) for uuid in uuids.sort_values()]) # ensure that values are always
|
||||
# in the same order
|
||||
return hashlib.md5(combined_uuids.encode('utf8')).hexdigest() # make a hash of the hashes
|
||||
|
||||
def verify_token(token, required_scopes):
|
||||
"""
|
||||
Part of the Swagger API permissions for the syncing API
|
||||
The env variable for this is defined in config/default.py
|
||||
|
||||
If you are 'playing' with the swagger interface, you will want to copy the
|
||||
token that is defined there and use it to authenticate the API if you are
|
||||
emulating copying files between systems.
|
||||
"""
|
||||
if token == app.config['API_TOKEN']:
|
||||
return {'scope':['any']}
|
||||
else:
|
||||
raise ApiError("permission_denied", "API Token information is not correct")
|
||||
|
||||
|
||||
def get_changed_workflows(remote,as_df=False):
|
||||
"""
|
||||
gets a remote endpoint - gets the workflows and then
|
||||
determines what workflows are different from the remote endpoint
|
||||
"""
|
||||
|
||||
remote_workflows_list = WorkflowSyncService.get_all_remote_workflows(remote)
|
||||
remote_workflows = pd.DataFrame(remote_workflows_list)
|
||||
|
||||
# get the local thumbprints & make sure that 'workflow_spec_id' is a column, not an index
|
||||
local = get_all_spec_state_dataframe().reset_index()
|
||||
|
||||
if local.empty:
|
||||
# return the list as a dict, let swagger convert it to json
|
||||
remote_workflows['new'] = True
|
||||
if as_df:
|
||||
return remote_workflows
|
||||
else:
|
||||
return remote_workflows.reset_index().to_dict(orient='records')
|
||||
|
||||
# merge these on workflow spec id and hash - this will
|
||||
# make two different date columns date_x and date_y
|
||||
different = remote_workflows.merge(local,
|
||||
right_on=['workflow_spec_id','md5_hash'],
|
||||
left_on=['workflow_spec_id','md5_hash'],
|
||||
how = 'outer' ,
|
||||
indicator=True).loc[lambda x : x['_merge']!='both']
|
||||
|
||||
# If there are no differences, then we can just return.
|
||||
if not different.empty:
|
||||
|
||||
# each line has a tag on it - if was in the left or the right,
|
||||
# label it so we know if that was on the remote or local machine
|
||||
different.loc[different['_merge']=='left_only','location'] = 'remote'
|
||||
different.loc[different['_merge']=='right_only','location'] = 'local'
|
||||
|
||||
# this takes the different date_created_x and date-created_y columns and
|
||||
# combines them back into one date_created column
|
||||
index = different['date_created_x'].isnull()
|
||||
different.loc[index,'date_created_x'] = different[index]['date_created_y']
|
||||
different = different[['workflow_spec_id','date_created_x','location']].copy()
|
||||
different.columns=['workflow_spec_id','date_created','location']
|
||||
|
||||
# our different list will have multiple entries for a workflow if there is a version on either side
|
||||
# we want to grab the most recent one, so we sort and grab the most recent one for each workflow
|
||||
changedfiles = different.sort_values('date_created',ascending=False).groupby('workflow_spec_id').first()
|
||||
|
||||
# get an exclusive or list of workflow ids - that is we want lists of files that are
|
||||
# on one machine or the other, but not both
|
||||
remote_spec_ids = remote_workflows[['workflow_spec_id']]
|
||||
local_spec_ids = local[['workflow_spec_id']]
|
||||
left = remote_spec_ids[~remote_spec_ids['workflow_spec_id'].isin(local_spec_ids['workflow_spec_id'])]
|
||||
right = local_spec_ids[~local_spec_ids['workflow_spec_id'].isin(remote_spec_ids['workflow_spec_id'])]
|
||||
|
||||
# flag files as new that are only on the remote box and remove the files that are only on the local box
|
||||
changedfiles['new'] = False
|
||||
changedfiles.loc[changedfiles.index.isin(left['workflow_spec_id']), 'new'] = True
|
||||
output = changedfiles[~changedfiles.index.isin(right['workflow_spec_id'])]
|
||||
|
||||
else:
|
||||
output = different
|
||||
|
||||
# return the list as a dict, let swagger convert it to json
|
||||
if as_df:
|
||||
return output
|
||||
else:
|
||||
return output.reset_index().to_dict(orient='records')
|
||||
|
||||
|
||||
def sync_all_changed_workflows(remote):
|
||||
"""
|
||||
Does what it says, gets a list of all workflows that are different between
|
||||
two systems and pulls all of the workflows and files that are different on the
|
||||
remote system. The idea is that we can make the local system 'look' like the remote
|
||||
system for deployment or testing.
|
||||
"""
|
||||
workflowsdf = get_changed_workflows(remote,as_df=True)
|
||||
if len(workflowsdf) ==0:
|
||||
return []
|
||||
workflows = workflowsdf.reset_index().to_dict(orient='records')
|
||||
for workflow in workflows:
|
||||
sync_changed_files(remote,workflow['workflow_spec_id'])
|
||||
sync_changed_files(remote,'REFERENCE_FILES')
|
||||
return [x['workflow_spec_id'] for x in workflows]
|
||||
|
||||
|
||||
def file_get(workflow_spec_id,filename):
|
||||
"""
|
||||
Helper function to take care of the special case where we
|
||||
are looking for files that are marked is_reference
|
||||
"""
|
||||
if workflow_spec_id == 'REFERENCE_FILES':
|
||||
currentfile = session.query(FileModel).filter(FileModel.is_reference == True,
|
||||
FileModel.name == filename).first()
|
||||
else:
|
||||
currentfile = session.query(FileModel).filter(FileModel.workflow_spec_id==workflow_spec_id,
|
||||
FileModel.name == filename).first()
|
||||
return currentfile
|
||||
|
||||
|
||||
def create_or_update_local_spec(remote,workflow_spec_id):
|
||||
specdict = WorkflowSyncService.get_remote_workflow_spec(remote, workflow_spec_id)
|
||||
# if we are updating from a master spec, then we want to make sure it is the only
|
||||
# master spec in our local system, turn all other master_specs off
|
||||
if specdict['is_master_spec']:
|
||||
master_specs = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.is_master_spec == True).all()
|
||||
for master_spec in master_specs:
|
||||
master_spec.is_master_spec = False
|
||||
session.add(master_spec)
|
||||
|
||||
# Update local_spec, or create a new one if one does not exist.
|
||||
local_spec = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.id == workflow_spec_id).first()
|
||||
local_spec = WorkflowSpecModelSchema().load(specdict, session=session, instance=local_spec)
|
||||
|
||||
# Set the category
|
||||
if specdict['category'] is not None:
|
||||
local_category = session.query(WorkflowSpecCategoryModel).\
|
||||
filter(WorkflowSpecCategoryModel.id == specdict['category']['id']).first()
|
||||
local_category = WorkflowSpecCategoryModelSchema().load(specdict['category'], session=session,
|
||||
instance=local_category)
|
||||
session.add(local_category)
|
||||
local_spec.category = local_category
|
||||
|
||||
# Add the local spec to the database, then we can link the libraries.
|
||||
session.add(local_spec)
|
||||
|
||||
# Set the libraries
|
||||
session.query(WorkflowLibraryModel).filter(WorkflowLibraryModel.workflow_spec_id == local_spec.id).delete()
|
||||
for library in specdict['libraries']:
|
||||
# Assure refernced libraries are local, and link them.
|
||||
create_or_update_local_spec(remote, library['id'])
|
||||
local_lib = WorkflowLibraryModel(workflow_spec_id=local_spec.id,
|
||||
library_spec_id=library['id'])
|
||||
session.add(local_lib)
|
||||
session.commit()
|
||||
|
||||
def update_or_create_current_file(remote,workflow_spec_id,updatefile):
|
||||
currentfile = file_get(workflow_spec_id, updatefile['filename'])
|
||||
if not currentfile:
|
||||
currentfile = FileModel()
|
||||
currentfile.name = updatefile['filename']
|
||||
if workflow_spec_id == 'REFERENCE_FILES':
|
||||
currentfile.workflow_spec_id = None
|
||||
currentfile.is_reference = True
|
||||
else:
|
||||
currentfile.workflow_spec_id = workflow_spec_id
|
||||
|
||||
currentfile.date_created = updatefile['date_created']
|
||||
currentfile.type = updatefile['type']
|
||||
currentfile.primary = updatefile['primary']
|
||||
currentfile.content_type = updatefile['content_type']
|
||||
currentfile.primary_process_id = updatefile['primary_process_id']
|
||||
session.add(currentfile)
|
||||
try:
|
||||
content = WorkflowSyncService.get_remote_file_by_hash(remote, updatefile['md5_hash'])
|
||||
FileService.update_file(currentfile, content, updatefile['type'])
|
||||
except ApiError:
|
||||
# Remote files doesn't exist, don't update it.
|
||||
print("Remote file " + currentfile.name + " does not exist, so not syncing.")
|
||||
|
||||
def sync_changed_files(remote,workflow_spec_id):
|
||||
"""
|
||||
This grabs a list of all files for a workflow_spec that are different between systems,
|
||||
and gets the remote copy of any file that has changed
|
||||
|
||||
We also have a special case for "REFERENCE_FILES" where there is not workflow_spec_id,
|
||||
but all of the files are marked in the database as is_reference - and they need to be
|
||||
handled slightly differently.
|
||||
"""
|
||||
# make sure that spec is local before syncing files
|
||||
if workflow_spec_id != 'REFERENCE_FILES':
|
||||
create_or_update_local_spec(remote,workflow_spec_id)
|
||||
|
||||
|
||||
changedfiles = get_changed_files(remote,workflow_spec_id,as_df=True)
|
||||
if len(changedfiles)==0:
|
||||
return []
|
||||
updatefiles = changedfiles[~((changedfiles['new']==True) & (changedfiles['location']=='local'))]
|
||||
updatefiles = updatefiles.reset_index().to_dict(orient='records')
|
||||
|
||||
deletefiles = changedfiles[((changedfiles['new']==True) & (changedfiles['location']=='local'))]
|
||||
deletefiles = deletefiles.reset_index().to_dict(orient='records')
|
||||
|
||||
for delfile in deletefiles:
|
||||
currentfile = file_get(workflow_spec_id,delfile['filename'])
|
||||
|
||||
# it is more appropriate to archive the file than delete
|
||||
# due to the fact that we might have workflows that are using the
|
||||
# file data
|
||||
currentfile.archived = True
|
||||
session.add(currentfile)
|
||||
|
||||
for updatefile in updatefiles:
|
||||
update_or_create_current_file(remote,workflow_spec_id,updatefile)
|
||||
session.commit()
|
||||
return [x['filename'] for x in updatefiles]
|
||||
|
||||
|
||||
def get_changed_files(remote,workflow_spec_id,as_df=False):
|
||||
"""
|
||||
gets a remote endpoint - gets the files for a workflow_spec on both
|
||||
local and remote and determines what files have been change and returns a list of those
|
||||
files
|
||||
"""
|
||||
remote_file_list = WorkflowSyncService.get_remote_workflow_spec_files(remote,workflow_spec_id)
|
||||
remote_files = pd.DataFrame(remote_file_list)
|
||||
# get the local thumbprints & make sure that 'workflow_spec_id' is a column, not an index
|
||||
local = get_workflow_spec_files_dataframe(workflow_spec_id).reset_index()
|
||||
local['md5_hash'] = local['md5_hash'].astype('str')
|
||||
remote_files['md5_hash'] = remote_files['md5_hash'].astype('str')
|
||||
if len(local) == 0:
|
||||
remote_files['new'] = True
|
||||
remote_files['location'] = 'remote'
|
||||
if as_df:
|
||||
return remote_files
|
||||
else:
|
||||
return remote_files.reset_index().to_dict(orient='records')
|
||||
|
||||
different = remote_files.merge(local,
|
||||
right_on=['filename','md5_hash'],
|
||||
left_on=['filename','md5_hash'],
|
||||
how = 'outer' ,
|
||||
indicator=True).loc[lambda x : x['_merge']!='both']
|
||||
if len(different) == 0:
|
||||
if as_df:
|
||||
return different
|
||||
else:
|
||||
return []
|
||||
# each line has a tag on it - if was in the left or the right,
|
||||
# label it so we know if that was on the remote or local machine
|
||||
different.loc[different['_merge']=='left_only','location'] = 'remote'
|
||||
different.loc[different['_merge']=='right_only','location'] = 'local'
|
||||
|
||||
# this takes the different date_created_x and date-created_y columns and
|
||||
# combines them back into one date_created column
|
||||
dualfields = ['date_created','type','primary','content_type','primary_process_id']
|
||||
for merge in dualfields:
|
||||
index = different[merge+'_x'].isnull()
|
||||
different.loc[index,merge+'_x'] = different[index][merge+'_y']
|
||||
|
||||
fieldlist = [fld+'_x' for fld in dualfields]
|
||||
different = different[ fieldlist + ['md5_hash','filename','location']].copy()
|
||||
|
||||
different.columns=dualfields+['md5_hash','filename','location']
|
||||
# our different list will have multiple entries for a workflow if there is a version on either side
|
||||
# we want to grab the most recent one, so we sort and grab the most recent one for each workflow
|
||||
changedfiles = different.sort_values('date_created',ascending=False).groupby('filename').first()
|
||||
|
||||
# get an exclusive or list of workflow ids - that is we want lists of files that are
|
||||
# on one machine or the other, but not both
|
||||
remote_spec_ids = remote_files[['filename']]
|
||||
local_spec_ids = local[['filename']]
|
||||
left = remote_spec_ids[~remote_spec_ids['filename'].isin(local_spec_ids['filename'])]
|
||||
right = local_spec_ids[~local_spec_ids['filename'].isin(remote_spec_ids['filename'])]
|
||||
changedfiles['new'] = False
|
||||
changedfiles.loc[changedfiles.index.isin(left['filename']), 'new'] = True
|
||||
changedfiles.loc[changedfiles.index.isin(right['filename']),'new'] = True
|
||||
changedfiles = changedfiles.replace({NA: None})
|
||||
# return the list as a dict, let swagger convert it to json
|
||||
if as_df:
|
||||
return changedfiles
|
||||
else:
|
||||
return changedfiles.reset_index().to_dict(orient='records')
|
||||
|
||||
|
||||
|
||||
def get_all_spec_state():
|
||||
"""
|
||||
Return a list of all workflow specs along with last updated date and a
|
||||
thumbprint of all of the files that are used for that workflow_spec
|
||||
Convert into a dict list from a dataframe
|
||||
"""
|
||||
df = get_all_spec_state_dataframe()
|
||||
return df.reset_index().to_dict(orient='records')
|
||||
|
||||
|
||||
def get_workflow_spec_files(workflow_spec_id):
|
||||
"""
|
||||
Return a list of all workflow specs along with last updated date and a
|
||||
thumbprint of all of the files that are used for that workflow_spec
|
||||
Convert into a dict list from a dataframe
|
||||
"""
|
||||
df = get_workflow_spec_files_dataframe(workflow_spec_id)
|
||||
return df.reset_index().to_dict(orient='records')
|
||||
|
||||
|
||||
def get_workflow_spec_files_dataframe(workflowid):
|
||||
"""
|
||||
Return a list of all files for a workflow_spec along with last updated date and a
|
||||
hash so we can determine file differences for a changed workflow on a box.
|
||||
Return a dataframe
|
||||
|
||||
In the special case of "REFERENCE_FILES" we get all of the files that are
|
||||
marked as is_reference
|
||||
"""
|
||||
if workflowid == 'REFERENCE_FILES':
|
||||
x = session.query(FileDataModel).join(FileModel).filter(FileModel.is_reference == True)
|
||||
else:
|
||||
x = session.query(FileDataModel).join(FileModel).filter(FileModel.workflow_spec_id == workflowid)
|
||||
# there might be a cleaner way of getting a data frome from some of the
|
||||
# fields in the ORM - but this works OK
|
||||
filelist = []
|
||||
for file in x:
|
||||
filelist.append({'file_model_id':file.file_model_id,
|
||||
'workflow_spec_id': file.file_model.workflow_spec_id,
|
||||
'md5_hash':file.md5_hash,
|
||||
'filename':file.file_model.name,
|
||||
'type':file.file_model.type.name,
|
||||
'primary':file.file_model.primary,
|
||||
'content_type':file.file_model.content_type,
|
||||
'primary_process_id':file.file_model.primary_process_id,
|
||||
'date_created':file.date_created})
|
||||
if len(filelist) == 0:
|
||||
return pd.DataFrame(columns=['file_model_id',
|
||||
'workflow_spec_id',
|
||||
'md5_hash',
|
||||
'filename',
|
||||
'type',
|
||||
'primary',
|
||||
'content_type',
|
||||
'primary_process_id',
|
||||
'date_created'])
|
||||
df = pd.DataFrame(filelist).sort_values('date_created').groupby('file_model_id').last()
|
||||
df['date_created'] = df['date_created'].astype('str')
|
||||
return df
|
||||
|
||||
|
||||
|
||||
def get_all_spec_state_dataframe():
|
||||
"""
|
||||
Return a list of all workflow specs along with last updated date and a
|
||||
thumbprint of all of the files that are used for that workflow_spec
|
||||
Return a dataframe
|
||||
"""
|
||||
x = session.query(FileDataModel).join(FileModel)
|
||||
# there might be a cleaner way of getting a data frome from some of the
|
||||
# fields in the ORM - but this works OK
|
||||
filelist = []
|
||||
for file in x:
|
||||
filelist.append({'file_model_id':file.file_model_id,
|
||||
'workflow_spec_id': file.file_model.workflow_spec_id,
|
||||
'md5_hash':file.md5_hash,
|
||||
'filename':file.file_model.name,
|
||||
'date_created':file.date_created})
|
||||
if len(filelist) == 0:
|
||||
df = pd.DataFrame(columns=['file_model_id','workflow_spec_id','md5_hash','filename','date_created'])
|
||||
else:
|
||||
df = pd.DataFrame(filelist)
|
||||
|
||||
# If the file list is empty, return an empty data frame
|
||||
if df.empty:
|
||||
return df
|
||||
|
||||
# get a distinct list of file_model_id's with the most recent file_data retained
|
||||
df = df.sort_values('date_created').drop_duplicates(['file_model_id'],keep='last').copy()
|
||||
|
||||
# take that list and then group by workflow_spec and retain the most recently touched file
|
||||
# and make a consolidated hash of the md5_checksums - this acts as a 'thumbprint' for each
|
||||
# workflow spec
|
||||
df = df.groupby('workflow_spec_id').agg({'date_created':'max',
|
||||
'md5_hash':join_uuids}).copy()
|
||||
# get only the columns we are really interested in returning
|
||||
df = df[['date_created','md5_hash']].copy()
|
||||
# convert dates to string
|
||||
df['date_created'] = df['date_created'].astype('str')
|
||||
return df
|
||||
|
@ -212,15 +212,13 @@ class DocumentDirectory(object):
|
||||
|
||||
class WorkflowApi(object):
|
||||
def __init__(self, id, status, next_task, navigation,
|
||||
spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks,
|
||||
workflow_spec_id, total_tasks, completed_tasks,
|
||||
last_updated, is_review, title, study_id):
|
||||
self.id = id
|
||||
self.status = status
|
||||
self.next_task = next_task # The next task that requires user input.
|
||||
self.navigation = navigation
|
||||
self.workflow_spec_id = workflow_spec_id
|
||||
self.spec_version = spec_version
|
||||
self.is_latest_spec = is_latest_spec
|
||||
self.total_tasks = total_tasks
|
||||
self.completed_tasks = completed_tasks
|
||||
self.last_updated = last_updated
|
||||
@ -232,7 +230,7 @@ class WorkflowApiSchema(ma.Schema):
|
||||
class Meta:
|
||||
model = WorkflowApi
|
||||
fields = ["id", "status", "next_task", "navigation",
|
||||
"workflow_spec_id", "spec_version", "is_latest_spec", "total_tasks", "completed_tasks",
|
||||
"workflow_spec_id", "total_tasks", "completed_tasks",
|
||||
"last_updated", "is_review", "title", "study_id"]
|
||||
unknown = INCLUDE
|
||||
|
||||
@ -243,7 +241,7 @@ class WorkflowApiSchema(ma.Schema):
|
||||
@marshmallow.post_load
|
||||
def make_workflow(self, data, **kwargs):
|
||||
keys = ['id', 'status', 'next_task', 'navigation',
|
||||
'workflow_spec_id', 'spec_version', 'is_latest_spec', "total_tasks", "completed_tasks",
|
||||
'workflow_spec_id', "total_tasks", "completed_tasks",
|
||||
"last_updated", "is_review", "title", "study_id"]
|
||||
filtered_fields = {key: data[key] for key in keys}
|
||||
filtered_fields['next_task'] = TaskSchema().make_task(data['next_task'])
|
||||
|
@ -1,7 +1,6 @@
|
||||
import enum
|
||||
import urllib
|
||||
|
||||
import connexion
|
||||
import flask
|
||||
from flask import url_for
|
||||
from marshmallow import INCLUDE, EXCLUDE, Schema
|
||||
@ -12,7 +11,7 @@ from sqlalchemy import func, Index
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import deferred, relationship
|
||||
|
||||
from crc import db, ma, app
|
||||
from crc import db, ma
|
||||
from crc.models.data_store import DataStoreModel
|
||||
|
||||
|
||||
@ -81,35 +80,32 @@ class FileModel(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String)
|
||||
type = db.Column(db.Enum(FileType))
|
||||
is_status = db.Column(db.Boolean)
|
||||
content_type = db.Column(db.String)
|
||||
is_reference = db.Column(db.Boolean, nullable=False, default=False) # A global reference file.
|
||||
primary = db.Column(db.Boolean, nullable=False, default=False) # Is this the primary BPMN in a workflow?
|
||||
primary_process_id = db.Column(db.String, nullable=True) # An id in the xml of BPMN documents, for primary BPMN.
|
||||
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'), nullable=True)
|
||||
workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=True)
|
||||
task_spec = db.Column(db.String, nullable=True)
|
||||
irb_doc_code = db.Column(db.String, nullable=True) # Code reference to the irb_documents.xlsx reference file.
|
||||
# A request was made to delete the file, but we can't because there are
|
||||
# active approvals or running workflows that depend on it. So we archive
|
||||
# it instead, hide it in the interface.
|
||||
is_review = db.Column(db.Boolean, default=False, nullable=True)
|
||||
archived = db.Column(db.Boolean, default=False, nullable=False)
|
||||
irb_doc_code = db.Column(db.String, nullable=True) # Code reference to the documents.xlsx reference file.
|
||||
data_stores = relationship(DataStoreModel, cascade="all,delete", backref="file")
|
||||
|
||||
|
||||
class File(object):
|
||||
def __init__(self):
|
||||
self.content_type = None
|
||||
self.name = None
|
||||
self.content_type = None
|
||||
self.workflow_id = None
|
||||
self.irb_doc_code = None
|
||||
self.type = None
|
||||
self.document = {}
|
||||
self.last_modified = None
|
||||
self.size = None
|
||||
self.data_store = {}
|
||||
|
||||
@classmethod
|
||||
def from_models(cls, model: FileModel, data_model: FileDataModel, doc_dictionary):
|
||||
def from_models(cls, model: FileModel, data_model, doc_dictionary):
|
||||
instance = cls()
|
||||
instance.id = model.id
|
||||
instance.name = model.name
|
||||
instance.is_status = model.is_status
|
||||
instance.is_reference = model.is_reference
|
||||
instance.content_type = model.content_type
|
||||
instance.primary = model.primary
|
||||
instance.primary_process_id = model.primary_process_id
|
||||
instance.workflow_spec_id = model.workflow_spec_id
|
||||
instance.workflow_id = model.workflow_id
|
||||
instance.irb_doc_code = model.irb_doc_code
|
||||
instance.type = model.type
|
||||
@ -119,7 +115,6 @@ class File(object):
|
||||
instance.document = {}
|
||||
if data_model:
|
||||
instance.last_modified = data_model.date_created
|
||||
instance.latest_version = data_model.version
|
||||
instance.size = data_model.size
|
||||
instance.user_uid = data_model.user_uid
|
||||
else:
|
||||
@ -132,6 +127,21 @@ class File(object):
|
||||
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
def from_file_system(cls, file_name, file_type, content_type,
|
||||
last_modified, file_size):
|
||||
|
||||
instance = cls()
|
||||
instance.name = file_name
|
||||
instance.content_type = content_type
|
||||
instance.type = file_type
|
||||
instance.document = {}
|
||||
instance.last_modified = last_modified
|
||||
instance.size = file_size
|
||||
#fixme: How to track the user id?
|
||||
instance.data_store = {}
|
||||
return instance
|
||||
|
||||
|
||||
class FileModelSchema(SQLAlchemyAutoSchema):
|
||||
class Meta:
|
||||
@ -146,8 +156,7 @@ class FileModelSchema(SQLAlchemyAutoSchema):
|
||||
class FileSchema(Schema):
|
||||
class Meta:
|
||||
model = File
|
||||
fields = ["id", "name", "is_status", "is_reference", "content_type",
|
||||
"primary", "primary_process_id", "workflow_spec_id", "workflow_id",
|
||||
fields = ["id", "name", "content_type", "workflow_spec_id", "workflow_id",
|
||||
"irb_doc_code", "last_modified", "latest_version", "type", "size", "data_store",
|
||||
"document", "user_uid", "url"]
|
||||
unknown = INCLUDE
|
||||
@ -156,30 +165,28 @@ class FileSchema(Schema):
|
||||
|
||||
def get_url(self, obj):
|
||||
token = 'not_available'
|
||||
if obj.id is None:
|
||||
return "" # We can't return a url for a file that isn't stored yet.
|
||||
file_url = url_for("/v1_0.crc_api_file_get_file_data_link", file_id=obj.id, _external=True)
|
||||
if hasattr(flask.g, 'user'):
|
||||
token = flask.g.user.encode_auth_token()
|
||||
url = file_url + '?auth_token=' + urllib.parse.quote_plus(token)
|
||||
return url
|
||||
|
||||
if hasattr(obj, 'id') and obj.id is not None:
|
||||
file_url = url_for("/v1_0.crc_api_file_get_file_data_link", file_id=obj.id, _external=True)
|
||||
if hasattr(flask.g, 'user'):
|
||||
token = flask.g.user.encode_auth_token()
|
||||
url = file_url + '?auth_token=' + urllib.parse.quote_plus(token)
|
||||
return url
|
||||
else:
|
||||
return ""
|
||||
|
||||
class LookupFileModel(db.Model):
|
||||
"""Gives us a quick way to tell what kind of lookup is set on a form field.
|
||||
Connected to the file data model, so that if a new version of the same file is
|
||||
created, we can update the listing."""
|
||||
"""Gives us a quick way to tell what kind of lookup is set on a form field."""
|
||||
__tablename__ = 'lookup_file'
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
workflow_spec_id = db.Column(db.String)
|
||||
task_spec_id = db.Column(db.String)
|
||||
field_id = db.Column(db.String)
|
||||
file_name = db.Column(db.String)
|
||||
is_ldap = db.Column(db.Boolean) # Allows us to run an ldap query instead of a db lookup.
|
||||
file_data_model_id = db.Column(db.Integer, db.ForeignKey('file_data.id'))
|
||||
last_updated = db.Column(db.DateTime(timezone=True))
|
||||
dependencies = db.relationship("LookupDataModel", lazy="select", backref="lookup_file_model",
|
||||
cascade="all, delete, delete-orphan")
|
||||
|
||||
|
||||
class LookupDataModel(db.Model):
|
||||
__tablename__ = 'lookup_data'
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
|
31
crc/models/git_models.py
Normal file
31
crc/models/git_models.py
Normal file
@ -0,0 +1,31 @@
|
||||
from crc import app, ma
|
||||
|
||||
|
||||
class GitRepo(object):
|
||||
|
||||
@classmethod
|
||||
def from_repo(cls, repo):
|
||||
instance = cls()
|
||||
instance.directory = repo.working_dir
|
||||
instance.branch = repo.active_branch.name
|
||||
instance.merge_branch = app.config['GIT_MERGE_BRANCH']
|
||||
instance.changes = [item.a_path for item in repo.index.diff(None)]
|
||||
instance.untracked = repo.untracked_files
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
class GitRepoSchema(ma.Schema):
|
||||
class Meta:
|
||||
model = GitRepo
|
||||
fields = ["directory", "branch", "merge_branch", "changes", "untracked"]
|
||||
|
||||
|
||||
class GitCommit(object):
|
||||
pass
|
||||
|
||||
|
||||
class GitCommitSchema(ma.Schema):
|
||||
class Meta:
|
||||
model = GitCommit
|
||||
fields = ["message", "files"]
|
@ -10,8 +10,7 @@ from crc.api.common import ApiErrorSchema, ApiError
|
||||
from crc.models.file import FileSchema
|
||||
from crc.models.ldap import LdapModel, LdapSchema
|
||||
from crc.models.protocol_builder import ProtocolBuilderCreatorStudy
|
||||
from crc.models.workflow import WorkflowSpecCategoryModel, WorkflowState, WorkflowStatus, WorkflowModel
|
||||
from crc.services.file_service import FileService
|
||||
from crc.models.workflow import WorkflowSpecCategory, WorkflowState, WorkflowStatus, WorkflowModel, WorkflowSpecInfo
|
||||
|
||||
|
||||
class StudyStatus(enum.Enum):
|
||||
@ -134,21 +133,19 @@ class WorkflowMetadata(object):
|
||||
|
||||
|
||||
@classmethod
|
||||
def from_workflow(cls, workflow: WorkflowModel):
|
||||
is_review = FileService.is_workflow_review(workflow.workflow_spec_id)
|
||||
def from_workflow(cls, workflow: WorkflowModel, spec: WorkflowSpecInfo):
|
||||
instance = cls(
|
||||
id=workflow.id,
|
||||
display_name=workflow.workflow_spec.display_name,
|
||||
description=workflow.workflow_spec.description,
|
||||
spec_version=workflow.spec_version(),
|
||||
category_id=workflow.workflow_spec.category_id,
|
||||
category_display_name=workflow.workflow_spec.category.display_name,
|
||||
display_name=spec.display_name,
|
||||
description=spec.description,
|
||||
category_id=spec.category_id,
|
||||
category_display_name=spec.category.display_name,
|
||||
state=WorkflowState.optional,
|
||||
status=workflow.status,
|
||||
total_tasks=workflow.total_tasks,
|
||||
completed_tasks=workflow.completed_tasks,
|
||||
is_review=is_review,
|
||||
display_order=workflow.workflow_spec.display_order,
|
||||
is_review=spec.is_review,
|
||||
display_order=spec.display_order,
|
||||
workflow_spec_id=workflow.workflow_spec_id
|
||||
)
|
||||
return instance
|
||||
@ -160,13 +157,13 @@ class WorkflowMetadataSchema(ma.Schema):
|
||||
class Meta:
|
||||
model = WorkflowMetadata
|
||||
additional = ["id", "display_name", "description",
|
||||
"total_tasks", "completed_tasks", "display_order",
|
||||
"total_tasks", "completed_tasks", "display_order",
|
||||
"category_id", "is_review", "category_display_name", "state_message"]
|
||||
unknown = INCLUDE
|
||||
|
||||
|
||||
class Category(object):
|
||||
def __init__(self, model: WorkflowSpecCategoryModel):
|
||||
def __init__(self, model: WorkflowSpecCategory):
|
||||
self.id = model.id
|
||||
self.display_name = model.display_name
|
||||
self.display_order = model.display_order
|
||||
|
@ -14,7 +14,7 @@ class TaskEventModel(db.Model):
|
||||
study_id = db.Column(db.Integer, db.ForeignKey('study.id'))
|
||||
user_uid = db.Column(db.String, nullable=False) # In some cases the unique user id may not exist in the db yet.
|
||||
workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=False)
|
||||
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
|
||||
workflow_spec_id = db.Column(db.String)
|
||||
spec_version = db.Column(db.String)
|
||||
action = db.Column(db.String)
|
||||
task_id = db.Column(db.String)
|
||||
@ -40,9 +40,10 @@ class TaskEventModelSchema(SQLAlchemyAutoSchema):
|
||||
|
||||
|
||||
class TaskEvent(object):
|
||||
def __init__(self, model: TaskEventModel, study: StudyModel, workflow: WorkflowMetadata):
|
||||
def __init__(self, model: TaskEventModel, study: StudyModel, workflow: WorkflowModel):
|
||||
self.id = model.id
|
||||
self.study = study
|
||||
# Fixme: this was workflowMetaData - but it is the only place it is used.
|
||||
self.workflow = workflow
|
||||
self.user_uid = model.user_uid
|
||||
self.user_display = LdapService.user_info(model.user_uid).display_name
|
||||
|
@ -1,70 +1,82 @@
|
||||
import enum
|
||||
|
||||
import marshmallow
|
||||
from marshmallow import EXCLUDE
|
||||
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
|
||||
from marshmallow import EXCLUDE, post_load, fields, INCLUDE
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import backref
|
||||
|
||||
from crc import db
|
||||
from crc.models.file import FileDataModel
|
||||
from crc import db, ma
|
||||
|
||||
|
||||
class WorkflowSpecCategoryModel(db.Model):
|
||||
__tablename__ = 'workflow_spec_category'
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
display_name = db.Column(db.String)
|
||||
display_order = db.Column(db.Integer)
|
||||
admin = db.Column(db.Boolean)
|
||||
class WorkflowSpecCategory(object):
|
||||
def __init__(self, id, display_name, display_order=0, admin=False):
|
||||
self.id = id # A unique string name, lower case, under scores (ie, 'my_category')
|
||||
self.display_name = display_name
|
||||
self.display_order = display_order
|
||||
self.admin = admin
|
||||
self.workflows = [] # For storing Workflow Metadata
|
||||
self.specs = [] # For the list of specifications associated with a category
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, WorkflowSpecCategory):
|
||||
return False
|
||||
if other.id == self.id:
|
||||
return True
|
||||
return False
|
||||
|
||||
class WorkflowSpecCategoryModelSchema(SQLAlchemyAutoSchema):
|
||||
class WorkflowSpecCategorySchema(ma.Schema):
|
||||
class Meta:
|
||||
model = WorkflowSpecCategoryModel
|
||||
load_instance = True
|
||||
include_relationships = True
|
||||
model = WorkflowSpecCategory
|
||||
fields = ["id", "display_name", "display_order", "admin"]
|
||||
|
||||
@post_load
|
||||
def make_cat(self, data, **kwargs):
|
||||
return WorkflowSpecCategory(**data)
|
||||
|
||||
|
||||
class WorkflowSpecModel(db.Model):
|
||||
__tablename__ = 'workflow_spec'
|
||||
id = db.Column(db.String, primary_key=True)
|
||||
display_name = db.Column(db.String)
|
||||
display_order = db.Column(db.Integer, nullable=True)
|
||||
description = db.Column(db.Text)
|
||||
category_id = db.Column(db.Integer, db.ForeignKey('workflow_spec_category.id'), nullable=True)
|
||||
category = db.relationship("WorkflowSpecCategoryModel")
|
||||
is_master_spec = db.Column(db.Boolean, default=False)
|
||||
standalone = db.Column(db.Boolean, default=False)
|
||||
library = db.Column(db.Boolean, default=False)
|
||||
class WorkflowSpecInfo(object):
|
||||
def __init__(self, id, display_name, description, is_master_spec=False,
|
||||
standalone=False, library=False, primary_file_name='', primary_process_id='',
|
||||
libraries=[], category_id="", display_order=0, is_review=False):
|
||||
self.id = id # Sting unique id
|
||||
self.display_name = display_name
|
||||
self.description = description
|
||||
self.display_order = display_order
|
||||
self.is_master_spec = is_master_spec
|
||||
self.standalone = standalone
|
||||
self.library = library
|
||||
self.primary_file_name = primary_file_name
|
||||
self.primary_process_id = primary_process_id
|
||||
self.is_review = is_review
|
||||
self.libraries = libraries
|
||||
self.category_id = category_id
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, WorkflowSpecInfo):
|
||||
return False
|
||||
if other.id == self.id:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class WorkflowLibraryModel(db.Model):
|
||||
__tablename__ = 'workflow_library'
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'), nullable=True)
|
||||
library_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'), nullable=True)
|
||||
parent = db.relationship(WorkflowSpecModel,
|
||||
primaryjoin=workflow_spec_id==WorkflowSpecModel.id,
|
||||
backref=backref('libraries',cascade='all, delete'))
|
||||
library = db.relationship(WorkflowSpecModel,primaryjoin=library_spec_id==WorkflowSpecModel.id,
|
||||
backref=backref('parents',cascade='all, delete'))
|
||||
|
||||
|
||||
class WorkflowSpecModelSchema(SQLAlchemyAutoSchema):
|
||||
class WorkflowSpecInfoSchema(ma.Schema):
|
||||
class Meta:
|
||||
model = WorkflowSpecModel
|
||||
load_instance = True
|
||||
include_relationships = True
|
||||
include_fk = True # Includes foreign keys
|
||||
unknown = EXCLUDE
|
||||
model = WorkflowSpecInfo
|
||||
id = marshmallow.fields.String(required=True)
|
||||
display_name = marshmallow.fields.String(required=True)
|
||||
description = marshmallow.fields.String()
|
||||
is_master_spec = marshmallow.fields.Boolean(required=True)
|
||||
standalone = marshmallow.fields.Boolean(required=True)
|
||||
library = marshmallow.fields.Boolean(required=True)
|
||||
display_order = marshmallow.fields.Integer(allow_none=True)
|
||||
primary_file_name = marshmallow.fields.String(allow_none=True)
|
||||
primary_process_id = marshmallow.fields.String(allow_none=True)
|
||||
is_review = marshmallow.fields.Boolean(allow_none=True)
|
||||
category_id = marshmallow.fields.String(allow_none=True)
|
||||
libraries = marshmallow.fields.List(marshmallow.fields.String(), allow_none=True)
|
||||
|
||||
category = marshmallow.fields.Nested(WorkflowSpecCategoryModelSchema, dump_only=True)
|
||||
libraries = marshmallow.fields.Function(lambda obj: [{'id':x.library.id,
|
||||
'display_name':x.library.display_name} for x in
|
||||
obj.libraries] )
|
||||
parents = marshmallow.fields.Function(lambda obj: [{'id':x.parent.id,
|
||||
'display_name':x.parent.display_name} for x in
|
||||
obj.parents] )
|
||||
@post_load
|
||||
def make_spec(self, data, **kwargs):
|
||||
return WorkflowSpecInfo(**data)
|
||||
|
||||
class WorkflowState(enum.Enum):
|
||||
hidden = "hidden"
|
||||
@ -89,23 +101,6 @@ class WorkflowStatus(enum.Enum):
|
||||
erroring = "erroring"
|
||||
|
||||
|
||||
class WorkflowSpecDependencyFile(db.Model):
|
||||
"""Connects to a workflow to test the version of the specification files it depends on to execute"""
|
||||
file_data_id = db.Column(db.Integer, db.ForeignKey(FileDataModel.id), primary_key=True)
|
||||
workflow_id = db.Column(db.Integer, db.ForeignKey("workflow.id"), primary_key=True)
|
||||
|
||||
file_data = db.relationship(FileDataModel)
|
||||
|
||||
|
||||
|
||||
class WorkflowLibraryModelSchema(SQLAlchemyAutoSchema):
|
||||
class Meta:
|
||||
model = WorkflowLibraryModel
|
||||
load_instance = True
|
||||
include_relationships = True
|
||||
|
||||
library = marshmallow.fields.Nested('WorkflowSpecModelSchema')
|
||||
|
||||
class WorkflowModel(db.Model):
|
||||
__tablename__ = 'workflow'
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
@ -113,16 +108,8 @@ class WorkflowModel(db.Model):
|
||||
status = db.Column(db.Enum(WorkflowStatus))
|
||||
study_id = db.Column(db.Integer, db.ForeignKey('study.id'))
|
||||
study = db.relationship("StudyModel", backref='workflow')
|
||||
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
|
||||
workflow_spec = db.relationship("WorkflowSpecModel")
|
||||
workflow_spec_id = db.Column(db.String)
|
||||
total_tasks = db.Column(db.Integer, default=0)
|
||||
completed_tasks = db.Column(db.Integer, default=0)
|
||||
last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now())
|
||||
user_id = db.Column(db.String, default=None)
|
||||
# Order By is important or generating hashes on reviews.
|
||||
dependencies = db.relationship(WorkflowSpecDependencyFile, cascade="all, delete, delete-orphan",
|
||||
order_by="WorkflowSpecDependencyFile.file_data_id")
|
||||
|
||||
def spec_version(self):
|
||||
dep_ids = list(dep.file_data_id for dep in self.dependencies)
|
||||
return "-".join(str(dep_ids))
|
||||
|
@ -2,6 +2,7 @@ from crc.scripts.script import Script
|
||||
from crc.api.common import ApiError
|
||||
from crc.services.protocol_builder import ProtocolBuilderService
|
||||
from crc.services.study_service import StudyService
|
||||
from crc.services.workflow_spec_service import WorkflowSpecService
|
||||
|
||||
|
||||
class CheckStudy(Script):
|
||||
@ -12,7 +13,9 @@ class CheckStudy(Script):
|
||||
return """Returns the Check Study data for a Study"""
|
||||
|
||||
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
|
||||
study = StudyService.get_study(study_id)
|
||||
spec_service = WorkflowSpecService()
|
||||
categories = spec_service.get_categories()
|
||||
study = StudyService.get_study(study_id, categories)
|
||||
if study:
|
||||
return {"DETAIL": "Passed validation.", "STATUS": "No Error"}
|
||||
else:
|
||||
|
@ -8,9 +8,11 @@ from crc.api.common import ApiError
|
||||
from crc.models.file import CONTENT_TYPES, FileModel
|
||||
from crc.models.workflow import WorkflowModel
|
||||
from crc.scripts.script import Script
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.jinja_service import JinjaService
|
||||
from crc.services.spec_file_service import SpecFileService
|
||||
from crc.services.user_file_service import UserFileService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from crc.services.workflow_spec_service import WorkflowSpecService
|
||||
|
||||
|
||||
class CompleteTemplate(Script):
|
||||
@ -21,7 +23,7 @@ a word document that contains Jinja markup. Please see https://docxtpl.readthed
|
||||
for more information on exact syntax.
|
||||
Takes two arguments:
|
||||
1. The name of a MS Word docx file to use as a template.
|
||||
2. The 'code' of the IRB Document as set in the irb_documents.xlsx file."
|
||||
2. The 'code' of the IRB Document as set in the documents.xlsx file."
|
||||
"""
|
||||
|
||||
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
|
||||
@ -30,16 +32,17 @@ Takes two arguments:
|
||||
self.process_template(task, study_id, workflow, *args, **kwargs)
|
||||
|
||||
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
|
||||
workflow_spec_service = WorkflowSpecService()
|
||||
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == workflow_id).first()
|
||||
final_document_stream = self.process_template(task, study_id, workflow, *args, **kwargs)
|
||||
file_name = args[0]
|
||||
irb_doc_code = args[1]
|
||||
FileService.add_workflow_file(workflow_id=workflow_id,
|
||||
task_spec_name=task.get_name(),
|
||||
name=file_name,
|
||||
content_type=CONTENT_TYPES['docx'],
|
||||
binary_data=final_document_stream.read(),
|
||||
irb_doc_code=irb_doc_code)
|
||||
UserFileService.add_workflow_file(workflow_id=workflow_id,
|
||||
task_spec_name=task.get_name(),
|
||||
name=file_name,
|
||||
content_type=CONTENT_TYPES['docx'],
|
||||
binary_data=final_document_stream.read(),
|
||||
irb_doc_code=irb_doc_code)
|
||||
|
||||
def process_template(self, task, study_id, workflow=None, *args, **kwargs):
|
||||
"""Entry point, mostly worried about wiring it all up."""
|
||||
@ -48,7 +51,7 @@ Takes two arguments:
|
||||
message="The CompleteTemplate script requires 2 arguments. The first argument is "
|
||||
"the name of the docx template to use. The second "
|
||||
"argument is a code for the document, as "
|
||||
"set in the reference document %s. " % FileService.DOCUMENT_LIST)
|
||||
"set in the reference document.")
|
||||
task_study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
|
||||
file_name = args[0]
|
||||
|
||||
@ -56,18 +59,11 @@ Takes two arguments:
|
||||
raise ApiError(code="invalid_argument",
|
||||
message="The given task does not match the given study.")
|
||||
|
||||
file_data_model = None
|
||||
file_data = None
|
||||
if workflow is not None:
|
||||
# Get the workflow specification file with the given name.
|
||||
file_data_models = FileService.get_spec_data_files(
|
||||
workflow_spec_id=workflow.workflow_spec_id,
|
||||
workflow_id=workflow.id,
|
||||
name=file_name)
|
||||
if len(file_data_models) > 0:
|
||||
file_data_model = file_data_models[0]
|
||||
else:
|
||||
raise ApiError(code="invalid_argument",
|
||||
message="Uable to locate a file with the given name.")
|
||||
workflow_spec_service = WorkflowSpecService()
|
||||
spec = workflow_spec_service.get_spec(workflow.workflow_spec_id)
|
||||
file_data = SpecFileService().get_data(spec, file_name)
|
||||
|
||||
# Get images from file/files fields
|
||||
if len(args) == 3:
|
||||
@ -76,7 +72,7 @@ Takes two arguments:
|
||||
image_file_data = None
|
||||
|
||||
try:
|
||||
return JinjaService().make_template(BytesIO(file_data_model.data), task.data, image_file_data)
|
||||
return JinjaService().make_template(BytesIO(file_data), task.data, image_file_data)
|
||||
except ApiError as ae:
|
||||
# In some cases we want to provide a very specific error, that does not get obscured when going
|
||||
# through the python expression engine. We can do that by throwing a WorkflowTaskExecException,
|
||||
@ -101,7 +97,7 @@ Takes two arguments:
|
||||
if not task.workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY]:
|
||||
# Get the actual image data
|
||||
image_file_model = session.query(FileModel).filter_by(id=file_id).first()
|
||||
image_file_data_model = FileService.get_file_data(file_id, image_file_model)
|
||||
image_file_data_model = UserFileService.get_file_data(file_id, image_file_model)
|
||||
if image_file_data_model is not None:
|
||||
image_file_data.append(image_file_data_model)
|
||||
|
||||
|
@ -3,7 +3,7 @@ from crc.api.common import ApiError
|
||||
from crc.models.file import FileModel
|
||||
from crc.scripts.script import Script
|
||||
from crc.services.document_service import DocumentService
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.user_file_service import UserFileService
|
||||
|
||||
|
||||
class DeleteFile(Script):
|
||||
@ -15,7 +15,7 @@ class DeleteFile(Script):
|
||||
FileModel.workflow_id == workflow_id, FileModel.irb_doc_code == doc_code).all()
|
||||
if isinstance(result, list) and len(result) > 0 and isinstance(result[0], FileModel):
|
||||
for file in result:
|
||||
FileService.delete_file(file.id)
|
||||
UserFileService.delete_file(file.id)
|
||||
else:
|
||||
raise ApiError.from_task(code='no_document_found',
|
||||
message=f'No document of type {doc_code} was found for this workflow.',
|
||||
|
@ -5,7 +5,7 @@ from crc.models.file import FileModel
|
||||
from crc.models.task_event import TaskEventModel
|
||||
from crc.scripts.script import Script
|
||||
from crc.services.document_service import DocumentService
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.user_file_service import UserFileService
|
||||
from crc.services.workflow_service import WorkflowService
|
||||
|
||||
|
||||
@ -44,7 +44,7 @@ class DeleteTaskData(Script):
|
||||
|
||||
# delete files
|
||||
for file in files_to_delete:
|
||||
FileService().delete_file(file.id)
|
||||
UserFileService().delete_file(file.id)
|
||||
|
||||
# delete the data store
|
||||
session.query(DataStoreModel). \
|
||||
|
@ -4,7 +4,7 @@ from crc.api.common import ApiError
|
||||
from crc.services.data_store_service import DataStoreBase
|
||||
from crc.scripts.script import Script
|
||||
from crc.services.document_service import DocumentService
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.user_file_service import UserFileService
|
||||
|
||||
|
||||
class FileDataSet(Script, DataStoreBase):
|
||||
@ -51,7 +51,7 @@ class FileDataSet(Script, DataStoreBase):
|
||||
del(kwargs['file_id'])
|
||||
if kwargs['key'] == 'irb_code':
|
||||
irb_doc_code = kwargs['value']
|
||||
FileService.update_irb_code(fileid,irb_doc_code)
|
||||
UserFileService.update_irb_code(fileid, irb_doc_code)
|
||||
|
||||
|
||||
return self.set_data_common(task.id,
|
||||
|
24
crc/scripts/get_spec_from_workflow_id.py
Normal file
24
crc/scripts/get_spec_from_workflow_id.py
Normal file
@ -0,0 +1,24 @@
|
||||
from crc import session
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.workflow import WorkflowModel, WorkflowSpecInfo, WorkflowSpecInfoSchema # WorkflowSpecModel, WorkflowSpecModelSchema
|
||||
from crc.scripts.script import Script
|
||||
from crc.services.workflow_spec_service import WorkflowSpecService
|
||||
|
||||
|
||||
class ScriptTemplate(Script):
|
||||
|
||||
def get_description(self):
|
||||
return """Get a workflow spec, from a workflow id. You must pass in a workflow id."""
|
||||
|
||||
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
|
||||
return self.do_task(task, study_id, workflow_id, *args, **kwargs)
|
||||
|
||||
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
|
||||
if len(args) < 1:
|
||||
raise ApiError(code='missing_parameter',
|
||||
message='Please pass in a workflow_id to use in the search.')
|
||||
passed_workflow_id = args[0]
|
||||
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == passed_workflow_id).first()
|
||||
workflow_spec = WorkflowSpecService().get_spec(workflow.workflow_spec_id)
|
||||
if workflow_spec:
|
||||
return WorkflowSpecInfoSchema().dump(workflow_spec)
|
@ -3,12 +3,13 @@ from crc.api.common import ApiError
|
||||
from crc.api.file import to_file_api
|
||||
from crc.models.file import FileModel, FileDataModel, FileSchema
|
||||
from crc.scripts.script import Script
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.study_service import StudyService
|
||||
|
||||
import tempfile
|
||||
import zipfile
|
||||
|
||||
from crc.services.user_file_service import UserFileService
|
||||
|
||||
|
||||
class GetZippedFiles(Script):
|
||||
|
||||
@ -47,8 +48,8 @@ class GetZippedFiles(Script):
|
||||
zfw.writestr(file_name, file_data.data)
|
||||
|
||||
with open(temp_file.name, mode='rb') as handle:
|
||||
file_model = FileService().add_workflow_file(workflow_id, None, task.get_name(), zip_filename,
|
||||
'application/zip', handle.read())
|
||||
file_model = UserFileService().add_workflow_file(workflow_id, None, task.get_name(),
|
||||
zip_filename, 'application/zip', handle.read())
|
||||
# return file_model
|
||||
return FileSchema().dump(to_file_api(file_model))
|
||||
else:
|
||||
|
@ -1,5 +1,5 @@
|
||||
from crc.scripts.script import Script
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.user_file_service import UserFileService
|
||||
|
||||
|
||||
class IsFileUploaded(Script):
|
||||
@ -10,11 +10,11 @@ class IsFileUploaded(Script):
|
||||
|
||||
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
|
||||
doc_code = args[0]
|
||||
files = FileService.get_files_for_study(study_id)
|
||||
files = UserFileService.get_files_for_study(study_id)
|
||||
|
||||
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
|
||||
|
||||
files = FileService.get_files_for_study(study_id)
|
||||
files = UserFileService.get_files_for_study(study_id)
|
||||
if len(files) > 0:
|
||||
doc_code = args[0]
|
||||
for file in files:
|
||||
|
@ -1,8 +1,9 @@
|
||||
from crc import session
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.workflow import WorkflowModel, WorkflowSpecModel
|
||||
from crc.models.workflow import WorkflowModel, WorkflowSpecInfo
|
||||
from crc.scripts.script import Script
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from crc.services.workflow_spec_service import WorkflowSpecService
|
||||
|
||||
|
||||
class ResetWorkflow(Script):
|
||||
@ -19,7 +20,7 @@ class ResetWorkflow(Script):
|
||||
|
||||
if 'reset_id' in kwargs.keys():
|
||||
reset_id = kwargs['reset_id']
|
||||
workflow_spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=reset_id).first()
|
||||
workflow_spec = WorkflowSpecService().get_spec(reset_id)
|
||||
if workflow_spec:
|
||||
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(
|
||||
workflow_spec_id=workflow_spec.id,
|
||||
|
@ -5,13 +5,9 @@ from SpiffWorkflow.util.metrics import timeit
|
||||
|
||||
from crc import session
|
||||
from crc.api.common import ApiError
|
||||
from crc.api.workflow import get_workflow
|
||||
from crc.models.protocol_builder import ProtocolBuilderInvestigatorType
|
||||
from crc.models.study import StudyModel, StudySchema
|
||||
from crc.api import workflow as workflow_api
|
||||
from crc.scripts.script import Script
|
||||
from crc.services.document_service import DocumentService
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.protocol_builder import ProtocolBuilderService
|
||||
from crc.services.study_service import StudyService
|
||||
|
||||
|
0
crc/services/cr_connect.log
Normal file
0
crc/services/cr_connect.log
Normal file
@ -1,7 +1,11 @@
|
||||
import time
|
||||
|
||||
from crc import session
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.api_models import DocumentDirectory
|
||||
from crc.services.file_service import FileService
|
||||
from crc.models.file import FileModel
|
||||
from crc.services.lookup_service import LookupService
|
||||
from crc.services.reference_file_service import ReferenceFileService
|
||||
|
||||
|
||||
class DocumentService(object):
|
||||
@ -37,8 +41,8 @@ class DocumentService(object):
|
||||
@staticmethod
|
||||
def get_dictionary():
|
||||
"""Returns a dictionary of document details keyed on the doc_code."""
|
||||
file_data = FileService.get_reference_file_data(DocumentService.DOCUMENT_LIST)
|
||||
lookup_model = LookupService.get_lookup_model_for_file_data(file_data, 'code', 'description')
|
||||
lookup_model = LookupService.get_lookup_model_for_reference(DocumentService.DOCUMENT_LIST,
|
||||
'code', 'description')
|
||||
doc_dict = {}
|
||||
for lookup_data in lookup_model.dependencies:
|
||||
doc_dict[lookup_data.value] = lookup_data.data
|
||||
@ -59,7 +63,6 @@ class DocumentService(object):
|
||||
expand = file.workflow_id == int(workflow_id)
|
||||
else:
|
||||
expand = False
|
||||
print(expand)
|
||||
categories = [x for x in [doc_code['category1'], doc_code['category2'], doc_code['category3'], file] if x]
|
||||
DocumentService.ensure_exists(directory, categories, expanded=expand)
|
||||
return directory
|
||||
@ -90,8 +93,6 @@ class DocumentService(object):
|
||||
new_level.expanded = expanded
|
||||
output.append(new_level)
|
||||
DocumentService.ensure_exists(new_level.children, categories[1:], expanded)
|
||||
else:
|
||||
print("Found it")
|
||||
else:
|
||||
new_level = DocumentDirectory(file=current_item)
|
||||
new_level.expanded = expanded
|
||||
|
@ -22,7 +22,10 @@ known_errors = {'Non-default exclusive outgoing sequence flow without condition
|
||||
'for the property.'},
|
||||
'Error opening excel file .*, with file_model_id:':
|
||||
{'hint': 'It looks like you are trying to use an older xls file. '
|
||||
'Try uploading a newer xlsx file.'}}
|
||||
'Try uploading a newer xlsx file.'},
|
||||
'Failed to parse the Workflow Specification. Error is \'The process \'(.+)\' was not found. Did you mean one of the following: .*':
|
||||
{'hint': 'The workflow spec could not be parsed. If you are loading a library, check whether the name is correct.'}
|
||||
}
|
||||
|
||||
|
||||
class ValidationErrorService(object):
|
||||
|
136
crc/services/file_system_service.py
Normal file
136
crc/services/file_system_service.py
Normal file
@ -0,0 +1,136 @@
|
||||
import datetime
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
import pytz
|
||||
|
||||
from crc import app
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileType, CONTENT_TYPES, File
|
||||
from crc.models.workflow import WorkflowSpecInfo
|
||||
|
||||
|
||||
class FileSystemService(object):
|
||||
|
||||
""" Simple Service meant for extension that provides some useful
|
||||
methods for dealing with the File system.
|
||||
"""
|
||||
LIBRARY_SPECS = "Library Specs"
|
||||
STAND_ALONE_SPECS = "Stand Alone"
|
||||
MASTER_SPECIFICATION = "Master Specification"
|
||||
REFERENCE_FILES = "Reference Files"
|
||||
SPECIAL_FOLDERS = [LIBRARY_SPECS, MASTER_SPECIFICATION, REFERENCE_FILES]
|
||||
CAT_JSON_FILE = "category.json"
|
||||
WF_JSON_FILE = "workflow.json"
|
||||
|
||||
@staticmethod
|
||||
def root_path():
|
||||
# fixme: allow absolute files
|
||||
dir_name = app.config['SYNC_FILE_ROOT']
|
||||
app_root = app.root_path
|
||||
return os.path.join(app_root, '..', dir_name)
|
||||
|
||||
@staticmethod
|
||||
def category_path(name: str):
|
||||
return os.path.join(FileSystemService.root_path(), name)
|
||||
|
||||
@staticmethod
|
||||
def library_path(name: str):
|
||||
return os.path.join(FileSystemService.root_path(), FileSystemService.LIBRARY_SPECS, name)
|
||||
|
||||
@staticmethod
|
||||
def category_path_for_spec(spec):
|
||||
if spec.is_master_spec:
|
||||
return os.path.join(FileSystemService.root_path())
|
||||
elif spec.library:
|
||||
category_path = FileSystemService.category_path(FileSystemService.LIBRARY_SPECS)
|
||||
elif spec.standalone:
|
||||
category_path = FileSystemService.category_path(FileSystemService.STAND_ALONE_SPECS)
|
||||
else:
|
||||
category_path = FileSystemService.category_path(spec.category_id)
|
||||
return category_path
|
||||
|
||||
@staticmethod
|
||||
def workflow_path(spec: WorkflowSpecInfo):
|
||||
if spec.is_master_spec:
|
||||
return os.path.join(FileSystemService.root_path(), FileSystemService.MASTER_SPECIFICATION)
|
||||
else:
|
||||
category_path = FileSystemService.category_path_for_spec(spec)
|
||||
return os.path.join(category_path, spec.id)
|
||||
|
||||
def next_display_order(self, spec):
|
||||
path = self.category_path_for_spec(spec)
|
||||
if os.path.exists(path):
|
||||
return len(next(os.walk(path))[1])
|
||||
else:
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def write_file_data_to_system(file_path, file_data):
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'wb') as f_handle:
|
||||
f_handle.write(file_data)
|
||||
|
||||
@staticmethod
|
||||
def get_extension(file_name):
|
||||
basename, file_extension = os.path.splitext(file_name)
|
||||
return file_extension.lower().strip()[1:]
|
||||
|
||||
@staticmethod
|
||||
def assert_valid_file_name(file_name):
|
||||
file_extension = FileSystemService.get_extension(file_name)
|
||||
if file_extension not in FileType._member_names_:
|
||||
raise ApiError('unknown_extension',
|
||||
'The file you provided does not have an accepted extension:' +
|
||||
file_extension, status_code=404)
|
||||
|
||||
@staticmethod
|
||||
def _last_modified(file_path: str):
|
||||
# Returns the last modified date of the given file.
|
||||
timestamp = os.path.getmtime(file_path)
|
||||
utc_dt = datetime.datetime.utcfromtimestamp(timestamp)
|
||||
aware_utc_dt = utc_dt.replace(tzinfo=pytz.utc)
|
||||
return aware_utc_dt
|
||||
|
||||
@staticmethod
|
||||
def file_type(file_name):
|
||||
extension = FileSystemService.get_extension(file_name)
|
||||
return FileType[extension]
|
||||
|
||||
@staticmethod
|
||||
def _get_files(file_path: str, file_name=None) -> List[File]:
|
||||
"""Returns an array of File objects at the given path, can be restricted to just one file"""
|
||||
files = []
|
||||
items = os.scandir(file_path)
|
||||
for item in items:
|
||||
if item.is_file():
|
||||
if item.name == FileSystemService.WF_JSON_FILE:
|
||||
continue # Ignore the json files.
|
||||
if file_name is not None and item.name != file_name:
|
||||
continue
|
||||
file = FileSystemService.to_file_object_from_dir_entry(item)
|
||||
files.append(file)
|
||||
return files
|
||||
|
||||
@staticmethod
|
||||
def to_file_object(file_name: str, file_path: str) -> File:
|
||||
file_type = FileSystemService.file_type(file_name)
|
||||
content_type = CONTENT_TYPES[file_type.name]
|
||||
last_modified = FileSystemService._last_modified(file_path)
|
||||
size = os.path.getsize(file_path)
|
||||
file = File.from_file_system(file_name, file_type, content_type, last_modified, size)
|
||||
return file
|
||||
|
||||
@staticmethod
|
||||
def to_file_object_from_dir_entry(item: os.DirEntry):
|
||||
extension = FileSystemService.get_extension(item.name)
|
||||
try:
|
||||
file_type = FileType[extension]
|
||||
content_type = CONTENT_TYPES[file_type.name]
|
||||
except KeyError:
|
||||
raise ApiError("invalid_type", "Invalid File Type: %s, for file %s" % (extension, item.name))
|
||||
stats = item.stat()
|
||||
file_size = stats.st_size
|
||||
last_modified = FileSystemService._last_modified(item.path)
|
||||
return File.from_file_system(item.name, file_type, content_type, last_modified, file_size)
|
||||
|
134
crc/services/git_service.py
Normal file
134
crc/services/git_service.py
Normal file
@ -0,0 +1,134 @@
|
||||
import os
|
||||
|
||||
from crc import app
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.git_models import GitRepo
|
||||
from git import Repo, InvalidGitRepositoryError, NoSuchPathError, GitCommandError
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class GitService(object):
|
||||
|
||||
"""This is a wrapper around GitPython to manage versioning and syncing
|
||||
for Workflow Spec files that reside on the filesystem.
|
||||
|
||||
This is not a full-service git tool. It has many limitations.
|
||||
|
||||
This service requires environment variables:
|
||||
|
||||
SYNC_FILE_ROOT - An absolute path to the local Workflow Spec files. This is our repository.
|
||||
GIT_REMOTE_PATH - Location of spec files on GitHub. Currently, this is "sartography/crconnect-workflow-specs"
|
||||
GIT_BRANCH - The name of your local development branch. We force load this branch
|
||||
GIT_MERGE_BRANCH - The branch that can be merged into GIT_BRANCH. I.e., for Production machine, this would be set to 'staging', or something similar.
|
||||
GIT_USER_NAME - The GitHub account to use
|
||||
GIT_USER_PASS - The GitHub token to use for account GIT_USER_NAME
|
||||
"""
|
||||
|
||||
# TODO: Implement the GIT_MERGE_BRANCH feature
|
||||
|
||||
@staticmethod
|
||||
def get_remote_url(remote_path):
|
||||
# we use github
|
||||
# Note that the 'password' is a token generated by github, not the site password
|
||||
username = app.config["GIT_USER_NAME"]
|
||||
password = app.config["GIT_USER_PASS"]
|
||||
remote_url = f"https://{username}:{password}@github.com/{remote_path}.git"
|
||||
return remote_url
|
||||
|
||||
@staticmethod
|
||||
def setup_repo(remote_path, directory):
|
||||
remote_url = GitService.get_remote_url(remote_path)
|
||||
repo = Repo.clone_from(remote_url, directory)
|
||||
return repo
|
||||
|
||||
def __get_repo(self):
|
||||
remote_path = app.config['GIT_REMOTE_PATH']
|
||||
git_branch = app.config['GIT_BRANCH']
|
||||
directory = app.config['SYNC_FILE_ROOT']
|
||||
try:
|
||||
repo = Repo(directory)
|
||||
|
||||
except InvalidGitRepositoryError:
|
||||
# Thrown if the given repository appears to have an invalid format.
|
||||
# I believe this means there is no .git directory
|
||||
if os.listdir(directory):
|
||||
# If the directory is not empty, we let them decide how to fix it
|
||||
raise ApiError(code='invalid_git_repo',
|
||||
message=f'The directory {directory} is not empty, and is not a valid git repository. Please fix this before continuing.')
|
||||
else:
|
||||
# The directory is empty, so we setup the repo
|
||||
repo = self.setup_repo(remote_path, directory)
|
||||
|
||||
except NoSuchPathError:
|
||||
# The directory does not exist, so setup
|
||||
repo = self.setup_repo(remote_path, directory)
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
app.logger.error(e)
|
||||
raise ApiError(code='unknown_exception',
|
||||
message=f'There was an unknown exception. Original message is: {e}')
|
||||
try:
|
||||
repo.git.checkout(git_branch)
|
||||
except GitCommandError:
|
||||
# The branch might not exist yet, so we create it and its remote ref
|
||||
repo.git.branch(git_branch)
|
||||
repo.git.checkout(git_branch)
|
||||
repo.remotes.origin.push(refspec='{}:{}'.format(git_branch, f'{git_branch}'))
|
||||
repo.remotes.origin.fetch()
|
||||
|
||||
remote_ref = repo.remotes.origin.refs[f'{git_branch}']
|
||||
repo.active_branch.set_tracking_branch(remote_ref)
|
||||
return repo
|
||||
|
||||
def _get_repo(self):
|
||||
# This returns a gitpython Repo object
|
||||
return self.__get_repo()
|
||||
|
||||
def get_repo(self):
|
||||
# This returns an instance of crc.models.git_models.GitRepo,
|
||||
# built from a gitpython Repo object
|
||||
repo = self._get_repo()
|
||||
repo_model = GitRepo().from_repo(repo)
|
||||
return repo_model
|
||||
|
||||
def push_to_remote(self, comment=None):
|
||||
if comment is None:
|
||||
comment = f"Git commit: {datetime.now()}"
|
||||
repo = self._get_repo()
|
||||
# get list of changed files
|
||||
changes = [item.a_path for item in repo.index.diff(None)]
|
||||
# get list of untracked files
|
||||
untracked_files = repo.untracked_files
|
||||
|
||||
repo.index.add(changes)
|
||||
repo.index.add(untracked_files)
|
||||
repo.index.commit(comment)
|
||||
repo.remotes.origin.push()
|
||||
|
||||
return repo
|
||||
|
||||
def pull_from_remote(self):
|
||||
repo = self._get_repo()
|
||||
if not repo.is_dirty():
|
||||
try:
|
||||
repo.remotes.origin.pull()
|
||||
except GitCommandError as ce:
|
||||
print(ce)
|
||||
else:
|
||||
raise ApiError(code='dirty_repo',
|
||||
message='You have modified or untracked files. Please fix this before attempting to pull.')
|
||||
print(repo)
|
||||
return repo
|
||||
|
||||
def merge_with_branch(self, branch):
|
||||
# https://stackoverflow.com/questions/36799362/how-do-you-merge-the-master-branch-into-a-feature-branch-with-gitpython#36802900
|
||||
repo = self._get_repo()
|
||||
repo.remotes.origin.fetch()
|
||||
merge_branch = repo.remotes.origin.refs[branch]
|
||||
base = repo.merge_base(repo.active_branch, merge_branch)
|
||||
repo.index.merge_tree(merge_branch, base=base)
|
||||
repo.index.commit(f'Merge {branch} into working branch', parent_commits=(repo.active_branch.commit, merge_branch.commit))
|
||||
repo.active_branch.checkout(force=True)
|
||||
return repo
|
@ -4,7 +4,6 @@ from collections import OrderedDict
|
||||
from zipfile import BadZipFile
|
||||
|
||||
import pandas as pd
|
||||
import numpy
|
||||
from pandas import ExcelFile
|
||||
from pandas._libs.missing import NA
|
||||
from sqlalchemy import desc
|
||||
@ -13,12 +12,14 @@ from sqlalchemy.sql.functions import GenericFunction
|
||||
from crc import db
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.api_models import Task
|
||||
from crc.models.file import FileModel, FileDataModel, LookupFileModel, LookupDataModel
|
||||
from crc.models.file import LookupFileModel, LookupDataModel
|
||||
from crc.models.ldap import LdapSchema
|
||||
from crc.models.workflow import WorkflowModel, WorkflowSpecDependencyFile
|
||||
from crc.services.file_service import FileService
|
||||
from crc.models.workflow import WorkflowModel
|
||||
from crc.services.spec_file_service import SpecFileService
|
||||
from crc.services.reference_file_service import ReferenceFileService
|
||||
from crc.services.ldap_service import LdapService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from crc.services.workflow_spec_service import WorkflowSpecService
|
||||
|
||||
|
||||
class TSRank(GenericFunction):
|
||||
@ -50,11 +51,16 @@ class LookupService(object):
|
||||
return LookupService.__get_lookup_model(workflow, spiff_task.task_spec.name, field.id)
|
||||
|
||||
@staticmethod
|
||||
def get_lookup_model_for_file_data(file_data: FileDataModel, value_column, label_column):
|
||||
lookup_model = db.session.query(LookupFileModel).filter(LookupFileModel.file_data_model_id == file_data.id).first()
|
||||
def get_lookup_model_for_reference(file_name, value_column, label_column):
|
||||
lookup_model = db.session.query(LookupFileModel).\
|
||||
filter(LookupFileModel.file_name == file_name). \
|
||||
filter(LookupFileModel.workflow_spec_id == None).\
|
||||
first() # use "==" not "is none" which does NOT work, and makes this constantly expensive.
|
||||
if not lookup_model:
|
||||
logging.warning("!!!! Making a very expensive call to update the lookup model.")
|
||||
lookup_model = LookupService.build_lookup_table(file_data, value_column, label_column)
|
||||
file_data = ReferenceFileService().get_data(file_name)
|
||||
file_date = ReferenceFileService().last_modified(file_name)
|
||||
lookup_model = LookupService.build_lookup_table(file_name, file_data, file_date, value_column, label_column)
|
||||
return lookup_model
|
||||
|
||||
@staticmethod
|
||||
@ -65,17 +71,18 @@ class LookupService(object):
|
||||
.filter(LookupFileModel.task_spec_id == task_spec_id) \
|
||||
.order_by(desc(LookupFileModel.id)).first()
|
||||
|
||||
# one more quick query, to see if the lookup file is still related to this workflow.
|
||||
# if not, we need to rebuild the lookup table.
|
||||
# The above may return a model, if it does, it might still be out of date.
|
||||
# We need to check the file date to assure we have the most recent file.
|
||||
is_current = False
|
||||
if lookup_model:
|
||||
if lookup_model.is_ldap: # LDAP is always current
|
||||
is_current = True
|
||||
else:
|
||||
is_current = db.session.query(WorkflowSpecDependencyFile). \
|
||||
filter(WorkflowSpecDependencyFile.file_data_id == lookup_model.file_data_model_id).\
|
||||
filter(WorkflowSpecDependencyFile.workflow_id == workflow.id).count()
|
||||
|
||||
elif lookup_model.file_name is not None and lookup_model.last_updated is not None:
|
||||
# In some legacy cases, the lookup model might exist, but not have a file name, in which case we need
|
||||
# to rebuild.
|
||||
workflow_spec = WorkflowSpecService().get_spec(workflow.workflow_spec_id)
|
||||
current_date = SpecFileService.last_modified(workflow_spec, lookup_model.file_name)
|
||||
is_current = current_date == lookup_model.last_updated
|
||||
|
||||
if not is_current:
|
||||
# Very very very expensive, but we don't know need this till we do.
|
||||
@ -131,15 +138,18 @@ class LookupService(object):
|
||||
file_name = field.get_property(Task.FIELD_PROP_SPREADSHEET_NAME)
|
||||
value_column = field.get_property(Task.FIELD_PROP_VALUE_COLUMN)
|
||||
label_column = field.get_property(Task.FIELD_PROP_LABEL_COLUMN)
|
||||
latest_files = FileService.get_spec_data_files(workflow_spec_id=workflow_model.workflow_spec_id,
|
||||
workflow_id=workflow_model.id,
|
||||
name=file_name)
|
||||
# TODO: workflow_model does not have a workflow_spec. It has a workflow_spec_id
|
||||
workflow_spec = WorkflowSpecService().get_spec(workflow_model.workflow_spec_id)
|
||||
latest_files = SpecFileService().get_files(workflow_spec, file_name=file_name)
|
||||
if len(latest_files) < 1:
|
||||
raise ApiError("invalid_enum", "Unable to locate the lookup data file '%s'" % file_name)
|
||||
else:
|
||||
data_model = latest_files[0]
|
||||
file = latest_files[0]
|
||||
|
||||
lookup_model = LookupService.build_lookup_table(data_model, value_column, label_column,
|
||||
file_data = SpecFileService().get_data(workflow_spec, file_name)
|
||||
file_date = SpecFileService.last_modified(workflow_spec, file_name)
|
||||
|
||||
lookup_model = LookupService.build_lookup_table(file_name, file_data, file_date, value_column, label_column,
|
||||
workflow_model.workflow_spec_id, task_spec_id, field_id)
|
||||
|
||||
# Use the results of an LDAP request to populate enum field options
|
||||
@ -158,40 +168,46 @@ class LookupService(object):
|
||||
return lookup_model
|
||||
|
||||
@staticmethod
|
||||
def build_lookup_table(data_model: FileDataModel, value_column, label_column,
|
||||
def build_lookup_table(file_name, file_data, file_date, value_column, label_column,
|
||||
workflow_spec_id=None, task_spec_id=None, field_id=None):
|
||||
""" In some cases the lookup table can be very large. This method will add all values to the database
|
||||
in a way that can be searched and returned via an api call - rather than sending the full set of
|
||||
options along with the form. It will only open the file and process the options if something has
|
||||
changed. """
|
||||
try:
|
||||
xlsx = ExcelFile(data_model.data, engine='openpyxl')
|
||||
xlsx = ExcelFile(file_data, engine='openpyxl')
|
||||
# Pandas--or at least openpyxl, cannot read old xls files.
|
||||
# The error comes back as zipfile.BadZipFile because xlsx files are zipped xml files
|
||||
except BadZipFile:
|
||||
raise ApiError(code='excel_error',
|
||||
message=f'Error opening excel file {data_model.file_model.name}. You may have an older .xls spreadsheet. (file_model_id: {data_model.file_model_id} workflow_spec_id: {workflow_spec_id}, task_spec_id: {task_spec_id}, and field_id: {field_id})')
|
||||
message=f"Error opening excel file {file_name}. You may have an older .xls spreadsheet. (workflow_spec_id: {workflow_spec_id}, task_spec_id: {task_spec_id}, and field_id: {field_id})")
|
||||
df = xlsx.parse(xlsx.sheet_names[0]) # Currently we only look at the fist sheet.
|
||||
df = df.convert_dtypes()
|
||||
df = df.loc[:, ~df.columns.str.contains('^Unnamed')] # Drop unnamed columns.
|
||||
df = df.loc[:, ~df.columns.str.contains('^Unnamed')] # Drop unnamed columns.
|
||||
df = pd.DataFrame(df).dropna(how='all') # Drop null rows
|
||||
df = pd.DataFrame(df).replace({NA: ''})
|
||||
|
||||
for (column_name, column_data) in df.iteritems():
|
||||
data_type = df.dtypes[column_name].name
|
||||
if data_type == 'string':
|
||||
df[column_name] = df[column_name].fillna('')
|
||||
else:
|
||||
df[column_name] = df[column_name].fillna(0)
|
||||
if value_column not in df:
|
||||
raise ApiError("invalid_enum",
|
||||
"The file %s does not contain a column named % s" % (data_model.file_model.name,
|
||||
"The file %s does not contain a column named % s" % (file_name,
|
||||
value_column))
|
||||
if label_column not in df:
|
||||
raise ApiError("invalid_enum",
|
||||
"The file %s does not contain a column named % s" % (data_model.file_model.name,
|
||||
"The file %s does not contain a column named % s" % (file_name,
|
||||
label_column))
|
||||
|
||||
lookup_model = LookupFileModel(workflow_spec_id=workflow_spec_id,
|
||||
field_id=field_id,
|
||||
task_spec_id=task_spec_id,
|
||||
file_data_model_id=data_model.id,
|
||||
file_name=file_name,
|
||||
last_updated=file_date,
|
||||
is_ldap=False)
|
||||
|
||||
|
||||
db.session.add(lookup_model)
|
||||
for index, row in df.iterrows():
|
||||
lookup_data = LookupDataModel(lookup_file_model=lookup_model,
|
||||
|
79
crc/services/reference_file_service.py
Normal file
79
crc/services/reference_file_service.py
Normal file
@ -0,0 +1,79 @@
|
||||
import datetime
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from crc import app, session
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileModel, FileModelSchema, FileDataModel, FileType, File
|
||||
from crc.services.file_system_service import FileSystemService
|
||||
|
||||
from uuid import UUID
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
|
||||
class ReferenceFileService(FileSystemService):
|
||||
|
||||
|
||||
@staticmethod
|
||||
def root_path():
|
||||
# fixme: allow absolute directory names (but support relative)
|
||||
dir_name = app.config['SYNC_FILE_ROOT']
|
||||
app_root = app.root_path
|
||||
return os.path.join(app_root, '..', dir_name, ReferenceFileService.REFERENCE_FILES)
|
||||
|
||||
@staticmethod
|
||||
def file_path(file_name: str):
|
||||
sync_file_root = ReferenceFileService().root_path()
|
||||
file_path = os.path.join(sync_file_root, file_name)
|
||||
return file_path
|
||||
|
||||
@staticmethod
|
||||
def add_reference_file(file_name: str, binary_data: bytes) -> File:
|
||||
return ReferenceFileService.update_reference_file(file_name, binary_data)
|
||||
|
||||
@staticmethod
|
||||
def update_reference_file(file_name: str, binary_data: bytes) -> File:
|
||||
ReferenceFileService.assert_valid_file_name(file_name)
|
||||
file_path = ReferenceFileService.file_path(file_name)
|
||||
ReferenceFileService.write_to_file_system(file_name, binary_data)
|
||||
return ReferenceFileService.to_file_object(file_name, file_path)
|
||||
|
||||
@staticmethod
|
||||
def get_data(file_name):
|
||||
file_path = ReferenceFileService.file_path(file_name)
|
||||
if os.path.exists(file_path):
|
||||
with open(file_path, 'rb') as f_handle:
|
||||
spec_file_data = f_handle.read()
|
||||
return spec_file_data
|
||||
else:
|
||||
raise ApiError('file_not_found',
|
||||
f"There is not a reference file named '{file_name}'")
|
||||
|
||||
@staticmethod
|
||||
def write_to_file_system(file_name, file_data):
|
||||
file_path = ReferenceFileService.file_path(file_name)
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'wb') as f_handle:
|
||||
f_handle.write(file_data)
|
||||
return file_path
|
||||
|
||||
@staticmethod
|
||||
def get_reference_files():
|
||||
return FileSystemService._get_files(ReferenceFileService.root_path())
|
||||
|
||||
@staticmethod
|
||||
def get_reference_file(name: str):
|
||||
files = FileSystemService._get_files(ReferenceFileService.root_path(), file_name=name)
|
||||
if len(files) < 1:
|
||||
raise ApiError('unknown_file', f"No reference file found with the name {name}", 404)
|
||||
return FileSystemService._get_files(ReferenceFileService.root_path(), file_name=name)[0]
|
||||
|
||||
|
||||
@staticmethod
|
||||
def delete(file_name):
|
||||
file_path = ReferenceFileService.file_path(file_name)
|
||||
os.remove(file_path)
|
||||
|
||||
@staticmethod
|
||||
def last_modified(file_name):
|
||||
return FileSystemService._last_modified(ReferenceFileService.file_path(file_name))
|
149
crc/services/spec_file_service.py
Normal file
149
crc/services/spec_file_service.py
Normal file
@ -0,0 +1,149 @@
|
||||
import datetime
|
||||
import os
|
||||
import shutil
|
||||
from typing import List
|
||||
|
||||
from crc import app, session
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileType, CONTENT_TYPES, File
|
||||
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from crc.models.workflow import WorkflowSpecInfo
|
||||
from crc.services.file_system_service import FileSystemService
|
||||
|
||||
|
||||
class SpecFileService(FileSystemService):
|
||||
|
||||
"""We store spec files on the file system. This allows us to take advantage of Git for
|
||||
syncing and versioning.
|
||||
The files are stored in a directory whose path is determined by the category and spec names.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_files(workflow_spec: WorkflowSpecInfo, file_name=None, include_libraries=False) -> List[File]:
|
||||
""" Returns all files associated with a workflow specification """
|
||||
path = SpecFileService.workflow_path(workflow_spec)
|
||||
files = SpecFileService._get_files(path, file_name)
|
||||
if include_libraries:
|
||||
for lib_name in workflow_spec.libraries:
|
||||
lib_path = SpecFileService.library_path(lib_name)
|
||||
files.extend(SpecFileService._get_files(lib_path, file_name))
|
||||
return files
|
||||
|
||||
@staticmethod
|
||||
def add_file(workflow_spec: WorkflowSpecInfo, file_name: str, binary_data: bytearray) -> File:
|
||||
# Same as update
|
||||
return SpecFileService.update_file(workflow_spec, file_name, binary_data)
|
||||
|
||||
@staticmethod
|
||||
def update_file(workflow_spec: WorkflowSpecInfo, file_name: str, binary_data) -> File:
|
||||
SpecFileService.assert_valid_file_name(file_name)
|
||||
file_path = SpecFileService.file_path(workflow_spec, file_name)
|
||||
SpecFileService.write_file_data_to_system(file_path, binary_data)
|
||||
file = SpecFileService.to_file_object(file_name, file_path)
|
||||
if file_name == workflow_spec.primary_file_name:
|
||||
SpecFileService.set_primary_bpmn(workflow_spec, file_name, binary_data)
|
||||
elif workflow_spec.primary_file_name is None and file.type == FileType.bpmn:
|
||||
# If no primary process exists, make this pirmary process.
|
||||
SpecFileService.set_primary_bpmn(workflow_spec, file_name, binary_data)
|
||||
return file
|
||||
|
||||
@staticmethod
|
||||
def get_data(workflow_spec: WorkflowSpecInfo, file_name: str):
|
||||
file_path = SpecFileService.file_path(workflow_spec, file_name)
|
||||
if not os.path.exists(file_path):
|
||||
# If the file isn't here, it may be in a library
|
||||
for lib in workflow_spec.libraries:
|
||||
file_path = SpecFileService.library_path(lib)
|
||||
file_path = os.path.join(file_path, file_name)
|
||||
if os.path.exists(file_path):
|
||||
break
|
||||
if not os.path.exists(file_path):
|
||||
raise ApiError("unknown_file", f"No file found with name {file_name} in {workflow_spec.display_name}")
|
||||
with open(file_path, 'rb') as f_handle:
|
||||
spec_file_data = f_handle.read()
|
||||
return spec_file_data
|
||||
|
||||
@staticmethod
|
||||
def file_path(spec: WorkflowSpecInfo, file_name: str):
|
||||
return os.path.join(SpecFileService.workflow_path(spec), file_name)
|
||||
|
||||
@staticmethod
|
||||
def last_modified(spec: WorkflowSpecInfo, file_name: str):
|
||||
path = SpecFileService.file_path(spec, file_name)
|
||||
return FileSystemService._last_modified(path)
|
||||
|
||||
@staticmethod
|
||||
def delete_file(spec, file_name):
|
||||
# Fixme: Remember to remove the lookup files when the spec file is removed.
|
||||
# lookup_files = session.query(LookupFileModel).filter_by(file_model_id=file_id).all()
|
||||
# for lf in lookup_files:
|
||||
# session.query(LookupDataModel).filter_by(lookup_file_model_id=lf.id).delete()
|
||||
# session.query(LookupFileModel).filter_by(id=lf.id).delete()
|
||||
file_path = SpecFileService.file_path(spec, file_name)
|
||||
os.remove(file_path)
|
||||
|
||||
@staticmethod
|
||||
def delete_all_files(spec):
|
||||
dir_path = SpecFileService.workflow_path(spec)
|
||||
if os.path.exists(dir_path):
|
||||
shutil.rmtree(dir_path)
|
||||
|
||||
@staticmethod
|
||||
def set_primary_bpmn(workflow_spec: WorkflowSpecInfo, file_name: str, binary_data=None):
|
||||
# If this is a BPMN, extract the process id, and determine if it is contains swim lanes.
|
||||
extension = SpecFileService.get_extension(file_name)
|
||||
file_type = FileType[extension]
|
||||
if file_type == FileType.bpmn:
|
||||
if not binary_data:
|
||||
binary_data = SpecFileService.get_data(workflow_spec, file_name)
|
||||
try:
|
||||
bpmn: etree.Element = etree.fromstring(binary_data)
|
||||
workflow_spec.primary_process_id = SpecFileService.get_process_id(bpmn)
|
||||
workflow_spec.primary_file_name = file_name
|
||||
workflow_spec.is_review = SpecFileService.has_swimlane(bpmn)
|
||||
|
||||
except etree.XMLSyntaxError as xse:
|
||||
raise ApiError("invalid_xml", "Failed to parse xml: " + str(xse), file_name=file_name)
|
||||
else:
|
||||
raise ApiError("invalid_xml", "Only a BPMN can be the primary file.", file_name=file_name)
|
||||
|
||||
@staticmethod
|
||||
def has_swimlane(et_root: etree.Element):
|
||||
"""
|
||||
Look through XML and determine if there are any lanes present that have a label.
|
||||
"""
|
||||
elements = et_root.xpath('//bpmn:lane',
|
||||
namespaces={'bpmn': 'http://www.omg.org/spec/BPMN/20100524/MODEL'})
|
||||
retval = False
|
||||
for el in elements:
|
||||
if el.get('name'):
|
||||
retval = True
|
||||
return retval
|
||||
|
||||
@staticmethod
|
||||
def get_process_id(et_root: etree.Element):
|
||||
process_elements = []
|
||||
for child in et_root:
|
||||
if child.tag.endswith('process') and child.attrib.get('isExecutable', False):
|
||||
process_elements.append(child)
|
||||
|
||||
if len(process_elements) == 0:
|
||||
raise ValidationException('No executable process tag found')
|
||||
|
||||
# There are multiple root elements
|
||||
if len(process_elements) > 1:
|
||||
|
||||
# Look for the element that has the startEvent in it
|
||||
for e in process_elements:
|
||||
this_element: etree.Element = e
|
||||
for child_element in list(this_element):
|
||||
if child_element.tag.endswith('startEvent'):
|
||||
return this_element.attrib['id']
|
||||
|
||||
raise ValidationException('No start event found in %s' % et_root.attrib['id'])
|
||||
|
||||
return process_elements[0].attrib['id']
|
@ -11,7 +11,7 @@ from ldap3.core.exceptions import LDAPSocketOpenError
|
||||
from crc import db, session, app
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.email import EmailModel
|
||||
from crc.models.file import FileModel, File, FileSchema
|
||||
from crc.models.file import FileModel, File, FileSchema, FileDataModel
|
||||
from crc.models.ldap import LdapSchema
|
||||
|
||||
from crc.models.protocol_builder import ProtocolBuilderCreatorStudy
|
||||
@ -19,13 +19,13 @@ from crc.models.study import StudyModel, Study, StudyStatus, Category, WorkflowM
|
||||
StudyAssociated, ProgressStatus
|
||||
from crc.models.task_event import TaskEventModel
|
||||
from crc.models.task_log import TaskLogModel
|
||||
from crc.models.workflow import WorkflowSpecCategoryModel, WorkflowModel, WorkflowSpecModel, WorkflowState, \
|
||||
WorkflowStatus, WorkflowSpecDependencyFile
|
||||
from crc.models.workflow import WorkflowSpecCategory, WorkflowModel, WorkflowSpecInfo, WorkflowState, \
|
||||
WorkflowStatus
|
||||
from crc.services.document_service import DocumentService
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.ldap_service import LdapService
|
||||
from crc.services.lookup_service import LookupService
|
||||
from crc.services.protocol_builder import ProtocolBuilderService
|
||||
from crc.services.user_file_service import UserFileService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
|
||||
|
||||
@ -41,11 +41,13 @@ class StudyService(object):
|
||||
study_info = study_details[0]
|
||||
# The review types 2, 3, 23, 24 correspond to review type names
|
||||
# `Full Committee`, `Expedited`, `Non-UVA IRB Full Board`, and `Non-UVA IRB Expedited`
|
||||
if isinstance(study_info, dict) and 'REVIEW_TYPE' in study_info.keys() and study_info['REVIEW_TYPE'] in [2, 3, 23, 24]:
|
||||
if isinstance(study_info, dict) and 'REVIEW_TYPE' in study_info.keys() and study_info['REVIEW_TYPE'] in [2, 3,
|
||||
23,
|
||||
24]:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_studies_for_user(self, user, include_invalid=False):
|
||||
def get_studies_for_user(self, user, categories, include_invalid=False):
|
||||
"""Returns a list of all studies for the given user."""
|
||||
associated = session.query(StudyAssociated).filter_by(uid=user.uid, access=True).all()
|
||||
associated_studies = [x.study_id for x in associated]
|
||||
@ -55,7 +57,7 @@ class StudyService(object):
|
||||
studies = []
|
||||
for study_model in db_studies:
|
||||
if include_invalid or self._is_valid_study(study_model.id):
|
||||
studies.append(StudyService.get_study(study_model.id, study_model, do_status=False))
|
||||
studies.append(StudyService.get_study(study_model.id, categories, study_model=study_model))
|
||||
return studies
|
||||
|
||||
@staticmethod
|
||||
@ -65,18 +67,18 @@ class StudyService(object):
|
||||
studies = []
|
||||
for s in db_studies:
|
||||
study = Study.from_model(s)
|
||||
study.files = FileService.get_files_for_study(study.id)
|
||||
study.files = UserFileService.get_files_for_study(study.id)
|
||||
studies.append(study)
|
||||
return studies
|
||||
|
||||
@staticmethod
|
||||
def get_study(study_id, study_model: StudyModel = None, do_status=False):
|
||||
def get_study(study_id, categories: List[WorkflowSpecCategory], study_model: StudyModel = None,
|
||||
master_workflow_results=None):
|
||||
"""Returns a study model that contains all the workflows organized by category.
|
||||
IMPORTANT: This is intended to be a lightweight call, it should never involve
|
||||
loading up and executing all the workflows in a study to calculate information."""
|
||||
Pass in the results of the master workflow spec, and the status of other workflows will be updated."""
|
||||
|
||||
if not study_model:
|
||||
study_model = session.query(StudyModel).filter_by(id=study_id).first()
|
||||
|
||||
study = Study.from_model(study_model)
|
||||
study.create_user_display = LdapService.user_info(study.user_uid).display_name
|
||||
last_event: TaskEventModel = session.query(TaskEventModel) \
|
||||
@ -88,29 +90,33 @@ class StudyService(object):
|
||||
else:
|
||||
study.last_activity_user = LdapService.user_info(last_event.user_uid).display_name
|
||||
study.last_activity_date = last_event.date
|
||||
study.categories = StudyService.get_categories()
|
||||
workflow_metas = StudyService._get_workflow_metas(study_id)
|
||||
files = FileService.get_files_for_study(study.id)
|
||||
files = (File.from_models(model, FileService.get_file_data(model.id),
|
||||
study.categories = categories
|
||||
files = UserFileService.get_files_for_study(study.id)
|
||||
files = (File.from_models(model, UserFileService.get_file_data(model.id),
|
||||
DocumentService.get_dictionary()) for model in files)
|
||||
study.files = list(files)
|
||||
# Calling this line repeatedly is very very slow. It creates the
|
||||
# master spec and runs it. Don't execute this for Abandoned studies, as
|
||||
# we don't have the information to process them.
|
||||
if study.status != StudyStatus.abandoned:
|
||||
# this line is taking 99% of the time that is used in get_study.
|
||||
# see ticket #196
|
||||
if do_status:
|
||||
# __get_study_status() runs the master workflow to generate the status dictionary
|
||||
status = StudyService._get_study_status(study_model)
|
||||
study.warnings = StudyService._update_status_of_workflow_meta(workflow_metas, status)
|
||||
|
||||
# Group the workflows into their categories.
|
||||
for category in study.categories:
|
||||
category.workflows = {w for w in workflow_metas if w.category_id == category.id}
|
||||
|
||||
workflow_metas = StudyService._get_workflow_metas(study_id, category)
|
||||
if master_workflow_results:
|
||||
study.warnings = StudyService._update_status_of_workflow_meta(workflow_metas,
|
||||
master_workflow_results)
|
||||
category.workflows = workflow_metas
|
||||
return study
|
||||
|
||||
@staticmethod
|
||||
def _get_workflow_metas(study_id, category):
|
||||
# Add in the Workflows for each category
|
||||
workflow_metas = []
|
||||
for spec in category.specs:
|
||||
workflow_models = db.session.query(WorkflowModel).\
|
||||
filter(WorkflowModel.study_id == study_id).\
|
||||
filter(WorkflowModel.workflow_spec_id == spec.id).\
|
||||
all()
|
||||
for workflow in workflow_models:
|
||||
workflow_metas.append(WorkflowMetadata.from_workflow(workflow, spec))
|
||||
return workflow_metas
|
||||
|
||||
@staticmethod
|
||||
def get_study_associate(study_id=None, uid=None):
|
||||
"""
|
||||
@ -236,22 +242,14 @@ class StudyService(object):
|
||||
return
|
||||
|
||||
session.query(TaskEventModel).filter_by(workflow_id=workflow.id).delete()
|
||||
session.query(WorkflowSpecDependencyFile).filter_by(workflow_id=workflow_id).delete(synchronize_session='fetch')
|
||||
session.query(FileModel).filter_by(workflow_id=workflow_id).update({'archived': True, 'workflow_id': None})
|
||||
files = session.query(FileModel).filter_by(workflow_id=workflow_id).all()
|
||||
for file in files:
|
||||
session.query(FileDataModel).filter(FileDataModel.file_model_id == file.id).delete()
|
||||
session.delete(file)
|
||||
|
||||
session.delete(workflow)
|
||||
session.commit()
|
||||
|
||||
@staticmethod
|
||||
def get_categories():
|
||||
"""Returns a list of category objects, in the correct order."""
|
||||
cat_models = db.session.query(WorkflowSpecCategoryModel) \
|
||||
.order_by(WorkflowSpecCategoryModel.display_order).all()
|
||||
categories = []
|
||||
for cat_model in cat_models:
|
||||
categories.append(Category(cat_model))
|
||||
return categories
|
||||
|
||||
@staticmethod
|
||||
def get_documents_status(study_id):
|
||||
"""Returns a list of documents related to the study, and any file information
|
||||
@ -276,7 +274,9 @@ class StudyService(object):
|
||||
|
||||
doc['required'] = False
|
||||
if ProtocolBuilderService.is_enabled() and doc['id'] != '':
|
||||
pb_data = next((item for item in pb_docs['AUXDOCS'] if int(item['SS_AUXILIARY_DOC_TYPE_ID']) == int(doc['id'])), None)
|
||||
pb_data = next(
|
||||
(item for item in pb_docs['AUXDOCS'] if int(item['SS_AUXILIARY_DOC_TYPE_ID']) == int(doc['id'])),
|
||||
None)
|
||||
if pb_data:
|
||||
doc['required'] = True
|
||||
|
||||
@ -291,12 +291,12 @@ class StudyService(object):
|
||||
doc['display_name'] = ' / '.join(name_list)
|
||||
|
||||
# For each file, get associated workflow status
|
||||
doc_files = FileService.get_files_for_study(study_id=study_id, irb_doc_code=code)
|
||||
doc_files = UserFileService.get_files_for_study(study_id=study_id, irb_doc_code=code)
|
||||
doc['count'] = len(doc_files)
|
||||
doc['files'] = []
|
||||
|
||||
for file_model in doc_files:
|
||||
file = File.from_models(file_model, FileService.get_file_data(file_model.id), [])
|
||||
file = File.from_models(file_model, UserFileService.get_file_data(file_model.id), [])
|
||||
file_data = FileSchema().dump(file)
|
||||
del file_data['document']
|
||||
doc['files'].append(Box(file_data))
|
||||
@ -310,16 +310,13 @@ class StudyService(object):
|
||||
|
||||
@staticmethod
|
||||
def get_investigator_dictionary():
|
||||
"""Returns a dictionary of document details keyed on the doc_code."""
|
||||
file_data = FileService.get_reference_file_data(StudyService.INVESTIGATOR_LIST)
|
||||
lookup_model = LookupService.get_lookup_model_for_file_data(file_data, 'code', 'label')
|
||||
lookup_model = LookupService.get_lookup_model_for_reference(StudyService.INVESTIGATOR_LIST, 'code', 'label')
|
||||
doc_dict = {}
|
||||
for lookup_data in lookup_model.dependencies:
|
||||
doc_dict[lookup_data.value] = lookup_data.data
|
||||
return doc_dict
|
||||
|
||||
@staticmethod
|
||||
|
||||
def get_investigators(study_id, all=False):
|
||||
"""Convert array of investigators from protocol builder into a dictionary keyed on the type. """
|
||||
|
||||
@ -362,7 +359,7 @@ class StudyService(object):
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def synch_with_protocol_builder_if_enabled(user):
|
||||
def synch_with_protocol_builder_if_enabled(user, specs):
|
||||
"""Assures that the studies we have locally for the given user are
|
||||
in sync with the studies available in protocol builder. """
|
||||
|
||||
@ -383,7 +380,9 @@ class StudyService(object):
|
||||
for pb_study in pb_studies:
|
||||
new_status = None
|
||||
new_progress_status = None
|
||||
db_study = next((s for s in db_studies if s.id == pb_study.STUDYID), None)
|
||||
db_study = session.query(StudyModel).filter(StudyModel.id == pb_study.STUDYID).first()
|
||||
#db_study = next((s for s in db_studies if s.id == pb_study.STUDYID), None)
|
||||
|
||||
if not db_study:
|
||||
db_study = StudyModel(id=pb_study.STUDYID)
|
||||
db_study.status = None # Force a new sa
|
||||
@ -394,7 +393,7 @@ class StudyService(object):
|
||||
db_studies.append(db_study)
|
||||
|
||||
db_study.update_from_protocol_builder(pb_study, user.uid)
|
||||
StudyService._add_all_workflow_specs_to_study(db_study)
|
||||
StudyService.add_all_workflow_specs_to_study(db_study, specs)
|
||||
|
||||
# If there is a new automatic status change and there isn't a manual change in place, record it.
|
||||
if new_status and db_study.status != StudyStatus.hold:
|
||||
@ -431,13 +430,14 @@ class StudyService(object):
|
||||
def _update_status_of_workflow_meta(workflow_metas, status):
|
||||
# Update the status on each workflow
|
||||
warnings = []
|
||||
unused_statuses = status.copy() # A list of all the statuses that are not used.
|
||||
unused_statuses = status.copy() # A list of all the statuses that are not used.
|
||||
for wfm in workflow_metas:
|
||||
unused_statuses.pop(wfm.workflow_spec_id, None)
|
||||
wfm.state_message = ''
|
||||
# do we have a status for you
|
||||
if wfm.workflow_spec_id not in status.keys():
|
||||
warnings.append(ApiError("missing_status", "No status information provided about workflow %s" % wfm.workflow_spec_id))
|
||||
warnings.append(ApiError("missing_status",
|
||||
"No status information provided about workflow %s" % wfm.workflow_spec_id))
|
||||
continue
|
||||
if not isinstance(status[wfm.workflow_spec_id], dict):
|
||||
warnings.append(ApiError(code='invalid_status',
|
||||
@ -452,7 +452,8 @@ class StudyService(object):
|
||||
if not WorkflowState.has_value(status[wfm.workflow_spec_id]['status']):
|
||||
warnings.append(ApiError("invalid_state",
|
||||
"Workflow '%s' can not be set to '%s', should be one of %s" % (
|
||||
wfm.workflow_spec_id, status[wfm.workflow_spec_id]['status'], ",".join(WorkflowState.list())
|
||||
wfm.workflow_spec_id, status[wfm.workflow_spec_id]['status'],
|
||||
",".join(WorkflowState.list())
|
||||
)))
|
||||
continue
|
||||
|
||||
@ -461,50 +462,19 @@ class StudyService(object):
|
||||
for status in unused_statuses:
|
||||
if isinstance(unused_statuses[status], dict) and 'status' in unused_statuses[status]:
|
||||
warnings.append(ApiError("unmatched_status", "The master workflow provided a status for '%s' a "
|
||||
"workflow that doesn't seem to exist." %
|
||||
status))
|
||||
"workflow that doesn't seem to exist." %
|
||||
status))
|
||||
|
||||
return warnings
|
||||
|
||||
@staticmethod
|
||||
def _get_workflow_metas(study_id):
|
||||
# Add in the Workflows for each category
|
||||
workflow_models = db.session.query(WorkflowModel). \
|
||||
join(WorkflowSpecModel). \
|
||||
filter(WorkflowSpecModel.is_master_spec == False). \
|
||||
filter((WorkflowSpecModel.library == False) | \
|
||||
(WorkflowSpecModel.library == None)). \
|
||||
filter(WorkflowModel.study_id == study_id). \
|
||||
all()
|
||||
workflow_metas = []
|
||||
for workflow in workflow_models:
|
||||
workflow_metas.append(WorkflowMetadata.from_workflow(workflow))
|
||||
return workflow_metas
|
||||
|
||||
@staticmethod
|
||||
def _get_study_status(study_model):
|
||||
"""Uses the Top Level Workflow to calculate the status of the study, and it's
|
||||
workflow models."""
|
||||
master_specs = db.session.query(WorkflowSpecModel). \
|
||||
filter_by(is_master_spec=True).all()
|
||||
if len(master_specs) < 1:
|
||||
raise ApiError("missing_master_spec", "No specifications are currently marked as the master spec.")
|
||||
if len(master_specs) > 1:
|
||||
raise ApiError("multiple_master_specs",
|
||||
"There is more than one master specification, and I don't know what to do.")
|
||||
|
||||
return WorkflowProcessor.run_master_spec(master_specs[0], study_model)
|
||||
|
||||
@staticmethod
|
||||
def _add_all_workflow_specs_to_study(study_model: StudyModel):
|
||||
def add_all_workflow_specs_to_study(study_model: StudyModel, specs: List[WorkflowSpecInfo]):
|
||||
existing_models = session.query(WorkflowModel).filter(WorkflowModel.study == study_model).all()
|
||||
existing_specs = list(m.workflow_spec_id for m in existing_models)
|
||||
new_specs = session.query(WorkflowSpecModel). \
|
||||
filter(WorkflowSpecModel.is_master_spec == False). \
|
||||
filter(WorkflowSpecModel.id.notin_(existing_specs)). \
|
||||
all()
|
||||
existing_spec_ids = list(map(lambda x: x.workflow_spec_id, existing_models))
|
||||
errors = []
|
||||
for workflow_spec in new_specs:
|
||||
for workflow_spec in specs:
|
||||
if workflow_spec.id in existing_spec_ids:
|
||||
continue
|
||||
try:
|
||||
StudyService._create_workflow_model(study_model, workflow_spec)
|
||||
except WorkflowTaskExecException as wtee:
|
||||
|
122
crc/services/update_service.py
Normal file
122
crc/services/update_service.py
Normal file
@ -0,0 +1,122 @@
|
||||
|
||||
# loop over all the categories in the database
|
||||
# assure we have a directory with the correct name
|
||||
# assure it contains a valid json file called categories.json
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
|
||||
from crc import db, app
|
||||
from crc.models.file import FileModel
|
||||
from crc.models.workflow import WorkflowSpecCategoryModel, WorkflowSpecCategoryModelSchema, WorkflowSpecModelSchema, \
|
||||
WorkflowSpecModel
|
||||
|
||||
LIBRARY_SPECS = "Library Specs"
|
||||
STAND_ALONE_SPECS = "Stand Alone"
|
||||
MASTER_SPECIFICATION = "Master Specification"
|
||||
REFERENCE_FILES = "Reference Files"
|
||||
SPECIAL_FOLDERS = [LIBRARY_SPECS, MASTER_SPECIFICATION, REFERENCE_FILES]
|
||||
CAT_JSON_FILE = "category.json"
|
||||
WF_JSON_FILE = "workflow.json"
|
||||
|
||||
|
||||
base_dir = '../SPECS'
|
||||
app_root = app.root_path
|
||||
path = os.path.join(app_root, '..', 'SPECS')
|
||||
CAT_SCHEMA = WorkflowSpecCategoryModelSchema()
|
||||
SPEC_SCHEMA = WorkflowSpecModelSchema()
|
||||
|
||||
|
||||
def remove_all_json_files(path):
|
||||
for json_file in pathlib.Path(path).glob('*.json'):
|
||||
print("removing ", json_file)
|
||||
os.remove(json_file)
|
||||
|
||||
|
||||
def update_workflows_for_category(path, schemas, category_id):
|
||||
for schema in schemas:
|
||||
orig_path = os.path.join(path, schema.display_name)
|
||||
new_path = os.path.join(path, schema.id)
|
||||
if (os.path.exists(orig_path)):
|
||||
os.rename(orig_path, new_path)
|
||||
print(new_path)
|
||||
update_spec(new_path, schema, category_id)
|
||||
|
||||
|
||||
def update_spec(path, schema, category_id):
|
||||
json_data = SPEC_SCHEMA.dump(schema)
|
||||
remove_all_json_files(path)
|
||||
|
||||
# Fixup the libraries
|
||||
lib_ids = list(map(lambda x: x['id'], json_data['libraries']))
|
||||
|
||||
# calculate the primary process id, and primary file name
|
||||
file = db.session.query(FileModel).\
|
||||
filter(FileModel.workflow_spec_id == schema.id).\
|
||||
filter(FileModel.primary == True).first()
|
||||
if(file):
|
||||
json_data['primary_file_name'] = file.name
|
||||
json_data['primary_process_id'] = file.primary_process_id
|
||||
else:
|
||||
print("This workflow doesn't have a primary process:", json_data)
|
||||
|
||||
json_data['category_id'] = category_id
|
||||
json_data.pop('category', None)
|
||||
json_data.pop('parents', None)
|
||||
if json_data['library'] is None:
|
||||
json_data['library'] = False
|
||||
|
||||
json_data['libraries'] = lib_ids
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
json_file_name = os.path.join(path, 'workflow.json')
|
||||
with open(json_file_name, 'w') as wf_handle:
|
||||
json.dump(json_data, wf_handle, indent=4)
|
||||
|
||||
# Clean up json files
|
||||
remove_all_json_files(path)
|
||||
|
||||
# Clean up libraries
|
||||
lib_path = os.path.join(path, LIBRARY_SPECS)
|
||||
remove_all_json_files(lib_path)
|
||||
workflows = db.session.query(WorkflowSpecModel).filter(WorkflowSpecModel.library == True)
|
||||
update_workflows_for_category(lib_path, workflows, "")
|
||||
|
||||
# Clean up reference Files
|
||||
ref_path = os.path.join(path, REFERENCE_FILES)
|
||||
old_ref_path = os.path.join(path,'Reference')
|
||||
if os.path.exists(old_ref_path):
|
||||
os.rename(old_ref_path, ref_path)
|
||||
remove_all_json_files(ref_path)
|
||||
|
||||
# Clean up the master spec
|
||||
tlw = os.path.join(path, MASTER_SPECIFICATION, "Top Level Workflow")
|
||||
master_path = os.path.join(path, MASTER_SPECIFICATION)
|
||||
if os.path.exists(tlw):
|
||||
for src_file in pathlib.Path(tlw).glob('*.*'):
|
||||
shutil.copy(src_file, master_path)
|
||||
remove_all_json_files(master_path)
|
||||
schema = db.session.query(WorkflowSpecModel).filter(WorkflowSpecModel.is_master_spec == True).first()
|
||||
update_spec(master_path, schema, "")
|
||||
|
||||
|
||||
# Fix all the categories
|
||||
categories = db.session.query(WorkflowSpecCategoryModel).all()
|
||||
for category in categories:
|
||||
json_data = CAT_SCHEMA.dump(category)
|
||||
orig_path = os.path.join(path, category.display_name)
|
||||
new_name = re.sub(r'[^A-Za-z0-9]', '_', category.display_name).lower()
|
||||
new_path = os.path.join(path, new_name)
|
||||
json_data['id'] = new_name
|
||||
if (os.path.exists(orig_path)):
|
||||
os.rename(orig_path, new_path)
|
||||
|
||||
remove_all_json_files(new_path)
|
||||
json_file_name = os.path.join(new_path, 'category.json')
|
||||
with open(json_file_name, 'w') as f_handle:
|
||||
json.dump(json_data, f_handle, indent=4)
|
||||
|
||||
workflows = db.session.query(WorkflowSpecModel).filter(WorkflowSpecModel.category_id == category.id)
|
||||
update_workflows_for_category(new_path, workflows, new_name)
|
@ -1,8 +1,6 @@
|
||||
import hashlib
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
import random
|
||||
import string
|
||||
|
||||
@ -11,8 +9,6 @@ from github import Github, GithubObject, UnknownObjectException
|
||||
from uuid import UUID
|
||||
from lxml import etree
|
||||
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
|
||||
from lxml.etree import XMLSyntaxError
|
||||
from sqlalchemy import desc
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
@ -20,7 +16,7 @@ from crc import session, app
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.data_store import DataStoreModel
|
||||
from crc.models.file import FileType, FileDataModel, FileModel, LookupFileModel, LookupDataModel
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowModel, WorkflowSpecDependencyFile, WorkflowLibraryModel
|
||||
from crc.models.workflow import WorkflowModel
|
||||
from crc.services.cache_service import cache
|
||||
from crc.services.user_service import UserService
|
||||
import re
|
||||
@ -39,32 +35,7 @@ def camel_to_snake(camel):
|
||||
return re.sub(r'(?<!^)(?=[A-Z])', '_', camel).lower()
|
||||
|
||||
|
||||
class FileService(object):
|
||||
|
||||
@staticmethod
|
||||
def add_workflow_spec_file(workflow_spec: WorkflowSpecModel,
|
||||
name, content_type, binary_data, primary=False, is_status=False):
|
||||
"""Create a new file and associate it with a workflow spec."""
|
||||
file_model = session.query(FileModel)\
|
||||
.filter(FileModel.workflow_spec_id == workflow_spec.id)\
|
||||
.filter(FileModel.name == name).first()
|
||||
|
||||
if file_model:
|
||||
if not file_model.archived:
|
||||
# Raise ApiError if the file already exists and is not archived
|
||||
raise ApiError(code="duplicate_file",
|
||||
message='If you want to replace the file, use the update mechanism.')
|
||||
else:
|
||||
file_model = FileModel(
|
||||
workflow_spec_id=workflow_spec.id,
|
||||
name=name,
|
||||
primary=primary,
|
||||
is_status=is_status,
|
||||
)
|
||||
|
||||
return FileService.update_file(file_model, binary_data, content_type)
|
||||
|
||||
|
||||
class UserFileService(object):
|
||||
|
||||
@staticmethod
|
||||
@cache
|
||||
@ -104,29 +75,14 @@ class FileService(object):
|
||||
task_spec=task_spec_name,
|
||||
irb_doc_code=irb_doc_code
|
||||
)
|
||||
return FileService.update_file(file_model, binary_data, content_type)
|
||||
return UserFileService.update_file(file_model, binary_data, content_type)
|
||||
|
||||
@staticmethod
|
||||
def get_workflow_files(workflow_id):
|
||||
"""Returns all the file models associated with a running workflow."""
|
||||
return session.query(FileModel).filter(FileModel.workflow_id == workflow_id).\
|
||||
filter(FileModel.archived == False).\
|
||||
order_by(FileModel.id).all()
|
||||
|
||||
@staticmethod
|
||||
def add_reference_file(name, content_type, binary_data):
|
||||
"""Create a file with the given name, but not associated with a spec or workflow.
|
||||
Only one file with the given reference name can exist."""
|
||||
file_model = session.query(FileModel). \
|
||||
filter(FileModel.is_reference == True). \
|
||||
filter(FileModel.name == name).first()
|
||||
if not file_model:
|
||||
file_model = FileModel(
|
||||
name=name,
|
||||
is_reference=True
|
||||
)
|
||||
return FileService.update_file(file_model, binary_data, content_type)
|
||||
|
||||
@staticmethod
|
||||
def get_extension(file_name):
|
||||
basename, file_extension = os.path.splitext(file_name)
|
||||
@ -146,13 +102,12 @@ class FileService(object):
|
||||
if (latest_data_model is not None) and (md5_checksum == latest_data_model.md5_hash):
|
||||
# This file does not need to be updated, it's the same file. If it is arhived,
|
||||
# then de-arvhive it.
|
||||
file_model.archived = False
|
||||
session.add(file_model)
|
||||
session.commit()
|
||||
return file_model
|
||||
|
||||
# Verify the extension
|
||||
file_extension = FileService.get_extension(file_model.name)
|
||||
file_extension = UserFileService.get_extension(file_model.name)
|
||||
if file_extension not in FileType._member_names_:
|
||||
raise ApiError('unknown_extension',
|
||||
'The file you provided does not have an accepted extension:' +
|
||||
@ -160,22 +115,12 @@ class FileService(object):
|
||||
else:
|
||||
file_model.type = FileType[file_extension]
|
||||
file_model.content_type = content_type
|
||||
file_model.archived = False # Unarchive the file if it is archived.
|
||||
|
||||
if latest_data_model is None:
|
||||
version = 1
|
||||
else:
|
||||
version = latest_data_model.version + 1
|
||||
|
||||
# If this is a BPMN, extract the process id.
|
||||
if file_model.type == FileType.bpmn:
|
||||
try:
|
||||
bpmn: etree.Element = etree.fromstring(binary_data)
|
||||
file_model.primary_process_id = FileService.get_process_id(bpmn)
|
||||
file_model.is_review = FileService.has_swimlane(bpmn)
|
||||
except XMLSyntaxError as xse:
|
||||
raise ApiError("invalid_xml", "Failed to parse xml: " + str(xse), file_name=file_model.name)
|
||||
|
||||
try:
|
||||
user_uid = UserService.current_user().uid
|
||||
except ApiError as ae:
|
||||
@ -191,117 +136,39 @@ class FileService(object):
|
||||
|
||||
return file_model
|
||||
|
||||
@staticmethod
|
||||
def has_swimlane(et_root: etree.Element):
|
||||
"""
|
||||
Look through XML and determine if there are any swimlanes present that have a label.
|
||||
"""
|
||||
elements = et_root.xpath('//bpmn:lane',
|
||||
namespaces={'bpmn':'http://www.omg.org/spec/BPMN/20100524/MODEL'})
|
||||
retval = False
|
||||
for el in elements:
|
||||
if el.get('name'):
|
||||
retval = True
|
||||
return retval
|
||||
|
||||
@staticmethod
|
||||
def get_process_id(et_root: etree.Element):
|
||||
process_elements = []
|
||||
for child in et_root:
|
||||
if child.tag.endswith('process') and child.attrib.get('isExecutable', False):
|
||||
process_elements.append(child)
|
||||
|
||||
if len(process_elements) == 0:
|
||||
raise ValidationException('No executable process tag found')
|
||||
|
||||
# There are multiple root elements
|
||||
if len(process_elements) > 1:
|
||||
|
||||
# Look for the element that has the startEvent in it
|
||||
for e in process_elements:
|
||||
this_element: etree.Element = e
|
||||
for child_element in list(this_element):
|
||||
if child_element.tag.endswith('startEvent'):
|
||||
return this_element.attrib['id']
|
||||
|
||||
raise ValidationException('No start event found in %s' % et_root.attrib['id'])
|
||||
|
||||
return process_elements[0].attrib['id']
|
||||
|
||||
@staticmethod
|
||||
def get_files_for_study(study_id, irb_doc_code=None):
|
||||
query = session.query(FileModel).\
|
||||
join(WorkflowModel).\
|
||||
filter(WorkflowModel.study_id == study_id).\
|
||||
filter(FileModel.archived == False)
|
||||
filter(WorkflowModel.study_id == study_id)
|
||||
if irb_doc_code:
|
||||
query = query.filter(FileModel.irb_doc_code == irb_doc_code)
|
||||
return query.all()
|
||||
|
||||
@staticmethod
|
||||
def get_files(workflow_spec_id=None, workflow_id=None,
|
||||
name=None, is_reference=False, irb_doc_code=None, include_libraries=False):
|
||||
query = session.query(FileModel).filter_by(is_reference=is_reference)
|
||||
if workflow_spec_id:
|
||||
if include_libraries:
|
||||
libraries = session.query(WorkflowLibraryModel).filter(
|
||||
WorkflowLibraryModel.workflow_spec_id==workflow_spec_id).all()
|
||||
library_workflow_specs = [x.library_spec_id for x in libraries]
|
||||
library_workflow_specs.append(workflow_spec_id)
|
||||
query = query.filter(FileModel.workflow_spec_id.in_(library_workflow_specs))
|
||||
else:
|
||||
query = query.filter(FileModel.workflow_spec_id == workflow_spec_id)
|
||||
|
||||
elif workflow_id:
|
||||
query = query.filter_by(workflow_id=workflow_id)
|
||||
def get_files(workflow_id=None, name=None, irb_doc_code=None):
|
||||
if workflow_id is not None:
|
||||
query = session.query(FileModel).filter_by(workflow_id=workflow_id)
|
||||
if irb_doc_code:
|
||||
query = query.filter_by(irb_doc_code=irb_doc_code)
|
||||
elif is_reference:
|
||||
query = query.filter_by(is_reference=True)
|
||||
|
||||
if name:
|
||||
query = query.filter_by(name=name)
|
||||
|
||||
query = query.filter(FileModel.archived == False)
|
||||
|
||||
query = query.order_by(FileModel.id)
|
||||
|
||||
results = query.all()
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def get_spec_data_files(workflow_spec_id, workflow_id=None, name=None, include_libraries=False):
|
||||
"""Returns all the FileDataModels related to a workflow specification.
|
||||
If a workflow is specified, returns the version of the spec related
|
||||
to that workflow, otherwise, returns the lastest files."""
|
||||
if workflow_id:
|
||||
query = session.query(FileDataModel) \
|
||||
.join(WorkflowSpecDependencyFile) \
|
||||
.filter(WorkflowSpecDependencyFile.workflow_id == workflow_id) \
|
||||
.order_by(FileDataModel.id)
|
||||
if name:
|
||||
query = query.join(FileModel).filter(FileModel.name == name)
|
||||
return query.all()
|
||||
else:
|
||||
"""Returns all the latest files related to a workflow specification"""
|
||||
file_models = FileService.get_files(workflow_spec_id=workflow_spec_id,include_libraries=include_libraries)
|
||||
latest_data_files = []
|
||||
for file_model in file_models:
|
||||
if name and file_model.name == name:
|
||||
latest_data_files.append(FileService.get_file_data(file_model.id))
|
||||
elif not name:
|
||||
latest_data_files.append(FileService.get_file_data(file_model.id))
|
||||
return latest_data_files
|
||||
query = query.filter_by(name=name)
|
||||
|
||||
query = query.order_by(FileModel.id)
|
||||
|
||||
results = query.all()
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def get_workflow_data_files(workflow_id=None):
|
||||
"""Returns all the FileDataModels related to a running workflow -
|
||||
So these are the latest data files that were uploaded or generated
|
||||
that go along with this workflow. Not related to the spec in any way"""
|
||||
file_models = FileService.get_files(workflow_id=workflow_id)
|
||||
file_models = UserFileService.get_files(workflow_id=workflow_id)
|
||||
latest_data_files = []
|
||||
for file_model in file_models:
|
||||
latest_data_files.append(FileService.get_file_data(file_model.id))
|
||||
latest_data_files.append(UserFileService.get_file_data(file_model.id))
|
||||
return latest_data_files
|
||||
|
||||
@staticmethod
|
||||
@ -315,72 +182,18 @@ class FileService(object):
|
||||
query = query.order_by(desc(FileDataModel.date_created))
|
||||
return query.first()
|
||||
|
||||
@staticmethod
|
||||
def get_reference_file_data(file_name):
|
||||
file_model = session.query(FileModel). \
|
||||
filter(FileModel.is_reference == True). \
|
||||
filter(FileModel.name == file_name).first()
|
||||
if not file_model:
|
||||
raise ApiError("file_not_found", "There is no reference file with the name '%s'" % file_name)
|
||||
return FileService.get_file_data(file_model.id)
|
||||
|
||||
@staticmethod
|
||||
def get_workflow_file_data(workflow, file_name):
|
||||
"""This method should be deleted, find where it is used, and remove this method.
|
||||
Given a SPIFF Workflow Model, tracks down a file with the given name in the database and returns its data"""
|
||||
workflow_spec_model = FileService.find_spec_model_in_db(workflow)
|
||||
|
||||
if workflow_spec_model is None:
|
||||
raise ApiError(code="unknown_workflow",
|
||||
message="Something is wrong. I can't find the workflow you are using.")
|
||||
|
||||
file_data_model = session.query(FileDataModel) \
|
||||
.join(FileModel) \
|
||||
.filter(FileModel.name == file_name) \
|
||||
.filter(FileModel.workflow_spec_id == workflow_spec_model.id).first()
|
||||
|
||||
if file_data_model is None:
|
||||
raise ApiError(code="file_missing",
|
||||
message="Can not find a file called '%s' within workflow specification '%s'"
|
||||
% (file_name, workflow_spec_model.id))
|
||||
|
||||
return file_data_model
|
||||
|
||||
@staticmethod
|
||||
def find_spec_model_in_db(workflow):
|
||||
""" Search for the workflow """
|
||||
# When the workflow spec model is created, we record the primary process id,
|
||||
# then we can look it up. As there is the potential for sub-workflows, we
|
||||
# may need to travel up to locate the primary process.
|
||||
spec = workflow.spec
|
||||
workflow_model = session.query(WorkflowSpecModel).join(FileModel). \
|
||||
filter(FileModel.primary_process_id == spec.name).first()
|
||||
if workflow_model is None and workflow != workflow.outer_workflow:
|
||||
return FileService.find_spec_model_in_db(workflow.outer_workflow)
|
||||
|
||||
return workflow_model
|
||||
|
||||
@staticmethod
|
||||
def delete_file(file_id):
|
||||
try:
|
||||
data_models = session.query(FileDataModel).filter_by(file_model_id=file_id).all()
|
||||
for dm in data_models:
|
||||
lookup_files = session.query(LookupFileModel).filter_by(file_data_model_id=dm.id).all()
|
||||
for lf in lookup_files:
|
||||
session.query(LookupDataModel).filter_by(lookup_file_model_id=lf.id).delete()
|
||||
session.query(LookupFileModel).filter_by(id=lf.id).delete()
|
||||
session.query(FileDataModel).filter_by(file_model_id=file_id).delete()
|
||||
session.query(DataStoreModel).filter_by(file_id=file_id).delete()
|
||||
session.query(FileModel).filter_by(id=file_id).delete()
|
||||
session.commit()
|
||||
except IntegrityError as ie:
|
||||
# We can't delete the file or file data, because it is referenced elsewhere,
|
||||
# but we can at least mark it as deleted on the table.
|
||||
session.rollback()
|
||||
file_model = session.query(FileModel).filter_by(id=file_id).first()
|
||||
file_model.archived = True
|
||||
session.commit()
|
||||
app.logger.info("Failed to delete file, so archiving it instead. %i, due to %s" % (file_id, str(ie)))
|
||||
raise ApiError('Delete Failed', "Unable to delete file. ")
|
||||
|
||||
@staticmethod
|
||||
def get_repo_branches():
|
||||
@ -547,49 +360,3 @@ class FileService(object):
|
||||
dmn_file = prefix + etree.tostring(root)
|
||||
|
||||
return dmn_file
|
||||
|
||||
@staticmethod
|
||||
def cleanup_file_data(copies_to_keep=1):
|
||||
if isinstance(copies_to_keep, int) and copies_to_keep > 0:
|
||||
|
||||
deleted_models = []
|
||||
saved_models = []
|
||||
current_models = []
|
||||
|
||||
session.flush()
|
||||
|
||||
workflow_spec_models = session.query(WorkflowSpecModel).all()
|
||||
|
||||
for wf_spec_model in workflow_spec_models:
|
||||
file_models = session.query(FileModel)\
|
||||
.filter(FileModel.workflow_spec_id == wf_spec_model.id)\
|
||||
.all()
|
||||
|
||||
for file_model in file_models:
|
||||
file_data_models = session.query(FileDataModel)\
|
||||
.filter(FileDataModel.file_model_id == file_model.id)\
|
||||
.order_by(desc(FileDataModel.date_created))\
|
||||
.all()
|
||||
current_models.append(file_data_models[:copies_to_keep])
|
||||
for fd_model in file_data_models[copies_to_keep:]:
|
||||
dependencies = session.query(WorkflowSpecDependencyFile)\
|
||||
.filter(WorkflowSpecDependencyFile.file_data_id == fd_model.id)\
|
||||
.all()
|
||||
if len(dependencies) > 0:
|
||||
saved_models.append(fd_model)
|
||||
continue
|
||||
lookups = session.query(LookupFileModel)\
|
||||
.filter(LookupFileModel.file_data_model_id == fd_model.id)\
|
||||
.all()
|
||||
if len(lookups) > 0:
|
||||
saved_models.append(fd_model)
|
||||
continue
|
||||
deleted_models.append(fd_model)
|
||||
session.delete(fd_model)
|
||||
|
||||
session.commit()
|
||||
return current_models, saved_models, deleted_models
|
||||
|
||||
else:
|
||||
raise ApiError(code='bad_keep',
|
||||
message='You must keep at least 1 version')
|
@ -1,35 +1,33 @@
|
||||
import re
|
||||
from typing import List
|
||||
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from SpiffWorkflow.bpmn.specs.UserTask import UserTask
|
||||
from SpiffWorkflow.bpmn.specs.events import EndEvent, CancelEventDefinition
|
||||
from SpiffWorkflow.serializer.exceptions import MissingSpecError
|
||||
from SpiffWorkflow.util.metrics import timeit, firsttime, sincetime
|
||||
from lxml import etree
|
||||
import shlex
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
from SpiffWorkflow import Task as SpiffTask, WorkflowException, Task
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
|
||||
from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer
|
||||
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
|
||||
from SpiffWorkflow.exceptions import WorkflowTaskExecException
|
||||
from SpiffWorkflow.specs import WorkflowSpec
|
||||
|
||||
import crc
|
||||
from crc import session, app
|
||||
from crc import session
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileDataModel, FileModel, FileType
|
||||
from crc.models.file import FileModel, FileType, File
|
||||
from crc.models.task_event import TaskEventModel
|
||||
from crc.models.user import UserModelSchema
|
||||
from crc.models.workflow import WorkflowStatus, WorkflowModel, WorkflowSpecDependencyFile
|
||||
from crc.models.workflow import WorkflowStatus, WorkflowModel, WorkflowSpecInfo
|
||||
from crc.scripts.script import Script
|
||||
from crc.services.file_service import FileService
|
||||
from crc import app
|
||||
from crc.services.spec_file_service import SpecFileService
|
||||
from crc.services.user_file_service import UserFileService
|
||||
from crc.services.user_service import UserService
|
||||
from crc.services.workflow_spec_service import WorkflowSpecService
|
||||
|
||||
|
||||
class CustomBpmnScriptEngine(PythonScriptEngine):
|
||||
@ -61,10 +59,8 @@ class CustomBpmnScriptEngine(PythonScriptEngine):
|
||||
"Error evaluating expression "
|
||||
"'%s', %s" % (expression, str(e)))
|
||||
|
||||
|
||||
@timeit
|
||||
def execute(self, task: SpiffTask, script, data):
|
||||
|
||||
study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
|
||||
if WorkflowProcessor.WORKFLOW_ID_KEY in task.workflow.data:
|
||||
workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
|
||||
@ -106,16 +102,14 @@ class WorkflowProcessor(object):
|
||||
"""Create a Workflow Processor based on the serialized information available in the workflow model."""
|
||||
|
||||
self.workflow_model = workflow_model
|
||||
|
||||
if workflow_model.bpmn_workflow_json is None: # The workflow was never started.
|
||||
self.spec_data_files = FileService.get_spec_data_files(
|
||||
workflow_spec_id=workflow_model.workflow_spec_id,include_libraries=True)
|
||||
spec = self.get_spec(self.spec_data_files, workflow_model.workflow_spec_id)
|
||||
else:
|
||||
self.spec_data_files = FileService.get_spec_data_files(
|
||||
workflow_spec_id=workflow_model.workflow_spec_id,
|
||||
workflow_id=workflow_model.id)
|
||||
spec = None
|
||||
self.workflow_spec_service = WorkflowSpecService()
|
||||
spec = None
|
||||
if workflow_model.bpmn_workflow_json is None:
|
||||
spec_info = self.workflow_spec_service.get_spec(workflow_model.workflow_spec_id)
|
||||
if spec_info is None:
|
||||
raise (ApiError("missing_spec", "The spec this workflow references does not currently exist."))
|
||||
self.spec_files = SpecFileService.get_files(spec_info, include_libraries=True)
|
||||
spec = self.get_spec(self.spec_files, spec_info)
|
||||
|
||||
self.workflow_spec_id = workflow_model.workflow_spec_id
|
||||
|
||||
@ -146,23 +140,16 @@ class WorkflowProcessor(object):
|
||||
except MissingSpecError as ke:
|
||||
raise ApiError(code="unexpected_workflow_structure",
|
||||
message="Failed to deserialize workflow"
|
||||
" '%s' version %s, due to a mis-placed or missing task '%s'" %
|
||||
(self.workflow_spec_id, self.get_version_string(), str(ke)))
|
||||
|
||||
# set whether this is the latest spec file.
|
||||
if self.spec_data_files == FileService.get_spec_data_files(workflow_spec_id=workflow_model.workflow_spec_id):
|
||||
self.is_latest_spec = True
|
||||
else:
|
||||
self.is_latest_spec = False
|
||||
" '%s' due to a mis-placed or missing task '%s'" %
|
||||
(self.workflow_spec_id, str(ke)))
|
||||
|
||||
@staticmethod
|
||||
def reset(workflow_model, clear_data=False, delete_files=False):
|
||||
print('WorkflowProcessor: reset: ')
|
||||
|
||||
# Try to execute a cancel notify
|
||||
try:
|
||||
wp = WorkflowProcessor(workflow_model)
|
||||
wp.cancel_notify() # The executes a notification to all endpoints that
|
||||
wp.cancel_notify() # The executes a notification to all endpoints that
|
||||
except Exception as e:
|
||||
app.logger.error(f"Unable to send a cancel notify for workflow %s during a reset."
|
||||
f" Continuing with the reset anyway so we don't get in an unresolvable"
|
||||
@ -179,7 +166,7 @@ class WorkflowProcessor(object):
|
||||
if delete_files:
|
||||
files = FileModel.query.filter(FileModel.workflow_id == workflow_model.id).all()
|
||||
for file in files:
|
||||
FileService.delete_file(file.id)
|
||||
UserFileService.delete_file(file.id)
|
||||
session.commit()
|
||||
return WorkflowProcessor(workflow_model)
|
||||
|
||||
@ -191,10 +178,6 @@ class WorkflowProcessor(object):
|
||||
bpmn_workflow = BpmnWorkflow(spec, script_engine=self._script_engine)
|
||||
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = workflow_model.study_id
|
||||
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = validate_only
|
||||
# try:
|
||||
# bpmn_workflow.do_engine_steps()
|
||||
# except WorkflowException as we:
|
||||
# raise ApiError.from_task_spec("error_loading_workflow", str(we), we.sender)
|
||||
return bpmn_workflow
|
||||
|
||||
def save(self):
|
||||
@ -206,71 +189,18 @@ class WorkflowProcessor(object):
|
||||
self.workflow_model.total_tasks = len(tasks)
|
||||
self.workflow_model.completed_tasks = sum(1 for t in tasks if t.state in complete_states)
|
||||
self.workflow_model.last_updated = datetime.utcnow()
|
||||
self.update_dependencies(self.spec_data_files)
|
||||
session.add(self.workflow_model)
|
||||
session.commit()
|
||||
|
||||
def get_version_string(self):
|
||||
# this could potentially become expensive to load all the data in the data models.
|
||||
# in which case we might consider using a deferred loader for the actual data, but
|
||||
# trying not to pre-optimize.
|
||||
file_data_models = FileService.get_spec_data_files(self.workflow_model.workflow_spec_id,
|
||||
self.workflow_model.id)
|
||||
return WorkflowProcessor.__get_version_string_for_data_models(file_data_models)
|
||||
|
||||
@staticmethod
|
||||
def get_latest_version_string_for_spec(spec_id):
|
||||
file_data_models = FileService.get_spec_data_files(spec_id)
|
||||
return WorkflowProcessor.__get_version_string_for_data_models(file_data_models)
|
||||
|
||||
@staticmethod
|
||||
def __get_version_string_for_data_models(file_data_models):
|
||||
"""Version is in the format v[VERSION] (FILE_ID_LIST)
|
||||
For example, a single bpmn file with only one version would be
|
||||
v1 (12) Where 12 is the id of the file data model that is used to create the
|
||||
specification. If multiple files exist, they are added on in
|
||||
dot notation to both the version number and the file list. So
|
||||
a Spec that includes a BPMN, DMN, an a Word file all on the first
|
||||
version would be v1.1.1 (12.45.21)"""
|
||||
|
||||
major_version = 0 # The version of the primary file.
|
||||
minor_version = [] # The versions of the minor files if any.
|
||||
file_ids = []
|
||||
for file_data in file_data_models:
|
||||
file_ids.append(file_data.id)
|
||||
if file_data.file_model.primary:
|
||||
major_version = file_data.version
|
||||
else:
|
||||
minor_version.append(file_data.version)
|
||||
minor_version.insert(0, major_version) # Add major version to beginning.
|
||||
version = ".".join(str(x) for x in minor_version)
|
||||
files = ".".join(str(x) for x in file_ids)
|
||||
full_version = "v%s (%s)" % (version, files)
|
||||
return full_version
|
||||
|
||||
def update_dependencies(self, spec_data_files):
|
||||
existing_dependencies = FileService.get_spec_data_files(
|
||||
workflow_spec_id=self.workflow_model.workflow_spec_id,
|
||||
workflow_id=self.workflow_model.id)
|
||||
|
||||
# Don't save the dependencies if they haven't changed.
|
||||
if existing_dependencies == spec_data_files:
|
||||
return
|
||||
|
||||
# Remove all existing dependencies, and replace them.
|
||||
self.workflow_model.dependencies = []
|
||||
for file_data in spec_data_files:
|
||||
self.workflow_model.dependencies.append(WorkflowSpecDependencyFile(file_data_id=file_data.id))
|
||||
|
||||
@staticmethod
|
||||
@timeit
|
||||
def run_master_spec(spec_model, study):
|
||||
"""Executes a BPMN specification for the given study, without recording any information to the database
|
||||
Useful for running the master specification, which should not persist. """
|
||||
lasttime = firsttime()
|
||||
spec_data_files = FileService.get_spec_data_files(spec_model.id)
|
||||
spec_files = SpecFileService().get_files(spec_model, include_libraries=True)
|
||||
lasttime = sincetime('load Files', lasttime)
|
||||
spec = WorkflowProcessor.get_spec(spec_data_files, spec_model.id)
|
||||
spec = WorkflowProcessor.get_spec(spec_files, spec_model)
|
||||
lasttime = sincetime('get spec', lasttime)
|
||||
try:
|
||||
bpmn_workflow = BpmnWorkflow(spec, script_engine=WorkflowProcessor._script_engine)
|
||||
@ -294,27 +224,24 @@ class WorkflowProcessor(object):
|
||||
return parser
|
||||
|
||||
@staticmethod
|
||||
def get_spec(file_data_models: List[FileDataModel], workflow_spec_id):
|
||||
def get_spec(files: List[File], workflow_spec_info: WorkflowSpecInfo):
|
||||
"""Returns a SpiffWorkflow specification for the given workflow spec,
|
||||
using the files provided. The Workflow_spec_id is only used to generate
|
||||
better error messages."""
|
||||
using the files provided. """
|
||||
parser = WorkflowProcessor.get_parser()
|
||||
process_id = None
|
||||
|
||||
for file_data in file_data_models:
|
||||
if file_data.file_model.type == FileType.bpmn:
|
||||
bpmn: etree.Element = etree.fromstring(file_data.data)
|
||||
if file_data.file_model.primary and file_data.file_model.workflow_spec_id == workflow_spec_id:
|
||||
process_id = FileService.get_process_id(bpmn)
|
||||
parser.add_bpmn_xml(bpmn, filename=file_data.file_model.name)
|
||||
elif file_data.file_model.type == FileType.dmn:
|
||||
dmn: etree.Element = etree.fromstring(file_data.data)
|
||||
parser.add_dmn_xml(dmn, filename=file_data.file_model.name)
|
||||
if process_id is None:
|
||||
for file in files:
|
||||
data = SpecFileService.get_data(workflow_spec_info, file.name)
|
||||
if file.type == FileType.bpmn:
|
||||
bpmn: etree.Element = etree.fromstring(data)
|
||||
parser.add_bpmn_xml(bpmn, filename=file.name)
|
||||
elif file.type == FileType.dmn:
|
||||
dmn: etree.Element = etree.fromstring(data)
|
||||
parser.add_dmn_xml(dmn, filename=file.name)
|
||||
if workflow_spec_info.primary_process_id is None:
|
||||
raise (ApiError(code="no_primary_bpmn_error",
|
||||
message="There is no primary BPMN model defined for workflow %s" % workflow_spec_id))
|
||||
message="There is no primary BPMN model defined for workflow %s" % workflow_spec_info.id))
|
||||
try:
|
||||
spec = parser.get_spec(process_id)
|
||||
spec = parser.get_spec(workflow_spec_info.primary_process_id)
|
||||
except ValidationException as ve:
|
||||
raise ApiError(code="workflow_validation_error",
|
||||
message="Failed to parse the Workflow Specification. " +
|
||||
@ -337,19 +264,6 @@ class WorkflowProcessor(object):
|
||||
else:
|
||||
return WorkflowStatus.waiting
|
||||
|
||||
# def hard_reset(self):
|
||||
# """Recreate this workflow. This will be useful when a workflow specification changes.
|
||||
# """
|
||||
# self.spec_data_files = FileService.get_spec_data_files(workflow_spec_id=self.workflow_spec_id)
|
||||
# new_spec = WorkflowProcessor.get_spec(self.spec_data_files, self.workflow_spec_id)
|
||||
# new_bpmn_workflow = BpmnWorkflow(new_spec, script_engine=self._script_engine)
|
||||
# new_bpmn_workflow.data = self.bpmn_workflow.data
|
||||
# try:
|
||||
# new_bpmn_workflow.do_engine_steps()
|
||||
# except WorkflowException as we:
|
||||
# raise ApiError.from_task_spec("hard_reset_engine_steps_error", str(we), we.sender)
|
||||
# self.bpmn_workflow = new_bpmn_workflow
|
||||
|
||||
def get_status(self):
|
||||
return self.status_of(self.bpmn_workflow)
|
||||
|
||||
@ -362,8 +276,10 @@ class WorkflowProcessor(object):
|
||||
|
||||
def cancel_notify(self):
|
||||
try:
|
||||
self.bpmn_workflow.signal('cancel') # generate a cancel signal.
|
||||
self.bpmn_workflow.cancel_notify() # call cancel_notify in
|
||||
# A little hackly, but make the bpmn_workflow catch a cancel event.
|
||||
self.bpmn_workflow.signal('cancel') # generate a cancel signal.
|
||||
self.bpmn_workflow.catch(CancelEventDefinition())
|
||||
self.bpmn_workflow.do_engine_steps()
|
||||
except WorkflowTaskExecException as we:
|
||||
raise ApiError.from_workflow_exception("task_error", str(we), we)
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
import copy
|
||||
import json
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import random
|
||||
import string
|
||||
@ -10,15 +11,15 @@ from typing import List
|
||||
import jinja2
|
||||
from SpiffWorkflow import Task as SpiffTask, WorkflowException, NavItem
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
|
||||
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
|
||||
from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask
|
||||
from SpiffWorkflow.bpmn.specs.ScriptTask import ScriptTask
|
||||
from SpiffWorkflow.bpmn.specs.StartEvent import StartEvent
|
||||
from SpiffWorkflow.bpmn.specs.UserTask import UserTask
|
||||
from SpiffWorkflow.bpmn.specs.events import EndEvent, StartEvent
|
||||
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||
from SpiffWorkflow.specs import CancelTask, StartTask
|
||||
from SpiffWorkflow.util.deep_merge import DeepMerge
|
||||
from SpiffWorkflow.util.metrics import timeit
|
||||
from sqlalchemy.exc import InvalidRequestError
|
||||
|
||||
from crc import db, app, session
|
||||
from crc.api.common import ApiError
|
||||
@ -28,16 +29,17 @@ from crc.models.ldap import LdapModel
|
||||
from crc.models.study import StudyModel
|
||||
from crc.models.task_event import TaskEventModel
|
||||
from crc.models.user import UserModel
|
||||
from crc.models.workflow import WorkflowModel, WorkflowStatus, WorkflowSpecModel, WorkflowSpecCategoryModel
|
||||
from crc.models.workflow import WorkflowModel, WorkflowStatus
|
||||
from crc.services.data_store_service import DataStoreBase
|
||||
|
||||
from crc.services.document_service import DocumentService
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.jinja_service import JinjaService
|
||||
from crc.services.lookup_service import LookupService
|
||||
from crc.services.spec_file_service import SpecFileService
|
||||
from crc.services.study_service import StudyService
|
||||
from crc.services.user_service import UserService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from crc.services.workflow_spec_service import WorkflowSpecService
|
||||
|
||||
from flask import request
|
||||
from sentry_sdk import capture_message, push_scope
|
||||
@ -50,7 +52,7 @@ class WorkflowService(object):
|
||||
TASK_ACTION_SOFT_RESET = "SOFT_RESET"
|
||||
TASK_ACTION_ASSIGNMENT = "ASSIGNMENT" # Whenever the lane changes between tasks we assign the task to specifc user.
|
||||
|
||||
TASK_STATE_LOCKED = "LOCKED" # When the task belongs to a different user.
|
||||
TASK_STATE_LOCKED = "LOCKED" # When the task belongs to a different user.
|
||||
|
||||
"""Provides tools for processing workflows and tasks. This
|
||||
should at some point, be the only way to work with Workflows, and
|
||||
@ -61,12 +63,11 @@ class WorkflowService(object):
|
||||
handles the testing of a workflow specification by completing it with
|
||||
random selections, attempting to mimic a front end as much as possible. """
|
||||
|
||||
from crc.services.user_service import UserService
|
||||
@staticmethod
|
||||
def make_test_workflow(spec_id, validate_study_id=None):
|
||||
try:
|
||||
user = UserService.current_user()
|
||||
except ApiError as e:
|
||||
except ApiError:
|
||||
user = None
|
||||
if not user:
|
||||
user = db.session.query(UserModel).filter_by(uid="test").first()
|
||||
@ -87,11 +88,16 @@ class WorkflowService(object):
|
||||
workflow_spec_id=spec_id,
|
||||
last_updated=datetime.utcnow(),
|
||||
study=study)
|
||||
db.session.add(workflow_model)
|
||||
db.session.commit()
|
||||
return workflow_model
|
||||
|
||||
@staticmethod
|
||||
def delete_test_data(workflow: WorkflowModel):
|
||||
db.session.delete(workflow)
|
||||
try:
|
||||
db.session.delete(workflow)
|
||||
except InvalidRequestError:
|
||||
pass
|
||||
# Also, delete any test study or user models that may have been created.
|
||||
for study in db.session.query(StudyModel).filter(StudyModel.user_uid == "test"):
|
||||
StudyService.delete_study(study.id)
|
||||
@ -105,7 +111,7 @@ class WorkflowService(object):
|
||||
|
||||
@staticmethod
|
||||
def do_waiting():
|
||||
records = db.session.query(WorkflowModel).filter(WorkflowModel.status==WorkflowStatus.waiting).all()
|
||||
records = db.session.query(WorkflowModel).filter(WorkflowModel.status == WorkflowStatus.waiting).all()
|
||||
for workflow_model in records:
|
||||
try:
|
||||
app.logger.info('Processing workflow %s' % workflow_model.id)
|
||||
@ -117,7 +123,7 @@ class WorkflowService(object):
|
||||
workflow_model.status = WorkflowStatus.erroring
|
||||
app.logger.error(f"Error running waiting task for workflow #%i (%s) for study #%i. %s" %
|
||||
(workflow_model.id,
|
||||
workflow_model.workflow_spec.id,
|
||||
workflow_model.workflow_spec_id,
|
||||
workflow_model.study_id,
|
||||
str(e)))
|
||||
|
||||
@ -187,41 +193,43 @@ class WorkflowService(object):
|
||||
count = 0
|
||||
|
||||
while not processor.bpmn_workflow.is_completed():
|
||||
processor.bpmn_workflow.get_deep_nav_list() # Assure no errors with navigation.
|
||||
exit_task = processor.bpmn_workflow.do_engine_steps(exit_at=test_until)
|
||||
if (exit_task != None):
|
||||
raise ApiError.from_task("validation_break",
|
||||
f"The validation has been exited early on task '{exit_task.task_spec.id}' and was parented by ",
|
||||
exit_task.parent)
|
||||
tasks = processor.bpmn_workflow.get_tasks(SpiffTask.READY)
|
||||
for task in tasks:
|
||||
if task.task_spec.lane is not None and task.task_spec.lane not in task.data:
|
||||
raise ApiError.from_task("invalid_role",
|
||||
f"This task is in a lane called '{task.task_spec.lane}', The "
|
||||
f" current task data must have information mapping this role to "
|
||||
f" a unique user id.", task)
|
||||
task_api = WorkflowService.spiff_task_to_api_task(
|
||||
task,
|
||||
add_docs_and_forms=True) # Assure we try to process the documentation, and raise those errors.
|
||||
# make sure forms have a form key
|
||||
if hasattr(task_api, 'form') and task_api.form is not None and task_api.form.key == '':
|
||||
raise ApiError(code='missing_form_key',
|
||||
message='Forms must include a Form Key.',
|
||||
task_id=task.id,
|
||||
task_name=task.get_name())
|
||||
WorkflowService.populate_form_with_random_data(task, task_api, required_only)
|
||||
processor.complete_task(task)
|
||||
if test_until == task.task_spec.name:
|
||||
raise ApiError.from_task(
|
||||
"validation_break",
|
||||
f"The validation has been exited early on task '{task.task_spec.name}' "
|
||||
f"and was parented by ",
|
||||
task.parent)
|
||||
count += 1
|
||||
if count >= 100:
|
||||
raise ApiError(code='unending_validation',
|
||||
message=f'There appears to be no way to complete this workflow,'
|
||||
f' halting validation.')
|
||||
processor.bpmn_workflow.get_deep_nav_list() # Assure no errors with navigation.
|
||||
|
||||
exit_task = processor.bpmn_workflow.do_engine_steps(exit_at=test_until)
|
||||
if (exit_task != None):
|
||||
raise ApiError.from_task("validation_break",
|
||||
f"The validation has been exited early on task '{exit_task.task_spec.id}' "
|
||||
f"and was parented by ",
|
||||
exit_task.parent)
|
||||
tasks = processor.bpmn_workflow.get_tasks(SpiffTask.READY)
|
||||
for task in tasks:
|
||||
if task.task_spec.lane is not None and task.task_spec.lane not in task.data:
|
||||
raise ApiError.from_task("invalid_role",
|
||||
f"This task is in a lane called '{task.task_spec.lane}', The "
|
||||
f" current task data must have information mapping this role to "
|
||||
f" a unique user id.", task)
|
||||
task_api = WorkflowService.spiff_task_to_api_task(
|
||||
task,
|
||||
add_docs_and_forms=True) # Assure we try to process the documentation, and raise those errors.
|
||||
# make sure forms have a form key
|
||||
if hasattr(task_api, 'form') and task_api.form is not None and task_api.form.key == '':
|
||||
raise ApiError(code='missing_form_key',
|
||||
message='Forms must include a Form Key.',
|
||||
task_id=task.id,
|
||||
task_name=task.get_name())
|
||||
WorkflowService.populate_form_with_random_data(task, task_api, required_only)
|
||||
processor.complete_task(task)
|
||||
if test_until == task.task_spec.name:
|
||||
raise ApiError.from_task(
|
||||
"validation_break",
|
||||
f"The validation has been exited early on task '{task.task_spec.name}' "
|
||||
f"and was parented by ",
|
||||
task.parent)
|
||||
count += 1
|
||||
if count >= 100:
|
||||
raise ApiError(code='unending_validation',
|
||||
message=f'There appears to be no way to complete this workflow,'
|
||||
f' halting validation.')
|
||||
|
||||
WorkflowService._process_documentation(processor.bpmn_workflow.last_task.parent.parent)
|
||||
|
||||
@ -244,20 +252,21 @@ class WorkflowService(object):
|
||||
form_data = data
|
||||
|
||||
hide_groups = []
|
||||
|
||||
for field in task_api.form.fields:
|
||||
# Assure we have a field type
|
||||
if field.type is None:
|
||||
raise ApiError(code='invalid_form_data',
|
||||
message = f'Type is missing for field "{field.id}". A field type must be provided.',
|
||||
task_id = task.id,
|
||||
task_name = task.get_name())
|
||||
message=f'Type is missing for field "{field.id}". A field type must be provided.',
|
||||
task_id=task.id,
|
||||
task_name=task.get_name())
|
||||
# Assure we have valid ids
|
||||
if not WorkflowService.check_field_id(field.id):
|
||||
raise ApiError(code='invalid_form_id',
|
||||
message=f'Invalid Field name: "{field.id}". A field ID must begin with a letter, '
|
||||
f'and can only contain letters, numbers, and "_"',
|
||||
task_id = task.id,
|
||||
task_name = task.get_name())
|
||||
task_id=task.id,
|
||||
task_name=task.get_name())
|
||||
# Assure field has valid properties
|
||||
WorkflowService.check_field_properties(field, task)
|
||||
WorkflowService.check_field_type(field, task)
|
||||
@ -268,7 +277,8 @@ class WorkflowService(object):
|
||||
field.label = result
|
||||
|
||||
# If a field is hidden and required, it must have a default value or value_expression
|
||||
if field.has_property(Task.FIELD_PROP_HIDE_EXPRESSION) and field.has_validation(Task.FIELD_CONSTRAINT_REQUIRED):
|
||||
if field.has_property(Task.FIELD_PROP_HIDE_EXPRESSION) and field.has_validation(
|
||||
Task.FIELD_CONSTRAINT_REQUIRED):
|
||||
if not field.has_property(Task.FIELD_PROP_VALUE_EXPRESSION) and \
|
||||
(not (hasattr(field, 'default_value')) or field.default_value is None):
|
||||
raise ApiError(code='hidden and required field missing default',
|
||||
@ -277,17 +287,20 @@ class WorkflowService(object):
|
||||
task_name=task.get_name())
|
||||
|
||||
# If the field is hidden and not required, it should not produce a value.
|
||||
if field.has_property(Task.FIELD_PROP_HIDE_EXPRESSION) and not field.has_validation(Task.FIELD_CONSTRAINT_REQUIRED):
|
||||
if field.has_property(Task.FIELD_PROP_HIDE_EXPRESSION) and not field.has_validation(
|
||||
Task.FIELD_CONSTRAINT_REQUIRED):
|
||||
if WorkflowService.evaluate_property(Task.FIELD_PROP_HIDE_EXPRESSION, field, task):
|
||||
continue
|
||||
|
||||
# A task should only have default_value **or** value expression, not both.
|
||||
if field.has_property(Task.FIELD_PROP_VALUE_EXPRESSION) and (hasattr(field, 'default_value') and field.default_value):
|
||||
if field.has_property(Task.FIELD_PROP_VALUE_EXPRESSION) and (
|
||||
hasattr(field, 'default_value') and field.default_value):
|
||||
raise ApiError.from_task(code='default value and value_expression',
|
||||
message=f'This task ({task.get_name()}) has both a default_value and value_expression. Please fix this to only have one or the other.',
|
||||
task=task)
|
||||
# If we have a default_value or value_expression, try to set the default
|
||||
if field.has_property(Task.FIELD_PROP_VALUE_EXPRESSION) or (hasattr(field, 'default_value') and field.default_value):
|
||||
if field.has_property(Task.FIELD_PROP_VALUE_EXPRESSION) or (
|
||||
hasattr(field, 'default_value') and field.default_value):
|
||||
form_data[field.id] = WorkflowService.get_default_value(field, task)
|
||||
if not field.has_property(Task.FIELD_PROP_REPEAT):
|
||||
continue
|
||||
@ -296,15 +309,16 @@ class WorkflowService(object):
|
||||
if required_only:
|
||||
if (not field.has_validation(Task.FIELD_CONSTRAINT_REQUIRED) or
|
||||
field.get_validation(Task.FIELD_CONSTRAINT_REQUIRED).lower().strip() != "true"):
|
||||
continue # Don't include any fields that aren't specifically marked as required.
|
||||
continue # Don't include any fields that aren't specifically marked as required.
|
||||
if field.has_property(Task.FIELD_PROP_REQUIRED_EXPRESSION):
|
||||
result = WorkflowService.evaluate_property(Task.FIELD_PROP_REQUIRED_EXPRESSION, field, task)
|
||||
if not result and required_only:
|
||||
continue # Don't complete fields that are not required.
|
||||
continue # Don't complete fields that are not required.
|
||||
|
||||
# If it is read only, stop here.
|
||||
if field.has_property("read_only") and field.get_property(Task.FIELD_PROP_READ_ONLY).lower().strip() == "true":
|
||||
continue # Don't mess about with read only fields.
|
||||
if field.has_property("read_only") and field.get_property(
|
||||
Task.FIELD_PROP_READ_ONLY).lower().strip() == "true":
|
||||
continue # Don't mess about with read only fields.
|
||||
|
||||
if field.has_property(Task.FIELD_PROP_REPEAT) and field.has_property(Task.FIELD_PROP_GROUP):
|
||||
raise ApiError.from_task("group_repeat", f'Fields cannot have both group and repeat properties. '
|
||||
@ -313,7 +327,7 @@ class WorkflowService(object):
|
||||
|
||||
if field.has_property(Task.FIELD_PROP_REPEAT):
|
||||
group = field.get_property(Task.FIELD_PROP_REPEAT)
|
||||
if group in form_data and not(isinstance(form_data[group], list)):
|
||||
if group in form_data and not (isinstance(form_data[group], list)):
|
||||
raise ApiError.from_task("invalid_group",
|
||||
f'You are grouping form fields inside a variable that is defined '
|
||||
f'elsewhere: {group}. Be sure that you use a unique name for the '
|
||||
@ -324,12 +338,13 @@ class WorkflowService(object):
|
||||
if not result:
|
||||
hide_groups.append(group)
|
||||
if group not in form_data and group not in hide_groups:
|
||||
form_data[group] = [{},{},{}]
|
||||
form_data[group] = [{}, {}, {}]
|
||||
if group in form_data and group not in hide_groups:
|
||||
for i in range(3):
|
||||
form_data[group][i][field.id] = WorkflowService.get_random_data_for_field(field, task)
|
||||
else:
|
||||
form_data[field.id] = WorkflowService.get_random_data_for_field(field, task)
|
||||
|
||||
if task.data is None:
|
||||
task.data = {}
|
||||
|
||||
@ -383,7 +398,8 @@ class WorkflowService(object):
|
||||
for field in task.task_spec.form.fields:
|
||||
data = task.data
|
||||
# If we have a repeat field, make sure it is used before processing it
|
||||
if field.has_property(Task.FIELD_PROP_REPEAT) and field.get_property(Task.FIELD_PROP_REPEAT) in task.data.keys():
|
||||
if field.has_property(Task.FIELD_PROP_REPEAT) and field.get_property(
|
||||
Task.FIELD_PROP_REPEAT) in task.data.keys():
|
||||
repeat_array = task.data[field.get_property(Task.FIELD_PROP_REPEAT)]
|
||||
for repeat_data in repeat_array:
|
||||
WorkflowService.__post_process_field(task, field, repeat_data)
|
||||
@ -398,7 +414,7 @@ class WorkflowService(object):
|
||||
file_id = data[field.id]["id"]
|
||||
doc_code = task.workflow.script_engine._evaluate(field.get_property(Task.FIELD_PROP_DOC_CODE), **data)
|
||||
file = db.session.query(FileModel).filter(FileModel.id == file_id).first()
|
||||
if(file):
|
||||
if (file):
|
||||
file.irb_doc_code = doc_code
|
||||
db.session.commit()
|
||||
else:
|
||||
@ -430,7 +446,7 @@ class WorkflowService(object):
|
||||
# Here we must make the current group data top level (as it would be in a repeat section) but
|
||||
# make all other top level task data available as well.
|
||||
new_data = copy.deepcopy(task.data)
|
||||
del(new_data[group])
|
||||
del (new_data[group])
|
||||
data = task.data[group][0]
|
||||
data.update(new_data)
|
||||
else:
|
||||
@ -454,7 +470,6 @@ class WorkflowService(object):
|
||||
has_file_lookup = field.has_property(Task.FIELD_PROP_SPREADSHEET_NAME)
|
||||
return has_ldap_lookup or has_file_lookup
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_default_value(field, task):
|
||||
has_lookup = WorkflowService.has_lookup(field)
|
||||
@ -487,11 +502,11 @@ class WorkflowService(object):
|
||||
elif field.type == "autocomplete" or field.type == "enum":
|
||||
lookup_model = LookupService.get_lookup_model(task, field)
|
||||
if field.has_property(Task.FIELD_PROP_LDAP_LOOKUP): # All ldap records get the same person.
|
||||
return None # There is no default value for ldap.
|
||||
return None # There is no default value for ldap.
|
||||
elif lookup_model:
|
||||
data = db.session.query(LookupDataModel).\
|
||||
data = db.session.query(LookupDataModel). \
|
||||
filter(LookupDataModel.lookup_file_model == lookup_model). \
|
||||
filter(LookupDataModel.value == str(default)).\
|
||||
filter(LookupDataModel.value == str(default)). \
|
||||
first()
|
||||
if not data:
|
||||
raise ApiError.from_task("invalid_default", "You specified a default value that does not exist in "
|
||||
@ -499,7 +514,7 @@ class WorkflowService(object):
|
||||
return default
|
||||
else:
|
||||
raise ApiError.from_task("unknown_lookup_option", "The settings for this auto complete field "
|
||||
"are incorrect: %s " % field.id, task)
|
||||
"are incorrect: %s " % field.id, task)
|
||||
elif field.type == "long":
|
||||
return int(default)
|
||||
elif field.type == 'boolean':
|
||||
@ -528,7 +543,8 @@ class WorkflowService(object):
|
||||
else:
|
||||
# fixme: why it is sometimes an EnumFormFieldOption, and other times not?
|
||||
random_value = random_choice.id
|
||||
if field.has_property(Task.FIELD_PROP_ENUM_TYPE) and field.get_property(Task.FIELD_PROP_ENUM_TYPE) == 'checkbox':
|
||||
if field.has_property(Task.FIELD_PROP_ENUM_TYPE) and field.get_property(
|
||||
Task.FIELD_PROP_ENUM_TYPE) == 'checkbox':
|
||||
return [random_value]
|
||||
else:
|
||||
return random_value
|
||||
@ -554,8 +570,9 @@ class WorkflowService(object):
|
||||
" with no options" % field.id, task)
|
||||
else:
|
||||
raise ApiError.from_task("unknown_lookup_option", "The settings for this auto complete field "
|
||||
"are incorrect: %s " % field.id, task)
|
||||
if field.has_property(Task.FIELD_PROP_ENUM_TYPE) and field.get_property(Task.FIELD_PROP_ENUM_TYPE) == 'checkbox':
|
||||
"are incorrect: %s " % field.id, task)
|
||||
if field.has_property(Task.FIELD_PROP_ENUM_TYPE) and field.get_property(
|
||||
Task.FIELD_PROP_ENUM_TYPE) == 'checkbox':
|
||||
return [random_value]
|
||||
else:
|
||||
return random_value
|
||||
@ -568,7 +585,7 @@ class WorkflowService(object):
|
||||
if field.has_property('doc_code'):
|
||||
doc_code = WorkflowService.evaluate_property('doc_code', field, task)
|
||||
file_model = FileModel(name="test.png",
|
||||
irb_doc_code = field.id)
|
||||
irb_doc_code=field.id)
|
||||
doc_dict = DocumentService.get_dictionary()
|
||||
file = File.from_models(file_model, None, doc_dict)
|
||||
return FileSchema().dump(file)
|
||||
@ -592,7 +609,6 @@ class WorkflowService(object):
|
||||
"sponsor_type": "Staff"}
|
||||
}
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _random_string(string_length=10):
|
||||
"""Generate a random string of fixed length """
|
||||
@ -606,22 +622,18 @@ class WorkflowService(object):
|
||||
|
||||
navigation = processor.bpmn_workflow.get_deep_nav_list()
|
||||
WorkflowService.update_navigation(navigation, processor)
|
||||
|
||||
|
||||
spec = db.session.query(WorkflowSpecModel).filter_by(id=processor.workflow_spec_id).first()
|
||||
is_review = FileService.is_workflow_review(processor.workflow_spec_id)
|
||||
spec_service = WorkflowSpecService()
|
||||
spec = spec_service.get_spec(processor.workflow_spec_id)
|
||||
workflow_api = WorkflowApi(
|
||||
id=processor.get_workflow_id(),
|
||||
status=processor.get_status(),
|
||||
next_task=None,
|
||||
navigation=navigation,
|
||||
workflow_spec_id=processor.workflow_spec_id,
|
||||
spec_version=processor.get_version_string(),
|
||||
is_latest_spec=processor.is_latest_spec,
|
||||
total_tasks=len(navigation),
|
||||
completed_tasks=processor.workflow_model.completed_tasks,
|
||||
last_updated=processor.workflow_model.last_updated,
|
||||
is_review=is_review,
|
||||
is_review=spec.is_review,
|
||||
title=spec.display_name,
|
||||
study_id=processor.workflow_model.study_id or None
|
||||
)
|
||||
@ -630,7 +642,7 @@ class WorkflowService(object):
|
||||
next_task = processor.next_task()
|
||||
if next_task:
|
||||
previous_form_data = WorkflowService.get_previously_submitted_data(processor.workflow_model.id, next_task)
|
||||
# DeepMerge.merge(next_task.data, previous_form_data)
|
||||
# DeepMerge.merge(next_task.data, previous_form_data)
|
||||
next_task.data = DeepMerge.merge(previous_form_data, next_task.data)
|
||||
|
||||
workflow_api.next_task = WorkflowService.spiff_task_to_api_task(next_task, add_docs_and_forms=True)
|
||||
@ -662,7 +674,6 @@ class WorkflowService(object):
|
||||
# Recurse here
|
||||
WorkflowService.update_navigation(nav_item.children, processor)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_previously_submitted_data(workflow_id, spiff_task):
|
||||
""" If the user has completed this task previously, find the form data for the last submission."""
|
||||
@ -689,8 +700,6 @@ class WorkflowService(object):
|
||||
else:
|
||||
return {}
|
||||
|
||||
|
||||
|
||||
@staticmethod
|
||||
def spiff_task_to_api_task(spiff_task, add_docs_and_forms=False):
|
||||
task_type = spiff_task.task_spec.__class__.__name__
|
||||
@ -765,14 +774,17 @@ class WorkflowService(object):
|
||||
# a BPMN standard, and should not be included in the display.
|
||||
if task.properties and "display_name" in task.properties:
|
||||
try:
|
||||
task.title = spiff_task.workflow.script_engine.evaluate(spiff_task, task.properties[Task.PROP_EXTENSIONS_TITLE])
|
||||
task.title = spiff_task.workflow.script_engine.evaluate(spiff_task,
|
||||
task.properties[Task.PROP_EXTENSIONS_TITLE])
|
||||
except Exception as e:
|
||||
# if the task is ready, we should raise an error, but if it is in the future or the past, we may not
|
||||
# have the information we need to properly set the title, so don't error out, and just use what is
|
||||
# provided.
|
||||
if spiff_task.state == spiff_task.READY:
|
||||
raise ApiError.from_task(code="task_title_error", message="Could not set task title on task %s with '%s' property because %s" %
|
||||
(spiff_task.task_spec.name, Task.PROP_EXTENSIONS_TITLE, str(e)), task=spiff_task)
|
||||
raise ApiError.from_task(code="task_title_error",
|
||||
message="Could not set task title on task %s with '%s' property because %s" %
|
||||
(spiff_task.task_spec.name, Task.PROP_EXTENSIONS_TITLE, str(e)),
|
||||
task=spiff_task)
|
||||
# Otherwise, just use the curreent title.
|
||||
elif task.title and ' ' in task.title:
|
||||
task.title = task.title.partition(' ')[2]
|
||||
@ -804,8 +816,12 @@ class WorkflowService(object):
|
||||
|
||||
try:
|
||||
doc_file_name = spiff_task.task_spec.name + ".md"
|
||||
data_model = FileService.get_workflow_file_data(spiff_task.workflow, doc_file_name)
|
||||
raw_doc = data_model.data.decode("utf-8")
|
||||
workflow_id = spiff_task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
|
||||
workflow = db.session.query(WorkflowModel). \
|
||||
filter(WorkflowModel.id == spiff_task.workflow.data['workflow_id']).first()
|
||||
spec_service = WorkflowSpecService()
|
||||
data = SpecFileService.get_data(spec_service.get_spec(workflow.workflow_spec_id), doc_file_name)
|
||||
raw_doc = data.decode("utf-8")
|
||||
except ApiError:
|
||||
raw_doc = documentation
|
||||
|
||||
@ -846,7 +862,7 @@ class WorkflowService(object):
|
||||
if hasattr(field, 'options') and len(field.options) > 1:
|
||||
return field
|
||||
elif not (field.has_property(Task.FIELD_PROP_VALUE_COLUMN) or
|
||||
field.has_property(Task.FIELD_PROP_LABEL_COLUMN)):
|
||||
field.has_property(Task.FIELD_PROP_LABEL_COLUMN)):
|
||||
raise ApiError.from_task("invalid_enum",
|
||||
f"For enumerations, you must include options, or a way to generate options from"
|
||||
f" a spreadsheet or data set. Please set either a spreadsheet name or data name,"
|
||||
@ -865,7 +881,6 @@ class WorkflowService(object):
|
||||
elif field.has_property(Task.FIELD_PROP_DATA_NAME):
|
||||
field.options = WorkflowService.get_options_from_task_data(spiff_task, field)
|
||||
|
||||
|
||||
return field
|
||||
|
||||
@staticmethod
|
||||
@ -882,10 +897,12 @@ class WorkflowService(object):
|
||||
options = []
|
||||
for item in items:
|
||||
if value_column not in item:
|
||||
raise ApiError.from_task("invalid_enum", f"The value column '{value_column}' does not exist for item {item}",
|
||||
raise ApiError.from_task("invalid_enum",
|
||||
f"The value column '{value_column}' does not exist for item {item}",
|
||||
task=spiff_task)
|
||||
if label_column not in item:
|
||||
raise ApiError.from_task("invalid_enum", f"The label column '{label_column}' does not exist for item {item}",
|
||||
raise ApiError.from_task("invalid_enum",
|
||||
f"The label column '{label_column}' does not exist for item {item}",
|
||||
task=spiff_task)
|
||||
|
||||
options.append(Box({"id": item[value_column], "name": item[label_column], "data": item}))
|
||||
@ -935,13 +952,15 @@ class WorkflowService(object):
|
||||
if user.get("value"):
|
||||
lane_uids.append(user['value'])
|
||||
else:
|
||||
raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." %
|
||||
spiff_task.task_spec.name, task=spiff_task)
|
||||
raise ApiError.from_task(code="task_lane_user_error",
|
||||
message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." %
|
||||
spiff_task.task_spec.name, task=spiff_task)
|
||||
elif isinstance(user, str):
|
||||
lane_uids.append(user)
|
||||
else:
|
||||
raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user is not a string or dict" %
|
||||
spiff_task.task_spec.name, task=spiff_task)
|
||||
raise ApiError.from_task(code="task_lane_user_error",
|
||||
message="Spiff Task %s lane user is not a string or dict" %
|
||||
spiff_task.task_spec.name, task=spiff_task)
|
||||
|
||||
return lane_uids
|
||||
|
||||
@ -954,7 +973,6 @@ class WorkflowService(object):
|
||||
user_uid=user_uid,
|
||||
workflow_id=processor.workflow_model.id,
|
||||
workflow_spec_id=processor.workflow_model.workflow_spec_id,
|
||||
spec_version=processor.get_version_string(),
|
||||
action=action,
|
||||
task_id=task.id,
|
||||
task_name=task.name,
|
||||
@ -1008,7 +1026,6 @@ class WorkflowService(object):
|
||||
return source[path]
|
||||
return None
|
||||
|
||||
|
||||
@staticmethod
|
||||
def set_dot_value(path, value, target):
|
||||
### Given a path in dot notation, such as "fruit.type", and a value "apple", will
|
||||
@ -1026,7 +1043,6 @@ class WorkflowService(object):
|
||||
destination = destination[p]
|
||||
return target
|
||||
|
||||
|
||||
@staticmethod
|
||||
def process_workflows_for_cancels(study_id):
|
||||
workflows = db.session.query(WorkflowModel).filter_by(study_id=study_id).all()
|
||||
@ -1047,109 +1063,11 @@ class WorkflowService(object):
|
||||
|
||||
@staticmethod
|
||||
def get_standalone_workflow_specs():
|
||||
specs = db.session.query(WorkflowSpecModel).filter_by(standalone=True).all()
|
||||
return specs
|
||||
return spec_service.standalone.values()
|
||||
|
||||
@staticmethod
|
||||
def get_library_workflow_specs():
|
||||
specs = db.session.query(WorkflowSpecModel).filter_by(library=True).all()
|
||||
return specs
|
||||
|
||||
@staticmethod
|
||||
def get_primary_workflow(workflow_spec_id):
|
||||
# Returns the FileModel of the primary workflow for a workflow_spec
|
||||
primary = None
|
||||
file = db.session.query(FileModel).filter(FileModel.workflow_spec_id==workflow_spec_id, FileModel.primary==True).first()
|
||||
if file:
|
||||
primary = file
|
||||
return primary
|
||||
|
||||
@staticmethod
|
||||
def reorder_workflow_spec(spec, direction):
|
||||
category_id = spec.category_id
|
||||
# Direction is either `up` or `down`
|
||||
# This is checked in api.workflow.reorder_workflow_spec
|
||||
if direction == 'up':
|
||||
neighbor = session.query(WorkflowSpecModel). \
|
||||
filter(WorkflowSpecModel.category_id == category_id). \
|
||||
filter(WorkflowSpecModel.display_order == spec.display_order - 1). \
|
||||
first()
|
||||
if neighbor:
|
||||
neighbor.display_order += 1
|
||||
spec.display_order -= 1
|
||||
if direction == 'down':
|
||||
neighbor = session.query(WorkflowSpecModel). \
|
||||
filter(WorkflowSpecModel.category_id == category_id). \
|
||||
filter(WorkflowSpecModel.display_order == spec.display_order + 1). \
|
||||
first()
|
||||
if neighbor:
|
||||
neighbor.display_order -= 1
|
||||
spec.display_order += 1
|
||||
if neighbor:
|
||||
session.add(spec)
|
||||
session.add(neighbor)
|
||||
session.commit()
|
||||
ordered_specs = session.query(WorkflowSpecModel). \
|
||||
filter(WorkflowSpecModel.category_id == category_id). \
|
||||
order_by(WorkflowSpecModel.display_order).all()
|
||||
return ordered_specs
|
||||
|
||||
@staticmethod
|
||||
def reorder_workflow_spec_category(category, direction):
|
||||
# Direction is either `up` or `down`
|
||||
# This is checked in api.workflow.reorder_workflow_spec_category
|
||||
if direction == 'up':
|
||||
neighbor = session.query(WorkflowSpecCategoryModel).\
|
||||
filter(WorkflowSpecCategoryModel.display_order == category.display_order - 1).\
|
||||
first()
|
||||
if neighbor:
|
||||
neighbor.display_order += 1
|
||||
category.display_order -= 1
|
||||
if direction == 'down':
|
||||
neighbor = session.query(WorkflowSpecCategoryModel).\
|
||||
filter(WorkflowSpecCategoryModel.display_order == category.display_order + 1).\
|
||||
first()
|
||||
if neighbor:
|
||||
neighbor.display_order -= 1
|
||||
category.display_order += 1
|
||||
if neighbor:
|
||||
session.add(neighbor)
|
||||
session.add(category)
|
||||
session.commit()
|
||||
ordered_categories = session.query(WorkflowSpecCategoryModel).\
|
||||
order_by(WorkflowSpecCategoryModel.display_order).all()
|
||||
return ordered_categories
|
||||
|
||||
@staticmethod
|
||||
def cleanup_workflow_spec_display_order(category_id):
|
||||
# make sure we don't have gaps in display_order
|
||||
new_order = 0
|
||||
specs = session.query(WorkflowSpecModel).\
|
||||
filter(WorkflowSpecModel.category_id == category_id).\
|
||||
order_by(WorkflowSpecModel.display_order).all()
|
||||
for spec in specs:
|
||||
spec.display_order = new_order
|
||||
session.add(spec)
|
||||
new_order += 1
|
||||
session.commit()
|
||||
|
||||
@staticmethod
|
||||
def cleanup_workflow_spec_category_display_order():
|
||||
# make sure we don't have gaps in display_order
|
||||
new_order = 0
|
||||
categories = session.query(WorkflowSpecCategoryModel).\
|
||||
order_by(WorkflowSpecCategoryModel.display_order).all()
|
||||
for category in categories:
|
||||
category.display_order = new_order
|
||||
session.add(category)
|
||||
new_order += 1
|
||||
session.commit()
|
||||
|
||||
@staticmethod
|
||||
def delete_workflow_spec_files(spec_id):
|
||||
files = session.query(FileModel).filter_by(workflow_spec_id=spec_id).all()
|
||||
for file in files:
|
||||
FileService.delete_file(file.id)
|
||||
return spec_service.libraries.values()
|
||||
|
||||
@staticmethod
|
||||
def delete_workflow_spec_task_events(spec_id):
|
||||
@ -1160,3 +1078,4 @@ class WorkflowService(object):
|
||||
def delete_workflow_spec_workflow_models(spec_id):
|
||||
for workflow in session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id):
|
||||
StudyService.delete_workflow(workflow.id)
|
||||
|
||||
|
240
crc/services/workflow_spec_service.py
Normal file
240
crc/services/workflow_spec_service.py
Normal file
@ -0,0 +1,240 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from typing import List
|
||||
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.workflow import WorkflowSpecCategory, WorkflowSpecCategorySchema, WorkflowSpecInfo, \
|
||||
WorkflowSpecInfoSchema
|
||||
from crc.services.file_system_service import FileSystemService
|
||||
|
||||
|
||||
class WorkflowSpecService(FileSystemService):
|
||||
|
||||
"""This is a way of persisting json files to the file system in a way that mimics the data
|
||||
as it would have been stored in the database. This is specific to Workflow Specifications, and
|
||||
Workflow Specification categories.
|
||||
We do this, so we can easily drop in a new configuration on the file system, and change all
|
||||
the workflow specs at once, or manage those file in a git repository. """
|
||||
|
||||
CAT_SCHEMA = WorkflowSpecCategorySchema()
|
||||
WF_SCHEMA = WorkflowSpecInfoSchema()
|
||||
|
||||
def add_spec(self, spec: WorkflowSpecInfo):
|
||||
display_order = self.next_display_order(spec)
|
||||
spec.display_order = display_order
|
||||
self.update_spec(spec)
|
||||
|
||||
def update_spec(self, spec:WorkflowSpecInfo):
|
||||
spec_path = self.workflow_path(spec)
|
||||
if spec.is_master_spec or spec.library or spec.standalone:
|
||||
spec.category_id = ""
|
||||
os.makedirs(spec_path, exist_ok=True)
|
||||
json_path = os.path.join(spec_path, self.WF_JSON_FILE)
|
||||
with open(json_path, "w") as wf_json:
|
||||
json.dump(self.WF_SCHEMA.dump(spec), wf_json, indent=4)
|
||||
|
||||
def delete_spec(self, spec_id: str):
|
||||
spec = self.get_spec(spec_id)
|
||||
if not spec:
|
||||
return
|
||||
if spec.library:
|
||||
self.__remove_library_references(spec.id)
|
||||
path = self.workflow_path(spec)
|
||||
shutil.rmtree(path)
|
||||
|
||||
def __remove_library_references(self, spec_id):
|
||||
for spec in self.get_specs():
|
||||
if spec_id in spec.libraries:
|
||||
spec.libraries.remove(spec_id)
|
||||
self.update_spec(spec)
|
||||
|
||||
@property
|
||||
def master_spec(self):
|
||||
return self.get_master_spec()
|
||||
|
||||
def get_master_spec(self):
|
||||
path = os.path.join(FileSystemService.root_path(), FileSystemService.MASTER_SPECIFICATION)
|
||||
if os.path.exists(path):
|
||||
return self.__scan_spec(path, FileSystemService.MASTER_SPECIFICATION)
|
||||
|
||||
def get_spec(self, spec_id):
|
||||
if not os.path.exists(FileSystemService.root_path()):
|
||||
return # Nothing to scan yet. There are no files.
|
||||
|
||||
master_spec = self.get_master_spec()
|
||||
if master_spec and master_spec.id == spec_id:
|
||||
return master_spec
|
||||
with os.scandir(FileSystemService.root_path()) as category_dirs:
|
||||
for item in category_dirs:
|
||||
category_dir = item
|
||||
if item.is_dir():
|
||||
with os.scandir(item.path) as spec_dirs:
|
||||
for sd in spec_dirs:
|
||||
if sd.name == spec_id:
|
||||
# Now we have the category direcotry, and spec directory
|
||||
category = self.__scan_category(category_dir)
|
||||
return self.__scan_spec(sd.path, sd.name, category)
|
||||
|
||||
def get_specs(self):
|
||||
categories = self.get_categories()
|
||||
specs = []
|
||||
for cat in categories:
|
||||
specs.extend(cat.specs)
|
||||
return specs
|
||||
|
||||
def reorder_spec(self, spec:WorkflowSpecInfo, direction):
|
||||
specs = spec.category.specs
|
||||
specs.sort(key=lambda w: w.display_order)
|
||||
index = specs.index(spec)
|
||||
if direction == 'up' and index > 0:
|
||||
specs[index-1], specs[index] = specs[index], specs[index-1]
|
||||
if direction == 'down' and index < len(specs)-1:
|
||||
specs[index+1], specs[index] = specs[index], specs[index+1]
|
||||
return self.cleanup_workflow_spec_display_order(spec.category)
|
||||
|
||||
def cleanup_workflow_spec_display_order(self, category):
|
||||
index = 0
|
||||
if not category:
|
||||
return []
|
||||
for workflow in category.specs:
|
||||
workflow.display_order = index
|
||||
self.update_spec(workflow)
|
||||
index += 1
|
||||
return category.specs
|
||||
|
||||
def get_categories(self) -> List[WorkflowSpecCategory]:
|
||||
"""Returns the categories as a list in display order"""
|
||||
cat_list = self.__scan_categories()
|
||||
cat_list.sort(key=lambda w: w.display_order)
|
||||
return cat_list
|
||||
|
||||
def get_libraries(self) -> List[WorkflowSpecInfo]:
|
||||
cat = self.get_category(self.LIBRARY_SPECS)
|
||||
if not cat:
|
||||
return []
|
||||
return cat.specs
|
||||
|
||||
def get_standalones(self) -> List[WorkflowSpecInfo]:
|
||||
cat = self.get_category(self.STAND_ALONE_SPECS)
|
||||
if not cat:
|
||||
return []
|
||||
return cat.specs
|
||||
|
||||
def get_category(self, category_id):
|
||||
"""Look for a given category, and return it."""
|
||||
if not os.path.exists(FileSystemService.root_path()):
|
||||
return # Nothing to scan yet. There are no files.
|
||||
with os.scandir(FileSystemService.root_path()) as directory_items:
|
||||
for item in directory_items:
|
||||
if item.is_dir() and item.name == category_id:
|
||||
return self.__scan_category(item)
|
||||
|
||||
def add_category(self, category: WorkflowSpecCategory):
|
||||
display_order = len(self.get_categories())
|
||||
category.display_order = display_order
|
||||
return self.update_category(category)
|
||||
|
||||
def update_category(self, category: WorkflowSpecCategory):
|
||||
cat_path = self.category_path(category.id)
|
||||
os.makedirs(cat_path, exist_ok=True)
|
||||
json_path = os.path.join(cat_path, self.CAT_JSON_FILE)
|
||||
with open(json_path, "w") as cat_json:
|
||||
json.dump(self.CAT_SCHEMA.dump(category), cat_json, indent=4)
|
||||
return category
|
||||
|
||||
def delete_category(self, category_id: str):
|
||||
path = self.category_path(category_id)
|
||||
if os.path.exists(path):
|
||||
shutil.rmtree(path)
|
||||
self.cleanup_category_display_order()
|
||||
|
||||
def reorder_workflow_spec_category(self, cat: WorkflowSpecCategory, direction):
|
||||
cats = self.get_categories() # Returns an ordered list
|
||||
index = cats.index(cat)
|
||||
if direction == 'up' and index > 0:
|
||||
cats[index-1], cats[index] = cats[index], cats[index-1]
|
||||
if direction == 'down' and index < len(cats)-1:
|
||||
cats[index+1], cats[index] = cats[index], cats[index+1]
|
||||
index = 0
|
||||
for category in cats:
|
||||
category.display_order = index
|
||||
self.update_category(category)
|
||||
index += 1
|
||||
return cats
|
||||
|
||||
def cleanup_category_display_order(self):
|
||||
cats = self.get_categories() # Returns an ordered list
|
||||
index = 0
|
||||
for category in cats:
|
||||
category.display_order = index
|
||||
self.update_category(category)
|
||||
index += 1
|
||||
return cats
|
||||
|
||||
def __scan_categories(self):
|
||||
if not os.path.exists(FileSystemService.root_path()):
|
||||
return [] # Nothing to scan yet. There are no files.
|
||||
|
||||
with os.scandir(FileSystemService.root_path()) as directory_items:
|
||||
categories = []
|
||||
for item in directory_items:
|
||||
if item.is_dir() and not item.name[0] == '.':
|
||||
if item.name == self.REFERENCE_FILES:
|
||||
continue
|
||||
elif item.name == self.MASTER_SPECIFICATION:
|
||||
continue
|
||||
elif item.name == self.LIBRARY_SPECS:
|
||||
continue
|
||||
elif item.name == self.STAND_ALONE_SPECS:
|
||||
continue
|
||||
categories.append(self.__scan_category(item))
|
||||
return categories
|
||||
|
||||
def __scan_category(self, dir_item: os.DirEntry):
|
||||
"""Reads the category.json file, and any workflow directories """
|
||||
cat_path = os.path.join(dir_item.path, self.CAT_JSON_FILE)
|
||||
if os.path.exists(cat_path):
|
||||
with open(cat_path) as cat_json:
|
||||
data = json.load(cat_json)
|
||||
cat = self.CAT_SCHEMA.load(data)
|
||||
else:
|
||||
cat = WorkflowSpecCategory(id=dir_item.name, display_name=dir_item.name, display_order=10000, admin=False)
|
||||
with open(cat_path, "w") as wf_json:
|
||||
json.dump(self.CAT_SCHEMA.dump(cat), wf_json, indent=4)
|
||||
with os.scandir(dir_item.path) as workflow_dirs:
|
||||
cat.specs = []
|
||||
for item in workflow_dirs:
|
||||
if item.is_dir():
|
||||
cat.specs.append(self.__scan_spec(item.path, item.name, category=cat))
|
||||
cat.specs.sort(key=lambda w: w.display_order)
|
||||
return cat
|
||||
|
||||
@staticmethod
|
||||
def _get_workflow_metas(study_id):
|
||||
# Add in the Workflows for each category
|
||||
# Fixme: moved fro the Study Service
|
||||
workflow_metas = []
|
||||
# for workflow in workflow_models:
|
||||
# workflow_metas.append(WorkflowMetadata.from_workflow(workflow))
|
||||
return workflow_metas
|
||||
|
||||
def __scan_spec(self, path, name, category=None):
|
||||
spec_path = os.path.join(path, self.WF_JSON_FILE)
|
||||
is_master = FileSystemService.MASTER_SPECIFICATION in spec_path
|
||||
|
||||
if os.path.exists(spec_path):
|
||||
with open(spec_path) as wf_json:
|
||||
data = json.load(wf_json)
|
||||
spec = self.WF_SCHEMA.load(data)
|
||||
else:
|
||||
spec = WorkflowSpecInfo(id=name, library=False, standalone=False, is_master_spec=is_master,
|
||||
display_name=name, description="", primary_process_id="",
|
||||
primary_file_name="", display_order=0, is_review=False,
|
||||
libraries=[])
|
||||
with open(spec_path, "w") as wf_json:
|
||||
json.dump(self.WF_SCHEMA.dump(spec), wf_json, indent=4)
|
||||
if category:
|
||||
spec.category = category
|
||||
spec.category_id = category.id
|
||||
return spec
|
@ -1,52 +1,123 @@
|
||||
import json
|
||||
import os
|
||||
from json import JSONDecodeError
|
||||
from typing import List, Optional
|
||||
|
||||
import marshmallow
|
||||
import requests
|
||||
|
||||
from crc import app
|
||||
from crc import app, db, ma
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileModel
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecCategoryModel, WorkflowSpecCategoryModelSchema, \
|
||||
WorkflowSpecModelSchema, WorkflowLibraryModel
|
||||
from crc.services.file_system_service import FileSystemService
|
||||
from crc.services.spec_file_service import SpecFileService
|
||||
|
||||
|
||||
class WorkflowSyncService(object):
|
||||
"""
|
||||
There are some files on the File System that should be used to determine what Categories and Workflow
|
||||
Specifications are available. The FileSyncService and WorkflowSyncService both look to the filesytem for
|
||||
everything, but we still track our workflow spec metadata and categories in the database. This will
|
||||
allow us to write that information to disk, and update our database from disk as needed.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_remote_file_by_hash(remote,md5_hash):
|
||||
url = remote+'/v1.0/file/'+md5_hash+'/hash_data'
|
||||
return WorkflowSyncService.__make_request(url,return_contents=True)
|
||||
LIBRARY_SPECS = "Library Specs"
|
||||
MASTER_SPECIFICATION = "Master Specification"
|
||||
REFERENCE_FILES = "Reference Files"
|
||||
SPECIAL_FOLDERS = [LIBRARY_SPECS, MASTER_SPECIFICATION, REFERENCE_FILES]
|
||||
JSON_FILE = "categories.json"
|
||||
|
||||
@staticmethod
|
||||
def get_remote_workflow_spec_files(remote,workflow_spec_id):
|
||||
url = remote+'/v1.0/workflow_sync/'+workflow_spec_id+'/files'
|
||||
return WorkflowSyncService.__make_request(url)
|
||||
def from_file_system(self):
|
||||
"""Assure the database is in sync with the workflow specifications on the file system. """
|
||||
if not os.path.exists(FileSystemService.root_path()):
|
||||
raise ApiError('missing_specs', 'The path for workflow specifications does not exist.')
|
||||
json_path = os.path.join(FileSystemService.root_path(), self.JSON_FILE)
|
||||
if not os.path.exists(json_path):
|
||||
raise ApiError('missing_category_file', 'The path for workflow specifications must contain a json'
|
||||
' file that describes the categories.')
|
||||
|
||||
@staticmethod
|
||||
def get_remote_workflow_spec(remote, workflow_spec_id):
|
||||
"""
|
||||
this just gets the details of a workflow spec from the
|
||||
remote side.
|
||||
"""
|
||||
url = remote+'/v1.0/workflow_sync/'+workflow_spec_id+'/spec'
|
||||
return WorkflowSyncService.__make_request(url)
|
||||
directory_items = os.scandir(FileSystemService.root_path())
|
||||
# Load the categories.
|
||||
with open(json_path) as json_file:
|
||||
data = json.load(json_file)
|
||||
existing_cats = db.session.query(WorkflowSpecCategoryModel).all()
|
||||
# SqlAlchemy will attempt to update existing models if it can find them.
|
||||
categories = WorkflowSpecCategoryModelSchema(many=True).load(data['categories'], session=db.session)
|
||||
db.session.add_all(categories)
|
||||
|
||||
@staticmethod
|
||||
def get_all_remote_workflows(remote):
|
||||
url = remote + '/v1.0/workflow_sync/all'
|
||||
return WorkflowSyncService.__make_request(url)
|
||||
|
||||
@staticmethod
|
||||
def __make_request(url,return_contents=False):
|
||||
try:
|
||||
response = requests.get(url,headers={'X-CR-API-KEY':app.config['API_TOKEN']})
|
||||
except:
|
||||
raise ApiError("workflow_sync_error",url)
|
||||
if response.ok and response.text:
|
||||
if return_contents:
|
||||
return response.content
|
||||
# For each category, load up the workflow files
|
||||
# also Load the master workflow, and library workflows
|
||||
for cat in categories:
|
||||
path = SpecFileService.category_path(cat.display_name)
|
||||
if os.path.exists(path):
|
||||
self.__load_workflows(cat.display_name, cat)
|
||||
else:
|
||||
return json.loads(response.text)
|
||||
else:
|
||||
raise ApiError("workflow_sync_error",
|
||||
"Received an invalid response from the remote CR-Connect API (status %s): %s when calling "
|
||||
"url '%s'." %
|
||||
(response.status_code, response.text, url))
|
||||
# Fixme: What if there are running workflows? Do those relationships cause this to fail?
|
||||
db.session.query(WorkflowSpecModel).filter(WorkflowSpecModel.category_id == cat.id).delete()
|
||||
db.session.delete(cat)
|
||||
self.__load_workflows(self.LIBRARY_SPECS)
|
||||
self.__load_workflows(self.MASTER_SPECIFICATION)
|
||||
db.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def __load_workflows(directory, category=None):
|
||||
"""Creates workflow models for all directories in the given directory"""
|
||||
path = SpecFileService.category_path(directory)
|
||||
for wd in os.listdir(path):
|
||||
wf_json_path = os.path.join(path, wd, 'workflow.json')
|
||||
if not os.path.exists(wf_json_path):
|
||||
raise ApiError('missing_workflow_meta_file',
|
||||
'Each directory containing a workflow must contain a '
|
||||
'workflow.json file.')
|
||||
with open(wf_json_path) as wf_json_file:
|
||||
data = json.load(wf_json_file)
|
||||
workflow = WorkflowSpecModelSchema().load(data, session=db.session)
|
||||
if category:
|
||||
workflow.category = category
|
||||
db.session.add(workflow)
|
||||
# Connect Libraries
|
||||
for lib in data['libraries']:
|
||||
lib = WorkflowLibraryModel(workflow_spec_id=workflow.id,
|
||||
library_spec_id=lib['id'])
|
||||
db.session.add(lib)
|
||||
|
||||
def to_file_system(self):
|
||||
"""Writes metadata about the specifications to json files, and assures
|
||||
directory structures are correct. """
|
||||
categories = db.session.query(WorkflowSpecCategoryModel).all()
|
||||
data = ExportData(categories, None, None)
|
||||
my_data = ExportDataSchema().dump(data)
|
||||
json_file = os.path.join(FileSystemService.root_path(), self.JSON_FILE)
|
||||
os.makedirs(os.path.dirname(json_file), exist_ok=True)
|
||||
with open(json_file, 'w') as f:
|
||||
json.dump(my_data, f, indent=4)
|
||||
|
||||
for wf in db.session.query(WorkflowSpecModel).all():
|
||||
self.workflow_to_file_system(wf)
|
||||
|
||||
def workflow_to_file_system(self, wf: WorkflowSpecModel):
|
||||
path = SpecFileService.workflow_path(wf)
|
||||
json_data = WorkflowSpecModelSchema().dump(wf)
|
||||
file = os.path.join(path, 'workflow.json')
|
||||
with open(file, 'w') as f:
|
||||
json.dump(json_data, f, indent=4)
|
||||
|
||||
|
||||
class ExportData(object):
|
||||
def __init__(self, categories: List[WorkflowSpecCategoryModel],
|
||||
master_spec: WorkflowSpecModel,
|
||||
libraries: List[WorkflowSpecModel]):
|
||||
self.categories = categories
|
||||
self.master_spec = master_spec
|
||||
self.libraries = libraries
|
||||
|
||||
|
||||
class ExportDataSchema(ma.Schema):
|
||||
class Meta:
|
||||
model = ExportData
|
||||
fields = ["categories"]
|
||||
categories = marshmallow.fields.List(marshmallow.fields.Nested(WorkflowSpecCategoryModelSchema))
|
||||
|
||||
|
||||
|
@ -1,53 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_65ee385" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:process id="Process_dc2efbd" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0tp54zs</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0tp54zs" sourceRef="StartEvent_1" targetRef="Activity_0wo6016" />
|
||||
<bpmn:endEvent id="Event_0y9rgmj">
|
||||
<bpmn:incoming>Flow_1bzyf90</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1bzyf90" sourceRef="Activity_0wo6016" targetRef="Event_0y9rgmj" />
|
||||
<bpmn:userTask id="Activity_0wo6016" name="Abandoned" camunda:formKey="Abondones">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="Abandon" label="Abandon this study?" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
</camunda:properties>
|
||||
<camunda:value id="yes" name="Yes" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="AbandonNotes" label="Why was this study moved to Abandoned status?" type="textarea">
|
||||
<camunda:properties>
|
||||
<camunda:property id="rows" value="5" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_0tp54zs</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1bzyf90</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_dc2efbd">
|
||||
<bpmndi:BPMNEdge id="Flow_1bzyf90_di" bpmnElement="Flow_1bzyf90">
|
||||
<di:waypoint x="370" y="177" />
|
||||
<di:waypoint x="432" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0tp54zs_di" bpmnElement="Flow_0tp54zs">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="270" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0y9rgmj_di" bpmnElement="Event_0y9rgmj">
|
||||
<dc:Bounds x="432" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_00f54me_di" bpmnElement="Activity_0wo6016">
|
||||
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
Binary file not shown.
@ -1,341 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_0be39yr" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
|
||||
<bpmn:process id="Process_1cme33c" isExecutable="false">
|
||||
<bpmn:parallelGateway id="ParallelGateway_0ecwf3g">
|
||||
<bpmn:incoming>Flow_1wqp7vf</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0f61fxp</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_1idbomg</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_0xj8i4c</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_1d4dncx</bpmn:outgoing>
|
||||
</bpmn:parallelGateway>
|
||||
<bpmn:parallelGateway id="ParallelGateway_01234ff">
|
||||
<bpmn:incoming>SequenceFlow_02nbqkn</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_1o39rt4</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_0gsy7mo</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_0rw17h2</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1v7oplk</bpmn:outgoing>
|
||||
</bpmn:parallelGateway>
|
||||
<bpmn:endEvent id="EndEvent_16uwhzg">
|
||||
<bpmn:incoming>SequenceFlow_1v7oplk</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:startEvent id="StartEvent_1mhzkcr">
|
||||
<bpmn:outgoing>SequenceFlow_1r3yrhy</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:userTask id="UserTask_1y1qon7" name="Enter Protocol Owner Info" camunda:formKey="Protocol Owner Info">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_ProtocolOwnerInfo" label="What is the Protocol Owner?" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="dropdown" />
|
||||
</camunda:properties>
|
||||
<camunda:value id="Ind" name="Industry" />
|
||||
<camunda:value id="IntPI" name="UVA Primary Investigator - Investigator Initiated" />
|
||||
<camunda:value id="OutPI" name="Outside Primary Investigator - Investigator Initiated" />
|
||||
<camunda:value id="CoopGrp" name="Cooperative Group" />
|
||||
<camunda:value id="OthColUni" name="Other Colleges and Universities" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="ProtocolOwnerName" label="Protocol Owner Name" type="autocomplete">
|
||||
<camunda:properties>
|
||||
<camunda:property id="spreadsheet.name" value="SponsorList.xls" />
|
||||
<camunda:property id="spreadsheet.value.column" value="CUSTOMER_NUMBER" />
|
||||
<camunda:property id="spreadsheet.label.column" value="CUSTOMER_NAME" />
|
||||
<camunda:property id="help" value="#### How To:\nYou can find the name by typing any part (at least 3 characters) of the name.\n\nNote: This source of this list is in the Integration System (Oracle) and the information is owned by and managed by the OSP team.\n\nIf you are not finding the name or need to make any changes.\n1. Email 'Information Team listserve' osp-infoteam@virginia.edu with the Subject Line "Requesting New Sponsor Setup" and provide the following information:\n - Sponsor Legal Name, Address, Sponsor Classification (Federal Government, Foreign Entity, Foundation, Industry, Local Government, Other Colleges & Universities or State Government) as stated in the agreement/notification.\n - Copies of the agreement from the sponsor (contract, award letter, email, etc.).\n2. Once all the required information is received, OSP will add the name to the list.\n3. The updated list should be available for your selection in the workflow within 2 business days." />
|
||||
<camunda:property id="description" value="The protocol owner name is always an entity. For example, if this is a UVA Primary Investigator - Investigator initiated study, the Protocol Owner Name will be "University of Virginia"" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="FormFieldPOHigherEd" label="Is the owner part of an institution of higher education?" type="boolean">
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1idbomg</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0gsy7mo</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="UserTask_01zzzg9" name="Enter Clinical Trials.gov Info" camunda:formKey="Clinicalrials.gov Info">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="CTG_NA" label="Not Applicable" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
</camunda:properties>
|
||||
<camunda:value id="enumNoRegister" name="Study does not need to be registered at ClinicalTrials.gov" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="CTG_NCT" label="Clinicaltrials.gov NCT #" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="description" value="It is the responsibility of Study Sponsor to register the study and obtain the #." />
|
||||
<camunda:property id="help" value="#### How to\n\nThe format is NCT########. (11 characters total)" />
|
||||
<camunda:property id="placeholder" value="Limit Length: 11" />
|
||||
<camunda:property id="hide_expression" value="model.CTG_NA && model.CTG_NA.enumNoRegister" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="max_length" config="11" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="CTG_StudyType" label="ClinicalTrials.gov Study Type" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="hide_expression" value="model.CTG_NA && model.CTG_NA.enumNoRegister" />
|
||||
</camunda:properties>
|
||||
<camunda:value id="Interv" name="Interventional" />
|
||||
<camunda:value id="Obs" name="Observational" />
|
||||
<camunda:value id="Obs_PR" name="Observational - Patient Registry" />
|
||||
<camunda:value id="Exp_Acc" name="Expanded Access" />
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0xj8i4c</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0rw17h2</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="UserTask_EnterMultiSiteInfo" name="Enter Multi-Site Info" camunda:formKey="Multi-Site Info" camunda:priority="1">
|
||||
<bpmn:documentation>### From Protocol Builder
|
||||
{{ ElementDoc_MultiSite_Q12 }}
|
||||
|
||||
{{ ElementDoc_MultiSite_Q14 }}
|
||||
|
||||
{{ ElementDoc_MultiSite_Q28 }}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="Scope" label="Scope" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="radio" />
|
||||
<camunda:property id="help" value="Local- only at UVA\n\nNational- sites across the United States\n\nInternational- sites in one or more countries outside US" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
<camunda:value id="local" name="Local" />
|
||||
<camunda:value id="national" name="National" />
|
||||
<camunda:value id="international" name="International" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="AffiliatesOnly" label="Open for Affiliates Only?" type="boolean" />
|
||||
<camunda:formField id="Q12_MultikSite" label="Is this a multi-site study?" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="hide_expression" value="model.StudyInfo.details.IS_MULTI_SITE !== null" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="Q14_UVaPI" label="Will the PI at UVA be the overall PI for all sites in the study?" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="hide_expression" value="model.StudyInfo.details.IS_UVA_PI_MULTI !== null" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_1d4dncx</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1o39rt4</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="UserTask_0ebxkp7" name="Enter Study Info" camunda:formKey="StudyInfo">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="StudyShortName" label="Study Short Name" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="help" value="- This title is primarily used for Office of Grants and Contracts / Office of Sponsored Programs (OGC/OSP) related processes.\n- Length limit: 30 characters. It is automatically populated on Save based on School, Award Owning Organization, PI's computing ID, and study short name provided earlier and used for the workflow." />
|
||||
<camunda:property id="description" value="Important: Limit is 17 characters. Until the system integration with ResearchUVa is in place, for consistency and alignment, please remember to enter this same title when you complete ePRF information in ResearchUVa." />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="max_length" config="17" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="FullProtocolTitle" label="Full Protocol Title" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="help" value="### Important\nThis title is included in auto-generated signature documents and email communications with stakeholders throughout study start-up. It is **VERY** important that you use the same title here and in IRB Protocol Builder. If the titles do not match, it is possible the IRB-HSR will ask you to redo all your submissions." />
|
||||
<camunda:property id="rows" value="2" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="StudyType" label="Study Type" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="help" value="- Basic Science (BAS): Protocol designed to examine the basic mechanisms of action (e.g., physiology, biomechanics) of an intervention.\n- Diagnostic (DIA): Protocol designed to evaluate one or more interventions aimed at identifying a disease or health condition.\n- Health Services Research (HSR): Protocol designed to evaluate the delivery, processes, management, organization, or financing of health care.\n- Prevention (PRE): Protocol designed to assess one or more interventions aimed at preventing the development of a specific disease or health condition.\n- Screening (SCR): Protocol designed to assess or examine methods of identifying a condition (or risk factor for a condition) in people who are not yet known to have the condition (or risk factor).\n- Supportive Care (SUP): Protocol designed to evaluate one or more interventions where the primary intent is to maximize comfort, minimize side effects, or mitigate against a decline in the participant’s health or function. In general, supportive care interventions are not intended to cure a disease.\n- Treatment (TRE): Protocol designed to evaluate one or more interventions for treating a disease, syndrome, or condition.\n- Other (OTH): Not in other categories\n- Retrospective ONLY (sub-category of HSR)" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
<camunda:value id="BAS" name="Basic Science / Biospecimen" />
|
||||
<camunda:value id="DIA" name="Diagnostic" />
|
||||
<camunda:value id="HSR" name="Health Services Research / Observational / Registry / Outcome / Econ" />
|
||||
<camunda:value id="PRE" name="Prevention" />
|
||||
<camunda:value id="SCR" name="Screening" />
|
||||
<camunda:value id="SUP" name="Supportive Care" />
|
||||
<camunda:value id="TRE" name="Treatment" />
|
||||
<camunda:value id="HSR-Retro" name="Retrospective ONLY (sub-category of HSR)" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="StudyPhase" label="Study Phase" type="enum">
|
||||
<camunda:value id="Zero" name="0" />
|
||||
<camunda:value id="I" name="I" />
|
||||
<camunda:value id="I-II" name="I/II" />
|
||||
<camunda:value id="I-III" name="I/III" />
|
||||
<camunda:value id="II" name="II" />
|
||||
<camunda:value id="II-III" name="II/III" />
|
||||
<camunda:value id="III" name="III" />
|
||||
<camunda:value id="III-IV" name="III/IV" />
|
||||
<camunda:value id="IV" name="IV" />
|
||||
<camunda:value id="NA" name="NA" />
|
||||
<camunda:value id="Pilot" name="Pilot" />
|
||||
<camunda:value id="Pivotal" name="Pivotal" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="SubjectAge" label="Age of Subjects to be Enrolled in Study" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="help" value="Additional Information: [NIH Age Definition](https://grants.nih.gov/grants/guide/notice-files/NOT-OD-16-010.html)" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
<camunda:value id="child" name="Child (Under 18 Years Old)" />
|
||||
<camunda:value id="adult" name="Adult (18 Years and Older)" />
|
||||
<camunda:value id="both" name="Both" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="UVaMedResFell" label="Is the study targeting UVA Medical Students, Residents, and/or Fellows?" type="boolean">
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0f61fxp</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_02nbqkn</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1r3yrhy" sourceRef="StartEvent_1mhzkcr" targetRef="Activity_10nxpt2" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0f61fxp" sourceRef="ParallelGateway_0ecwf3g" targetRef="UserTask_0ebxkp7" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1idbomg" sourceRef="ParallelGateway_0ecwf3g" targetRef="UserTask_1y1qon7" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0xj8i4c" sourceRef="ParallelGateway_0ecwf3g" targetRef="UserTask_01zzzg9" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_02nbqkn" sourceRef="UserTask_0ebxkp7" targetRef="ParallelGateway_01234ff" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1o39rt4" sourceRef="UserTask_EnterMultiSiteInfo" targetRef="ParallelGateway_01234ff" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0gsy7mo" sourceRef="UserTask_1y1qon7" targetRef="ParallelGateway_01234ff" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0rw17h2" sourceRef="UserTask_01zzzg9" targetRef="ParallelGateway_01234ff" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1v7oplk" sourceRef="ParallelGateway_01234ff" targetRef="EndEvent_16uwhzg" />
|
||||
<bpmn:sequenceFlow id="Flow_09h1imz" sourceRef="Activity_10nxpt2" targetRef="Activity_PBMultiSiteCheckQ12" />
|
||||
<bpmn:scriptTask id="Activity_10nxpt2" name="Load Study Details">
|
||||
<bpmn:incoming>SequenceFlow_1r3yrhy</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_09h1imz</bpmn:outgoing>
|
||||
<bpmn:script>details = study_info('details')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:businessRuleTask id="Activity_PBMultiSiteCheckQ12" name="PB Multi-Site Check Q12" camunda:decisionRef="Decision_core_info_multi_site_q12">
|
||||
<bpmn:incoming>Flow_09h1imz</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_16v64sg</bpmn:outgoing>
|
||||
</bpmn:businessRuleTask>
|
||||
<bpmn:sequenceFlow id="Flow_16v64sg" sourceRef="Activity_PBMultiSiteCheckQ12" targetRef="Activity_PBMultiSiteCheckQ14" />
|
||||
<bpmn:sequenceFlow id="Flow_1d4dncx" sourceRef="ParallelGateway_0ecwf3g" targetRef="UserTask_EnterMultiSiteInfo" />
|
||||
<bpmn:sequenceFlow id="Flow_1tfyk5m" sourceRef="Activity_PBMultiSiteCheckQ14" targetRef="Activity_PBMultiSiteCheckQ28" />
|
||||
<bpmn:businessRuleTask id="Activity_PBMultiSiteCheckQ14" name="PB Multi-Site Check Q14" camunda:decisionRef="Decision_core_info_multi_site_q14">
|
||||
<bpmn:incoming>Flow_16v64sg</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1tfyk5m</bpmn:outgoing>
|
||||
</bpmn:businessRuleTask>
|
||||
<bpmn:sequenceFlow id="Flow_1wqp7vf" sourceRef="Activity_PBMultiSiteCheckQ28" targetRef="ParallelGateway_0ecwf3g" />
|
||||
<bpmn:businessRuleTask id="Activity_PBMultiSiteCheckQ28" name="PB Multi-Site Check Q28" camunda:decisionRef="Decision_core_info_multi_site_q28">
|
||||
<bpmn:incoming>Flow_1tfyk5m</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1wqp7vf</bpmn:outgoing>
|
||||
</bpmn:businessRuleTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1cme33c">
|
||||
<bpmndi:BPMNEdge id="Flow_1wqp7vf_di" bpmnElement="Flow_1wqp7vf">
|
||||
<di:waypoint x="820" y="325" />
|
||||
<di:waypoint x="865" y="325" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1tfyk5m_di" bpmnElement="Flow_1tfyk5m">
|
||||
<di:waypoint x="670" y="325" />
|
||||
<di:waypoint x="720" y="325" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1d4dncx_di" bpmnElement="Flow_1d4dncx">
|
||||
<di:waypoint x="890" y="300" />
|
||||
<di:waypoint x="890" y="250" />
|
||||
<di:waypoint x="990" y="250" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_16v64sg_di" bpmnElement="Flow_16v64sg">
|
||||
<di:waypoint x="510" y="325" />
|
||||
<di:waypoint x="570" y="325" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_09h1imz_di" bpmnElement="Flow_09h1imz">
|
||||
<di:waypoint x="350" y="325" />
|
||||
<di:waypoint x="410" y="325" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1v7oplk_di" bpmnElement="SequenceFlow_1v7oplk">
|
||||
<di:waypoint x="1215" y="325" />
|
||||
<di:waypoint x="1282" y="325" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0rw17h2_di" bpmnElement="SequenceFlow_0rw17h2">
|
||||
<di:waypoint x="1090" y="500" />
|
||||
<di:waypoint x="1190" y="500" />
|
||||
<di:waypoint x="1190" y="350" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0gsy7mo_di" bpmnElement="SequenceFlow_0gsy7mo">
|
||||
<di:waypoint x="1090" y="380" />
|
||||
<di:waypoint x="1190" y="380" />
|
||||
<di:waypoint x="1190" y="350" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1o39rt4_di" bpmnElement="SequenceFlow_1o39rt4">
|
||||
<di:waypoint x="1090" y="250" />
|
||||
<di:waypoint x="1190" y="250" />
|
||||
<di:waypoint x="1190" y="300" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_02nbqkn_di" bpmnElement="SequenceFlow_02nbqkn">
|
||||
<di:waypoint x="1090" y="130" />
|
||||
<di:waypoint x="1190" y="130" />
|
||||
<di:waypoint x="1190" y="300" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0xj8i4c_di" bpmnElement="SequenceFlow_0xj8i4c">
|
||||
<di:waypoint x="890" y="350" />
|
||||
<di:waypoint x="890" y="500" />
|
||||
<di:waypoint x="990" y="500" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1idbomg_di" bpmnElement="SequenceFlow_1idbomg">
|
||||
<di:waypoint x="890" y="350" />
|
||||
<di:waypoint x="890" y="380" />
|
||||
<di:waypoint x="990" y="380" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0f61fxp_di" bpmnElement="SequenceFlow_0f61fxp">
|
||||
<di:waypoint x="890" y="300" />
|
||||
<di:waypoint x="890" y="130" />
|
||||
<di:waypoint x="990" y="130" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1r3yrhy_di" bpmnElement="SequenceFlow_1r3yrhy">
|
||||
<di:waypoint x="188" y="325" />
|
||||
<di:waypoint x="250" y="325" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="ParallelGateway_0ecwf3g_di" bpmnElement="ParallelGateway_0ecwf3g">
|
||||
<dc:Bounds x="865" y="300" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ParallelGateway_01234ff_di" bpmnElement="ParallelGateway_01234ff">
|
||||
<dc:Bounds x="1165" y="300" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_16uwhzg_di" bpmnElement="EndEvent_16uwhzg">
|
||||
<dc:Bounds x="1282" y="307" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="StartEvent_1mhzkcr_di" bpmnElement="StartEvent_1mhzkcr">
|
||||
<dc:Bounds x="152" y="307" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_1y1qon7_di" bpmnElement="UserTask_1y1qon7">
|
||||
<dc:Bounds x="990" y="340" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_01zzzg9_di" bpmnElement="UserTask_01zzzg9">
|
||||
<dc:Bounds x="990" y="460" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0gtuk1e_di" bpmnElement="UserTask_EnterMultiSiteInfo">
|
||||
<dc:Bounds x="990" y="210" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0ebxkp7_di" bpmnElement="UserTask_0ebxkp7">
|
||||
<dc:Bounds x="990" y="90" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0vthni9_di" bpmnElement="Activity_10nxpt2">
|
||||
<dc:Bounds x="250" y="285" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0spxv8q_di" bpmnElement="Activity_PBMultiSiteCheckQ12">
|
||||
<dc:Bounds x="410" y="285" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0ah6heg_di" bpmnElement="Activity_PBMultiSiteCheckQ14">
|
||||
<dc:Bounds x="570" y="285" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0x7b58m_di" bpmnElement="Activity_PBMultiSiteCheckQ28">
|
||||
<dc:Bounds x="720" y="285" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
@ -1,40 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_488cd39" name="DRD" namespace="http://camunda.org/schema/1.0/dmn">
|
||||
<decision id="Decision_core_info_multi_site_q12" name="Core Info - Multi-Site - Q12">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="170" y="130" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="PB - Is Multi-Site">
|
||||
<inputExpression id="inputExpression_1" typeRef="string">
|
||||
<text>details.IS_MULTI_SITE</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="Task Header" name="ElementDoc_MultiSite_Q12" typeRef="string" />
|
||||
<rule id="DecisionRule_0irvsnq">
|
||||
<inputEntry id="UnaryTests_1s583o6">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0sbyzzq">
|
||||
<text>"Q12-Is this a multi-site study: Yes"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0cv40pc">
|
||||
<inputEntry id="UnaryTests_1gq1mc2">
|
||||
<text>0</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1bqfytt">
|
||||
<text>"Q12-Is this a multi-site study: No"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0j0uc7w">
|
||||
<inputEntry id="UnaryTests_1vjdw44">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0kqx95n">
|
||||
<text>"Q12-Is this a multi-site study: Question was not answered"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
@ -1,40 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_dc32393" name="DRD" namespace="http://camunda.org/schema/1.0/dmn">
|
||||
<decision id="Decision_core_info_multi_site_q14" name="Core Info - Multi-Site - Q14">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="230" y="160" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="PB - MultiSite UVA PI">
|
||||
<inputExpression id="inputExpression_1" typeRef="string">
|
||||
<text>details.IS_UVA_PI_MULTI</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="Task Header" name="ElementDoc_MultiSite_Q14" typeRef="string" />
|
||||
<rule id="DecisionRule_0ewhyrs">
|
||||
<inputEntry id="UnaryTests_0hnjckp">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_14q7lmq">
|
||||
<text>"Q14-Will the PI at UVA be the overall PI for all sites in the study: Yes"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1oewm3u">
|
||||
<inputEntry id="UnaryTests_09jmcfo">
|
||||
<text>0</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1yiqs5b">
|
||||
<text>"Q14-Will the PI at UVA be the overall PI for all sites in the study: No"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_01wesg0">
|
||||
<inputEntry id="UnaryTests_11lb1rb">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_07m1v2u">
|
||||
<text>"Q14-Will the PI at UVA be the overall PI for all sites in the study: Question was not presented"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
@ -1,24 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_dfddbbb" name="DRD" namespace="http://camunda.org/schema/1.0/dmn">
|
||||
<decision id="Decision_core_info_multi_site_q28" name="Core Info - Multi-Site - Q28">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="250" y="130" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="PB - Collaborative Analysis">
|
||||
<inputExpression id="inputExpression_1" typeRef="string">
|
||||
<text></text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="Task Header" name="ElementDoc_MultiSite_Q28" typeRef="string" />
|
||||
<rule id="DecisionRule_1lrzp3u">
|
||||
<inputEntry id="UnaryTests_0d0mlct">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1v69x8b">
|
||||
<text>"Q28-Is this a single site Collaborative Analysis study in which data from this study is being done by UVA personnel will be combined…: Question was not presented"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,753 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_1wv9t3c" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
|
||||
<bpmn:process id="Process_19ej1y2" name="Data Securty Plan" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1co48s3">
|
||||
<bpmn:outgoing>SequenceFlow_100w7co</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:userTask id="Task_EnterHIPAAIdentifiers" name="Enter HIPAA Identifiers" camunda:formKey="Enter_HIPAA_Identifiers">
|
||||
<bpmn:documentation>Add a box for each of the HIPAA Identifiers that you will receive, collect, record, store long term, or send outside of UVA during the course of your research..</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="HIPAA_Ids" label="HIPAA Identifiers" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="repeat" value="hipaa" />
|
||||
<camunda:property id="spreadsheet.name" value="HIPAA_Ids.xls" />
|
||||
<camunda:property id="spreadsheet.value.column" value="Value" />
|
||||
<camunda:property id="spreadsheet.label.column" value="Label" />
|
||||
<camunda:property id="repeat_button_label" value="Add HIPAA Id" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="HIPAA_IdsDataQualifiers" label="Check each that apply:" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
<camunda:property id="repeat" value="hipaa" />
|
||||
<camunda:property id="hide_expression" value="model.HIPAA_Ids === 'HIPAA_Ids0'" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="!(model.HIPAA_Ids === 'HIPAA_Ids0') || model.HIPAA_Ids == null" />
|
||||
</camunda:validation>
|
||||
<camunda:value id="OrigSource" name="Original source data collection (receive, collect, or record at UVa)" />
|
||||
<camunda:value id="LongTerm" name="Store long term at UVa" />
|
||||
<camunda:value id="SendTransOutside" name="Send or transmit outside of UVA" />
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0jyty9m</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0blyor8</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:parallelGateway id="ExclusiveGateway_0b16kmf">
|
||||
<bpmn:incoming>SequenceFlow_1oq4w2h</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1i6ac9a</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_084dyht</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_10g92nf</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_0pw57x9</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_0ng3fm8</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_0obqjjx</bpmn:outgoing>
|
||||
</bpmn:parallelGateway>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1i6ac9a" sourceRef="ExclusiveGateway_0b16kmf" targetRef="Task_EnterPaperDocuments" />
|
||||
<bpmn:userTask id="Task_EnterPaperDocuments" name="Enter Paper Documents" camunda:formKey="Enter_Paper_Documents">
|
||||
<bpmn:documentation>Paper documents:</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="CollStorUVaLocPaperTypes" label="Check all that apply" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
<camunda:property id="help" value="### Appropriate UVA locations include one or more of the following:\r\n\r\n- Kept in a locked office in a building with 24 hour swipe locks when unattended\r\n- Kept in a locked file cabinet in a locked room when unattended\n- Kept in an office where study are personnel present in room at all times located in a building with 24 hour swipe locks or a room with a lock when unattended\n -Behind two locked doors when unattended" />
|
||||
</camunda:properties>
|
||||
<camunda:value id="Appropriate" name="Appropriate UVa location (Click Help for list)" />
|
||||
<camunda:value id="Approved" name="UVa approved storage facility" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="CollStorUVaLocPaperOther" label="Other: (Please describe)" type="textarea" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1i6ac9a</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1q6gf6w</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_084dyht" sourceRef="ExclusiveGateway_0b16kmf" targetRef="Task_EnterEmailedUVAPersonnel" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_10g92nf" sourceRef="ExclusiveGateway_0b16kmf" targetRef="Task_EnterEMR" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0pw57x9" sourceRef="ExclusiveGateway_0b16kmf" targetRef="Task_EnterUVAApprovedECRF" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0ng3fm8" sourceRef="ExclusiveGateway_0b16kmf" targetRef="Task_EnterUVaServersWebsites" />
|
||||
<bpmn:userTask id="Task_EnterEmailedUVAPersonnel" name="Enter Emailed to UVA Personnel" camunda:formKey="EnterEmailedUVAPersonnel">
|
||||
<bpmn:documentation>Emailed to other UVA personnel:</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="CollStorUVaLocEmailUVaTypes" label="Check all that apply:" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
</camunda:properties>
|
||||
<camunda:value id="ResearchNoHIPPAIds" name="Research data emailed to UVA personnel, but with no HIPAA identifiers except dates." />
|
||||
<camunda:value id="EmailUVa" name="Email only to and from UVA personnel with *HS in the Global Address List" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="CollStorUVaLocEmailOther" label="Other Email Characteristics (Please describe)" type="textarea" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_084dyht</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_12cos7w</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="Task_EnterEMR" name="Enter EMR (EPIC)" camunda:formKey="EnterEMR">
|
||||
<bpmn:documentation>Electronic Medical Record (EPIC):</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="CollStorUVaLocEMR" label="Data will be collected in EPIC as part of routine care or as part of medical center encounters during the research study" type="enum" defaultValue="Not Applicable">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="radio" />
|
||||
<camunda:property id="description" value="Use of EPIC in this context encludes data managed entirely in the EPIC environment." />
|
||||
</camunda:properties>
|
||||
<camunda:value id="NA" name="Not Applicable" />
|
||||
<camunda:value id="Yes" name="Yes" />
|
||||
<camunda:value id="No" name="No" />
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_10g92nf</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0z10m1d</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="Task_EnterUVAApprovedECRF" name="Enter UVA Approved eCRF" camunda:formKey="EnterUVaApprovedECRF">
|
||||
<bpmn:documentation>UVA approved eCRF or clinical trials management system:</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="CollStorUVaLocCTMSTypes" label="Check all that apply:" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
<camunda:property id="description" value="If no device, please choose "Yes" below" />
|
||||
</camunda:properties>
|
||||
<camunda:value id="hstsdatalab" name="hstsdatalab.hscs.virginia.edu" />
|
||||
<camunda:value id="hstsdsmpogapp" name="hstsdsmpogapp.hscs.virginia.edu" />
|
||||
<camunda:value id="musicvpn01" name="musicvpn01.med.virginia.edu" />
|
||||
<camunda:value id="OnCore" name="OnCore (oncore.med.virginia.edu)" />
|
||||
<camunda:value id="RedCap" name="Redcap-int.hscs.virginia.edu" />
|
||||
<camunda:value id="reveal" name="https://reveal.studymanager.com/" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="CollStorUVaLocCTMSAck" label="I acknowledge that ANY electronic individual use devices used to >>connect<< to any servers/websites checked above are supported by UVA Health System IT." type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="help" value="[Definition of electronic individual user devices](https://security.virginia.edu/individual%E2%80%93use-electronic-devices)" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="CollStorUVaLocCTMSAckNo" label="If you answered "No" to the acknowledgment above, for each device list who provides support. Include their contact information (Device Name: Contact Name, Email & Phone):" type="textarea">
|
||||
<camunda:properties>
|
||||
<camunda:property id="hide_expression" value="model.CollStorUVaLocCTMSAck || model.CollStorUVaLocCTMSAck == null" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0pw57x9</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1agmshr</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="Task_EnterUVaServersWebsites" name="Enter UVA Servers & Websites" camunda:formKey="EnterUVaServersWebsites">
|
||||
<bpmn:documentation>UVA servers & websites:</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="CollStorUVA_LocUVA_ServersWebsites" label="Check all that apply:" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
<camunda:property id="help" value="You may locate the server/drive name and path by taking the following steps:\n\n- Windows: Click your “computer icon”, right click on the Drive icon (e.g., F). Then click on ”properties”. The server/drive name and path will appear at the very top of the box.\n- If you need additional assistance, contact your department computer support or system administrator for assistance." />
|
||||
<camunda:property id="spreadsheet.name" value="UVA_ServersWebsitesList.xls" />
|
||||
<camunda:property id="spreadsheet.value.column" value="Value" />
|
||||
<camunda:property id="spreadsheet.label.column" value="Label" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0ng3fm8</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1xp62py</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0obqjjx" sourceRef="ExclusiveGateway_0b16kmf" targetRef="Task_EnterWebCloudServer" />
|
||||
<bpmn:userTask id="Task_EnterWebCloudServer" name="Enter Web or Cloud Server" camunda:formKey="EnterWebCloudServer">
|
||||
<bpmn:documentation>Web-based server, cloud server, or any non-centrally managed server (not entered previously):</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="CollStorUVaLocWebCloud" label="Will data be collected and/or stored on a web-based server, cloud server or any non-centrally UVA managed server?" type="boolean" />
|
||||
<camunda:formField id="CollStorUVaLocWebCloudDescribe" label="Please describe:" type="textarea">
|
||||
<camunda:properties>
|
||||
<camunda:property id="rows" value="5" />
|
||||
<camunda:property id="hide_expression" value="!(model.CollStorUVaLocWebCloud)" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="CollStorUVaLocWebCloudHIPAAIds" label="Check all HIPAA Identifiers stored on a web based server, cloud server, and/or any non-centrally managed UVA server." type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
<camunda:property id="hide_expression" value="!(model.CollStorUVaLocWebCloud)" />
|
||||
<camunda:property id="spreadsheet.name" value="HIPAA_Ids.xls" />
|
||||
<camunda:property id="spreadsheet.value.column" value="Value" />
|
||||
<camunda:property id="spreadsheet.label.column" value="Label" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0obqjjx</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_10fsxk4</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0z10m1d" sourceRef="Task_EnterEMR" targetRef="ExclusiveGateway_06kvl84" />
|
||||
<bpmn:parallelGateway id="ExclusiveGateway_06kvl84">
|
||||
<bpmn:incoming>SequenceFlow_0z10m1d</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_1q6gf6w</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_12cos7w</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_1agmshr</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_1xp62py</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_10fsxk4</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0nc6lcs</bpmn:outgoing>
|
||||
</bpmn:parallelGateway>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1q6gf6w" sourceRef="Task_EnterPaperDocuments" targetRef="ExclusiveGateway_06kvl84" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_12cos7w" sourceRef="Task_EnterEmailedUVAPersonnel" targetRef="ExclusiveGateway_06kvl84" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1agmshr" sourceRef="Task_EnterUVAApprovedECRF" targetRef="ExclusiveGateway_06kvl84" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1xp62py" sourceRef="Task_EnterUVaServersWebsites" targetRef="ExclusiveGateway_06kvl84" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_10fsxk4" sourceRef="Task_EnterWebCloudServer" targetRef="ExclusiveGateway_06kvl84" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0nc6lcs" sourceRef="ExclusiveGateway_06kvl84" targetRef="Task_EnterIndividualUseDevices" />
|
||||
<bpmn:userTask id="Task_EnterIndividualUseDevices" name="Enter Individual Use Devices" camunda:formKey="EnterIndividualUseDevices">
|
||||
<bpmn:documentation>Answer the questions for each of the Individual Use Devices that you use to collect or store your data onto your individual use device during the course of your research. Do not select these items if they are only to be used to connect elsewhere (to the items you identified in Electronic Medical Record, UVA approved eCRF or clinical trials management system, UVA servers & websites, and Web-based server, cloud server, or any non-centrally managed server):</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="IUD" label="Individual use devices:" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="repeat" value="devices" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
<camunda:value id="None" name="No Individual Use Devices will be used" />
|
||||
<camunda:value id="FlashDrive" name="Flash (thumb) drive" />
|
||||
<camunda:value id="ExternalDrive" name="External drive" />
|
||||
<camunda:value id="CDorDVD" name="CD or DVD" />
|
||||
<camunda:value id="Desktop" name="Desktop Computer" />
|
||||
<camunda:value id="Laptop" name="Laptop" />
|
||||
<camunda:value id="Tablet" name="Tablet" />
|
||||
<camunda:value id="SmartPhone" name="Smart phone" />
|
||||
<camunda:value id="Camera" name="Camera" />
|
||||
<camunda:value id="VideoRecorder" name="Video recorder" />
|
||||
<camunda:value id="AudioRecorder" name="Audio recorder" />
|
||||
<camunda:value id="Biometric" name="Biometric recording device" />
|
||||
<camunda:value id="FitnessTrackers" name="Fitness Trackers" />
|
||||
<camunda:value id="Other" name="Other" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IUD_Other" label="Since you selected "Other" above, please identify the device type:" type="textarea">
|
||||
<camunda:properties>
|
||||
<camunda:property id="rows" value="5" />
|
||||
<camunda:property id="repeat" value="devices" />
|
||||
<camunda:property id="hide_expression" value="model.IUD !== 'Other'" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IUD_Process" label="Please describe your process for collecting, storing and/or transmitting data on the Individual Use Devices you selected in earlier steps (phones, flash drives, CDs, etc.):" type="textarea">
|
||||
<camunda:properties>
|
||||
<camunda:property id="rows" value="5" />
|
||||
<camunda:property id="repeat" value="devices" />
|
||||
<camunda:property id="hide_expression" value="model.IUD_Devices == 'None'" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IUD_HIPPA_Ids" label="Check the HIPAA Identifiers stored with the data on this device (e.g. such as full-face picture or video):" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
<camunda:property id="repeat" value="devices" />
|
||||
<camunda:property id="hide_expression" value="model.IUD == 'None'" />
|
||||
<camunda:property id="spreadsheet.name" value="HIPAA_Ids.xls" />
|
||||
<camunda:property id="spreadsheet.value.column" value="Value" />
|
||||
<camunda:property id="spreadsheet.label.column" value="Label" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IUD_Backups" label="Describe any backups made of the data stored on the device. Please include the location & method of data transfer:" type="textarea">
|
||||
<camunda:properties>
|
||||
<camunda:property id="rows" value="5" />
|
||||
<camunda:property id="repeat" value="devices" />
|
||||
<camunda:property id="hide_expression" value="model.IUD == 'None'" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IUD_HowLong" label="How long will the data remain on the individual-use device before being transferred?" type="textarea">
|
||||
<camunda:properties>
|
||||
<camunda:property id="rows" value="5" />
|
||||
<camunda:property id="repeat" value="devices" />
|
||||
<camunda:property id="hide_expression" value="model.IUD == 'None'" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IUD_DeleteData" label="After the information is transferred elsewhere, will you securely delete all the data from this device?" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="repeat" value="devices" />
|
||||
<camunda:property id="hide_expression" value="model.IUD == 'None'" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IUD_Access" label="Will anyone other than the study team or sponsor/CRO have access to data on this device?" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="repeat" value="devices" />
|
||||
<camunda:property id="hide_expression" value="model.IUD == 'None'" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IUD_AccessYes" label="If yes, describe:" type="textarea">
|
||||
<camunda:properties>
|
||||
<camunda:property id="repeat" value="devices" />
|
||||
<camunda:property id="rows" value="5" />
|
||||
<camunda:property id="hide_expression" value="model.IUD === 'None' || !model.IUD_Access" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IUD_Alternatives" label="Other storage alternatives that were considered and the reasons they are unworkable:" type="textarea">
|
||||
<camunda:properties>
|
||||
<camunda:property id="repeat" value="devices" />
|
||||
<camunda:property id="rows" value="5" />
|
||||
<camunda:property id="hide_expression" value="model.IUD == 'None'" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IUD_Justification" label="The justification for storage of these data on this individual use device is:" type="textarea">
|
||||
<camunda:properties>
|
||||
<camunda:property id="repeat" value="devices" />
|
||||
<camunda:property id="rows" value="5" />
|
||||
<camunda:property id="hide_expression" value="model.IUD == 'None'" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0nc6lcs</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0gp2pjm</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0gp2pjm" sourceRef="Task_EnterIndividualUseDevices" targetRef="Task_EnterOutsideUVA" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0mgwas4" sourceRef="Task_EnterOutsideUVA" targetRef="ExclusiveGateway_0pi0c2d" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1i8e52t" sourceRef="ExclusiveGateway_0x3t2vl" targetRef="Task_EnterEmailMethods" />
|
||||
<bpmn:userTask id="Task_EnterOutsideUVA" name="Enter Outside of UVA" camunda:formKey="EnterOutsideUVa">
|
||||
<bpmn:documentation>Transmission to and/or storage of the human subject research data OUTSIDE of UVA, such as to a sponsor or a colleague at another institution
|
||||
|
||||
##### Important
|
||||
Indicate all the possible formats in which you will transmit your data outside of UVA. Options should be selected even if the data is only temporarily stored in that format. Also remember to select any location where a subject’s SSN, hospital billing or account number, or certificate/license number might be recorded either with the data or separately.</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="DateTransmittedOutside" label="Will data be transmitted to a sponsor or a colleague at another institution?" type="boolean">
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0gp2pjm</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0mgwas4</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="Task_EnterEmailMethods" name="Enter Email Methods" camunda:formKey="EnterEmailMethods">
|
||||
<bpmn:documentation>Since you answered "Yes" to question above, than complete questions below:</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="DateTransmittedOutsideViaHSC" label="Data will be emailed to non-UVA personnel via HSC secure email:" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="help" value="For information regarding how to email data to non-UVA personnel via HSC secure email: http://hit.healthsystem.virginia.edu/index.cfm/departments/security/how-tos/how-to-encrypt-a-medical-center-email/" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="DateTransmittedOutsideRegEmail" label="Data will be emailed to non-UVA personnel via regular email" type="boolean" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1i8e52t</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0uewki3</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_12bv2i4" sourceRef="ExclusiveGateway_0x3t2vl" targetRef="Task_EnterDataManagement" />
|
||||
<bpmn:userTask id="Task_EnterDataManagement" name="Enter Data Management" camunda:formKey="EnterDataManagement">
|
||||
<bpmn:documentation>Data will be sent to website/server/drive managed by the sponsor or CRO</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="EnterDataManagementWebwsiteServerDriveAck" label="I acknowledge that ANY electronic individual use devices used to connect to any servers/websites listed below are supported by UVA Health System IT." type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="help" value="[Definition of electronic individual user devices](https://security.virginia.edu/individual%E2%80%93use-electronic-devices)" />
|
||||
<camunda:property id="repeat" value="DataSentSponsorCRO" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="EnterDataManagementHIPAA_Ids" label="HIPAA Identifiers" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="repeat" value="DataSentSponsorCRO" />
|
||||
<camunda:property id="spreadsheet.name" value="HIPAA_Ids.xls" />
|
||||
<camunda:property id="spreadsheet.value.column" value="Value" />
|
||||
<camunda:property id="spreadsheet.label.column" value="Label" />
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="EnterDataManagementSharingContract" label="If sharing data with anyone outside of UVA, do you confirm that you will obtain a contract with them via the School of Medicine Office of Grants and Contracts (OGC) or the Office of Sponsored Programs (OSP)?" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="repeat" value="DataSentSponsorCRO" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="EnterDataManagementWebsiteServerDriveEncrypted" label="Data will be sent and stored in an encrypted fashion (e.g. will only be shared and via Secure FX, Secure FTP, HTTPS, PGP) and the server/drive is configured to store data regulated by HIPAA" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="repeat" value="DataSentSponsorCRO" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="EnterDataManagementWebsiteServerDriveURL" label="Name (URL) of website (e.g. https://remote.sponsor.com/project name)" type="textarea">
|
||||
<camunda:properties>
|
||||
<camunda:property id="repeat" value="DataSentSponsorCRO" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_12bv2i4</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_08rwbhm</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1mnmo6p" sourceRef="ExclusiveGateway_0x3t2vl" targetRef="Task_EnterTransmissionMethod" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_08rwbhm" sourceRef="Task_EnterDataManagement" targetRef="ExclusiveGateway_1lpm3pa" />
|
||||
<bpmn:parallelGateway id="ExclusiveGateway_1lpm3pa">
|
||||
<bpmn:incoming>SequenceFlow_08rwbhm</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_0uewki3</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_0lere0k</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0k2r83n</bpmn:outgoing>
|
||||
</bpmn:parallelGateway>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0uewki3" sourceRef="Task_EnterEmailMethods" targetRef="ExclusiveGateway_1lpm3pa" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0lere0k" sourceRef="Task_EnterTransmissionMethod" targetRef="ExclusiveGateway_1lpm3pa" />
|
||||
<bpmn:userTask id="Task_EnterTransmissionMethod" name="Enter Transmission Method" camunda:formKey="EnterTransmissionMethod">
|
||||
<bpmn:documentation>Transmission Method of data that will be mailed or faxed (FedEx, UPS, certified mail, etc.) Check all that apply:</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="DataTransmissionMethodPaper" label="Paper documents" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="radio" />
|
||||
<camunda:property id="markdown_description" value="**Note:** Paper documents shipped using non-trackable method - **Not Allowed.**" />
|
||||
</camunda:properties>
|
||||
<camunda:value id="Yes" name="Yes, paper documents will be shipped using trackable method" />
|
||||
<camunda:value id="No" name="No, paper documents will not be shipped" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="DataTransmissionMethodEncrypted" label="Individual Use Devices" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="radio" />
|
||||
<camunda:property id="markdown_description" value="**Note:** Examples of individual use devices: CD, thumb drive, etc." />
|
||||
</camunda:properties>
|
||||
<camunda:value id="Yes" name="Yes, individual use devices will be shipped using a trackable method with data encrypted and password to the encrypted data transmitted separately" />
|
||||
<camunda:value id="No" name="No, individual use devices will not be shipped" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="DataTransmissionMethodFaxed" label="Faxed" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="radio" />
|
||||
<camunda:property id="markdown_description" value="**Note:** By checking this option, you are also confirming you will verify FAX numbers before faxing and use FAX cover sheet with a confidentiality statement." />
|
||||
</camunda:properties>
|
||||
<camunda:value id="Yes" name="Yes, data wile be faxed to a receiving machine in a restricted-access location with the intended recipient is clearly indicated, alerted to the pending transmission and available to pick up immediately." />
|
||||
<camunda:value id="No" name="No, data will not be faxed" />
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1mnmo6p</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0lere0k</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:endEvent id="EndEvent_151cj59">
|
||||
<bpmn:documentation>Please download and check your DSP doc.</bpmn:documentation>
|
||||
<bpmn:incoming>SequenceFlow_16kyite</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:exclusiveGateway id="ExclusiveGateway_0pi0c2d" name="Outside of UVa?">
|
||||
<bpmn:incoming>SequenceFlow_0mgwas4</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_01hl869</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_0t6xl9i</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_01hl869" name="Yes" sourceRef="ExclusiveGateway_0pi0c2d" targetRef="ExclusiveGateway_0x3t2vl">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">DateTransmittedOutside == True</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:parallelGateway id="ExclusiveGateway_0x3t2vl">
|
||||
<bpmn:incoming>SequenceFlow_01hl869</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1i8e52t</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_12bv2i4</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_1mnmo6p</bpmn:outgoing>
|
||||
</bpmn:parallelGateway>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_100w7co" sourceRef="StartEvent_1co48s3" targetRef="Task_0q6ir2l" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1oq4w2h" sourceRef="Task_196zozc" targetRef="ExclusiveGateway_0b16kmf" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0m2op9s" sourceRef="Task_0q6ir2l" targetRef="Task_0uotpzg" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0jyty9m" sourceRef="Task_0uotpzg" targetRef="Task_EnterHIPAAIdentifiers" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0blyor8" sourceRef="Task_EnterHIPAAIdentifiers" targetRef="Task_196zozc" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0k2r83n" sourceRef="ExclusiveGateway_1lpm3pa" targetRef="Task_GenDoc" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0t6xl9i" sourceRef="ExclusiveGateway_0pi0c2d" targetRef="Task_GenDoc">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">DateTransmittedOutside == False</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_16kyite" sourceRef="Task_GenDoc" targetRef="EndEvent_151cj59" />
|
||||
<bpmn:scriptTask id="Task_GenDoc" name="Generate DSP">
|
||||
<bpmn:incoming>SequenceFlow_0k2r83n</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_0t6xl9i</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_16kyite</bpmn:outgoing>
|
||||
<bpmn:script>complete_template('NEW_DSP_template.docx', 'Study_DataSecurityPlan')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:manualTask id="Task_0q6ir2l" name="View Instructions">
|
||||
<bpmn:documentation>##### Instructions
|
||||
|
||||
Overview: You should consult with UVA Information Security (InfoSec) while completing this Step if your protocol will involve highly technical issues such as the creation of a website to collect data, software application development, the use of a smart phone app, or if you plan to store identifiable data onto an individual use device such as a tablet, laptop, or camera. Otherwise, answer the questions on this Step to create the Data Security Plan for your study. After you submit this Step, you can view the resulting Data Security Plan and assessment outcomes on the Data Security Plan Upload Step and submit your plan for UVA Information Security (InfoSec) or IRB-HSR Review.
|
||||
|
||||
You need a protocol to submit for this step. If you do not have a protocol, please go to IRB Protocol Builder to create the Protocol template. Complete the template and upload.
|
||||
|
||||
UVA Information Security Contact Information:
|
||||
|
||||
Website: UVA Office of Information Security
|
||||
Email: IT-Security@Virginia.edu
|
||||
|
||||
HIPAA Reference: https://www.hhs.gov/hipaa/for-professionals/privacy/laws-regulations/index.html
|
||||
|
||||
Process: The Study Team will answer the questions in this section to create the Data Security Plan and, if required, a Highly Sensitive Data Storage Request Form.
|
||||
|
||||
How to The Data Security Plan is auto-generated based on your answers on this Step. You can save your information here and check the outcomes on the Data Security Plan Upload Step at any time.
|
||||
|
||||
Submit the step only when you are ready. After you "Submit" the step, the information will not be available for editing.
|
||||
|
||||
|
||||
# test</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_23d42bc" label="asdg" type="boolean" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_100w7co</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0m2op9s</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:manualTask id="Task_0uotpzg" name="View HIPAA ID Instructions">
|
||||
<bpmn:documentation>• Include the HIPAA identifier if you are collecting, recording or receiving any of these items for a potential subject, an enrolled subject, a subject’s relative, household member or employer.
|
||||
|
||||
• Include the HIPAA identifier even if you are recording any item below temporarily while the information is being collected.
|
||||
|
||||
• Keep in mind that the information below includes data collected via photographs, video, audiotapes, and systems like IVRS (Interactive Voice Response System)
|
||||
|
||||
• If you list no HIPAA identifier to all items it means you would never be able to go back and obtain any additional information about an individual.</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_3l1fkli" label="Read?" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="hide_expression" value="console.log('this', this)" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0m2op9s</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0jyty9m</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:manualTask id="Task_196zozc" name="View Definitions & Instructions">
|
||||
<bpmn:documentation>#### Collection & storage of research data at UVA
|
||||
|
||||
• Include the HIPAA identifier if you are collecting, recording or receiving any of these items for a potential subject, an enrolled subject, a subject’s relative, household member or employer.
|
||||
|
||||
• Include the HIPAA identifier even if you are recording any item below temporarily while the information is being collected.
|
||||
|
||||
• Keep in mind that the information below includes data collected via photographs, video, audiotapes, and systems like IVRS (Interactive Voice Response System)
|
||||
|
||||
• If you list no HIPAA identifier to all items it means you would never be able to go back and obtain any additional information about an individual.
|
||||
|
||||
##### Important
|
||||
Indicate all the possible formats in which you will collect or receive your original source documentation at UVA. Also indicate any locations where it might be stored long term at UVA. Options should be selected even if the data is only temporarily stored in that format. Check all that apply for each category. If "Other" selected, please provide a brief description.</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="def_ack" label="I have read definitions" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
</camunda:properties>
|
||||
<camunda:value id="def_ack" name="Yes" />
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0blyor8</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1oq4w2h</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_19ej1y2">
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_16kyite_di" bpmnElement="SequenceFlow_16kyite">
|
||||
<di:waypoint x="2240" y="390" />
|
||||
<di:waypoint x="2322" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0t6xl9i_di" bpmnElement="SequenceFlow_0t6xl9i">
|
||||
<di:waypoint x="1620" y="415" />
|
||||
<di:waypoint x="1620" y="640" />
|
||||
<di:waypoint x="2190" y="640" />
|
||||
<di:waypoint x="2190" y="430" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0k2r83n_di" bpmnElement="SequenceFlow_0k2r83n">
|
||||
<di:waypoint x="2075" y="390" />
|
||||
<di:waypoint x="2140" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0blyor8_di" bpmnElement="SequenceFlow_0blyor8">
|
||||
<di:waypoint x="660" y="390" />
|
||||
<di:waypoint x="717" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0jyty9m_di" bpmnElement="SequenceFlow_0jyty9m">
|
||||
<di:waypoint x="498" y="390" />
|
||||
<di:waypoint x="560" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0m2op9s_di" bpmnElement="SequenceFlow_0m2op9s">
|
||||
<di:waypoint x="351" y="390" />
|
||||
<di:waypoint x="398" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1oq4w2h_di" bpmnElement="SequenceFlow_1oq4w2h">
|
||||
<di:waypoint x="817" y="390" />
|
||||
<di:waypoint x="875" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_100w7co_di" bpmnElement="SequenceFlow_100w7co">
|
||||
<di:waypoint x="178" y="390" />
|
||||
<di:waypoint x="251" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_01hl869_di" bpmnElement="SequenceFlow_01hl869">
|
||||
<di:waypoint x="1645" y="390" />
|
||||
<di:waypoint x="1725" y="390" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1677" y="372" width="18" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0lere0k_di" bpmnElement="SequenceFlow_0lere0k">
|
||||
<di:waypoint x="1950" y="530" />
|
||||
<di:waypoint x="2050" y="530" />
|
||||
<di:waypoint x="2050" y="415" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0uewki3_di" bpmnElement="SequenceFlow_0uewki3">
|
||||
<di:waypoint x="1950" y="250" />
|
||||
<di:waypoint x="2050" y="250" />
|
||||
<di:waypoint x="2050" y="365" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_08rwbhm_di" bpmnElement="SequenceFlow_08rwbhm">
|
||||
<di:waypoint x="1950" y="390" />
|
||||
<di:waypoint x="2025" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1mnmo6p_di" bpmnElement="SequenceFlow_1mnmo6p">
|
||||
<di:waypoint x="1750" y="415" />
|
||||
<di:waypoint x="1750" y="530" />
|
||||
<di:waypoint x="1850" y="530" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_12bv2i4_di" bpmnElement="SequenceFlow_12bv2i4">
|
||||
<di:waypoint x="1775" y="390" />
|
||||
<di:waypoint x="1850" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1i8e52t_di" bpmnElement="SequenceFlow_1i8e52t">
|
||||
<di:waypoint x="1750" y="365" />
|
||||
<di:waypoint x="1750" y="250" />
|
||||
<di:waypoint x="1850" y="250" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0mgwas4_di" bpmnElement="SequenceFlow_0mgwas4">
|
||||
<di:waypoint x="1530" y="390" />
|
||||
<di:waypoint x="1595" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0gp2pjm_di" bpmnElement="SequenceFlow_0gp2pjm">
|
||||
<di:waypoint x="1360" y="390" />
|
||||
<di:waypoint x="1430" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0nc6lcs_di" bpmnElement="SequenceFlow_0nc6lcs">
|
||||
<di:waypoint x="1185" y="390" />
|
||||
<di:waypoint x="1260" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_10fsxk4_di" bpmnElement="SequenceFlow_10fsxk4">
|
||||
<di:waypoint x="1080" y="640" />
|
||||
<di:waypoint x="1160" y="640" />
|
||||
<di:waypoint x="1160" y="415" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1xp62py_di" bpmnElement="SequenceFlow_1xp62py">
|
||||
<di:waypoint x="1080" y="540" />
|
||||
<di:waypoint x="1160" y="540" />
|
||||
<di:waypoint x="1160" y="415" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1agmshr_di" bpmnElement="SequenceFlow_1agmshr">
|
||||
<di:waypoint x="1080" y="440" />
|
||||
<di:waypoint x="1160" y="440" />
|
||||
<di:waypoint x="1160" y="415" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_12cos7w_di" bpmnElement="SequenceFlow_12cos7w">
|
||||
<di:waypoint x="1080" y="220" />
|
||||
<di:waypoint x="1160" y="220" />
|
||||
<di:waypoint x="1160" y="365" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1q6gf6w_di" bpmnElement="SequenceFlow_1q6gf6w">
|
||||
<di:waypoint x="1080" y="120" />
|
||||
<di:waypoint x="1160" y="120" />
|
||||
<di:waypoint x="1160" y="365" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0z10m1d_di" bpmnElement="SequenceFlow_0z10m1d">
|
||||
<di:waypoint x="1080" y="320" />
|
||||
<di:waypoint x="1160" y="320" />
|
||||
<di:waypoint x="1160" y="365" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0obqjjx_di" bpmnElement="SequenceFlow_0obqjjx">
|
||||
<di:waypoint x="900" y="415" />
|
||||
<di:waypoint x="900" y="640" />
|
||||
<di:waypoint x="980" y="640" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0ng3fm8_di" bpmnElement="SequenceFlow_0ng3fm8">
|
||||
<di:waypoint x="900" y="415" />
|
||||
<di:waypoint x="900" y="540" />
|
||||
<di:waypoint x="980" y="540" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0pw57x9_di" bpmnElement="SequenceFlow_0pw57x9">
|
||||
<di:waypoint x="900" y="415" />
|
||||
<di:waypoint x="900" y="440" />
|
||||
<di:waypoint x="980" y="440" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_10g92nf_di" bpmnElement="SequenceFlow_10g92nf">
|
||||
<di:waypoint x="900" y="365" />
|
||||
<di:waypoint x="900" y="320" />
|
||||
<di:waypoint x="980" y="320" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_084dyht_di" bpmnElement="SequenceFlow_084dyht">
|
||||
<di:waypoint x="900" y="365" />
|
||||
<di:waypoint x="900" y="220" />
|
||||
<di:waypoint x="980" y="220" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1i6ac9a_di" bpmnElement="SequenceFlow_1i6ac9a">
|
||||
<di:waypoint x="900" y="365" />
|
||||
<di:waypoint x="900" y="120" />
|
||||
<di:waypoint x="980" y="120" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="StartEvent_1co48s3_di" bpmnElement="StartEvent_1co48s3">
|
||||
<dc:Bounds x="142" y="372" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_16imtaa_di" bpmnElement="Task_EnterHIPAAIdentifiers">
|
||||
<dc:Bounds x="560" y="350" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ParallelGateway_03qblyb_di" bpmnElement="ExclusiveGateway_0b16kmf">
|
||||
<dc:Bounds x="875" y="365" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_1wsga3m_di" bpmnElement="Task_EnterPaperDocuments">
|
||||
<dc:Bounds x="980" y="80" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0o1xjub_di" bpmnElement="Task_EnterEmailedUVAPersonnel">
|
||||
<dc:Bounds x="980" y="180" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_1gnbchf_di" bpmnElement="Task_EnterEMR">
|
||||
<dc:Bounds x="980" y="280" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0a4bj92_di" bpmnElement="Task_EnterUVAApprovedECRF">
|
||||
<dc:Bounds x="980" y="400" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_1f2b80a_di" bpmnElement="Task_EnterUVaServersWebsites">
|
||||
<dc:Bounds x="980" y="500" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0n3jbd7_di" bpmnElement="Task_EnterWebCloudServer">
|
||||
<dc:Bounds x="980" y="600" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ParallelGateway_0zl5t7b_di" bpmnElement="ExclusiveGateway_06kvl84">
|
||||
<dc:Bounds x="1135" y="365" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0q8o038_di" bpmnElement="Task_EnterIndividualUseDevices">
|
||||
<dc:Bounds x="1260" y="350" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_10x8kgc_di" bpmnElement="Task_EnterOutsideUVA">
|
||||
<dc:Bounds x="1430" y="350" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_1x8azom_di" bpmnElement="Task_EnterEmailMethods">
|
||||
<dc:Bounds x="1850" y="210" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_1r640re_di" bpmnElement="Task_EnterDataManagement">
|
||||
<dc:Bounds x="1850" y="350" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ParallelGateway_0cignbh_di" bpmnElement="ExclusiveGateway_1lpm3pa">
|
||||
<dc:Bounds x="2025" y="365" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0ns9m8t_di" bpmnElement="Task_EnterTransmissionMethod">
|
||||
<dc:Bounds x="1850" y="490" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_151cj59_di" bpmnElement="EndEvent_151cj59">
|
||||
<dc:Bounds x="2322" y="372" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ExclusiveGateway_0pi0c2d_di" bpmnElement="ExclusiveGateway_0pi0c2d" isMarkerVisible="true">
|
||||
<dc:Bounds x="1595" y="365" width="50" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1580" y="341" width="80" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ParallelGateway_1284xgu_di" bpmnElement="ExclusiveGateway_0x3t2vl">
|
||||
<dc:Bounds x="1725" y="365" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ScriptTask_1616pnb_di" bpmnElement="Task_GenDoc">
|
||||
<dc:Bounds x="2140" y="350" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1l6rjbr_di" bpmnElement="Task_0q6ir2l">
|
||||
<dc:Bounds x="251" y="350" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_11u7de2_di" bpmnElement="Task_0uotpzg">
|
||||
<dc:Bounds x="398" y="350" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0nfmn0k_di" bpmnElement="Task_196zozc">
|
||||
<dc:Bounds x="717" y="350" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
File diff suppressed because it is too large
Load Diff
@ -1,214 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_ef00925" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:collaboration id="Collaboration_1540cgn">
|
||||
<bpmn:participant id="Participant_17tc4sp" name="Department Chair Approval" processRef="Process_b47cbda" />
|
||||
</bpmn:collaboration>
|
||||
<bpmn:process id="Process_b47cbda" isExecutable="true">
|
||||
<bpmn:laneSet id="LaneSet_1de522h">
|
||||
<bpmn:lane id="Lane_1rq9xje">
|
||||
<bpmn:flowNodeRef>StartEvent_1</bpmn:flowNodeRef>
|
||||
<bpmn:flowNodeRef>Activity_0s4b6tq</bpmn:flowNodeRef>
|
||||
<bpmn:flowNodeRef>Activity_1cfbpwn</bpmn:flowNodeRef>
|
||||
<bpmn:flowNodeRef>Activity_0kyyjcu</bpmn:flowNodeRef>
|
||||
<bpmn:flowNodeRef>Activity_0pm3m9s</bpmn:flowNodeRef>
|
||||
</bpmn:lane>
|
||||
<bpmn:lane id="Lane_1b7d4a7" name="DeptChairApprover">
|
||||
<bpmn:flowNodeRef>Activity_1npbkhr</bpmn:flowNodeRef>
|
||||
<bpmn:flowNodeRef>Gateway_1mui28k</bpmn:flowNodeRef>
|
||||
<bpmn:flowNodeRef>Event_0t6gdb3</bpmn:flowNodeRef>
|
||||
</bpmn:lane>
|
||||
</bpmn:laneSet>
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0qf0ntn</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0qf0ntn" sourceRef="StartEvent_1" targetRef="Activity_0s4b6tq" />
|
||||
<bpmn:sequenceFlow id="Flow_0wxblf0" sourceRef="Activity_0pm3m9s" targetRef="Activity_1npbkhr" />
|
||||
<bpmn:sequenceFlow id="Flow_1qqzl3b" sourceRef="Activity_1npbkhr" targetRef="Gateway_1mui28k" />
|
||||
<bpmn:sequenceFlow id="Flow_1h379u7" name="Yes" sourceRef="Gateway_1mui28k" targetRef="Event_0t6gdb3" />
|
||||
<bpmn:sequenceFlow id="Flow_0qea2ru" name="No" sourceRef="Gateway_1mui28k" targetRef="Activity_0pm3m9s">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">isApproved == False</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:sequenceFlow id="Flow_1p4pvy1" sourceRef="Activity_0s4b6tq" targetRef="Activity_0kyyjcu" />
|
||||
<bpmn:sequenceFlow id="Flow_0bsmt3b" sourceRef="Activity_0kyyjcu" targetRef="Activity_1cfbpwn" />
|
||||
<bpmn:sequenceFlow id="Flow_0rqond3" sourceRef="Activity_1cfbpwn" targetRef="Activity_0pm3m9s" />
|
||||
<bpmn:userTask id="Activity_0s4b6tq" name="Temp Datastore" camunda:formKey="TempDatastore">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="Department" label="Department" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="description" value="Responsible Organization Department" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_0qf0ntn</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1p4pvy1</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:scriptTask id="Activity_1cfbpwn" name="Build Signee List">
|
||||
<bpmn:incoming>Flow_0bsmt3b</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0rqond3</bpmn:outgoing>
|
||||
<bpmn:script>investigators = study_info('investigators')
|
||||
pi = investigators.get('PI', None)
|
||||
subs_list = ""
|
||||
for k in investigators.keys():
|
||||
if k[:2] == 'SI':
|
||||
investigator = investigators.get(k)
|
||||
subs_list = subs_list + investigator["uid"] + ", "
|
||||
exclude_list_arg = "[" + subs_list + pi.uid + "]"
|
||||
del(pi)
|
||||
del(k)
|
||||
del(investigator)
|
||||
del(investigators)
|
||||
|
||||
dc_enum = []
|
||||
if len(Chair_CID) > 0 and len(Chair_Name) > 0 and Chair_CID not in exclude_list_arg:
|
||||
dc_enum_dc = [
|
||||
{
|
||||
"uid": Chair_CID,
|
||||
"name": Chair_Name
|
||||
},
|
||||
]
|
||||
else:
|
||||
dc_enum_dc = []
|
||||
if len(D1_CID) > 0 and len(D1_Name) > 0 and D1_CID not in exclude_list_arg:
|
||||
dc_enum_d1 = [
|
||||
{
|
||||
"uid": D1_CID,
|
||||
"name": D1_Name
|
||||
},
|
||||
]
|
||||
else:
|
||||
dc_enum_d1 = []
|
||||
if len(D2_CID) > 0 and len(D2_Name) > 0 and D2_CID not in exclude_list_arg:
|
||||
dc_enum_d2 = [
|
||||
{
|
||||
"uid": D2_CID,
|
||||
"name": D2_Name
|
||||
},
|
||||
]
|
||||
else:
|
||||
dc_enum_d2 = []
|
||||
dc_enum = dc_enum_dc + dc_enum_d1 + dc_enum_d2
|
||||
del(dc_enum_dc)
|
||||
del(dc_enum_d1)
|
||||
del(dc_enum_d2)</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:businessRuleTask id="Activity_0kyyjcu" name="Determine Department Chair" camunda:decisionRef="Decision_Medicine_Dept_Chair">
|
||||
<bpmn:incoming>Flow_1p4pvy1</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0bsmt3b</bpmn:outgoing>
|
||||
</bpmn:businessRuleTask>
|
||||
<bpmn:userTask id="Activity_0pm3m9s" name="Select Signee" camunda:formKey="Signee">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="DeptChairApprover" label="Select Signee" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="data.name" value="dc_enum" />
|
||||
<camunda:property id="data.value.column" value="uid" />
|
||||
<camunda:property id="data.label.column" value="name" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_0rqond3</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_0qea2ru</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0wxblf0</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="Activity_1npbkhr" name="Review Approval Request" camunda:formKey="ReviewApporvalRequest">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="isApproved" label="Approve Request?" type="boolean" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_0wxblf0</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1qqzl3b</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:exclusiveGateway id="Gateway_1mui28k" name="Approved?" default="Flow_1h379u7">
|
||||
<bpmn:incoming>Flow_1qqzl3b</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1h379u7</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_0qea2ru</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:endEvent id="Event_0t6gdb3">
|
||||
<bpmn:incoming>Flow_1h379u7</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Collaboration_1540cgn">
|
||||
<bpmndi:BPMNShape id="Participant_17tc4sp_di" bpmnElement="Participant_17tc4sp" isHorizontal="true">
|
||||
<dc:Bounds x="175" y="130" width="1185" height="380" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Lane_1b7d4a7_di" bpmnElement="Lane_1b7d4a7" isHorizontal="true">
|
||||
<dc:Bounds x="205" y="317" width="1155" height="193" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Lane_1rq9xje_di" bpmnElement="Lane_1rq9xje" isHorizontal="true">
|
||||
<dc:Bounds x="205" y="130" width="1155" height="187" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0rqond3_di" bpmnElement="Flow_0rqond3">
|
||||
<di:waypoint x="850" y="220" />
|
||||
<di:waypoint x="930" y="220" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0bsmt3b_di" bpmnElement="Flow_0bsmt3b">
|
||||
<di:waypoint x="660" y="220" />
|
||||
<di:waypoint x="750" y="220" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1p4pvy1_di" bpmnElement="Flow_1p4pvy1">
|
||||
<di:waypoint x="470" y="220" />
|
||||
<di:waypoint x="560" y="220" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0qea2ru_di" bpmnElement="Flow_0qea2ru">
|
||||
<di:waypoint x="1150" y="395" />
|
||||
<di:waypoint x="1150" y="220" />
|
||||
<di:waypoint x="1030" y="220" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1158" y="363" width="15" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1h379u7_di" bpmnElement="Flow_1h379u7">
|
||||
<di:waypoint x="1175" y="420" />
|
||||
<di:waypoint x="1292" y="420" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1225" y="402" width="18" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1qqzl3b_di" bpmnElement="Flow_1qqzl3b">
|
||||
<di:waypoint x="1030" y="420" />
|
||||
<di:waypoint x="1125" y="420" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0wxblf0_di" bpmnElement="Flow_0wxblf0">
|
||||
<di:waypoint x="980" y="260" />
|
||||
<di:waypoint x="980" y="380" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0qf0ntn_di" bpmnElement="Flow_0qf0ntn">
|
||||
<di:waypoint x="284" y="220" />
|
||||
<di:waypoint x="370" y="220" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="248" y="202" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_01qoifo_di" bpmnElement="Activity_0s4b6tq">
|
||||
<dc:Bounds x="370" y="180" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1sgan1v_di" bpmnElement="Activity_1cfbpwn">
|
||||
<dc:Bounds x="750" y="180" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1u532ks_di" bpmnElement="Activity_0kyyjcu">
|
||||
<dc:Bounds x="560" y="180" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1av9xpa_di" bpmnElement="Activity_0pm3m9s">
|
||||
<dc:Bounds x="930" y="180" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0mqofy6_di" bpmnElement="Activity_1npbkhr">
|
||||
<dc:Bounds x="930" y="380" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_1mui28k_di" bpmnElement="Gateway_1mui28k" isMarkerVisible="true">
|
||||
<dc:Bounds x="1125" y="395" width="50" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1123" y="452" width="54" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0t6gdb3_di" bpmnElement="Event_0t6gdb3">
|
||||
<dc:Bounds x="1292" y="402" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
||||
|
@ -1,102 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_00j2iu5" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
|
||||
<bpmn:process id="Process_1gmf4la" name="Documents and Approvals" isExecutable="true">
|
||||
<bpmn:documentation />
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_1k3su2q</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:endEvent id="EndEvent_1qvyxg7">
|
||||
<bpmn:incoming>Flow_0m7unlb</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0m7unlb" sourceRef="Activity_DisplayDocsAndApprovals" targetRef="EndEvent_1qvyxg7" />
|
||||
<bpmn:manualTask id="Activity_DisplayDocsAndApprovals" name="Display Documents and Approvals">
|
||||
<bpmn:documentation># Documents & Approvals
|
||||
|
||||
> ## Protocol Document Management
|
||||
{% if documents.Study_Protocol_Document is defined -%}
|
||||
{%- set p = documents.Study_Protocol_Document -%}
|
||||
{% if p.files|length -%}
|
||||
{%- set f = p.files[0] -%}
|
||||
> [{{p.display_name}}](/study/{{p.study_id}}/workflow/{{f.workflow_id}}/task/{{f.task_id}})
|
||||
{%- else -%}
|
||||
> No protocol uploaded yet.
|
||||
{% endif %}
|
||||
{%- else -%}
|
||||
> No protocol uploaded yet.
|
||||
{% endif %}
|
||||
|
||||
> ## Approvals
|
||||
> | Name | Status | Help |
|
||||
|:---- |:------ |:---- |
|
||||
{% for approval in approvals -%}
|
||||
| [{{approval.display_name}}](/study/{{approval.study_id}}/workflow/{{approval.workflow_id}}) | {{approval.status}} | [?](/help/{{approval.workflow_spec_id}}) |
|
||||
{% endfor %}
|
||||
|
||||
> ## Documents
|
||||
> | Name | Status | Help | Download |
|
||||
|:---- |:------ |:---- |:-------- |
|
||||
{% for key, doc in documents.items() %}{% if doc.required -%}
|
||||
{% if doc.files|length -%}
|
||||
| [{{doc.display_name}}](/study/{{doc.study_id}}/workflow/{{doc.workflow_id}}/task/{{doc.task_id}}) | {{doc.status}} | [Context here](/help/documents/{{doc.code}}) | [Download](/file/{{doc.file_id}}/data) |
|
||||
{%- else -%}
|
||||
| {{doc.display_name}} | Not started | [?](/help/documents/{{doc.code}}) | No file yet |
|
||||
{%- endif %}
|
||||
{% endif %}{% endfor %}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:properties>
|
||||
<camunda:property name="display_name" value="Documents and Approvals" />
|
||||
</camunda:properties>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_142jtxs</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0m7unlb</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:scriptTask id="Activity_0a14x7j" name="Load Approvals">
|
||||
<bpmn:incoming>Flow_0c7ryff</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_142jtxs</bpmn:outgoing>
|
||||
<bpmn:script>approvals = study_info('approvals')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:scriptTask id="Activity_1aju60t" name="Load Documents">
|
||||
<bpmn:incoming>Flow_1k3su2q</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0c7ryff</bpmn:outgoing>
|
||||
<bpmn:script>documents = study_info('documents')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_142jtxs" sourceRef="Activity_0a14x7j" targetRef="Activity_DisplayDocsAndApprovals" />
|
||||
<bpmn:sequenceFlow id="Flow_0c7ryff" sourceRef="Activity_1aju60t" targetRef="Activity_0a14x7j" />
|
||||
<bpmn:sequenceFlow id="Flow_1k3su2q" sourceRef="StartEvent_1" targetRef="Activity_1aju60t" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1gmf4la">
|
||||
<bpmndi:BPMNEdge id="Flow_1k3su2q_di" bpmnElement="Flow_1k3su2q">
|
||||
<di:waypoint x="258" y="117" />
|
||||
<di:waypoint x="390" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0c7ryff_di" bpmnElement="Flow_0c7ryff">
|
||||
<di:waypoint x="490" y="117" />
|
||||
<di:waypoint x="540" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_142jtxs_di" bpmnElement="Flow_142jtxs">
|
||||
<di:waypoint x="640" y="117" />
|
||||
<di:waypoint x="710" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0m7unlb_di" bpmnElement="Flow_0m7unlb">
|
||||
<di:waypoint x="810" y="117" />
|
||||
<di:waypoint x="882" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="222" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_1qvyxg7_di" bpmnElement="EndEvent_1qvyxg7">
|
||||
<dc:Bounds x="882" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_19nawos_di" bpmnElement="Activity_DisplayDocsAndApprovals">
|
||||
<dc:Bounds x="710" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1bxk8h3_di" bpmnElement="Activity_0a14x7j">
|
||||
<dc:Bounds x="540" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_07ytvmv_di" bpmnElement="Activity_1aju60t">
|
||||
<dc:Bounds x="390" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
@ -1,43 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_380b1cb" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:process id="Process_f6c78dd" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0eew51c</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0eew51c" sourceRef="StartEvent_1" targetRef="Activity_EnrollmentDate" />
|
||||
<bpmn:endEvent id="Event_0sbvtzq">
|
||||
<bpmn:incoming>Flow_17olkaa</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_17olkaa" sourceRef="Activity_EnrollmentDate" targetRef="Event_0sbvtzq" />
|
||||
<bpmn:userTask id="Activity_EnrollmentDate" name="Enter Enrollment Date" camunda:formKey="Enter First Date of Participant Enrollment">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="EnrollmentDate" label="Enrollment Date" type="date" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_0eew51c</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_17olkaa</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_f6c78dd">
|
||||
<bpmndi:BPMNEdge id="Flow_0eew51c_di" bpmnElement="Flow_0eew51c">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="270" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_17olkaa_di" bpmnElement="Flow_17olkaa">
|
||||
<di:waypoint x="370" y="177" />
|
||||
<di:waypoint x="432" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0sbvtzq_di" bpmnElement="Event_0sbvtzq">
|
||||
<dc:Bounds x="432" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0inr3ph_di" bpmnElement="Activity_EnrollmentDate">
|
||||
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
@ -1,158 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1wrlvk8" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
|
||||
<bpmn:process id="Finance" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1p6s47e">
|
||||
<bpmn:outgoing>SequenceFlow_1uxqr6o</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:userTask id="UserTask_03iirsg" name="Enter Budget Info" camunda:formKey="Finance">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_isBudget" label="Is there a budget document for this study?" type="boolean">
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="FormField_BudgetDraft" label="Draft Budget" type="file">
|
||||
<camunda:properties>
|
||||
<camunda:property id="hide_expression" value="!(model.FormField_isBudget) | model.FormField_isBudget == null" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="FormField_Budget Final" label="Final Budget" type="file">
|
||||
<camunda:properties>
|
||||
<camunda:property id="description" value="This is the budget that you will negotiate with your funding source." />
|
||||
<camunda:property id="hide_expression" value="!(model.FormField_isBudget) | model.FormField_isBudget == null" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0bvhrqu</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0afrh4e</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:endEvent id="EndEvent_14p904o">
|
||||
<bpmn:incoming>SequenceFlow_160jfpk</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1uxqr6o" sourceRef="StartEvent_1p6s47e" targetRef="ExclusiveGateway_0m1n8mu" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0bvhrqu" sourceRef="ExclusiveGateway_0m1n8mu" targetRef="UserTask_03iirsg" />
|
||||
<bpmn:parallelGateway id="ExclusiveGateway_0m1n8mu">
|
||||
<bpmn:incoming>SequenceFlow_1uxqr6o</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0bvhrqu</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_157c6e9</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_1oh6eq7</bpmn:outgoing>
|
||||
</bpmn:parallelGateway>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0afrh4e" sourceRef="UserTask_03iirsg" targetRef="ExclusiveGateway_0tqopul" />
|
||||
<bpmn:parallelGateway id="ExclusiveGateway_0tqopul">
|
||||
<bpmn:incoming>SequenceFlow_0afrh4e</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_1mv3dp4</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_0rr7ods</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_160jfpk</bpmn:outgoing>
|
||||
</bpmn:parallelGateway>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_160jfpk" sourceRef="ExclusiveGateway_0tqopul" targetRef="EndEvent_14p904o" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_157c6e9" sourceRef="ExclusiveGateway_0m1n8mu" targetRef="Task_0xn3d6z" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1oh6eq7" sourceRef="ExclusiveGateway_0m1n8mu" targetRef="Task_0dj66yz" />
|
||||
<bpmn:userTask id="Task_0dj66yz" name="Enter Contract Funded" camunda:formKey="FormKey_ContractFunded">
|
||||
<bpmn:documentation>#### Process:
|
||||
|
||||
The study team uploads the executed copy of the contract(s) after they receive it from the Office of Grants and Contracts, after the following process components are completed outside of the Clinical Research Connect:
|
||||
|
||||
ePRF is completed in ResearchUVa.
|
||||
PI, Department Chair and SOM Dean’s Office signatures are completed (via ResearchUVa);
|
||||
The Office of Grants and Contracts completed the negotiation and execution of the contract.
|
||||
If you have any questions about the process, contact contract negotiator or Office of Grants and Contracts at 434-924-8426.</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_FullyExecutedContract" label="Fully Executed Contract" type="file">
|
||||
<camunda:properties>
|
||||
<camunda:property id="group" value="funded" />
|
||||
<camunda:property id="repeat" value="funded" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1oh6eq7</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1mv3dp4</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1mv3dp4" sourceRef="Task_0dj66yz" targetRef="ExclusiveGateway_0tqopul" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0rr7ods" sourceRef="Task_0xn3d6z" targetRef="ExclusiveGateway_0tqopul" />
|
||||
<bpmn:userTask id="Task_0xn3d6z" name="Enter PTAO" camunda:formKey="FormKey_PTAO">
|
||||
<bpmn:documentation>#### Enter PTAO Number</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_PTAO-Number" label="PTAO" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="description" value="Expected format is: 123456.TASK.AA12345.12345" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_157c6e9</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0rr7ods</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Finance">
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0rr7ods_di" bpmnElement="SequenceFlow_0rr7ods">
|
||||
<di:waypoint x="460" y="470" />
|
||||
<di:waypoint x="570" y="470" />
|
||||
<di:waypoint x="570" y="362" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1mv3dp4_di" bpmnElement="SequenceFlow_1mv3dp4">
|
||||
<di:waypoint x="460" y="337" />
|
||||
<di:waypoint x="545" y="337" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1oh6eq7_di" bpmnElement="SequenceFlow_1oh6eq7">
|
||||
<di:waypoint x="275" y="337" />
|
||||
<di:waypoint x="360" y="337" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_157c6e9_di" bpmnElement="SequenceFlow_157c6e9">
|
||||
<di:waypoint x="250" y="362" />
|
||||
<di:waypoint x="250" y="470" />
|
||||
<di:waypoint x="360" y="470" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_160jfpk_di" bpmnElement="SequenceFlow_160jfpk">
|
||||
<di:waypoint x="595" y="337" />
|
||||
<di:waypoint x="662" y="337" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0afrh4e_di" bpmnElement="SequenceFlow_0afrh4e">
|
||||
<di:waypoint x="460" y="190" />
|
||||
<di:waypoint x="570" y="190" />
|
||||
<di:waypoint x="570" y="312" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0bvhrqu_di" bpmnElement="SequenceFlow_0bvhrqu">
|
||||
<di:waypoint x="250" y="312" />
|
||||
<di:waypoint x="250" y="190" />
|
||||
<di:waypoint x="360" y="190" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1uxqr6o_di" bpmnElement="SequenceFlow_1uxqr6o">
|
||||
<di:waypoint x="158" y="337" />
|
||||
<di:waypoint x="225" y="337" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="StartEvent_1p6s47e_di" bpmnElement="StartEvent_1p6s47e">
|
||||
<dc:Bounds x="122" y="319" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_03iirsg_di" bpmnElement="UserTask_03iirsg">
|
||||
<dc:Bounds x="360" y="150" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ParallelGateway_02f3709_di" bpmnElement="ExclusiveGateway_0m1n8mu">
|
||||
<dc:Bounds x="225" y="312" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ParallelGateway_0f68xmc_di" bpmnElement="ExclusiveGateway_0tqopul">
|
||||
<dc:Bounds x="545" y="312" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0tz5ojx_di" bpmnElement="Task_0dj66yz">
|
||||
<dc:Bounds x="360" y="297" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_1fv8l08_di" bpmnElement="Task_0xn3d6z">
|
||||
<dc:Bounds x="360" y="430" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_14p904o_di" bpmnElement="EndEvent_14p904o">
|
||||
<dc:Bounds x="662" y="319" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
@ -1,55 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_30b31d2" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:process id="Process_dfbd81d" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_1jv8wlf</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1jv8wlf" sourceRef="StartEvent_1" targetRef="Activity_1fri4xp" />
|
||||
<bpmn:userTask id="Activity_1fri4xp" name="Put Study On Hold" camunda:formKey="Hold status">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="isHold" label="Place the study on Hold?" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
</camunda:properties>
|
||||
<camunda:value id="yes" name="Yes" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="HoldNotes" label="Why was this study moved to Hold Status?" type="textarea">
|
||||
<camunda:properties>
|
||||
<camunda:property id="rows" value="5" />
|
||||
<camunda:property id="group" value="Reason for Hold Notes" />
|
||||
<camunda:property id="repeat" value="Reason for Hold Notes" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_1jv8wlf</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_006k4fa</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:endEvent id="Event_101lvwk">
|
||||
<bpmn:incoming>Flow_006k4fa</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_006k4fa" sourceRef="Activity_1fri4xp" targetRef="Event_101lvwk" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_dfbd81d">
|
||||
<bpmndi:BPMNEdge id="Flow_006k4fa_di" bpmnElement="Flow_006k4fa">
|
||||
<di:waypoint x="370" y="177" />
|
||||
<di:waypoint x="432" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1jv8wlf_di" bpmnElement="Flow_1jv8wlf">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="270" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_05x7z7l_di" bpmnElement="Activity_1fri4xp">
|
||||
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_101lvwk_di" bpmnElement="Event_101lvwk">
|
||||
<dc:Bounds x="432" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
Binary file not shown.
@ -1,75 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" id="Definitions_0o0ff2r" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
|
||||
<decision id="decision_ide_check" name="IDE Check">
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="IS_IDE">
|
||||
<inputExpression id="inputExpression_1" typeRef="integer">
|
||||
<text>is_ide.IS_IDE</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<input id="InputClause_0x1r5qo" label="IDE Number?">
|
||||
<inputExpression id="LiteralExpression_1qxic4k" typeRef="string">
|
||||
<text>ide.IDE</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="OutputClause_1xvp2on" label="IDE Field Value" name="IDE_Number" typeRef="string" />
|
||||
<output id="OutputClause_0g5qbkb" label="IDE Message" name="ide_message" typeRef="string" />
|
||||
<rule id="DecisionRule_0h0od2e">
|
||||
<inputEntry id="UnaryTests_09ctq71">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1w71u9t">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1ynrc3b">
|
||||
<text>ide.IDE</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0n9fwk1">
|
||||
<text>"Provide additional information for the IDE assocoated with this study"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_199dgpt">
|
||||
<inputEntry id="UnaryTests_1ec0msc">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0w8ohet">
|
||||
<text>''</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0oyaymb">
|
||||
<text>"not entered in Procol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0tiaje9">
|
||||
<text>"The IDE number was not entered in Protocol Builder. You must provide a number if it is available prior to IRB submission"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1ok04ot">
|
||||
<inputEntry id="UnaryTests_0uhe0qi">
|
||||
<text>0</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1eivrcw">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_03uxiu8">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_080066t">
|
||||
<text>"You should not have access to this workflow since your Protocol Builder reposes indicate that a IDE is not associated with this study. Please contact Support"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1cj8yq8">
|
||||
<inputEntry id="UnaryTests_1fbn44t">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0hejiza">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_06fo7cc">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0ns0fff">
|
||||
<text>"You should not have access to this workflow since your Protocol Builder reposes indicate that a IDE is not associated with this study. Please contact Support"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
@ -1,223 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_1e7871f" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
|
||||
<bpmn:process id="Process_04jm0bm" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>SequenceFlow_1dhb8f4</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1dhb8f4" sourceRef="StartEvent_1" targetRef="Activity_18x16ff" />
|
||||
<bpmn:userTask id="IDE_Entry_Submit" name="Edit IDE Info" camunda:formKey="IDE">
|
||||
<bpmn:documentation>{{ ide_message }}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="IDE_Number" label="IDE #:" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="read_only" value="true" />
|
||||
<camunda:property id="hide_expression" value="model.IDE_HolderType.value === "Exempt" || model.IDE_HolderType.value === "NotExempt"" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IDE_NewDeviceOrNewIndication" label="New Device used or New Indication for Marketed Device" type="boolean" />
|
||||
<camunda:formField id="IDE_HolderType" label="IDE Holder Type" type="enum">
|
||||
<camunda:value id="Industry" name="Industry" />
|
||||
<camunda:value id="UVaPI" name="UVa PI" />
|
||||
<camunda:value id="OtherPI" name="Other PI" />
|
||||
<camunda:value id="UVaCenter" name="UVaCenter" />
|
||||
<camunda:value id="OtherCollUniv" name="Other Colleges and Universities" />
|
||||
<camunda:value id="NotExempt" name="IDE not exempt, but no IDE#" />
|
||||
<camunda:value id="Exempt" name="IDE Exempt" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IDE_HolderNameOSP" label="Holder Name" type="autocomplete">
|
||||
<camunda:properties>
|
||||
<camunda:property id="spreadsheet.name" value="SponsorList.xls" />
|
||||
<camunda:property id="spreadsheet.value.column" value="CUSTOMER_NUMBER" />
|
||||
<camunda:property id="spreadsheet.label.column" value="CUSTOMER_NAME" />
|
||||
<camunda:property id="autocomplete_num" value="15" />
|
||||
<camunda:property id="hide_expression" value="!model.IDE_HolderType || !model.IDE_HolderType.value || (model.IDE_HolderType.value !== "Industry" && model.IDE_HolderType.value !== "OtherCollUniv")" />
|
||||
<camunda:property id="description" value="Text" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IDE_HolderNameText" label="IDE Holder Name if not in above list" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="hide_expression" value="(!model.IDE_HolderType || !model.IDE_HolderType.value || model.IDE_HolderType.value === "UVaPI" || model.IDE_HolderType.value === "Exempt" || model.IDE_HolderType.value === "NotExempt") || ((model.IDE_HolderType.value === "Industry" || model.IDE_HolderType.value === "OtherCollUniv") && (!model.IDE_HolderNameOSP || !model.IDE_HolderNameOSP.value || model.IDE_HolderNameOSP.value !== "100"))" />
|
||||
<camunda:property id="description" value="Text" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IDE_HolderNameUVA" label="Holder Name" type="autocomplete">
|
||||
<camunda:properties>
|
||||
<camunda:property id="hide_expression" value="!model.IDE_HolderType || !model.IDE_HolderType.value || model.IDE_HolderType.value !== "UVaPI"" />
|
||||
<camunda:property id="description" value="LDAP" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IDE_DeviceName" label="Device Name" type="string" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1yb1vma</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1t2ha54</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:endEvent id="EndEvent_1h89sl4">
|
||||
<bpmn:documentation>temp</bpmn:documentation>
|
||||
<bpmn:incoming>SequenceFlow_1t2ha54</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_1yhv1qz</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1t2ha54" sourceRef="IDE_Entry_Submit" targetRef="EndEvent_1h89sl4" />
|
||||
<bpmn:exclusiveGateway id="ExclusiveGateway_1fib89p" name="IS_IDE = 1?">
|
||||
<bpmn:incoming>SequenceFlow_1lazou8</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1yb1vma</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_011l5xt</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1yb1vma" name="Yes" sourceRef="ExclusiveGateway_1fib89p" targetRef="IDE_Entry_Submit">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">ide.IS_IDE == 1</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_011l5xt" name="No" sourceRef="ExclusiveGateway_1fib89p" targetRef="Task_NoIDE">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">not ide.IS_IDE or ide.IS_IDE == 0</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:manualTask id="Task_NoIDE" name="Show Invalid IDE Access">
|
||||
<bpmn:documentation>{{ ide_message }}</bpmn:documentation>
|
||||
<bpmn:incoming>SequenceFlow_011l5xt</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1yhv1qz</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1lazou8" sourceRef="Task_SupplementIDE" targetRef="ExclusiveGateway_1fib89p" />
|
||||
<bpmn:businessRuleTask id="Task_SupplementIDE" name="IDE Info from PB" camunda:decisionRef="decision_ide_check">
|
||||
<bpmn:incoming>Flow_08ezwjq</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1lazou8</bpmn:outgoing>
|
||||
</bpmn:businessRuleTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1yhv1qz" sourceRef="Task_NoIDE" targetRef="EndEvent_1h89sl4" />
|
||||
<bpmn:sequenceFlow id="Flow_1majmgp" sourceRef="Activity_1tp43gs" targetRef="Gateway_1xe3e0h" />
|
||||
<bpmn:scriptTask id="Activity_1tp43gs" name="Extract IDE Data">
|
||||
<bpmn:incoming>Flow_0v7gh90</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1majmgp</bpmn:outgoing>
|
||||
<bpmn:script>ide = {x:details[x] for x in details.keys() if x == 'IDE'}
|
||||
is_ide = {x:details[x] for x in details.keys() if x == 'IS_IDE'}
|
||||
del(details)</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_0v7gh90" sourceRef="Activity_18x16ff" targetRef="Activity_1tp43gs" />
|
||||
<bpmn:scriptTask id="Activity_18x16ff" name="Load IRB API Details">
|
||||
<bpmn:incoming>SequenceFlow_1dhb8f4</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0v7gh90</bpmn:outgoing>
|
||||
<bpmn:script>details = study_info('details')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:exclusiveGateway id="Gateway_1xe3e0h" name="IS_IDE = 1">
|
||||
<bpmn:incoming>Flow_1majmgp</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_08ezwjq</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_1l53x1e</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="Flow_08ezwjq" name="Yes" sourceRef="Gateway_1xe3e0h" targetRef="Task_SupplementIDE">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">is_ide.IS_IDE == 1</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:sequenceFlow id="Flow_1l53x1e" name="No" sourceRef="Gateway_1xe3e0h" targetRef="Activity_1seaot7">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">not is_ide.IS_IDE or is_ide.IS_IDE == 0</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:manualTask id="Activity_1seaot7" name="Show Contact Support">
|
||||
<bpmn:documentation>{{ ide_message }}</bpmn:documentation>
|
||||
<bpmn:incoming>Flow_1l53x1e</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0pfii1m</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:endEvent id="Event_16u0jqm">
|
||||
<bpmn:incoming>Flow_0pfii1m</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0pfii1m" sourceRef="Activity_1seaot7" targetRef="Event_16u0jqm" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_04jm0bm">
|
||||
<bpmndi:BPMNEdge id="Flow_0pfii1m_di" bpmnElement="Flow_0pfii1m">
|
||||
<di:waypoint x="530" y="250" />
|
||||
<di:waypoint x="582" y="250" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1l53x1e_di" bpmnElement="Flow_1l53x1e">
|
||||
<di:waypoint x="480" y="142" />
|
||||
<di:waypoint x="480" y="210" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="488" y="173" width="15" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_08ezwjq_di" bpmnElement="Flow_08ezwjq">
|
||||
<di:waypoint x="505" y="117" />
|
||||
<di:waypoint x="580" y="117" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="534" y="99" width="18" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0v7gh90_di" bpmnElement="Flow_0v7gh90">
|
||||
<di:waypoint x="210" y="117" />
|
||||
<di:waypoint x="280" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1majmgp_di" bpmnElement="Flow_1majmgp">
|
||||
<di:waypoint x="380" y="117" />
|
||||
<di:waypoint x="455" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1yhv1qz_di" bpmnElement="SequenceFlow_1yhv1qz">
|
||||
<di:waypoint x="1020" y="250" />
|
||||
<di:waypoint x="1170" y="250" />
|
||||
<di:waypoint x="1170" y="135" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1lazou8_di" bpmnElement="SequenceFlow_1lazou8">
|
||||
<di:waypoint x="680" y="117" />
|
||||
<di:waypoint x="785" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_011l5xt_di" bpmnElement="SequenceFlow_011l5xt">
|
||||
<di:waypoint x="810" y="142" />
|
||||
<di:waypoint x="810" y="250" />
|
||||
<di:waypoint x="920" y="250" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="819" y="194" width="15" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1yb1vma_di" bpmnElement="SequenceFlow_1yb1vma">
|
||||
<di:waypoint x="835" y="117" />
|
||||
<di:waypoint x="920" y="117" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="870" y="99" width="18" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1t2ha54_di" bpmnElement="SequenceFlow_1t2ha54">
|
||||
<di:waypoint x="1020" y="117" />
|
||||
<di:waypoint x="1152" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1dhb8f4_di" bpmnElement="SequenceFlow_1dhb8f4">
|
||||
<di:waypoint x="58" y="117" />
|
||||
<di:waypoint x="110" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="22" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_17syy68_di" bpmnElement="IDE_Entry_Submit">
|
||||
<dc:Bounds x="920" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_1h89sl4_di" bpmnElement="EndEvent_1h89sl4">
|
||||
<dc:Bounds x="1152" y="99" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="414" y="202" width="74" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ExclusiveGateway_1fib89p_di" bpmnElement="ExclusiveGateway_1fib89p" isMarkerVisible="true">
|
||||
<dc:Bounds x="785" y="92" width="50" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="782" y="47" width="60" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ManualTask_1f7z9wm_di" bpmnElement="Task_NoIDE">
|
||||
<dc:Bounds x="920" y="210" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="BusinessRuleTask_1cszgkx_di" bpmnElement="Task_SupplementIDE">
|
||||
<dc:Bounds x="580" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0sq4r9w_di" bpmnElement="Activity_18x16ff">
|
||||
<dc:Bounds x="110" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_1xe3e0h_di" bpmnElement="Gateway_1xe3e0h" isMarkerVisible="true">
|
||||
<dc:Bounds x="455" y="92" width="50" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="453" y="68" width="54" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_01v60lq_di" bpmnElement="Activity_1seaot7">
|
||||
<dc:Bounds x="430" y="210" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_16u0jqm_di" bpmnElement="Event_16u0jqm">
|
||||
<dc:Bounds x="582" y="232" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0u8knps_di" bpmnElement="Activity_1tp43gs">
|
||||
<dc:Bounds x="280" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
@ -1,345 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_07f7kut" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
|
||||
<bpmn:process id="Process_IDS" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>SequenceFlow_1dexemq</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:userTask id="Task_EnterIDSStudyIdentification" name="Enter IDS Study Identification" camunda:formKey="IDS Study ID">
|
||||
<bpmn:documentation>#### Instructions
|
||||
|
||||
According to selected Study Type: Treatment (TRE), IDS applicable for this type of study: Yes</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_PatientType" label="Select patient type (Select all applicable)" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="help" value="*Bottle label should include at a minimum the following: patient identification, drug name, instructions for use, and protocol number." />
|
||||
<camunda:property id="enum_type" value="checkbox" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
<camunda:value id="Inpatient" name="Inpatient - Including all procedural areas, ED, operating rooms, etc. IDS is REQUIRED" />
|
||||
<camunda:value id="Outpatient1" name="Outpatient - Product administered in clinic or infusion center. IDS is REQUIRED" />
|
||||
<camunda:value id="Outpatient2" name="Outpatient - Product taken at home and is NOT labeled for end use* by sponsor. IDS is REQUIRED" />
|
||||
<camunda:value id="Outpatient3" name="Outpatient - Product taken at home and is labeled for end use* by sponsor. Study will use IDS" />
|
||||
<camunda:value id="Outpatient4" name="Outpatient - Product taken at home and is labeled for end use* by sponsor. Study will NOT use IDS" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="FormField_isAdultOncologyPatients" label="Is this study treating adult oncology patients?" type="boolean">
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="FormField_isDrugDispenseNonBusinessHours" label="Will study drug be dispensed during non-business hours?" type="boolean">
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1guaev4</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1iiazgn</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="Task_EnterIDSFeeStructure" name="Enter IDS Fee Structure" camunda:formKey="IDS Fee Structure">
|
||||
<bpmn:documentation>#### How to
|
||||
IDS service fees are based on the Protocol Owner identified in Common Study section of the workflow.
|
||||
|
||||
Visit [IDS Study Fee Schedule](https://www.medicalcenter.virginia.edu/pharmacy/investigational-drug-services/information-for-investigators) for specific cost information.
|
||||
|
||||
Protocol Owner: **(need to insert value here)**</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_StudyType" label="IDS Study Type*" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="help" value="**Biosafety Level 1 or 2:** Procedures that include biological microorganisms or material that are categorized with the CDC as potentially infectious to humans where exposure may result in limited to moderate disease must be approved through UVA’s Institutional Biosafety Committee (IBC) prior to use. Examples of these agents include human derived therapies, gene transfer therapies, recombinant DNA-based vectors, etc. For questions, please contact UVA’s IBC at 434-243-0726." />
|
||||
<camunda:property id="enum_type" value="radio" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
<camunda:value id="BiosafetyLevel" name="Biosafety Level 1 or 2" />
|
||||
<camunda:value id="NA" name="N/A" />
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0movigc</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_100vc9e</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1guaev4" sourceRef="ExclusiveGateway_14igy57" targetRef="Task_EnterIDSStudyIdentification" />
|
||||
<bpmn:parallelGateway id="ExclusiveGateway_14igy57">
|
||||
<bpmn:incoming>SequenceFlow_1bkjyhx</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1guaev4</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_0movigc</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_0y21euo</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_0jwnfzy</bpmn:outgoing>
|
||||
<bpmn:outgoing>SequenceFlow_1lys0jq</bpmn:outgoing>
|
||||
</bpmn:parallelGateway>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0movigc" sourceRef="ExclusiveGateway_14igy57" targetRef="Task_EnterIDSFeeStructure" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_100vc9e" sourceRef="Task_EnterIDSFeeStructure" targetRef="ExclusiveGateway_1b69uum" />
|
||||
<bpmn:parallelGateway id="ExclusiveGateway_1b69uum">
|
||||
<bpmn:incoming>SequenceFlow_100vc9e</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_1iiazgn</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_1pg0dkw</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_13fzv9y</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_0m01j99</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0lixqzs</bpmn:outgoing>
|
||||
</bpmn:parallelGateway>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1iiazgn" sourceRef="Task_EnterIDSStudyIdentification" targetRef="ExclusiveGateway_1b69uum" />
|
||||
<bpmn:endEvent id="EndEvent_0jypqha">
|
||||
<bpmn:incoming>SequenceFlow_1r7kcks</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0y21euo" sourceRef="ExclusiveGateway_14igy57" targetRef="Task_ReviewPharmacyManualStatus" />
|
||||
<bpmn:userTask id="Task_ReviewPharmacyManualStatus" name="Review Pharmacy Manual Status" camunda:formKey="Pharmacy Manual Status">
|
||||
<bpmn:documentation>{{ElementDoc_PharmMan}}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_isSponsor" label="Will the sponsor be providing the Pharmacy Manual?" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="help" value="If you need to submit to IDS before you have a Pharmacy Manual, please provide it to them as soon as you receive a copy." />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="FormField_Explain" label="Please explain" type="text_area">
|
||||
<camunda:properties>
|
||||
<camunda:property id="rows" value="5" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0y21euo</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1pg0dkw</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1pg0dkw" sourceRef="Task_ReviewPharmacyManualStatus" targetRef="ExclusiveGateway_1b69uum" />
|
||||
<bpmn:userTask id="UserTask_ReviewInvestigatorsBrochureStatus" name="Review Investigator's Brochure Status" camunda:formKey="Investigator's Brochure Status">
|
||||
<bpmn:documentation>{{ElementDoc_InvestigatorsBrochure}}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_isSponsor" label="Will the sponsor be providing the Investigator's Brochure?" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="help" value="If you need to submit to IDS before you have a Investigator's Brochure, please provide it to them as soon as you receive a copy." />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="FormField_Explain" label="Please explain" type="text_area">
|
||||
<camunda:properties>
|
||||
<camunda:property id="rows" value="5" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0jwnfzy</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_13fzv9y</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0jwnfzy" sourceRef="ExclusiveGateway_14igy57" targetRef="UserTask_ReviewInvestigatorsBrochureStatus" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_13fzv9y" sourceRef="UserTask_ReviewInvestigatorsBrochureStatus" targetRef="ExclusiveGateway_1b69uum" />
|
||||
<bpmn:userTask id="UserTask_ReviewIVRS-IWRS-IXRSManualStatus" name="Review IVRS/IWRS/IXRS Manual Status" camunda:formKey="IVRS/IWRS/IXRS Manual Status">
|
||||
<bpmn:documentation>{{ElementDoc_IVRSIWRSIXRS}}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_Required" label="Does the study require use of IVRS/IXRS/IWRS?" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="radio" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
<camunda:value id="yes_1" name="Yes - study team has requested IVRS/IXRS/IWRS access for IDS pharmacists and technicians" />
|
||||
<camunda:value id="yes_2" name="Yes - study team will request access for IDS pharmacists and technicians prior to study opening or initial drug shipment, whichever is earlier" />
|
||||
<camunda:value id="yes_3" name="Yes - but only CRC will have access" />
|
||||
<camunda:value id="no" name="no" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="FormField_isSponsor" label="Will the sponsor be providing the IVRS/IWRS/IXRS Manual?" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="help" value="If you need to submit to IDS before you have an IVRS/IWRS/IXRS Manual, please provide it to them as soon as you receive a copy." />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="FormField_Explain" label="Please explain" type="text_area">
|
||||
<camunda:properties>
|
||||
<camunda:property id="rows" value="5" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="FormField_Training" label="Is any training required prior to obtaining system access?" type="boolean" />
|
||||
<camunda:formField id="FormField_Details" label="If yes, provide details:" type="text_area">
|
||||
<camunda:properties>
|
||||
<camunda:property id="rows" value="5" />
|
||||
<camunda:property id="hide_expression" value="!model.FormField_Training | model.FormField_Training == null" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1lys0jq</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0m01j99</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1lys0jq" sourceRef="ExclusiveGateway_14igy57" targetRef="UserTask_ReviewIVRS-IWRS-IXRSManualStatus" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0m01j99" sourceRef="UserTask_ReviewIVRS-IWRS-IXRSManualStatus" targetRef="ExclusiveGateway_1b69uum" />
|
||||
<bpmn:businessRuleTask id="BusinessRuleTask_PharmacyManual" name="Pharmacy Manual" camunda:decisionRef="Decision_PharmacyManual">
|
||||
<bpmn:incoming>SequenceFlow_1dcu8zu</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1kam5in</bpmn:outgoing>
|
||||
</bpmn:businessRuleTask>
|
||||
<bpmn:businessRuleTask id="BusinessRuleTask_InvestigatorsBrochure" name="Investigator's Brochure" camunda:decisionRef="Decision_InvestigatorBrochure">
|
||||
<bpmn:incoming>Flow_1x9d2mo</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1dcu8zu</bpmn:outgoing>
|
||||
</bpmn:businessRuleTask>
|
||||
<bpmn:businessRuleTask id="BusinessRuleTask_IVRS-IWRS-IXRSManual" name="IVRS/IWRS/IXRS Manual" camunda:decisionRef="Decision_IVRS-IWRS-IXRS">
|
||||
<bpmn:incoming>SequenceFlow_1kam5in</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1bkjyhx</bpmn:outgoing>
|
||||
</bpmn:businessRuleTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1dcu8zu" sourceRef="BusinessRuleTask_InvestigatorsBrochure" targetRef="BusinessRuleTask_PharmacyManual" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1kam5in" sourceRef="BusinessRuleTask_PharmacyManual" targetRef="BusinessRuleTask_IVRS-IWRS-IXRSManual" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1bkjyhx" sourceRef="BusinessRuleTask_IVRS-IWRS-IXRSManual" targetRef="ExclusiveGateway_14igy57" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1dexemq" sourceRef="StartEvent_1" targetRef="Activity_LoadDocuments" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0lixqzs" sourceRef="ExclusiveGateway_1b69uum" targetRef="UserTask_03o04d0" />
|
||||
<bpmn:userTask id="UserTask_03o04d0" name="Enter Staff Training" camunda:formKey="Staff Training">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_isStaffTraining" label="Is there any sponsor-required protocol training for IDS staff (web training, slide review, etc) that must be completed prior to beginning the study?" type="enum">
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
<camunda:value id="yes" name="Yes" />
|
||||
<camunda:value id="no" name="No" />
|
||||
<camunda:value id="unknown" name="Unknown" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="FormField_Details" label="If yes or unknown, provide details:" type="text_area" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0lixqzs</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1r7kcks</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1r7kcks" sourceRef="UserTask_03o04d0" targetRef="EndEvent_0jypqha" />
|
||||
<bpmn:sequenceFlow id="Flow_1x9d2mo" sourceRef="Activity_LoadDocuments" targetRef="BusinessRuleTask_InvestigatorsBrochure" />
|
||||
<bpmn:scriptTask id="Activity_LoadDocuments" name="Load Documents">
|
||||
<bpmn:incoming>SequenceFlow_1dexemq</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1x9d2mo</bpmn:outgoing>
|
||||
<bpmn:script>documents = study_info('documents')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_IDS">
|
||||
<bpmndi:BPMNEdge id="Flow_1x9d2mo_di" bpmnElement="Flow_1x9d2mo">
|
||||
<di:waypoint x="200" y="340" />
|
||||
<di:waypoint x="270" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1r7kcks_di" bpmnElement="SequenceFlow_1r7kcks">
|
||||
<di:waypoint x="1180" y="340" />
|
||||
<di:waypoint x="1252" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0lixqzs_di" bpmnElement="SequenceFlow_0lixqzs">
|
||||
<di:waypoint x="985" y="340" />
|
||||
<di:waypoint x="1080" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1dexemq_di" bpmnElement="SequenceFlow_1dexemq">
|
||||
<di:waypoint x="48" y="340" />
|
||||
<di:waypoint x="100" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1bkjyhx_di" bpmnElement="SequenceFlow_1bkjyhx">
|
||||
<di:waypoint x="645" y="340" />
|
||||
<di:waypoint x="695" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1kam5in_di" bpmnElement="SequenceFlow_1kam5in">
|
||||
<di:waypoint x="506" y="340" />
|
||||
<di:waypoint x="545" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1dcu8zu_di" bpmnElement="SequenceFlow_1dcu8zu">
|
||||
<di:waypoint x="370" y="340" />
|
||||
<di:waypoint x="406" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0m01j99_di" bpmnElement="SequenceFlow_0m01j99">
|
||||
<di:waypoint x="890" y="560" />
|
||||
<di:waypoint x="960" y="560" />
|
||||
<di:waypoint x="960" y="365" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1lys0jq_di" bpmnElement="SequenceFlow_1lys0jq">
|
||||
<di:waypoint x="720" y="365" />
|
||||
<di:waypoint x="720" y="560" />
|
||||
<di:waypoint x="790" y="560" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_13fzv9y_di" bpmnElement="SequenceFlow_13fzv9y">
|
||||
<di:waypoint x="890" y="450" />
|
||||
<di:waypoint x="960" y="450" />
|
||||
<di:waypoint x="960" y="365" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0jwnfzy_di" bpmnElement="SequenceFlow_0jwnfzy">
|
||||
<di:waypoint x="720" y="365" />
|
||||
<di:waypoint x="720" y="450" />
|
||||
<di:waypoint x="790" y="450" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1pg0dkw_di" bpmnElement="SequenceFlow_1pg0dkw">
|
||||
<di:waypoint x="890" y="340" />
|
||||
<di:waypoint x="935" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0y21euo_di" bpmnElement="SequenceFlow_0y21euo">
|
||||
<di:waypoint x="745" y="340" />
|
||||
<di:waypoint x="790" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1iiazgn_di" bpmnElement="SequenceFlow_1iiazgn">
|
||||
<di:waypoint x="890" y="120" />
|
||||
<di:waypoint x="960" y="120" />
|
||||
<di:waypoint x="960" y="315" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_100vc9e_di" bpmnElement="SequenceFlow_100vc9e">
|
||||
<di:waypoint x="890" y="230" />
|
||||
<di:waypoint x="960" y="230" />
|
||||
<di:waypoint x="960" y="315" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0movigc_di" bpmnElement="SequenceFlow_0movigc">
|
||||
<di:waypoint x="720" y="315" />
|
||||
<di:waypoint x="720" y="230" />
|
||||
<di:waypoint x="790" y="230" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1guaev4_di" bpmnElement="SequenceFlow_1guaev4">
|
||||
<di:waypoint x="720" y="315" />
|
||||
<di:waypoint x="720" y="120" />
|
||||
<di:waypoint x="790" y="120" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="12" y="322" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0wr3vp4_di" bpmnElement="Task_EnterIDSStudyIdentification">
|
||||
<dc:Bounds x="790" y="80" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0s34owg_di" bpmnElement="Task_EnterIDSFeeStructure">
|
||||
<dc:Bounds x="790" y="190" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ParallelGateway_1c14ymx_di" bpmnElement="ExclusiveGateway_14igy57">
|
||||
<dc:Bounds x="695" y="315" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ParallelGateway_188sr3c_di" bpmnElement="ExclusiveGateway_1b69uum">
|
||||
<dc:Bounds x="935" y="315" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_0jypqha_di" bpmnElement="EndEvent_0jypqha">
|
||||
<dc:Bounds x="1252" y="322" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0li1vo4_di" bpmnElement="Task_ReviewPharmacyManualStatus">
|
||||
<dc:Bounds x="790" y="300" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0uvz4r8_di" bpmnElement="UserTask_ReviewInvestigatorsBrochureStatus">
|
||||
<dc:Bounds x="790" y="410" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_06sfx4u_di" bpmnElement="UserTask_ReviewIVRS-IWRS-IXRSManualStatus">
|
||||
<dc:Bounds x="790" y="520" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="BusinessRuleTask_1ld7tdu_di" bpmnElement="BusinessRuleTask_PharmacyManual">
|
||||
<dc:Bounds x="406" y="300" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="BusinessRuleTask_04d0y1w_di" bpmnElement="BusinessRuleTask_InvestigatorsBrochure">
|
||||
<dc:Bounds x="270" y="300" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="BusinessRuleTask_03zh0rt_di" bpmnElement="BusinessRuleTask_IVRS-IWRS-IXRSManual">
|
||||
<dc:Bounds x="545" y="300" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_03o04d0_di" bpmnElement="UserTask_03o04d0">
|
||||
<dc:Bounds x="1080" y="300" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0f5ox7w_di" bpmnElement="Activity_LoadDocuments">
|
||||
<dc:Bounds x="100" y="300" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
@ -1,50 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_181emiu" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
|
||||
<decision id="Decision_InvestigatorBrochure" name="Decision_InvestigatorBrochure">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="150" y="150" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="Investigator's Brochure Form Upload Count">
|
||||
<inputExpression id="inputExpression_1" typeRef="integer">
|
||||
<text>documents.DrugDevDoc_InvestBrochure.count</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="Investigator's Brochure(s) Uploaded?" name="isInvestigatorsBrochure" typeRef="boolean" />
|
||||
<output id="OutputClause_1f3mlfn" label="Investigator's Brochure Form Banner" name="ElementDoc_InvestigatorsBrochure" typeRef="string" />
|
||||
<rule id="DecisionRule_0zvgaym">
|
||||
<inputEntry id="UnaryTests_0mwq0my">
|
||||
<text>0</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1egtix6">
|
||||
<text>false</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0syurh2">
|
||||
<text>"The Investigator's Brochure has not been uploaded. If you do not intend to upload a manual before you IDS submission, please complete the questions below. If you do intend to upload a brochure, you do not need to complete them."</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1tex88t">
|
||||
<inputEntry id="UnaryTests_02u2t7y">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_10fvqcp">
|
||||
<text>true</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_12xmgog">
|
||||
<text>"This brochure will be sent as part of your IDS Submission."</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1h9ji9j">
|
||||
<inputEntry id="UnaryTests_1q9mscj">
|
||||
<text>> 1</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0ybjs1p">
|
||||
<text>true</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1fhucbl">
|
||||
<text>"These brochures will be sent as part of your IDS Submission."</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
@ -1,50 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_13ibzgx" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
|
||||
<decision id="Decision_IVRS-IWRS-IXRS" name="IVRS-IWRS-IXRS Manual">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="150" y="150" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="IVRS-IWRS-IXRS Manual Count">
|
||||
<inputExpression id="inputExpression_1" typeRef="integer">
|
||||
<text>documents.DrugDevDoc_IVRSIWRSIXRSMan.count</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="IVRS-IWRS-IXRS Manual Uploaded?" name="isIVRS-IWRS-IXRS" typeRef="boolean" />
|
||||
<output id="OutputClause_00aciq3" label="IVRS-IWRS-IXRS Form Banner" name="ElementDoc_IVRSIWRSIXRS" typeRef="string" />
|
||||
<rule id="DecisionRule_0qbyqkn">
|
||||
<inputEntry id="UnaryTests_0k06tzv">
|
||||
<text>0</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1i0v4vv">
|
||||
<text>false</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1rmabtw">
|
||||
<text>"The IVRS/IWRS/IXRS Manual has not been uploaded. If you do not intend to upload a manual before you IDS submission, please complete the questions below. If you do intend to upload a manual, you do not need to complete them."</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1qiq2ek">
|
||||
<inputEntry id="UnaryTests_0rxyo8h">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_19ht3su">
|
||||
<text>true</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_00sov94">
|
||||
<text>"This manual will be sent as part of your IDS Submission"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1t2dpzl">
|
||||
<inputEntry id="UnaryTests_0z1i32u">
|
||||
<text>> 0</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_047zb7w">
|
||||
<text>true</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_00yj9ia">
|
||||
<text>"These manuals will be sent as part of your IDS Submission"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
@ -1,50 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_a268e14" name="DRD" namespace="http://camunda.org/schema/1.0/dmn">
|
||||
<decision id="Decision_PharmacyManual" name="Pharmacy Manual">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="190" y="140" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="Pharmacy Manual Upload Count">
|
||||
<inputExpression id="inputExpression_1" typeRef="integer">
|
||||
<text>documents.DrugDevDoc_PharmManual.count</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="Pharmacy Manual(s) Uploaded?" name="isPharmacyManual" typeRef="boolean" />
|
||||
<output id="OutputClause_1vbolzy" label="Pharmacy Manual Form Banner" name="ElementDoc_PharmMan" typeRef="string" />
|
||||
<rule id="DecisionRule_0hvsnu9">
|
||||
<inputEntry id="UnaryTests_1ll10nk">
|
||||
<text>0</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1kidcjg">
|
||||
<text>false</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0tk68d3">
|
||||
<text>"The Pharmacy Manual has not been uploaded. If you do not intend to upload a manual before your IDS submission, please complete the questions below. If you do intend to upload a manual, you do not need to complete them."</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0yqphk1">
|
||||
<inputEntry id="UnaryTests_0p3500o">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_08v3bfw">
|
||||
<text>true</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1espyxg">
|
||||
<text>"This manual will be sent as part of your IDS Submission"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1cwcqvd">
|
||||
<inputEntry id="UnaryTests_0b49gpa">
|
||||
<text>> 1</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1hb3ip1">
|
||||
<text>true</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0yslf6v">
|
||||
<text>"These manuals will be sent as part of your IDS Submission"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
@ -1,61 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_0gn2e8d" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
|
||||
<bpmn:process id="Process_1hssp5j" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>SequenceFlow_1aq1jk8</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:userTask id="UserTask_SubmitWaiver" name="Submit Waiver" camunda:formKey="Submit Waiver">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="UVACompl_IDSWaiverApp" label="Upload " type="file" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1aq1jk8</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0wdpi62</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="UserTask_ApproveWaiver" name="Approve Waiver" camunda:formKey="Approve Waiver">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_isApporved" label="Waiver Approved?" type="boolean" />
|
||||
<camunda:formField id="UVACompl_IDSNotification" label="IDS Waiver Application approved by IDS" type="file" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0wdpi62</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1pbjfym</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0wdpi62" sourceRef="UserTask_SubmitWaiver" targetRef="UserTask_ApproveWaiver" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1aq1jk8" sourceRef="StartEvent_1" targetRef="UserTask_SubmitWaiver" />
|
||||
<bpmn:endEvent id="EndEvent_1nf8um4">
|
||||
<bpmn:incoming>SequenceFlow_1pbjfym</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1pbjfym" sourceRef="UserTask_ApproveWaiver" targetRef="EndEvent_1nf8um4" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1hssp5j">
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1pbjfym_di" bpmnElement="SequenceFlow_1pbjfym">
|
||||
<di:waypoint x="620" y="120" />
|
||||
<di:waypoint x="692" y="120" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1aq1jk8_di" bpmnElement="SequenceFlow_1aq1jk8">
|
||||
<di:waypoint x="188" y="120" />
|
||||
<di:waypoint x="300" y="120" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0wdpi62_di" bpmnElement="SequenceFlow_0wdpi62">
|
||||
<di:waypoint x="400" y="120" />
|
||||
<di:waypoint x="520" y="120" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="152" y="102" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_1ede5qu_di" bpmnElement="UserTask_SubmitWaiver">
|
||||
<dc:Bounds x="300" y="80" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0pe6rdg_di" bpmnElement="UserTask_ApproveWaiver">
|
||||
<dc:Bounds x="520" y="80" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_1nf8um4_di" bpmnElement="EndEvent_1nf8um4">
|
||||
<dc:Bounds x="692" y="102" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
Binary file not shown.
@ -1,428 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" id="Definitions_0o0ff2r" name="DRD" namespace="http://camunda.org/schema/1.0/dmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
|
||||
<decision id="decision_ind_check" name="IND Check">
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="IS_IND in PB">
|
||||
<inputExpression id="inputExpression_1" typeRef="integer">
|
||||
<text>is_ind</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<input id="InputClause_1yk6kx1" label="IND_1 Number in PB?">
|
||||
<inputExpression id="LiteralExpression_00xhtjw" typeRef="string">
|
||||
<text>ind_1</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<input id="InputClause_069sith" label="IND_2 Number?">
|
||||
<inputExpression id="LiteralExpression_1h9kd8o" typeRef="string">
|
||||
<text>ind_2</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<input id="InputClause_0d0vpur" label="IND_3 Number?">
|
||||
<inputExpression id="LiteralExpression_0zbsg01" typeRef="string">
|
||||
<text>ind_3</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="Non-sequential PB Entry" name="is_non_seq_pb_entry" typeRef="boolean" />
|
||||
<output id="OutputClause_08qk83g" label="IND 1 Field Value" name="IRB_IND1_Number" typeRef="string" />
|
||||
<output id="OutputClause_0yman8a" label="Has Second PB IND?" name="has_second_pb_ind" typeRef="boolean" />
|
||||
<output id="OutputClause_1a9ypxf" label="IND 2 Field Value" name="IRB_IND2_Number" typeRef="string" />
|
||||
<output id="OutputClause_19dr73j" label="Has Third PB IND" name="has_third_pb_ind" typeRef="boolean" />
|
||||
<output id="OutputClause_0ysbmmv" label="IND 3 Field Value" name="IRB_IND3_Number" typeRef="string" />
|
||||
<output id="OutputClause_0xcdkqm" label="IND Message" name="ind_message" typeRef="string" />
|
||||
<rule id="DecisionRule_0teanii">
|
||||
<description>3 IND #s</description>
|
||||
<inputEntry id="UnaryTests_0akfjdp">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1c88e2t">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0zfrdlt">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_07drghr">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1i7dtia">
|
||||
<text>False</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1fiijih">
|
||||
<text>ind_1</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0xfbzo0">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0c5sv9n">
|
||||
<text>ind_2</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0lu82no">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0ukag0c">
|
||||
<text>ind_3</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1tw8tzn">
|
||||
<text>"Provide requested information for the three IND numbers associated with this study entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_199dgpt">
|
||||
<description>2 IND #s</description>
|
||||
<inputEntry id="UnaryTests_1ec0msc">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0h3sj7g">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1ji4kgh">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_10gxrx9">
|
||||
<text>== ""</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1fhlpya">
|
||||
<text>False</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1rwd1ja">
|
||||
<text>ind_1</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0o8a5qg">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0iyqi80">
|
||||
<text>ind_2</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0d1tnpm">
|
||||
<text>False</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1munivg">
|
||||
<text>"not entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1nvcjhv">
|
||||
<text>"Provide requested information for the two IND numbers associated with this study entered in Protocol Builder and any others with numbers pending or exempt"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0h0od2e">
|
||||
<description>1 IND #</description>
|
||||
<inputEntry id="UnaryTests_09ctq71">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1cub5pk">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0aubvru">
|
||||
<text>== ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0rjeqez">
|
||||
<text>== ""</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1we3duh">
|
||||
<text>False</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_15ikz7u">
|
||||
<text>ind_1</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0scnjxc">
|
||||
<text>False</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1vmvj00">
|
||||
<text>"not entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_059bp6f">
|
||||
<text>False</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0bx4383">
|
||||
<text>"not entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_19cvvhd">
|
||||
<text>"Provide requested information for the IND number associated with this study entered in Protocol Builder and any others with numbers pending or exempt"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0z0tcm0">
|
||||
<description>Invalid entry sequence, IND_1 and IND_3 entered, no IND_2</description>
|
||||
<inputEntry id="UnaryTests_1kf86r3">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0jm1wzq">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_14itgac">
|
||||
<text>== ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1prht5p">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0pooubu">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1qul3vr">
|
||||
<text>ind_1</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1myb8bv">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1kw2w4x">
|
||||
<text>"not entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1xxycbv">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1ec0zoc">
|
||||
<text>ind_3</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1fa5e2o">
|
||||
<text>"Two IND #s entered, but not in sequential Protocol Builder fields. Please correct in Protocol Builder if this was not intended"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0bwkqh7">
|
||||
<description>Invalid entry sequence, IND_2 and IND_3 entered, no IND_1</description>
|
||||
<inputEntry id="UnaryTests_13ig4fh">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_11kb6cw">
|
||||
<text>== ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0sfwtwo">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0xxmh5j">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_14otjle">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_13g0u0n">
|
||||
<text>"not entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0goa7rm">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1s6utug">
|
||||
<text>ind_2</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1fcdh57">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0bacx0w">
|
||||
<text>ind_3</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0xhjgjn">
|
||||
<text>"Two IND #s entered, but not in sequential Protocol Builder fields. Please correct in Protocol Builder if this was not intended"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0aubiri">
|
||||
<description>Invalid entry sequence, IND_2 entered, no IND_1 and IND_3</description>
|
||||
<inputEntry id="UnaryTests_06o2bff">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0a79wl7">
|
||||
<text>== ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1o02de0">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1n6nr7w">
|
||||
<text>== ""</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1lujdoj">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_05b12fr">
|
||||
<text>"not entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1hsxec0">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1dr1sdq">
|
||||
<text>ind_2</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1piefr7">
|
||||
<text>False</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0kzme1x">
|
||||
<text>"not entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0oad9ah">
|
||||
<text>"One IND #s entered, but not in the first Protocol Builder field. Please correct in Protocol Builder if this was not intended"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1kp5i27">
|
||||
<description>Invalid entry sequence, IND_3 entered, no IND_1 and IND_2</description>
|
||||
<inputEntry id="UnaryTests_0c0oxqi">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1hcjrkk">
|
||||
<text>== ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0xao0nq">
|
||||
<text>== ""</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1eonsag">
|
||||
<text>!= ""</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0alpmli">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1qsomfm">
|
||||
<text>"not entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_00xrc5b">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0m8n2hh">
|
||||
<text>"not entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0e4liap">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0nb0ikb">
|
||||
<text>ind_3</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1mcyec7">
|
||||
<text>"One IND #s entered, but not in the first Protocol Builder field. Please correct in Protocol Builder if this was not intended"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1nitohs">
|
||||
<description>No</description>
|
||||
<inputEntry id="UnaryTests_19oot48">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0i2qyga">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_09wye05">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1g4y2ti">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0c2mi3l">
|
||||
<text>True</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_049iioi">
|
||||
<text>"not entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_09h1veb">
|
||||
<text>False</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_17kywu8">
|
||||
<text>"not entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_015vz5w">
|
||||
<text>False</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0fzubhr">
|
||||
<text>"not entered in Protocol Builder"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0wj4zzb">
|
||||
<text>"No IND Numbers entered in Protocol Builder. Please enter them and return to this workflow to save if they are availabile before submission to the IRB"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0m9aydp">
|
||||
<description>No IND, PB Q#56 answered as No, should not be needed, but here as stopgap in case memu check failed</description>
|
||||
<inputEntry id="UnaryTests_003n37j">
|
||||
<text>0</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1fcaod2">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0hmnsvb">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0y6xian">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1wuhxz7">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0tkt63s">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_05iughi">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0i6mpvj">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0a9ww2m">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1lrscuy">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1lbt5oy">
|
||||
<text>"You should not be in this workflow since no IND was indicated in PB. Please contact Support and inform them of this error"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1vodqp8">
|
||||
<inputEntry id="UnaryTests_1tpspfa">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0bkp3ds">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0dz5okk">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1f33zcd">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_169264f">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0qxk12m">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0wgsh79">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_07bzcpu">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_00x7ss1">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1fpotur">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0ysg90w">
|
||||
<text>"You should not be in this workflow since no IND was indicated in PB. Please contact Support and inform them of this error"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0wz09ul">
|
||||
<inputEntry id="UnaryTests_0ejngto">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0mf773o">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1g6bm6w">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_19oxlpg">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_11fxqu8">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1lb640f">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0tzuvxl">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1yh8b65">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_03ecp5l">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_10bphfn">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1bm8bkw">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
@ -1,40 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_d4461ee" name="DRD" namespace="http://camunda.org/schema/1.0/dmn">
|
||||
<decision id="Decision_UVA_IND_Check" name="UVA IND Check">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="280" y="120" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="UVA IND?">
|
||||
<inputExpression id="inputExpression_1" typeRef="integer">
|
||||
<text>is_uva_ind</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="PB IS UVA IND" name="pb_is_uva_ind" typeRef="string" />
|
||||
<rule id="DecisionRule_0zp23yq">
|
||||
<inputEntry id="UnaryTests_0ojkmct">
|
||||
<text>1</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1fbdkeh">
|
||||
<text>"UVaPI"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1708zy0">
|
||||
<inputEntry id="UnaryTests_0kl2qjr">
|
||||
<text>0</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1e29o43">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1l7fmr2">
|
||||
<inputEntry id="UnaryTests_0c05h8p">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1k7e72v">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
@ -1,385 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_1e7871f" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
|
||||
<bpmn:process id="Process_04jm0bm" isExecutable="true">
|
||||
<bpmn:documentation>Click Start over if you have updated your Protocol Builder entries.</bpmn:documentation>
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>SequenceFlow_1dhb8f4</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1dhb8f4" sourceRef="StartEvent_1" targetRef="Activity_LoadIND_Data" />
|
||||
<bpmn:endEvent id="EndEvent_1h89sl4">
|
||||
<bpmn:incoming>Flow_0jqdolk</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_OneOnly</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_13jejjr</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:businessRuleTask id="BusinessTask_IND_NumberCheck" name="IND Number Check" camunda:decisionRef="decision_ind_check">
|
||||
<bpmn:incoming>Flow_1rk7fmm</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1cwibmt</bpmn:outgoing>
|
||||
</bpmn:businessRuleTask>
|
||||
<bpmn:userTask id="IND_n1_info" name="Edit IND #1 Info" camunda:formKey="IND1_Info">
|
||||
<bpmn:documentation>{{ ind_message }}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="IND1_Number" label="IND1 number:" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="description" value="IND number from Protocol Builder, if available." />
|
||||
<camunda:property id="value_expression" value="model.IRB_IND1_Number" />
|
||||
<camunda:property id="read_only" value="true" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND1_UVA" label="Is UVA the IND number holder?" type="boolean" defaultValue="true" />
|
||||
<camunda:formField id="IND1_HolderType" label="IND Holder Type" type="enum">
|
||||
<camunda:value id="Industry" name="Industry" />
|
||||
<camunda:value id="UVaPI" name="UVa PI" />
|
||||
<camunda:value id="OtherPI" name="Other PI" />
|
||||
<camunda:value id="UVaCenter" name="UVaCenter" />
|
||||
<camunda:value id="OtherCollUniv" name="Other Colleges and Universities" />
|
||||
<camunda:value id="Exempt" name="IND Exempt" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND1_HolderNameOSP" label="Holder Name" type="autocomplete">
|
||||
<camunda:properties>
|
||||
<camunda:property id="spreadsheet.name" value="SponsorList.xls" />
|
||||
<camunda:property id="spreadsheet.value.column" value="CUSTOMER_NUMBER" />
|
||||
<camunda:property id="spreadsheet.label.column" value="CUSTOMER_NAME" />
|
||||
<camunda:property id="autocomplete_num" value="15" />
|
||||
<camunda:property id="description" value="OSP" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND1_HolderNameText" label="Holder Name" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="description" value="Text entry" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND1_HolderNameUVA" label="Holder Name" type="autocomplete">
|
||||
<camunda:properties>
|
||||
<camunda:property id="ldap.lookup" value="true" />
|
||||
<camunda:property id="autocomplete_num" value="10" />
|
||||
<camunda:property id="description" value="LDAP" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND1_DrugBiologicName" label="Drug/Biologic Name" type="string" />
|
||||
<camunda:formField id="IND1_AnotherIND" label="Another IND?" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="value_expression" value="model.has_second_pb_ind" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_00wk9rz</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_10rb7gb</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="IND_n2_info" name="Edit IND #2 Info" camunda:formKey="IND2_Info">
|
||||
<bpmn:documentation>{{ ind_message }}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="IND2_Number" label="IND2 Number:" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="description" value="IND number from Protocol Builder, if available." />
|
||||
<camunda:property id="read_only" value="true" />
|
||||
<camunda:property id="hide_expression" value="model.IND2_HolderType === "Exempt"" />
|
||||
<camunda:property id="value_expression" value="IRB_IND2_Number" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND2_UVA" label="Is UVA the IND number holder?" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="hide_expression" value="!model.pb_is_uva_ind || model.pb_is_uva_ind === 0" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND2_HolderType" label="IND Holder Type" type="enum">
|
||||
<camunda:value id="Industry" name="Industry" />
|
||||
<camunda:value id="UVaPI" name="UVa PI" />
|
||||
<camunda:value id="OtherPI" name="Other PI" />
|
||||
<camunda:value id="UVaCenter" name="UVaCenter" />
|
||||
<camunda:value id="OtherCollUniv" name="Other Colleges and Universities" />
|
||||
<camunda:value id="Exempt" name="IND Exempt" />
|
||||
<camunda:value id="NA" name="NA" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND2_HolderNameOSP" label="Holder Name" type="autocomplete">
|
||||
<camunda:properties>
|
||||
<camunda:property id="spreadsheet.name" value="SponsorList.xls" />
|
||||
<camunda:property id="spreadsheet.value.column" value="CUSTOMER_NUMBER" />
|
||||
<camunda:property id="spreadsheet.label.column" value="CUSTOMER_NAME" />
|
||||
<camunda:property id="autocomplete_num" value="15" />
|
||||
<camunda:property id="hide_expression" value="!model.IND2_HolderType || !model.IND2_HolderType.value || (model.IND2_HolderType.value !== "Industry" && model.IND2_HolderType.value !== "OtherCollUniv")" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND2_HolderNameText" label="IND Holder Name if not in above list" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="hide_expression" value="(!model.IND2_HolderType || !model.IND2_HolderType.value || model.IND2_HolderType.value === "UVaPI" || model.IND2_HolderType.value === "Exempt") || ((model.IND2_HolderType.value === "Industry" || model.IND2_HolderType.value === "OtherCollUniv") && (!model.IND2_HolderNameOSP || !model.IND2_HolderNameOSP.value || model.IND2_HolderNameOSP.value !== "100"))" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND2_HolderNameUVA" label="Holder Name" type="autocomplete">
|
||||
<camunda:properties>
|
||||
<camunda:property id="ldap.lookup" value="true" />
|
||||
<camunda:property id="autocomplete_num" value="10" />
|
||||
<camunda:property id="description" value="LDAP" />
|
||||
<camunda:property id="hide_expression" value="!model.IND2_HolderType || !model.IND2_HolderType.value || model.IND2_HolderType.value !== "UVaPI"" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND2_DrugBiologicName" label="Drug/Biologic Name" type="string" />
|
||||
<camunda:formField id="IND2_AnotherIND" label="Another IND?" type="boolean" defaultValue="false" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_TwoOrThree</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1p563xr</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:userTask id="IND_n3_info" name="Edit IND #3 Info" camunda:formKey="IND3_Info">
|
||||
<bpmn:documentation>{{ ind_message }}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="IND3_Number" label="IND3 Number:" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="description" value="IND number from Protocol Builder, if available." />
|
||||
<camunda:property id="read_only" value="true" />
|
||||
<camunda:property id="hide_expression" value="model.IND3_HolderType === "Exempt"" />
|
||||
<camunda:property id="value_expression" value="IRB_IND3_Number" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND3_UVA" label="Is UVA the IND number holder?" type="boolean">
|
||||
<camunda:properties>
|
||||
<camunda:property id="hide_expression" value="!model.pb_is_uva_ind || model.pb_is_uva_ind === 0" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND3_HolderType" label="IND Holder Type" type="enum">
|
||||
<camunda:value id="Industry" name="Industry" />
|
||||
<camunda:value id="UVaPI" name="UVa PI" />
|
||||
<camunda:value id="OtherPI" name="Other PI" />
|
||||
<camunda:value id="UVaCenter" name="UVaCenter" />
|
||||
<camunda:value id="OtherCollUniv" name="Other Colleges and Universities" />
|
||||
<camunda:value id="Exempt" name="IND Exempt" />
|
||||
<camunda:value id="NA" name="NA" />
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND3_HolderNameOSP" label="Holder Name" type="autocomplete">
|
||||
<camunda:properties>
|
||||
<camunda:property id="spreadsheet.name" value="SponsorList.xls" />
|
||||
<camunda:property id="spreadsheet.value.column" value="CUSTOMER_NUMBER" />
|
||||
<camunda:property id="spreadsheet.label.column" value="CUSTOMER_NAME" />
|
||||
<camunda:property id="autocomplete_num" value="15" />
|
||||
<camunda:property id="hide_expression" value="!model.IND3_HolderType || !model.IND3_HolderType.value || (model.IND3_HolderType.value !== "Industry" && model.IND3_HolderType.value !== "OtherCollUniv")" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND3_HolderNameText" label="IND Holder Name if not in above list" type="string">
|
||||
<camunda:properties>
|
||||
<camunda:property id="hide_expression" value="(!model.IND3_HolderType || !model.IND3_HolderType.value || model.IND3_HolderType.value === "UVaPI" || model.IND3_HolderType.value === "Exempt") || ((model.IND3_HolderType.value === "Industry" || model.IND3_HolderType.value === "OtherCollUniv") && (!model.IND3_HolderNameOSP || !model.IND3_HolderNameOSP.value || model.IND3_HolderNameOSP.value !== "100"))" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND3_HolderNameUVA" label="Holder Name" type="autocomplete">
|
||||
<camunda:properties>
|
||||
<camunda:property id="ldap.lookup" value="true" />
|
||||
<camunda:property id="autocomplete_num" value="10" />
|
||||
<camunda:property id="description" value="LDAP" />
|
||||
<camunda:property id="hide_expression" value="!model.IND3_HolderType || !model.IND3_HolderType.value || model.IND3_HolderType.value !== "UVaPI"" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
<camunda:formField id="IND3_DrugBiologicName" label="Drug/Biologic Name" type="string" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_1wxvyqe</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0jqdolk</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1cwibmt" sourceRef="BusinessTask_IND_NumberCheck" targetRef="Activity_UVA_IND_Check" />
|
||||
<bpmn:sequenceFlow id="Flow_1p563xr" sourceRef="IND_n2_info" targetRef="Gateway_Count3" />
|
||||
<bpmn:sequenceFlow id="Flow_0jqdolk" sourceRef="IND_n3_info" targetRef="EndEvent_1h89sl4" />
|
||||
<bpmn:sequenceFlow id="Flow_10rb7gb" sourceRef="IND_n1_info" targetRef="Gateway_Count2" />
|
||||
<bpmn:exclusiveGateway id="Gateway_Count2" name="Another IND? " default="Flow_TwoOrThree">
|
||||
<bpmn:incoming>Flow_10rb7gb</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_TwoOrThree</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_OneOnly</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="Flow_TwoOrThree" name="Yes" sourceRef="Gateway_Count2" targetRef="IND_n2_info" />
|
||||
<bpmn:sequenceFlow id="Flow_OneOnly" name="No" sourceRef="Gateway_Count2" targetRef="EndEvent_1h89sl4">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">IND1_AnotherIND != True</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:sequenceFlow id="Flow_1rk7fmm" sourceRef="Activity_LoadIND_Data" targetRef="BusinessTask_IND_NumberCheck" />
|
||||
<bpmn:scriptTask id="Activity_LoadIND_Data" name="Load IND Data">
|
||||
<bpmn:incoming>SequenceFlow_1dhb8f4</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1rk7fmm</bpmn:outgoing>
|
||||
<bpmn:script>details = study_info('details')
|
||||
ind_1 = details.IND_1
|
||||
ind_2 = details.IND_2
|
||||
ind_3 = details.IND_3
|
||||
is_ind = details.IS_IND
|
||||
is_uva_ind = details.IS_UVA_IND
|
||||
del(details)</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:exclusiveGateway id="Gateway_Count1" name="IS_IND?" default="Flow_00wk9rz">
|
||||
<bpmn:incoming>Flow_0pwur5c</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_04rl7gw</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_00wk9rz</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="Flow_04rl7gw" name=" " sourceRef="Gateway_Count1" targetRef="Activity_0yf2ypo">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">is_ind != 1</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:sequenceFlow id="Flow_00wk9rz" name="= 1 (Yes)" sourceRef="Gateway_Count1" targetRef="IND_n1_info" />
|
||||
<bpmn:exclusiveGateway id="Gateway_Count3" name="Another IND?" default="Flow_1wxvyqe">
|
||||
<bpmn:incoming>Flow_1p563xr</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1wxvyqe</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_13jejjr</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="Flow_1wxvyqe" name="Yes" sourceRef="Gateway_Count3" targetRef="IND_n3_info" />
|
||||
<bpmn:sequenceFlow id="Flow_13jejjr" name="No" sourceRef="Gateway_Count3" targetRef="EndEvent_1h89sl4">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">not IND2_AnotherIND or IND2_AnotherIND != True</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:endEvent id="EndEvent_NoOrNull_IS_IND">
|
||||
<bpmn:incoming>Flow_19usc52</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_19usc52" sourceRef="Activity_0yf2ypo" targetRef="EndEvent_NoOrNull_IS_IND" />
|
||||
<bpmn:manualTask id="Activity_0yf2ypo" name="Show Contact Admin">
|
||||
<bpmn:documentation>{{ ind_message }}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="IND_CntEntered" label="How Many?" type="enum">
|
||||
<camunda:properties>
|
||||
<camunda:property id="enum_type" value="radio" />
|
||||
</camunda:properties>
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
<camunda:value id="one" name="1" />
|
||||
<camunda:value id="two" name="2" />
|
||||
<camunda:value id="three" name="3" />
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_04rl7gw</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_19usc52</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:sequenceFlow id="Flow_0pwur5c" sourceRef="Activity_UVA_IND_Check" targetRef="Gateway_Count1" />
|
||||
<bpmn:businessRuleTask id="Activity_UVA_IND_Check" name="UVA IND Check" camunda:decisionRef="Decision_UVA_IND_Check">
|
||||
<bpmn:incoming>SequenceFlow_1cwibmt</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0pwur5c</bpmn:outgoing>
|
||||
</bpmn:businessRuleTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_04jm0bm">
|
||||
<bpmndi:BPMNEdge id="Flow_0pwur5c_di" bpmnElement="Flow_0pwur5c">
|
||||
<di:waypoint x="900" y="197" />
|
||||
<di:waypoint x="965" y="197" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_19usc52_di" bpmnElement="Flow_19usc52">
|
||||
<di:waypoint x="1040" y="390" />
|
||||
<di:waypoint x="1112" y="390" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_13jejjr_di" bpmnElement="Flow_13jejjr">
|
||||
<di:waypoint x="1710" y="172" />
|
||||
<di:waypoint x="1710" y="100" />
|
||||
<di:waypoint x="2100" y="100" />
|
||||
<di:waypoint x="2100" y="179" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1899" y="82" width="15" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1wxvyqe_di" bpmnElement="Flow_1wxvyqe">
|
||||
<di:waypoint x="1735" y="197" />
|
||||
<di:waypoint x="1790" y="197" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1754" y="179" width="18" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_00wk9rz_di" bpmnElement="Flow_00wk9rz">
|
||||
<di:waypoint x="1015" y="197" />
|
||||
<di:waypoint x="1140" y="197" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1049" y="173" width="44" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_04rl7gw_di" bpmnElement="Flow_04rl7gw">
|
||||
<di:waypoint x="990" y="222" />
|
||||
<di:waypoint x="990" y="350" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1010" y="276" width="0" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1rk7fmm_di" bpmnElement="Flow_1rk7fmm">
|
||||
<di:waypoint x="550" y="197" />
|
||||
<di:waypoint x="640" y="197" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_00n2n7p_di" bpmnElement="Flow_OneOnly">
|
||||
<di:waypoint x="1370" y="222" />
|
||||
<di:waypoint x="1370" y="340" />
|
||||
<di:waypoint x="2100" y="340" />
|
||||
<di:waypoint x="2100" y="215" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1730" y="322" width="15" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1o2u7k3_di" bpmnElement="Flow_TwoOrThree">
|
||||
<di:waypoint x="1395" y="197" />
|
||||
<di:waypoint x="1480" y="197" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1432" y="166" width="18" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_10rb7gb_di" bpmnElement="Flow_10rb7gb">
|
||||
<di:waypoint x="1240" y="197" />
|
||||
<di:waypoint x="1345" y="197" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0jqdolk_di" bpmnElement="Flow_0jqdolk">
|
||||
<di:waypoint x="1890" y="197" />
|
||||
<di:waypoint x="2082" y="197" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1p563xr_di" bpmnElement="Flow_1p563xr">
|
||||
<di:waypoint x="1580" y="197" />
|
||||
<di:waypoint x="1685" y="197" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1cwibmt_di" bpmnElement="SequenceFlow_1cwibmt">
|
||||
<di:waypoint x="740" y="197" />
|
||||
<di:waypoint x="800" y="197" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1dhb8f4_di" bpmnElement="SequenceFlow_1dhb8f4">
|
||||
<di:waypoint x="358" y="197" />
|
||||
<di:waypoint x="450" y="197" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="322" y="179" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_1h89sl4_di" bpmnElement="EndEvent_1h89sl4">
|
||||
<dc:Bounds x="2082" y="179" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="414" y="202" width="74" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="BusinessRuleTask_1cszgkx_di" bpmnElement="BusinessTask_IND_NumberCheck">
|
||||
<dc:Bounds x="640" y="157" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_0a2dfa8_di" bpmnElement="IND_n1_info">
|
||||
<dc:Bounds x="1140" y="157" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_1smni98_di" bpmnElement="IND_n2_info">
|
||||
<dc:Bounds x="1480" y="157" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_1378hd8_di" bpmnElement="IND_n3_info">
|
||||
<dc:Bounds x="1790" y="157" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_0ckycp9_di" bpmnElement="Gateway_Count2" isMarkerVisible="true">
|
||||
<dc:Bounds x="1345" y="172" width="50" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1338" y="146" width="67" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0el77c2_di" bpmnElement="Activity_LoadIND_Data">
|
||||
<dc:Bounds x="450" y="157" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_0nrywiv_di" bpmnElement="Gateway_Count1" isMarkerVisible="true">
|
||||
<dc:Bounds x="965" y="172" width="50" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="975" y="142" width="42" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_1iu0zpo_di" bpmnElement="Gateway_Count3" isMarkerVisible="true">
|
||||
<dc:Bounds x="1685" y="172" width="50" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="1677" y="229" width="67" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0dnhrh6_di" bpmnElement="EndEvent_NoOrNull_IS_IND">
|
||||
<dc:Bounds x="1112" y="372" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0362d0t_di" bpmnElement="Activity_0yf2ypo">
|
||||
<dc:Bounds x="940" y="350" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1w7kyic_di" bpmnElement="Activity_UVA_IND_Check">
|
||||
<dc:Bounds x="800" y="157" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
@ -1,75 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_00j2iu5" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
|
||||
<bpmn:process id="Process_1gmf4la" isExecutable="true">
|
||||
<bpmn:documentation />
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>SequenceFlow_1fmyo77</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:scriptTask id="ScriptTask_02924vs" name="Load IRB Details">
|
||||
<bpmn:incoming>SequenceFlow_1fmyo77</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_18nr0gf</bpmn:outgoing>
|
||||
<bpmn:script>details = study_info('details')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1fmyo77" sourceRef="StartEvent_1" targetRef="ScriptTask_02924vs" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_18nr0gf" sourceRef="ScriptTask_02924vs" targetRef="Activity_FromIRB-API" />
|
||||
<bpmn:endEvent id="EndEvent_1qvyxg7">
|
||||
<bpmn:incoming>Flow_0m7unlb</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0m7unlb" sourceRef="Activity_FromIRB-API" targetRef="EndEvent_1qvyxg7" />
|
||||
<bpmn:manualTask id="Activity_FromIRB-API" name="From IRB API">
|
||||
<bpmn:documentation>| Data Point | Value | Help |
|
||||
|:-------------- |:-------- |:------ |
|
||||
{% for key, value in details.items() -%}
|
||||
| {{key}} | {%- if value == None -%}
|
||||
\-\-
|
||||
{%- else -%}
|
||||
{%- if value is number -%}
|
||||
{%- if value == 1 -%}
|
||||
True
|
||||
{%- elif value == 0 -%}
|
||||
False
|
||||
{%- else -%}
|
||||
{{value}}
|
||||
{%- endif -%}
|
||||
{%- elif value is string -%}
|
||||
{%- if value|length -%}
|
||||
{{value}}
|
||||
{%- else -%}
|
||||
Question not presented
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
{%- endif -%} | [Context here](/help)
|
||||
{% endfor -%}</bpmn:documentation>
|
||||
<bpmn:incoming>SequenceFlow_18nr0gf</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0m7unlb</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1gmf4la">
|
||||
<bpmndi:BPMNEdge id="Flow_0m7unlb_di" bpmnElement="Flow_0m7unlb">
|
||||
<di:waypoint x="570" y="117" />
|
||||
<di:waypoint x="672" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_18nr0gf_di" bpmnElement="SequenceFlow_18nr0gf">
|
||||
<di:waypoint x="410" y="117" />
|
||||
<di:waypoint x="470" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1fmyo77_di" bpmnElement="SequenceFlow_1fmyo77">
|
||||
<di:waypoint x="238" y="117" />
|
||||
<di:waypoint x="310" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="202" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="ScriptTask_02924vs_di" bpmnElement="ScriptTask_02924vs">
|
||||
<dc:Bounds x="310" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_1qvyxg7_di" bpmnElement="EndEvent_1qvyxg7">
|
||||
<dc:Bounds x="672" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_19nawos_di" bpmnElement="Activity_FromIRB-API">
|
||||
<dc:Bounds x="470" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,32 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_389ac74" name="DRD" namespace="http://camunda.org/schema/1.0/dmn">
|
||||
<decision id="Decision_CheckPI" name="Check for PI">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="157" y="81" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="Check for PI">
|
||||
<inputExpression id="inputExpression_1" typeRef="string">
|
||||
<text>investigators.get('PI','None Found')</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="PI Found" name="is_pi" typeRef="boolean" />
|
||||
<rule id="DecisionRule_0513h6e">
|
||||
<inputEntry id="UnaryTests_18pzg5h">
|
||||
<text>"None Found"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0voyjpp">
|
||||
<text>false</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1j7k6d3">
|
||||
<inputEntry id="UnaryTests_0ezhr0y">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_05plngz">
|
||||
<text>true</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
@ -1,951 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_d28686b" name="DRD" namespace="http://camunda.org/schema/1.0/dmn">
|
||||
<decision id="Decision_dept_chair" name="Department Chair">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="290" y="130" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="PI Department">
|
||||
<inputExpression id="inputExpression_1" typeRef="string">
|
||||
<text>PI_department</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="OutputClause_0a23e1m" label="Chair Computer ID" name="Chair_CID" typeRef="string" />
|
||||
<output id="output_1" label="Chair Name & Degree" name="Chair_Name_Degree" typeRef="string" />
|
||||
<output id="OutputClause_0ysj4lb" label="Chair Title" name="Chair_Title" typeRef="string" />
|
||||
<output id="OutputClause_1cw9kan" label="Chair Sig Block" name="Chair_Sig_Block" typeRef="string" />
|
||||
<output id="OutputClause_0lym39s" label="Designee 1 CID" name="D1_CID" typeRef="string" />
|
||||
<output id="OutputClause_1jjvdz1" label="Designee 1 Name & Degree" name="D1_Name_Degree" typeRef="string" />
|
||||
<output id="OutputClause_07o7aw8" label="Designee 1 Title" name="D1_Title" typeRef="string" />
|
||||
<output id="OutputClause_0vfjwcc" label="Designee 1 Sig Block" name="D1_Sig_Block" typeRef="string" />
|
||||
<rule id="DecisionRule_130my8k">
|
||||
<inputEntry id="UnaryTests_0utry5v">
|
||||
<text>"Anesthesiology"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0npmzip">
|
||||
<text>"gfr2f"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0tk1vjc">
|
||||
<text>"George F. Rich, MD, PhD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0e67jw1">
|
||||
<text>"Harrison Medical Teaching Professor and Chair"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_01dz6m5">
|
||||
<text>"Department of Anesthesiology"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0n6oua7">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0zajaid">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0grhgrz">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1goqtom">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1p25iet">
|
||||
<inputEntry id="UnaryTests_1twyz8n">
|
||||
<text>"Biochemistry & Molecular Genetics"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0mk1ouw">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_02ypl0q">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1g170wj">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1jhsveh">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_082aspf">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0awx5vb">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0aukxvo">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0009kji">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_11vnz24">
|
||||
<inputEntry id="UnaryTests_1n5hdk0">
|
||||
<text>"Biomedical Engineering"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1ki2y5r">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1nbiz8o">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0zidebq">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_002cq1c">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0j5jqsg">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0lbxiqn">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0wkbs71">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1wlnj8l">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0o9l1g4">
|
||||
<inputEntry id="UnaryTests_00c9fvd">
|
||||
<text>"Brain Institute"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0a5eyr5">
|
||||
<text>"jk8t"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0e4jt7e">
|
||||
<text>"Jaideep Kapur, MD, PhD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1kjp55c">
|
||||
<text>"Director of UVA Brain Institute"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_17791yy">
|
||||
<text>"Eugene Meyer III Professor of Neuroscience"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0v9pxd6">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_07n4c21">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0hvam0u">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_19e4obt">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_07agfmq">
|
||||
<inputEntry id="UnaryTests_1ne2xho">
|
||||
<text>"Cell Biology"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1mt2ueq">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0wfnk1a">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1bdq1nk">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_17cxb6g">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1n5mxii">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1tzwz8h">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_06ibam8">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0o5p6mi">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0tbdzg7">
|
||||
<inputEntry id="UnaryTests_192cb1q">
|
||||
<text>"Center for Diabetes Technology"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_00jss4e">
|
||||
<text>"hgm7s"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_10za2my">
|
||||
<text>"Harry G. Mitchell"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_096h7xv">
|
||||
<text>"Chief Operating Officer"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_07qfbku">
|
||||
<text>"Center for Diabetes Technology"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0obq3ta">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0a8ph15">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1tscoeq">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_047859r">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0mnaifs">
|
||||
<inputEntry id="UnaryTests_1pbp49w">
|
||||
<text>"Center for Research in Reproduction"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0zkt3i2">
|
||||
<text>"jcm9h"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_19cb1bz">
|
||||
<text>"John C. Marshall, MD, PhD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0uktq93">
|
||||
<text>"Director, Center for Research in Reproduction"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0v8z227">
|
||||
<text>"Andrew D. Hart Professor of Medicine"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_18x17lq">
|
||||
<text>"cm2hq"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1l41pgn">
|
||||
<text>"Christopher McCartney, MD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1qhexw2">
|
||||
<text>"Associate Professor of Medicine"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0th4s9k">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1g9heo9">
|
||||
<inputEntry id="UnaryTests_06vfwmk">
|
||||
<text>"Dermatology"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1yav83a">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1apthzx">
|
||||
<text>"Art P. Saavedra, MD, PhD, MBA"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0hhk5mn">
|
||||
<text>"Endowed Professor and Chair of Dermatology"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1yp78ih">
|
||||
<text>"Chief of Ambulatory Strategy and Operations, Department of Dematology"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_163a3e0">
|
||||
<text>"ltg4ga"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0nknugg">
|
||||
<text>"Leisa Gonnella"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1rj0p0p">
|
||||
<text>"Chief Operating Officer, Administrator"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1p9cuj3">
|
||||
<text>"Department of Dermatology"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1w4vnv2">
|
||||
<inputEntry id="UnaryTests_1uxzlwk">
|
||||
<text>"Emergency Medicine"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0y4klbl">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_00rdpy1">
|
||||
<text>"Robert O'Connor, MD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_00v04f6">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0rse4cx">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1w8ejrj">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0qdmvgy">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0tjzvg5">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0jfgfhp">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0v9kyu9">
|
||||
<inputEntry id="UnaryTests_0obk165">
|
||||
<text>"Family Medicine"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1u4cfnj">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0g02cea">
|
||||
<text>"Li Li, MD, PhD, MPH"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_06vuegj">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1qhnttc">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_12xt0ep">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1qa5kcs">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0za5gpp">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1twuim4">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0yft3o2">
|
||||
<inputEntry id="UnaryTests_11hmf6p">
|
||||
<text>"Institute of Law, Psychiatry and Public Policy (institutional)"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1k99mrq">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1en1sr3">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0q5jqja">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_011ucce">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0bogqw7">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0jxkapm">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_13xl7b5">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1pwpak6">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0tn1ntu">
|
||||
<inputEntry id="UnaryTests_03sw24v">
|
||||
<text>"Keck Center for Cellular Imaging (institutional)"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_13i4uts">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0rzmxbc">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1c1hbm4">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_01151r2">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_087iw0o">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0snuc6d">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_13kstqw">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1e8264r">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1xa7pks">
|
||||
<inputEntry id="UnaryTests_1pppuin">
|
||||
<text>"Kinesiology"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_17bh2dl">
|
||||
<text>"alw2v"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0f8mbsy">
|
||||
<text>"Arthur L. Weltman"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1ktzrw6">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0bs8674">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_02nf3re">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1oxr1hm">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0n7l563">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0opge44">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_07gq82a">
|
||||
<inputEntry id="UnaryTests_1usw6cv">
|
||||
<text>"Microbiology, Immunology, and Cancer Biology (MIC)"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0ayt0hb">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0zas7lc">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1t5vcgd">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0nfe8zu">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_02lfwjn">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1qgnopz">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1mw0evh">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_109mquo">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1ugttjx">
|
||||
<inputEntry id="UnaryTests_0l14jnz">
|
||||
<text>"Molecular Physiology & Biological Physics"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0cmnhcl">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0nz91ut">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1mvclh2">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1vz32qe">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0ra8i39">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1ssvvwf">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_09y17lm">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1jl7ljp">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_13zj816">
|
||||
<inputEntry id="UnaryTests_03te6ro">
|
||||
<text>"Neurology"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0i51oau">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1ym154j">
|
||||
<text>"Howard Goodkin MD, PhD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0txl5cj">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0sekcao">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0gfwwav">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0aszk0t">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1co84ru">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_03q6up9">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1d162e6">
|
||||
<inputEntry id="UnaryTests_0t4sokv">
|
||||
<text>"Neuroscience"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1tfzksp">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1lszybr">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1976phh">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1h2752z">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0jnhvv5">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1rzehm7">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0n5fge7">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1a80hnb">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1qa4tbk">
|
||||
<inputEntry id="UnaryTests_0h7ex0k">
|
||||
<text>"Neurosurgery"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1jrc8uu">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1ivww3e">
|
||||
<text>"Mark E. Shaffrey, MD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1xdcxk9">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1d9vr6b">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1fd2214">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0lce0kt">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1nlp8dp">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1uy1fel">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_194hj7g">
|
||||
<inputEntry id="UnaryTests_1lmoxki">
|
||||
<text>"Obstetrics & Gynecology"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1d2368t">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0l5nykm">
|
||||
<text>"James (Jef) E Ferguson II, MD, MBA"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0jwhpxm">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1391acv">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0si3942">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0d1n3k1">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1i5o5yn">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0dn6xa0">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1feefxw">
|
||||
<inputEntry id="UnaryTests_1bquriu">
|
||||
<text>"Ophthalmology"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1cvve9k">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0rcmv2x">
|
||||
<text>"Peter Netland, MD, PhD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0lb9uaq">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_063dzsz">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_097bq13">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1pu38e4">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0sea7rb">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0uhn2ma">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0kk6ajr">
|
||||
<inputEntry id="UnaryTests_0j98tua">
|
||||
<text>"Orthopedic Surgery"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1jpdhy8">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_14lbprk">
|
||||
<text>"A. Bobby Chhabra, MD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_16su4fp">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0br0ljc">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_18tsp9o">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0iytnrt">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1sqb3bn">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_00hwx9m">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1ckz8ox">
|
||||
<inputEntry id="UnaryTests_1gkxt51">
|
||||
<text>"Otolaryngology- Head & Neck Surgery"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1jxatpo">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1urdeg3">
|
||||
<text>"Stephen S. Park, MD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1puli8h">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_05hanjd">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_05z5vca">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_140631p">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0sg2tkh">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1cco0sk">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_04eribm">
|
||||
<inputEntry id="UnaryTests_1a11t50">
|
||||
<text>"Pathology"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0kh06ih">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1ovk0xq">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0dc4w43">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_14xd721">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1w3elof">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0cndgkq">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_09cqz3x">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_19ke3cp">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_04do91b">
|
||||
<inputEntry id="UnaryTests_1hg6qgn">
|
||||
<text>"Pediatrics"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0wipbsc">
|
||||
<text>"jpn2r"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1h4gtcc">
|
||||
<text>"James P. Nataro, MD, PhD, MBA, FAAP"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_18uyr1o">
|
||||
<text>"Chair, Department of Pediatrics"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_04ajeps">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_130lm87">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1ghciu1">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0ywnhpr">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1lgvui4">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0lb8hi5">
|
||||
<inputEntry id="UnaryTests_0y76uqi">
|
||||
<text>"Pharmacology"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0zjqu5t">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0xu1r2k">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_12a5zfs">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1t7jwlp">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0c3vy0f">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0mdcrsx">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1hup339">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0mqbucf">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0csfjb9">
|
||||
<inputEntry id="UnaryTests_0ccnf2c">
|
||||
<text>"Plastic and Maxillofacial Surgery"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0so2ly5">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_09ynoch">
|
||||
<text>"Thomas J. Gampper, MD, FACS"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0ized9e">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0i6xbfl">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0ip9317">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1nurrpl">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0cdiqdv">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_12im461">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0ie3f70">
|
||||
<inputEntry id="UnaryTests_10hi0vn">
|
||||
<text>"Psychiatry and Neurobehavioral Sciences"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_05qrc3z">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1tcqtd0">
|
||||
<text>"Anita H. Clayton, MD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_16paqdh">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1lomodj">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_128hjyq">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1ct7zgi">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1g86k3w">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_05bq3ae">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1iavxz0">
|
||||
<inputEntry id="UnaryTests_1myl3be">
|
||||
<text>"Public Health Sciences"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0kdn3sp">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1ayhurb">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_193gp8u">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0t4xqbq">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1ct3aon">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_11975jk">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0c75zal">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1joannt">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1d7j3pd">
|
||||
<inputEntry id="UnaryTests_1m7gkcr">
|
||||
<text>"Radiation Oncology"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1xg47wl">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_11ufvnv">
|
||||
<text>"James M. Larner, MD, FASTRO"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1bm58kb">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1uqjshy">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0pombk0">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0yxaw3s">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_00i47mj">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0z7nmmd">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0gzdz53">
|
||||
<inputEntry id="UnaryTests_0mo9711">
|
||||
<text>"Radiology and Medical Imaging"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_07shsb0">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1v4defw">
|
||||
<text>"Alan H. Matsumoto, MD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0gqqxxj">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1oowa0l">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0p9rbw9">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1rmnnwe">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1p31rj5">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0zhi6dq">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1immrvr">
|
||||
<inputEntry id="UnaryTests_01pmp6n">
|
||||
<text>"Surgery"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_026jlgr">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_01ao8qh">
|
||||
<text>"Alexander S. Krupnick , MD"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1yqde5y">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0obxa2t">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0tvp046">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0yww9ti">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_19qm59b">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1rjo16e">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_148egsn">
|
||||
<inputEntry id="UnaryTests_0x77krc">
|
||||
<text>"Urology"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1xwdb9q">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1qsi7o3">
|
||||
<text>"Kristen L.Greene, MD, MAS, FACS"</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0hi3yzf">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_04tmu5b">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_15ixegr">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1mjzs1w">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_16wfyws">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0ehj16p">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
@ -1,224 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_a3b9c9b" name="DRD" namespace="http://camunda.org/schema/1.0/dmn">
|
||||
<decision id="Decision_PI_Dept" name="PI Department">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="300" y="140" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="InputClause_12xvnxx" label="E0 Dept">
|
||||
<inputExpression id="LiteralExpression_1q9d9zi" typeRef="string">
|
||||
<text>pi.E0.deptAbbrv</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="PI Department" name="PI_E0_deptName" typeRef="string" />
|
||||
<rule id="DecisionRule_1b5ywn5">
|
||||
<inputEntry id="UnaryTests_1bev7id">
|
||||
<text>"ANES"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1nzq40i">
|
||||
<text>"Anesthesiology"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0wbq6tr">
|
||||
<inputEntry id="UnaryTests_1vs880z">
|
||||
<text>"BIOC"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0tgoozf">
|
||||
<text>"Biochemistry & Molecular Genetics"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0zojm1d">
|
||||
<inputEntry id="UnaryTests_0kgwioh">
|
||||
<text>"BIOM"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_08w2wq9">
|
||||
<text>"Biomedical Engineering"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0owmu0q">
|
||||
<inputEntry id="UnaryTests_0rywcw8">
|
||||
<text>"CELL"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0ru3sax">
|
||||
<text>"Cell Biology"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1ryvd9v">
|
||||
<inputEntry id="UnaryTests_0yrysju">
|
||||
<text>"DMED"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1c4iwlq">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_11nfq9u">
|
||||
<inputEntry id="UnaryTests_15017iw">
|
||||
<text>"INMD"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_193ae27">
|
||||
<text>"Institute of Law, Psychiatry and Public Policy (institutional)"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0lwmys9">
|
||||
<inputEntry id="UnaryTests_0bgwlbf">
|
||||
<text>"INMD-Ctr"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1p0b3ea">
|
||||
<text>"Keck Center for Cellular Imaging (institutional)"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1qjzff0">
|
||||
<inputEntry id="UnaryTests_10jnj9r">
|
||||
<text>"MICR"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1qpt4pk">
|
||||
<text>"Microbiology, Immunology, and Cancer Biology (MIC)"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_08qhcy9">
|
||||
<inputEntry id="UnaryTests_19uyawr">
|
||||
<text>"MPHY"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_06z2wux">
|
||||
<text>"Molecular Physiology & Biological Physics"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0ovrx5p">
|
||||
<inputEntry id="UnaryTests_0pg1um2">
|
||||
<text>"NERS"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0p5gvct">
|
||||
<text>"Neurosurgery"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_135q0hq">
|
||||
<inputEntry id="UnaryTests_0e11w4s">
|
||||
<text>"NESC"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0bu5hgk">
|
||||
<text>"Neuroscience"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_18zb09n">
|
||||
<inputEntry id="UnaryTests_0fvagjn">
|
||||
<text>"NEUR"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0tl3ksn">
|
||||
<text>"Neurology"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1vev1e3">
|
||||
<inputEntry id="UnaryTests_07qj3jf">
|
||||
<text>"OBGY"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_067ehpk">
|
||||
<text>"Obstetrics and Gynecology"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_08k4jec">
|
||||
<inputEntry id="UnaryTests_0nlzxc2">
|
||||
<text>"OPHT"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_103y6qq">
|
||||
<text>"Ophthalmology"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0a48i89">
|
||||
<inputEntry id="UnaryTests_1y5nfzo">
|
||||
<text>"ORTP"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1cr3wq0">
|
||||
<text>"Orthopaedic Surgery"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0km2u3f">
|
||||
<inputEntry id="UnaryTests_1buhr78">
|
||||
<text>"PATH"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0nx1reo">
|
||||
<text>"Pathology"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1gc10ny">
|
||||
<inputEntry id="UnaryTests_1uru4m4">
|
||||
<text>"PBHS"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_073f0bn">
|
||||
<text>"Public Health Sciences"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_145vhtj">
|
||||
<inputEntry id="UnaryTests_1y8kr8n">
|
||||
<text>"PEDT"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1k444fj">
|
||||
<text>"Pediatrics"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_05u0zki">
|
||||
<inputEntry id="UnaryTests_1uudg05">
|
||||
<text>"PHAR"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1mz3u7d">
|
||||
<text>"Pharmacology"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0o9ozyh">
|
||||
<inputEntry id="UnaryTests_1ytw7l4">
|
||||
<text>"PLSR"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0mxv6ov">
|
||||
<text>"Plastic and Maxillofacial Surgery"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0wbzqhg">
|
||||
<inputEntry id="UnaryTests_0uwi3mu">
|
||||
<text>"PSCH"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1lsruwb">
|
||||
<text>"Psychiatry and Neurobehavioral Sciences"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1ukpgze">
|
||||
<inputEntry id="UnaryTests_0ijuf1f">
|
||||
<text>"RADL"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1tjwp0q">
|
||||
<text>"Radiology and Medical Imaging"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_04la8a6">
|
||||
<inputEntry id="UnaryTests_1f5hv2r">
|
||||
<text>"RONC"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0g10afk">
|
||||
<text>"Radiation Oncology"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0qwccau">
|
||||
<inputEntry id="UnaryTests_0661n6g">
|
||||
<text>"SURG"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1iuug6l">
|
||||
<text>"Surgery"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0jleevh">
|
||||
<inputEntry id="UnaryTests_1cpprhv">
|
||||
<text>"UROL"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0kllkvf">
|
||||
<text>"Urology"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_08vdy19">
|
||||
<inputEntry id="UnaryTests_1rl58pv">
|
||||
<text>"Not in LDAP"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0rueyva">
|
||||
<text>"Not in LDAP"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
@ -1,72 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_f88e4e8" name="DRD" namespace="http://camunda.org/schema/1.0/dmn">
|
||||
<decision id="Decision_PI_School" name="PI School">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="300" y="150" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="E0 School">
|
||||
<inputExpression id="inputExpression_1" typeRef="string">
|
||||
<text>pi.E0.schoolAbbrv</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="PI Schnool" name="PI_E0_schoolName" typeRef="string" />
|
||||
<rule id="DecisionRule_0iw6vvy">
|
||||
<inputEntry id="UnaryTests_12b09kx">
|
||||
<text>"AS"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1v41ba6">
|
||||
<text>"Arts & Sciences"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_1a10g2y">
|
||||
<inputEntry id="UnaryTests_0m9zhs2">
|
||||
<text>"EN"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_042fedg">
|
||||
<text>"Engineering"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_08mkkmy">
|
||||
<inputEntry id="UnaryTests_15khdr9">
|
||||
<text>"MD"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1htytl3">
|
||||
<text>"Medicine"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_028hdln">
|
||||
<inputEntry id="UnaryTests_0f8ykny">
|
||||
<text>"NR"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0worgwg">
|
||||
<text>"Nursing"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_00xmnyh">
|
||||
<inputEntry id="UnaryTests_1vhujxy">
|
||||
<text>"RS"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0n7lwiw">
|
||||
<text>"Provost Office"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0hmk2dt">
|
||||
<inputEntry id="UnaryTests_133zdb4">
|
||||
<text>"Not in LDAP"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_06p5yz2">
|
||||
<text>"Not in LDAP"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_01w7s67">
|
||||
<inputEntry id="UnaryTests_1p7ofcc">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0bciq1t">
|
||||
<text>"Not found in scrtpts"</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
@ -1,78 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_0adc6e9" name="DRD" namespace="http://camunda.org/schema/1.0/dmn">
|
||||
<decision id="Decision_RO" name="Determine RO">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="420" y="180" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="RO Same As PI's Primary Dept?">
|
||||
<inputExpression id="inputExpression_1" typeRef="boolean">
|
||||
<text>isRO_SameAsPIsPriDept</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<input id="InputClause_1xgnpdy" label="RO School">
|
||||
<inputExpression id="LiteralExpression_1bqutsa" typeRef="string">
|
||||
<text>RO_StudySchool.value</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="RO School" name="RO_School" typeRef="string" />
|
||||
<output id="OutputClause_0f76t6l" label="RO Department" name="RO_Dept" typeRef="string" />
|
||||
<rule id="DecisionRule_0218zu9">
|
||||
<inputEntry id="UnaryTests_061tvgo">
|
||||
<text>True</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_1r0zkd8">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1p32kxk">
|
||||
<text>pi.E0.schoolName</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_07rulgc" expressionLanguage="feel">
|
||||
<text>pi.E0.deptName</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_04pw6py">
|
||||
<inputEntry id="UnaryTests_0u65ktf">
|
||||
<text>False</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_14yvbz3">
|
||||
<text>"AS"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1ujjyis">
|
||||
<text>RO_StudySchool.label</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_06p10h4">
|
||||
<text>RO_StudyDeptArtsSciences.label</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0l0orcb">
|
||||
<inputEntry id="UnaryTests_1nfg5uh">
|
||||
<text>False</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_0o5tflu">
|
||||
<text>"CU"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0ukcma3">
|
||||
<text>RO_StudySchool.label</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_03wc2vv">
|
||||
<text>RO_StudyDeptEducatoin.label</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_0j2d370">
|
||||
<inputEntry id="UnaryTests_0m6l42r">
|
||||
<text>False</text>
|
||||
</inputEntry>
|
||||
<inputEntry id="UnaryTests_00yvxdr">
|
||||
<text>"MD"</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0vy8mcu">
|
||||
<text>RO_StudySchool.label</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_0a2j3zg">
|
||||
<text>RO_StudyDeptMedicine.label</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
File diff suppressed because it is too large
Load Diff
@ -1,40 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<definitions xmlns="http://www.omg.org/spec/DMN/20151101/dmn.xsd" xmlns:biodi="http://bpmn.io/schema/dmn/biodi/1.0" id="Definitions_b2e2651" name="DRD" namespace="http://camunda.org/schema/1.0/dmn">
|
||||
<decision id="Decision_RO_Dept" name="Responsible Org Department">
|
||||
<extensionElements>
|
||||
<biodi:bounds x="300" y="140" width="180" height="80" />
|
||||
</extensionElements>
|
||||
<decisionTable id="decisionTable_1">
|
||||
<input id="input_1" label="RO Same As PIs Primary Department">
|
||||
<inputExpression id="inputExpression_1" typeRef="boolean">
|
||||
<text>isRO_SameAsPIsPriDept</text>
|
||||
</inputExpression>
|
||||
</input>
|
||||
<output id="output_1" label="RO School" name="RO_School" typeRef="string" />
|
||||
<output id="OutputClause_0f6vbck" label="RO Department" name="RO_Department" typeRef="string" />
|
||||
<rule id="DecisionRule_0e2hvpp">
|
||||
<description>RO same as PI's Primary Department</description>
|
||||
<inputEntry id="UnaryTests_1eenh83">
|
||||
<text>True</text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_1tq8k0h">
|
||||
<text>pi.E0.schoolName</text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_1g8im4z">
|
||||
<text>pi.E0.deptName</text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
<rule id="DecisionRule_04ff0di">
|
||||
<inputEntry id="UnaryTests_1fsa0de">
|
||||
<text></text>
|
||||
</inputEntry>
|
||||
<outputEntry id="LiteralExpression_0phratv">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
<outputEntry id="LiteralExpression_13dyeq0">
|
||||
<text></text>
|
||||
</outputEntry>
|
||||
</rule>
|
||||
</decisionTable>
|
||||
</decision>
|
||||
</definitions>
|
File diff suppressed because it is too large
Load Diff
@ -1,65 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_413aa25" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:process id="Process_dbd342e" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0je7686</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0je7686" sourceRef="StartEvent_1" targetRef="Activity_08san8i" />
|
||||
<bpmn:endEvent id="Event_1owauyk">
|
||||
<bpmn:incoming>Flow_0iah4ly</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0iah4ly" sourceRef="Activity_08san8i" targetRef="Event_1owauyk" />
|
||||
<bpmn:userTask id="Activity_08san8i" name="Show upload Non-UVA approval letter" camunda:formKey="Non-UVA IRB Approval Letter">
|
||||
<bpmn:documentation>### This step is Open when:
|
||||
|
||||
|
||||
-"IRB Review Type" Step is Complete and IRB Review Type is 'Full Board', 'Expedited', 'Non-Engaged' or 'Exempt'.
|
||||
"Compliance Requirements Checklist (by IRB-HSR)" Step is Complete and the question "Non-UVA Institutional Approval" is 'Applicable'.
|
||||
Compliance Requirement: Certain studies require either the IRB Approval from a non-UVA site or a letter of approval from the institution of a non-UVA site.
|
||||
|
||||
|
||||
**Contact Information:
|
||||
|
||||
|
||||
Name: Eileen Sembrowich – Full Board Protocols
|
||||
Email: ecs3b@virginia.edu
|
||||
Phone: 434-243-6542
|
||||
Name: Amy Blackman – Expedited Protocols
|
||||
Email: as5v@virginia.edu
|
||||
Phone: 434-924-2546
|
||||
##Process: Upload the applicable document(s) below. The file(s) uploaded here will be automatically included in the submission to the IRB-HSR.</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="Non-UVA Approval Type_Foreign IRB Approval" label="Foreign IRB Approval" type="boolean" defaultValue="true" />
|
||||
<camunda:formField id="Non-UVA Approval Type_USA IRB Approval" label="USA IRB Approval" type="boolean" defaultValue="true" />
|
||||
<camunda:formField id="Non-UVA Approval Type_Institutional Site Approval" label="Institutional Site Approval" type="boolean" defaultValue="true" />
|
||||
<camunda:formField id="Non-UVA Documentation_Upload" label="Non-UVA IRB Approval Letter" type="File" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_0je7686</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0iah4ly</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_dbd342e">
|
||||
<bpmndi:BPMNEdge id="Flow_0iah4ly_di" bpmnElement="Flow_0iah4ly">
|
||||
<di:waypoint x="370" y="177" />
|
||||
<di:waypoint x="432" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0je7686_di" bpmnElement="Flow_0je7686">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="270" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1owauyk_di" bpmnElement="Event_1owauyk">
|
||||
<dc:Bounds x="432" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1sdsx38_di" bpmnElement="Activity_08san8i">
|
||||
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
||||
|
@ -1,138 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_300b2c3" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.0.0">
|
||||
<bpmn:collaboration id="Collaboration_163c7c8">
|
||||
<bpmn:participant id="Participant_1mnua71" name="team" processRef="Process_cd666f3" />
|
||||
</bpmn:collaboration>
|
||||
<bpmn:process id="Process_cd666f3" isExecutable="true">
|
||||
<bpmn:laneSet id="LaneSet_0ucxzw3">
|
||||
<bpmn:lane id="Lane_16ml9fk">
|
||||
<bpmn:flowNodeRef>StartEvent_1</bpmn:flowNodeRef>
|
||||
<bpmn:flowNodeRef>Activity_1qpy9ra</bpmn:flowNodeRef>
|
||||
<bpmn:flowNodeRef>Event_1m9fnmv</bpmn:flowNodeRef>
|
||||
<bpmn:flowNodeRef>Activity_0c5drp3</bpmn:flowNodeRef>
|
||||
</bpmn:lane>
|
||||
<bpmn:lane id="Lane_1jw70kl" name="supervisor">
|
||||
<bpmn:flowNodeRef>Gateway_0ved0t9</bpmn:flowNodeRef>
|
||||
<bpmn:flowNodeRef>Activity_107ojvq</bpmn:flowNodeRef>
|
||||
</bpmn:lane>
|
||||
</bpmn:laneSet>
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0q51aiq</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:userTask id="Activity_1qpy9ra" name="Assign Approver" camunda:formKey="form_assign_approver">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="supervisor" label="Approver UID" type="string" defaultValue="dhf8r">
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_0q51aiq</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_1ugh4wn</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0d2snmk</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="Flow_0q51aiq" sourceRef="StartEvent_1" targetRef="Activity_1qpy9ra" />
|
||||
<bpmn:sequenceFlow id="Flow_0d2snmk" sourceRef="Activity_1qpy9ra" targetRef="Activity_107ojvq" />
|
||||
<bpmn:exclusiveGateway id="Gateway_0ved0t9" name="Approved?">
|
||||
<bpmn:incoming>Flow_0apr3nj</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0mhtlkt</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_11tnx3n</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="Flow_0apr3nj" sourceRef="Activity_107ojvq" targetRef="Gateway_0ved0t9" />
|
||||
<bpmn:sequenceFlow id="Flow_0mhtlkt" name="Yes" sourceRef="Gateway_0ved0t9" targetRef="Event_1m9fnmv">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">is_study_approved == True</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:endEvent id="Event_1m9fnmv">
|
||||
<bpmn:incoming>Flow_0mhtlkt</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_11tnx3n" name="No" sourceRef="Gateway_0ved0t9" targetRef="Activity_0c5drp3">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">is_study_approved == False</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:userTask id="Activity_107ojvq" name="Approve Study" camunda:formKey="form_approve_study">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="is_study_approved" label="Approve this study?" type="boolean">
|
||||
<camunda:validation>
|
||||
<camunda:constraint name="required" config="true" />
|
||||
</camunda:validation>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_0d2snmk</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0apr3nj</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="Flow_1ugh4wn" sourceRef="Activity_0c5drp3" targetRef="Activity_1qpy9ra" />
|
||||
<bpmn:manualTask id="Activity_0c5drp3" name="Review Feedback">
|
||||
<bpmn:documentation>Your request was not approved. Try again.</bpmn:documentation>
|
||||
<bpmn:incoming>Flow_11tnx3n</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1ugh4wn</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Collaboration_163c7c8">
|
||||
<bpmndi:BPMNShape id="Participant_1mnua71_di" bpmnElement="Participant_1mnua71" isHorizontal="true">
|
||||
<dc:Bounds x="129" y="117" width="600" height="250" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Lane_1jw70kl_di" bpmnElement="Lane_1jw70kl" isHorizontal="true">
|
||||
<dc:Bounds x="159" y="242" width="570" height="125" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Lane_16ml9fk_di" bpmnElement="Lane_16ml9fk" isHorizontal="true">
|
||||
<dc:Bounds x="159" y="117" width="570" height="125" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_11tnx3n_di" bpmnElement="Flow_11tnx3n">
|
||||
<di:waypoint x="460" y="275" />
|
||||
<di:waypoint x="460" y="217" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="468" y="241" width="15" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0mhtlkt_di" bpmnElement="Flow_0mhtlkt">
|
||||
<di:waypoint x="485" y="300" />
|
||||
<di:waypoint x="660" y="300" />
|
||||
<di:waypoint x="660" y="195" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="563" y="282" width="19" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0apr3nj_di" bpmnElement="Flow_0apr3nj">
|
||||
<di:waypoint x="370" y="300" />
|
||||
<di:waypoint x="435" y="300" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0d2snmk_di" bpmnElement="Flow_0d2snmk">
|
||||
<di:waypoint x="320" y="217" />
|
||||
<di:waypoint x="320" y="260" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0q51aiq_di" bpmnElement="Flow_0q51aiq">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="270" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1ugh4wn_di" bpmnElement="Flow_1ugh4wn">
|
||||
<di:waypoint x="400" y="177" />
|
||||
<di:waypoint x="370" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_14cpuv6_di" bpmnElement="Activity_1qpy9ra">
|
||||
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_0ved0t9_di" bpmnElement="Gateway_0ved0t9" isMarkerVisible="true">
|
||||
<dc:Bounds x="435" y="275" width="50" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="435" y="332" width="54" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1m9fnmv_di" bpmnElement="Event_1m9fnmv">
|
||||
<dc:Bounds x="642" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1ps6jft_di" bpmnElement="Activity_107ojvq">
|
||||
<dc:Bounds x="270" y="260" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1al86eb_di" bpmnElement="Activity_0c5drp3">
|
||||
<dc:Bounds x="400" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
@ -1,43 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_5d31ba5" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:process id="Process_1e4920c" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0kanc90</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0kanc90" sourceRef="StartEvent_1" targetRef="Activity_UploadProtocol" />
|
||||
<bpmn:endEvent id="Event_0zakcui">
|
||||
<bpmn:incoming>Flow_0oswmz6</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0oswmz6" sourceRef="Activity_UploadProtocol" targetRef="Event_0zakcui" />
|
||||
<bpmn:userTask id="Activity_UploadProtocol" name="Upload Protocol" camunda:formKey="Upload Protocol">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="Study_Protocol_Document" label="Protocol" type="file" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_0kanc90</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0oswmz6</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1e4920c">
|
||||
<bpmndi:BPMNEdge id="Flow_0oswmz6_di" bpmnElement="Flow_0oswmz6">
|
||||
<di:waypoint x="380" y="177" />
|
||||
<di:waypoint x="432" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0kanc90_di" bpmnElement="Flow_0kanc90">
|
||||
<di:waypoint x="218" y="177" />
|
||||
<di:waypoint x="280" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_0zakcui_di" bpmnElement="Event_0zakcui">
|
||||
<dc:Bounds x="432" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1rp9n6c_di" bpmnElement="Activity_UploadProtocol">
|
||||
<dc:Bounds x="280" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="182" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user