Merge branch 'dev' into 321-Markdown-Macro

This commit is contained in:
Dan Funk 2021-05-14 12:32:41 -04:00 committed by GitHub
commit 99da502912
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
61 changed files with 850 additions and 419 deletions

389
Pipfile.lock generated
View File

@ -25,19 +25,11 @@
}, },
"alembic": { "alembic": {
"hashes": [ "hashes": [
"sha256:8a259f0a4c8b350b03579d77ce9e810b19c65bf0af05f84efb69af13ad50801e", "sha256:3ff4f90d23dd283d7822d78ffbc07cb256344ae1d60500b933378bc13407efcc",
"sha256:e27fd67732c97a1c370c33169ef4578cf96436fa0e7dcfaeeef4a917d0737d56" "sha256:d7f6d4dc6abed18e1591932a85349a7d621298ef0daa40021609cdca54a6047c"
], ],
"index": "pypi", "index": "pypi",
"version": "==1.5.8" "version": "==1.6.0"
},
"amqp": {
"hashes": [
"sha256:03e16e94f2b34c31f8bf1206d8ddd3ccaa4c315f7f6a1879b7b1210d229568c2",
"sha256:493a2ac6788ce270a2f6a765b017299f60c1998f5a8617908ee9be082f7300fb"
],
"markers": "python_version >= '3.6'",
"version": "==5.0.6"
}, },
"aniso8601": { "aniso8601": {
"hashes": [ "hashes": [
@ -51,16 +43,14 @@
"sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6",
"sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==20.3.0" "version": "==20.3.0"
}, },
"babel": { "babel": {
"hashes": [ "hashes": [
"sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5", "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9",
"sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05" "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.9.1"
"version": "==2.9.0"
}, },
"bcrypt": { "bcrypt": {
"hashes": [ "hashes": [
@ -72,7 +62,6 @@
"sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1", "sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1",
"sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d" "sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d"
], ],
"markers": "python_version >= '3.6'",
"version": "==3.2.0" "version": "==3.2.0"
}, },
"beautifulsoup4": { "beautifulsoup4": {
@ -83,27 +72,12 @@
], ],
"version": "==4.9.3" "version": "==4.9.3"
}, },
"billiard": {
"hashes": [
"sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547",
"sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"
],
"version": "==3.6.4.0"
},
"blinker": { "blinker": {
"hashes": [ "hashes": [
"sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6" "sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6"
], ],
"version": "==1.4" "version": "==1.4"
}, },
"celery": {
"hashes": [
"sha256:5e8d364e058554e83bbb116e8377d90c79be254785f357cb2cec026e79febe13",
"sha256:f4efebe6f8629b0da2b8e529424de376494f5b7a743c321c8a2ddc2b1414921c"
],
"markers": "python_version >= '3.6'",
"version": "==5.0.5"
},
"certifi": { "certifi": {
"hashes": [ "hashes": [
"sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c",
@ -158,7 +132,6 @@
"sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa",
"sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==4.0.0" "version": "==4.0.0"
}, },
"click": { "click": {
@ -166,29 +139,8 @@
"sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a",
"sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==7.1.2" "version": "==7.1.2"
}, },
"click-didyoumean": {
"hashes": [
"sha256:112229485c9704ff51362fe34b2d4f0b12fc71cc20f6d2b3afabed4b8bfa6aeb"
],
"version": "==0.0.3"
},
"click-plugins": {
"hashes": [
"sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b",
"sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"
],
"version": "==1.1.1"
},
"click-repl": {
"hashes": [
"sha256:9c4c3d022789cae912aad8a3f5e1d7c2cdd016ee1225b5212ad3e8691563cda5",
"sha256:b9f29d52abc4d6059f8e276132a111ab8d94980afe6a5432b9d996544afa95d5"
],
"version": "==0.1.6"
},
"clickclick": { "clickclick": {
"hashes": [ "hashes": [
"sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c", "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c",
@ -203,14 +155,6 @@
], ],
"version": "==0.9.1" "version": "==0.9.1"
}, },
"configparser": {
"hashes": [
"sha256:85d5de102cfe6d14a5172676f09d19c465ce63d6019cf0a4ef13385fc535e828",
"sha256:af59f2cdd7efbdd5d111c1976ecd0b82db9066653362f0962d7bf1d3ab89a1fa"
],
"markers": "python_version >= '3.6'",
"version": "==5.0.2"
},
"connexion": { "connexion": {
"extras": [ "extras": [
"swagger-ui" "swagger-ui"
@ -285,30 +229,29 @@
"sha256:08452d69b6b5bc66e8330adde0a4f8642e969b9e1702904d137eeb29c8ffc771", "sha256:08452d69b6b5bc66e8330adde0a4f8642e969b9e1702904d137eeb29c8ffc771",
"sha256:6d2de2de7931a968874481ef30208fd4e08da39177d61d3d4ebdf4366e7dbca1" "sha256:6d2de2de7931a968874481ef30208fd4e08da39177d61d3d4ebdf4366e7dbca1"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.2.12" "version": "==1.2.12"
}, },
"docutils": { "docutils": {
"hashes": [ "hashes": [
"sha256:a71042bb7207c03d5647f280427f14bfbd1a65c9eb84f4b341d85fafb6bb4bdf", "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
"sha256:e2ffeea817964356ba4470efba7c2f42b6b0de0b04e66378507e3e2504bbff4c" "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==0.16"
"version": "==0.17"
}, },
"docxtpl": { "docxtpl": {
"hashes": [ "hashes": [
"sha256:18b81c072254b2eef3cbf878951a93515f4d287c319856d3dd7ada53a3ffd5ad", "sha256:b7370b11fd226a712fcbf36ff45d34993253cce268846ee1c8309cef649cffa2",
"sha256:c26ad7a0e6e9aedc7dfaf6b09af60ee0a4f001a065896503056ce9171bd9024a" "sha256:d7b78a5da704800c53e4434a7ef397faa19e56d21595af23f70b89a84e56ac1d"
], ],
"index": "pypi", "index": "pypi",
"version": "==0.11.3" "version": "==0.11.4"
}, },
"et-xmlfile": { "et-xmlfile": {
"hashes": [ "hashes": [
"sha256:614d9722d572f6246302c4491846d2c393c199cfa4edc9af593437691683335b" "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c",
"sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"
], ],
"version": "==1.0.1" "version": "==1.1.0"
}, },
"flask": { "flask": {
"hashes": [ "hashes": [
@ -320,10 +263,10 @@
}, },
"flask-admin": { "flask-admin": {
"hashes": [ "hashes": [
"sha256:145f59407d78319925e20f7c3021f60c71f0cacc98e916e52000845dc4c63621" "sha256:eb06a1f31b98881dee53a55c64faebd1990d6aac38826364b280df0b2679ff74"
], ],
"index": "pypi", "index": "pypi",
"version": "==1.5.7" "version": "==1.5.8"
}, },
"flask-bcrypt": { "flask-bcrypt": {
"hashes": [ "hashes": [
@ -376,16 +319,8 @@
"sha256:2bda44b43e7cacb15d4e05ff3cc1f8bc97936cc464623424102bfc2c35e95912", "sha256:2bda44b43e7cacb15d4e05ff3cc1f8bc97936cc464623424102bfc2c35e95912",
"sha256:f12c3d4cc5cc7fdcc148b9527ea05671718c3ea45d50c7e732cceb33f574b390" "sha256:f12c3d4cc5cc7fdcc148b9527ea05671718c3ea45d50c7e732cceb33f574b390"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.5.1" "version": "==2.5.1"
}, },
"future": {
"hashes": [
"sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.18.2"
},
"greenlet": { "greenlet": {
"hashes": [ "hashes": [
"sha256:0a77691f0080c9da8dfc81e23f4e3cffa5accf0f5b56478951016d7cfead9196", "sha256:0a77691f0080c9da8dfc81e23f4e3cffa5accf0f5b56478951016d7cfead9196",
@ -437,6 +372,7 @@
}, },
"gunicorn": { "gunicorn": {
"hashes": [ "hashes": [
"sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e",
"sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8" "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"
], ],
"index": "pypi", "index": "pypi",
@ -454,7 +390,6 @@
"sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
"sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.10" "version": "==2.10"
}, },
"imagesize": { "imagesize": {
@ -462,7 +397,6 @@
"sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1",
"sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.2.0" "version": "==1.2.0"
}, },
"inflection": { "inflection": {
@ -470,7 +404,6 @@
"sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417", "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417",
"sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2" "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"
], ],
"markers": "python_version >= '3.5'",
"version": "==0.5.1" "version": "==0.5.1"
}, },
"isodate": { "isodate": {
@ -485,7 +418,6 @@
"sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19",
"sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.0" "version": "==1.1.0"
}, },
"jinja2": { "jinja2": {
@ -493,7 +425,6 @@
"sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419",
"sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==2.11.3" "version": "==2.11.3"
}, },
"jsonschema": { "jsonschema": {
@ -503,21 +434,10 @@
], ],
"version": "==3.2.0" "version": "==3.2.0"
}, },
"kombu": {
"hashes": [
"sha256:6dc509178ac4269b0e66ab4881f70a2035c33d3a622e20585f965986a5182006",
"sha256:f4965fba0a4718d47d470beeb5d6446e3357a62402b16c510b6a2f251e05ac3c"
],
"markers": "python_version >= '3.6'",
"version": "==5.0.2"
},
"ldap3": { "ldap3": {
"hashes": [ "hashes": [
"sha256:18c3ee656a6775b9b0d60f7c6c5b094d878d1d90fc03d56731039f0a4b546a91", "sha256:18c3ee656a6775b9b0d60f7c6c5b094d878d1d90fc03d56731039f0a4b546a91",
"sha256:afc6fc0d01f02af82cd7bfabd3bbfd5dc96a6ae91e97db0a2dab8a0f1b436056", "sha256:c1df41d89459be6f304e0ceec4b00fdea533dbbcd83c802b1272dcdb94620b57"
"sha256:c1df41d89459be6f304e0ceec4b00fdea533dbbcd83c802b1272dcdb94620b57",
"sha256:8c949edbad2be8a03e719ba48bd6779f327ec156929562814b3e84ab56889c8c",
"sha256:4139c91f0eef9782df7b77c8cbc6243086affcb6a8a249b768a9658438e5da59"
], ],
"index": "pypi", "index": "pypi",
"version": "==2.9" "version": "==2.9"
@ -527,18 +447,24 @@
"sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d", "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d",
"sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3", "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3",
"sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2", "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2",
"sha256:1b38116b6e628118dea5b2186ee6820ab138dbb1e24a13e478490c7db2f326ae",
"sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f", "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f",
"sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927", "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927",
"sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3", "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3",
"sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7", "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7",
"sha256:3082c518be8e97324390614dacd041bb1358c882d77108ca1957ba47738d9d59",
"sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f", "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f",
"sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade", "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade",
"sha256:36108c73739985979bf302006527cf8a20515ce444ba916281d1c43938b8bb96",
"sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468", "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468",
"sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b", "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b",
"sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4", "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4",
"sha256:4c61b3a0db43a1607d6264166b230438f85bfed02e8cff20c22e564d0faff354",
"sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83", "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83",
"sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04", "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04",
"sha256:5c8c163396cc0df3fd151b927e74f6e4acd67160d6c33304e805b84293351d16",
"sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791", "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791",
"sha256:6f12e1427285008fd32a6025e38e977d44d6382cf28e7201ed10d6c1698d2a9a",
"sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51", "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51",
"sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1", "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1",
"sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a", "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a",
@ -551,10 +477,14 @@
"sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa", "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa",
"sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106", "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106",
"sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d", "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d",
"sha256:c47ff7e0a36d4efac9fd692cfa33fbd0636674c102e9e8d9b26e1b93a94e7617",
"sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4", "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4",
"sha256:cdaf11d2bd275bf391b5308f86731e5194a21af45fbaaaf1d9e8147b9160ea92",
"sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0", "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0",
"sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4", "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4",
"sha256:d916d31fd85b2f78c76400d625076d9124de3e4bda8b016d25a050cc7d603f24",
"sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2", "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2",
"sha256:e1cbd3f19a61e27e011e02f9600837b921ac661f0c40560eefb366e4e4fb275e",
"sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0", "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0",
"sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654", "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654",
"sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2", "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2",
@ -569,7 +499,6 @@
"sha256:17831f0b7087c313c0ffae2bcbbd3c1d5ba9eeac9c38f2eb7b50e8c99fe9d5ab", "sha256:17831f0b7087c313c0ffae2bcbbd3c1d5ba9eeac9c38f2eb7b50e8c99fe9d5ab",
"sha256:aea166356da44b9b830c8023cd9b557fa856bd8b4035d6de771ca027dfc5cc6e" "sha256:aea166356da44b9b830c8023cd9b557fa856bd8b4035d6de771ca027dfc5cc6e"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.4" "version": "==1.1.4"
}, },
"markdown": { "markdown": {
@ -635,7 +564,6 @@
"sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be",
"sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.1" "version": "==1.1.1"
}, },
"marshmallow": { "marshmallow": {
@ -656,11 +584,11 @@
}, },
"marshmallow-sqlalchemy": { "marshmallow-sqlalchemy": {
"hashes": [ "hashes": [
"sha256:b217c6327bcf291e843dc1c2c20f0915061d4ecc303f0c5be40f23206607f702", "sha256:f1491f83833ac9c8406ba603458b1447fdfd904194833aab4b3cc01ef3646944",
"sha256:ee3ead3b83de6608c6850ff60515691b0dc556ca226680f8a82b9f785cdb71b1" "sha256:f861888ae3299f2c1f18cd94f02147ced70cd1b4986b2c5077e4a1036018d2a2"
], ],
"index": "pypi", "index": "pypi",
"version": "==0.24.2" "version": "==0.25.0"
}, },
"numpy": { "numpy": {
"hashes": [ "hashes": [
@ -689,7 +617,6 @@
"sha256:e9459f40244bb02b2f14f6af0cd0732791d72232bbb0dc4bab57ef88e75f6935", "sha256:e9459f40244bb02b2f14f6af0cd0732791d72232bbb0dc4bab57ef88e75f6935",
"sha256:edb1f041a9146dcf02cd7df7187db46ab524b9af2515f392f337c7cbbf5b52cd" "sha256:edb1f041a9146dcf02cd7df7187db46ab524b9af2515f392f337c7cbbf5b52cd"
], ],
"markers": "python_version >= '3.7'",
"version": "==1.20.2" "version": "==1.20.2"
}, },
"openapi-schema-validator": { "openapi-schema-validator": {
@ -698,7 +625,6 @@
"sha256:a4b2712020284cee880b4c55faa513fbc2f8f07f365deda6098f8ab943c9f0df", "sha256:a4b2712020284cee880b4c55faa513fbc2f8f07f365deda6098f8ab943c9f0df",
"sha256:b65d6c2242620bfe76d4c749b61cd9657e4528895a8f4fb6f916085b508ebd24" "sha256:b65d6c2242620bfe76d4c749b61cd9657e4528895a8f4fb6f916085b508ebd24"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==0.1.5" "version": "==0.1.5"
}, },
"openapi-spec-validator": { "openapi-spec-validator": {
@ -707,7 +633,6 @@
"sha256:53ba3d884e98ff2062d5ada025aa590541dcd665b8f81067dc82dd61c0923759", "sha256:53ba3d884e98ff2062d5ada025aa590541dcd665b8f81067dc82dd61c0923759",
"sha256:e11df7c559339027bd04f2399bc82474983129a6a7a6a0421eaa95e2c844d686" "sha256:e11df7c559339027bd04f2399bc82474983129a6a7a6a0421eaa95e2c844d686"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==0.3.0" "version": "==0.3.0"
}, },
"openpyxl": { "openpyxl": {
@ -723,38 +648,29 @@
"sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5",
"sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==20.9" "version": "==20.9"
}, },
"pandas": { "pandas": {
"hashes": [ "hashes": [
"sha256:09761bf5f8c741d47d4b8b9073288de1be39bbfccc281d70b889ade12b2aad29", "sha256:167693a80abc8eb28051fbd184c1b7afd13ce2c727a5af47b048f1ea3afefff4",
"sha256:0f27fd1adfa256388dc34895ca5437eaf254832223812afd817a6f73127f969c", "sha256:2111c25e69fa9365ba80bbf4f959400054b2771ac5d041ed19415a8b488dc70a",
"sha256:43e00770552595c2250d8d712ec8b6e08ca73089ac823122344f023efa4abea3", "sha256:298f0553fd3ba8e002c4070a723a59cdb28eda579f3e243bc2ee397773f5398b",
"sha256:46fc671c542a8392a4f4c13edc8527e3a10f6cb62912d856f82248feb747f06e", "sha256:2b063d41803b6a19703b845609c0b700913593de067b552a8b24dd8eeb8c9895",
"sha256:475b7772b6e18a93a43ea83517932deff33954a10d4fbae18d0c1aba4182310f", "sha256:2cb7e8f4f152f27dc93f30b5c7a98f6c748601ea65da359af734dd0cf3fa733f",
"sha256:4d821b9b911fc1b7d428978d04ace33f0af32bb7549525c8a7b08444bce46b74", "sha256:52d2472acbb8a56819a87aafdb8b5b6d2b3386e15c95bde56b281882529a7ded",
"sha256:5e3c8c60541396110586bcbe6eccdc335a38e7de8c217060edaf4722260b158f", "sha256:612add929bf3ba9d27b436cc8853f5acc337242d6b584203f207e364bb46cb12",
"sha256:621c044a1b5e535cf7dcb3ab39fca6f867095c3ef223a524f18f60c7fee028ea", "sha256:649ecab692fade3cbfcf967ff936496b0cfba0af00a55dfaacd82bdda5cb2279",
"sha256:72ffcea00ae8ffcdbdefff800284311e155fbb5ed6758f1a6110fc1f8f8f0c1c", "sha256:68d7baa80c74aaacbed597265ca2308f017859123231542ff8a5266d489e1858",
"sha256:8a051e957c5206f722e83f295f95a2cf053e890f9a1fba0065780a8c2d045f5d", "sha256:8d4c74177c26aadcfb4fd1de6c1c43c2bf822b3e0fc7a9b409eeaf84b3e92aaa",
"sha256:97b1954533b2a74c7e20d1342c4f01311d3203b48f2ebf651891e6a6eaf01104", "sha256:971e2a414fce20cc5331fe791153513d076814d30a60cd7348466943e6e909e4",
"sha256:9f5829e64507ad10e2561b60baf285c470f3c4454b007c860e77849b88865ae7", "sha256:9db70ffa8b280bb4de83f9739d514cd0735825e79eef3a61d312420b9f16b758",
"sha256:a93e34f10f67d81de706ce00bf8bb3798403cabce4ccb2de10c61b5ae8786ab5", "sha256:b730add5267f873b3383c18cac4df2527ac4f0f0eed1c6cf37fcb437e25cf558",
"sha256:d59842a5aa89ca03c2099312163ffdd06f56486050e641a45d926a072f04d994", "sha256:bd659c11a4578af740782288cac141a322057a2e36920016e0fc7b25c5a4b686",
"sha256:dbb255975eb94143f2e6ec7dadda671d25147939047839cd6b8a4aff0379bb9b", "sha256:c601c6fdebc729df4438ec1f62275d6136a0dd14d332fc0e8ce3f7d2aadb4dd6",
"sha256:df6f10b85aef7a5bb25259ad651ad1cc1d6bb09000595cab47e718cbac250b1d" "sha256:d0877407359811f7b853b548a614aacd7dea83b0c0c84620a9a643f180060950"
], ],
"index": "pypi", "index": "pypi",
"version": "==1.2.3" "version": "==1.2.4"
},
"prompt-toolkit": {
"hashes": [
"sha256:bf00f22079f5fadc949f42ae8ff7f05702826a97059ffcc6281036ad40ac6f04",
"sha256:e1b4f11b9336a28fa11810bc623c357420f69dfdb6d2dac41ca2c21a55c033bc"
],
"markers": "python_full_version >= '3.6.1'",
"version": "==3.0.18"
}, },
"psycopg2-binary": { "psycopg2-binary": {
"hashes": [ "hashes": [
@ -799,19 +715,8 @@
}, },
"pyasn1": { "pyasn1": {
"hashes": [ "hashes": [
"sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2",
"sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf",
"sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776",
"sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba",
"sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7",
"sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d", "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d",
"sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86", "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"
"sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359",
"sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576",
"sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00",
"sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12",
"sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3",
"sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"
], ],
"version": "==0.4.8" "version": "==0.4.8"
}, },
@ -820,56 +725,67 @@
"sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
"sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.20" "version": "==2.20"
}, },
"pygithub": { "pygithub": {
"hashes": [ "hashes": [
"sha256:300bc16e62886ca6537b0830e8f516ea4bc3ef12d308e0c5aff8bdbd099173d4", "sha256:1bbfff9372047ff3f21d5cd8e07720f3dbfdaf6462fcaed9d815f528f1ba7283",
"sha256:87afd6a67ea582aa7533afdbf41635725f13d12581faed7e3e04b1579c0c0627" "sha256:2caf0054ea079b71e539741ae56c5a95e073b81fa472ce222e81667381b9601b"
], ],
"index": "pypi", "index": "pypi",
"version": "==1.54.1" "version": "==1.55"
}, },
"pygments": { "pygments": {
"hashes": [ "hashes": [
"sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94", "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f",
"sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8" "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e"
], ],
"markers": "python_version >= '3.5'", "version": "==2.9.0"
"version": "==2.8.1"
}, },
"pyjwt": { "pyjwt": {
"hashes": [ "hashes": [
"sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e", "sha256:934d73fbba91b0483d3857d1aff50e96b2a892384ee2c17417ed3203f173fca1",
"sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96" "sha256:fba44e7898bbca160a2b2b501f492824fc8382485d3a6f11ba5d0c1937ce6130"
], ],
"index": "pypi", "index": "pypi",
"version": "==1.7.1" "version": "==2.1.0"
},
"pynacl": {
"hashes": [
"sha256:06cbb4d9b2c4bd3c8dc0d267416aaed79906e7b33f114ddbf0911969794b1cc4",
"sha256:11335f09060af52c97137d4ac54285bcb7df0cef29014a1a4efe64ac065434c4",
"sha256:2fe0fc5a2480361dcaf4e6e7cea00e078fcda07ba45f811b167e3f99e8cff574",
"sha256:30f9b96db44e09b3304f9ea95079b1b7316b2b4f3744fe3aaecccd95d547063d",
"sha256:4e10569f8cbed81cb7526ae137049759d2a8d57726d52c1a000a3ce366779634",
"sha256:511d269ee845037b95c9781aa702f90ccc36036f95d0f31373a6a79bd8242e25",
"sha256:537a7ccbea22905a0ab36ea58577b39d1fa9b1884869d173b5cf111f006f689f",
"sha256:54e9a2c849c742006516ad56a88f5c74bf2ce92c9f67435187c3c5953b346505",
"sha256:757250ddb3bff1eecd7e41e65f7f833a8405fede0194319f87899690624f2122",
"sha256:7757ae33dae81c300487591c68790dfb5145c7d03324000433d9a2c141f82af7",
"sha256:7c6092102219f59ff29788860ccb021e80fffd953920c4a8653889c029b2d420",
"sha256:8122ba5f2a2169ca5da936b2e5a511740ffb73979381b4229d9188f6dcb22f1f",
"sha256:9c4a7ea4fb81536c1b1f5cc44d54a296f96ae78c1ebd2311bd0b60be45a48d96",
"sha256:c914f78da4953b33d4685e3cdc7ce63401247a21425c16a39760e282075ac4a6",
"sha256:cd401ccbc2a249a47a3a1724c2918fcd04be1f7b54eb2a5a71ff915db0ac51c6",
"sha256:d452a6746f0a7e11121e64625109bc4468fc3100452817001dbe018bb8b08514",
"sha256:ea6841bc3a76fa4942ce00f3bda7d436fda21e2d91602b9e21b7ca9ecab8f3ff",
"sha256:f8851ab9041756003119368c1e6cd0b9c631f46d686b3904b18c0139f4419f80"
],
"version": "==1.4.0"
}, },
"pyparsing": { "pyparsing": {
"hashes": [ "hashes": [
"sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
"sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
], ],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.4.7" "version": "==2.4.7"
}, },
"pyrsistent": { "pyrsistent": {
"hashes": [ "hashes": [
"sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e" "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"
], ],
"markers": "python_version >= '3.5'",
"version": "==0.17.3" "version": "==0.17.3"
}, },
"python-box": {
"hashes": [
"sha256:4ed4ef5d34de505a65c01e3f1911de8cdb29484fcae0c035141dce535c6c194a",
"sha256:f2a531f9f5bbef078c175fad6abb31e9b59d40d121ea79993197e6bb221c6be6"
],
"markers": "python_version >= '3.6'",
"version": "==5.3.0"
},
"python-dateutil": { "python-dateutil": {
"hashes": [ "hashes": [
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
@ -887,10 +803,8 @@
"python-editor": { "python-editor": {
"hashes": [ "hashes": [
"sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d", "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d",
"sha256:ea87e17f6ec459e780e4221f295411462e0d0810858e055fc514684350a2f522", "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b",
"sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77", "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"
"sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8",
"sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b"
], ],
"version": "==1.0.4" "version": "==1.0.4"
}, },
@ -940,7 +854,6 @@
"sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6",
"sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
"version": "==5.4.1" "version": "==5.4.1"
}, },
"recommonmark": { "recommonmark": {
@ -972,11 +885,10 @@
}, },
"six": { "six": {
"hashes": [ "hashes": [
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.16.0"
"version": "==1.15.0"
}, },
"snowballstemmer": { "snowballstemmer": {
"hashes": [ "hashes": [
@ -995,18 +907,17 @@
}, },
"sphinx": { "sphinx": {
"hashes": [ "hashes": [
"sha256:3f01732296465648da43dec8fb40dc451ba79eb3e2cc5c6d79005fd98197107d", "sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1",
"sha256:ce9c228456131bab09a3d7d10ae58474de562a6f79abb3dc811ae401cf8c1abc" "sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8"
], ],
"index": "pypi", "index": "pypi",
"version": "==3.5.3" "version": "==3.5.4"
}, },
"sphinxcontrib-applehelp": { "sphinxcontrib-applehelp": {
"hashes": [ "hashes": [
"sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a", "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a",
"sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58" "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"
], ],
"markers": "python_version >= '3.5'",
"version": "==1.0.2" "version": "==1.0.2"
}, },
"sphinxcontrib-devhelp": { "sphinxcontrib-devhelp": {
@ -1014,7 +925,6 @@
"sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e",
"sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4" "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"
], ],
"markers": "python_version >= '3.5'",
"version": "==1.0.2" "version": "==1.0.2"
}, },
"sphinxcontrib-htmlhelp": { "sphinxcontrib-htmlhelp": {
@ -1022,7 +932,6 @@
"sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f", "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f",
"sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b" "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"
], ],
"markers": "python_version >= '3.5'",
"version": "==1.0.3" "version": "==1.0.3"
}, },
"sphinxcontrib-jsmath": { "sphinxcontrib-jsmath": {
@ -1030,7 +939,6 @@
"sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178",
"sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"
], ],
"markers": "python_version >= '3.5'",
"version": "==1.0.1" "version": "==1.0.1"
}, },
"sphinxcontrib-qthelp": { "sphinxcontrib-qthelp": {
@ -1038,7 +946,6 @@
"sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72",
"sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6" "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"
], ],
"markers": "python_version >= '3.5'",
"version": "==1.0.3" "version": "==1.0.3"
}, },
"sphinxcontrib-serializinghtml": { "sphinxcontrib-serializinghtml": {
@ -1046,52 +953,50 @@
"sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc", "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc",
"sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a" "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"
], ],
"markers": "python_version >= '3.5'",
"version": "==1.1.4" "version": "==1.1.4"
}, },
"spiffworkflow": { "spiffworkflow": {
"git": "https://github.com/sartography/SpiffWorkflow.git", "git": "https://github.com/sartography/SpiffWorkflow.git",
"ref": "382048e31e872d23188fab6bec68323f593ccc19" "ref": "1a44d004d657bc5773551254aafba88993ae6d35"
}, },
"sqlalchemy": { "sqlalchemy": {
"hashes": [ "hashes": [
"sha256:02b039e0e7e6de2f15ea2d2de3995e31a170e700ec0b37b4eded662171711d19", "sha256:08a00a955c5cb1d3a610f9735e0e9ca64f2fd2540c942ab84dc9a71433940f86",
"sha256:08943201a1e3c6238e48f4d5d56c27ea1e1b39d3d9f36a9d81fc3cfb0e1b83bd", "sha256:1b2b0199153a4ecbb57ec09ff8a3693dcb2c134fef217379e2761f27bccf3a14",
"sha256:0ee0054d4a598d2920cae14bcbd33e200e02c5e3b47b902627f8cf5d4c9a2a4b", "sha256:1d8a71c2bf21437d6216ba1963507d4d1a37920429eafd09d85387d0d078fa5a",
"sha256:11e7a86209f69273e75d2dd64b06c0c2660e39cd942fce2170515c404ed7358a", "sha256:36bcf7530ca070e89f29e2f6e05c5566c9ab3a2e493608437a230253ecf112a7",
"sha256:1294f05916c044631fd626a4866326bbfbd17f62bd37510d000afaef4b35bd74", "sha256:375cde7038d3c4493e2e61273ed2a3be04b5845e9bea5c662543c22935fb439b",
"sha256:2f11b5783933bff55291ca06496124347627d211ff2e509e846af1c35de0a3fb", "sha256:384c0ecc845b597eda2519de2f8dd66770e76f8f39e0d21f00dd5affaf293787",
"sha256:301d0cd6ef1dc73b607748183da857e712d6f743de8d92b1e1f8facfb0ba2aa2", "sha256:46737cd87a57e03ab20e79d29ad931b842e7b3226a169ae9b36babe69d92256f",
"sha256:344b58b4b4193b72e8b768a51ef6eb5a4c948ce313a0f23e2ea081e71ce8ac0e", "sha256:49fc18facca9ecb29308e486de53e7d9ab7d7b02d6705158fa34af0c1a6c3b0b",
"sha256:44e11a06168782b6d485daef197783366ce7ab0d5eea0066c899ae06cef47bbc", "sha256:4b9e7764638910c43eea6e6e367395dce3d1c6acc17f8550e66cd913725491d2",
"sha256:45b091ccbf94374ed14abde17e9a04522b0493a17282eaaf4383efdd413f5243", "sha256:50dba4adb0f7cafb5c05e3e9734b7d84f0b009daf17ca5a3c1560be7dbcaaba7",
"sha256:48540072f43b3c080159ec1f24a4b014c0ee83d3b73795399974aa358a8cf71b", "sha256:586eb3698e616fe044472e7a249d24a5b05dc5c714dc0b9744417031988df3af",
"sha256:4df07161897191ed8d4a0cfc92425c81296160e5c5f76c9256716d3085172883", "sha256:58bee8384a7e32846e560da0ad595cf0dd5046b286aafa8d000312c5db8899bf",
"sha256:4f7ce3bfdab6520554af4a5b1df4513d45388624d015ba4d921daf48ce1d6503", "sha256:5e7e9a7092aea03c68318d390f39dab75422143354543244b6e1b2b31848a494",
"sha256:5361e25181b9872d6906c8c9be7dc05cb0a0951d71ee59ee5a71c1deb301b8a8", "sha256:6adf973e7e27bce34c6bb14f62368b99e53a55226836ac93ff1352fe467dc966",
"sha256:6f8fdad2f335d2f3ca2f3ee3b01404f7abcf519b03de2c510f1f42d16e39ffb4", "sha256:6d01d83d290db9e27ea02183e56ba548a48143b3b1b7977d07cedafc3606f91d",
"sha256:70a1387396ea5b3022539b560c287daf79403d8b4b365f89b56d660e625a4457", "sha256:6f0bd9b2cf1c555c6bfbb71d58750d096f7462a582abf6994cff80fbfe0d8c94",
"sha256:7481f9c2c832a3bf37c80bee44d91ac9938b815cc06f7e795b976e300914aab9", "sha256:74cd7afd1789eabe42c838747c5680d78317aee448a22de75638ac0735ae3284",
"sha256:7c0c7bb49167ac738ca6ee6e7f94a9988a7e4e261d8da335341e8c8c8f3b2e9b", "sha256:79286d63e5f92340357bc2a0801637b2accc95d7e0044768c3eea5e8271cc300",
"sha256:7de84feb31af3d8fdf819cac2042928d0b60d3cb16f49c4b2f48d88db46e79f6", "sha256:8162f379edc3c1c0c4ac7436b3a8baa8ca7754913ed81002f631bc066486803e",
"sha256:7f5087104c3c5af11ea59e49ae66c33ca98b14a47d3796ae97498fca53f84aef", "sha256:85bd128ebb3c47615496778fedbe334094cf6133c6933804e237c741fce4f20c",
"sha256:81badd7d3e0e6aba70a5d1b50fabe8112e9835a6fdb0684054c3fe5378ce0d01", "sha256:8a00c3494a1553e171c77505653cca22f5fadf09a0af4a020243f1baaad412b3",
"sha256:82f11b679df91275788be6734dd4a9dfa29bac67b85326992609f62b05bdab37", "sha256:8dd79b534516b9b792dbb319324962d02c69a50a390cb2387e360bebe5d7b280",
"sha256:8301ecf3e819eb5dbc171e84654ff60872807775301a55fe35b0ab2ba3742031", "sha256:938e819bc74c95466c7f6d5dc7e2d08142c116c380992aa36d60e64e7a62ffe7",
"sha256:8d6a9feb5efd2fdab25c6d5a0a5589fed9d789f5ec57ec12263fd0e60ce1dea6", "sha256:98270f1c52dc4a62279aee7c0a134e84182372e4b3c7ee35cafd906c11f4e218",
"sha256:915d4fa08776c0252dc5a34fa15c6490f66f411ea1ac9492022f98875d6baf20", "sha256:9c2afd9ad52387d32b2a856b19352d605213a06b4684a3b469ff8f39a27fb3a2",
"sha256:94040a92b6676f9ffdab6c6b479b3554b927a635c90698c761960b266b04fc88", "sha256:a35d909327a1c3bc407689179101af93de34bc6af8c6f07d5d29e4eaab54a9f4",
"sha256:a08027ae84efc563f0f2f341dda572eadebeca38c0ae028a009988f27e9e6230", "sha256:a63848afe8f909d1dcea286c3856c1cc1de6e8908e9ce1bdb672c9f19b2d2aa7",
"sha256:a103294583383660d9e06dbd82037dc8e94c184bdcb27b2be44ae4457dafc6b4", "sha256:b12b39ded8cee6c4fdd0b8aa5afdb8cb5641098f2625acc9175effdc064b5c9f",
"sha256:c22bfac8d3b955cdb13f0fcd6343156bf56d925196cf7d9ab9ce9f61d3f1e11c", "sha256:b53a0faf32cde49eb04ad81f8ff60cfa1dcc024aa6a6bb8b545621339395e640",
"sha256:c3810ebcf1d42c532c8f5c3f442c705d94442a27a32f2df5344f0857306ab321", "sha256:c9937cb1061042fb09c4b622884407525a0a595e300ef199d80a7290ca2c71ea",
"sha256:ee4ddc904fb6414b5118af5b8d45e428aac2ccda01326b2ba2fe4354b0d8d1ae", "sha256:e21ca6ecf2a48a53856562af3380f2a64a1ce08ae2d17c800095f4685ab499b1",
"sha256:f16801795f1ffe9472360589a04301018c79e4582a85e68067275bb4f765e4e2", "sha256:e25d48233f5501b41c7d561cfd9ec9c89a891643aaf282750c129d627cc5a547",
"sha256:f62c57ceadedeb8e7b98b48ac4d684bf2b0f73b9d882fed3ca260d9aedf6403f", "sha256:e288a3640c3c9311bb223c13e6ecb2ae4c5fb018756b5fbf82b9a1f13c6c6111",
"sha256:fbb0fda1c574975807aceb0e2332e0ecfe9e5656c191ed482c1a5eafe7a33823" "sha256:ed96e1f28708c5a00fb371971d6634210afdcabb439dd488d41e1cfc2c906459"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", "version": "==1.4.13"
"version": "==1.4.5"
}, },
"swagger-ui-bundle": { "swagger-ui-bundle": {
"hashes": [ "hashes": [
@ -1106,38 +1011,20 @@
"sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df", "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df",
"sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937" "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
"version": "==1.26.4" "version": "==1.26.4"
}, },
"vine": {
"hashes": [
"sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30",
"sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"
],
"markers": "python_version >= '3.6'",
"version": "==5.0.0"
},
"waitress": { "waitress": {
"hashes": [ "hashes": [
"sha256:29af5a53e9fb4e158f525367678b50053808ca6c21ba585754c77d790008c746", "sha256:29af5a53e9fb4e158f525367678b50053808ca6c21ba585754c77d790008c746",
"sha256:69e1f242c7f80273490d3403c3976f3ac3b26e289856936d1f620ed48f321897" "sha256:69e1f242c7f80273490d3403c3976f3ac3b26e289856936d1f620ed48f321897"
], ],
"markers": "python_full_version >= '3.6.0'",
"version": "==2.0.0" "version": "==2.0.0"
}, },
"wcwidth": {
"hashes": [
"sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784",
"sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"
],
"version": "==0.2.5"
},
"webob": { "webob": {
"hashes": [ "hashes": [
"sha256:73aae30359291c14fa3b956f8b5ca31960e420c28c1bec002547fb04928cf89b", "sha256:73aae30359291c14fa3b956f8b5ca31960e420c28c1bec002547fb04928cf89b",
"sha256:b64ef5141be559cfade448f044fa45c2260351edcb6a8ef6b7e00c7dcef0c323" "sha256:b64ef5141be559cfade448f044fa45c2260351edcb6a8ef6b7e00c7dcef0c323"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.8.7" "version": "==1.8.7"
}, },
"webtest": { "webtest": {
@ -1179,11 +1066,11 @@
}, },
"xlsxwriter": { "xlsxwriter": {
"hashes": [ "hashes": [
"sha256:2b7e22b1268c2ed85d73e5629097c9a63357f2429667ada9863cd05ff8ee33aa", "sha256:1a6dd98892e8010d3e089d1cb61385baa8f76fa547598df2c221cc37238c72d3",
"sha256:30ebc19d0f201fafa34a6c622050ed2a268ac8dee24037a61605caa801dc8af5" "sha256:82be5a58c09bdc2ff8afc25acc815c465275239ddfc56d6e7b2a7e6c5d2e213b"
], ],
"index": "pypi", "index": "pypi",
"version": "==1.3.8" "version": "==1.4.0"
} }
}, },
"develop": { "develop": {
@ -1192,7 +1079,6 @@
"sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6",
"sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==20.3.0" "version": "==20.3.0"
}, },
"coverage": { "coverage": {
@ -1265,23 +1151,21 @@
"sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5",
"sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==20.9" "version": "==20.9"
}, },
"pbr": { "pbr": {
"hashes": [ "hashes": [
"sha256:5fad80b613c402d5b7df7bd84812548b2a61e9977387a80a5fc5c396492b13c9", "sha256:42df03e7797b796625b1029c0400279c7c34fd7df24a7d7818a1abb5b38710dd",
"sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00" "sha256:c68c661ac5cc81058ac94247278eeda6d2e6aecb3e227b0387c30d277e7ef8d4"
], ],
"index": "pypi", "index": "pypi",
"version": "==5.5.1" "version": "==5.6.0"
}, },
"pluggy": { "pluggy": {
"hashes": [ "hashes": [
"sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0",
"sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.13.1" "version": "==0.13.1"
}, },
"py": { "py": {
@ -1289,7 +1173,6 @@
"sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3",
"sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"
], ],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.10.0" "version": "==1.10.0"
}, },
"pyparsing": { "pyparsing": {
@ -1297,23 +1180,21 @@
"sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
"sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
], ],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.4.7" "version": "==2.4.7"
}, },
"pytest": { "pytest": {
"hashes": [ "hashes": [
"sha256:671238a46e4df0f3498d1c3270e5deb9b32d25134c99b7d75370a68cfbe9b634", "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b",
"sha256:6ad9c7bdf517a808242b998ac20063c41532a570d088d77eec1ee12b0b5574bc" "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"
], ],
"index": "pypi", "index": "pypi",
"version": "==6.2.3" "version": "==6.2.4"
}, },
"toml": { "toml": {
"hashes": [ "hashes": [
"sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
"sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
], ],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.10.2" "version": "==0.10.2"
} }
} }

View File

@ -60,6 +60,7 @@ PB_INVESTIGATORS_URL = environ.get('PB_INVESTIGATORS_URL', default=PB_BASE_URL +
PB_REQUIRED_DOCS_URL = environ.get('PB_REQUIRED_DOCS_URL', default=PB_BASE_URL + "required_docs?studyid=%i") PB_REQUIRED_DOCS_URL = environ.get('PB_REQUIRED_DOCS_URL', default=PB_BASE_URL + "required_docs?studyid=%i")
PB_STUDY_DETAILS_URL = environ.get('PB_STUDY_DETAILS_URL', default=PB_BASE_URL + "study?studyid=%i") PB_STUDY_DETAILS_URL = environ.get('PB_STUDY_DETAILS_URL', default=PB_BASE_URL + "study?studyid=%i")
PB_SPONSORS_URL = environ.get('PB_SPONSORS_URL', default=PB_BASE_URL + "sponsors?studyid=%i") PB_SPONSORS_URL = environ.get('PB_SPONSORS_URL', default=PB_BASE_URL + "sponsors?studyid=%i")
PB_IRB_INFO_URL = environ.get('PB_IRB_INFO_URL', default=PB_BASE_URL + "current_irb_info/%i")
# Ldap Configuration # Ldap Configuration
LDAP_URL = environ.get('LDAP_URL', default="ldap.virginia.edu").strip('/') # No trailing slash or http:// LDAP_URL = environ.get('LDAP_URL', default="ldap.virginia.edu").strip('/') # No trailing slash or http://

View File

@ -83,6 +83,8 @@ paths:
type : integer type : integer
get: get:
operationId: crc.api.file.get_document_directory operationId: crc.api.file.get_document_directory
security:
- auth_admin: ['secret']
summary: Returns a directory of all files for study in a nested structure summary: Returns a directory of all files for study in a nested structure
tags: tags:
- Document Categories - Document Categories
@ -349,6 +351,12 @@ paths:
schema: schema:
type: integer type: integer
format: int32 format: int32
- name: update_status
in: query
required: false
description: If set to true, will synch the study with protocol builder and assure the status of all workflows is up to date (expensive).
schema:
type: boolean
get: get:
operationId: crc.api.study.get_study operationId: crc.api.study.get_study
summary: Provides a single study summary: Provides a single study
@ -425,7 +433,7 @@ paths:
- name: spec_id - name: spec_id
in: path in: path
required: true required: true
description: The unique id of an existing workflow specification to modify. description: The unique id of an existing workflow specification.
schema: schema:
type: string type: string
get: get:
@ -440,6 +448,18 @@ paths:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/WorkflowSpec" $ref: "#/components/schemas/WorkflowSpec"
post:
operationId: crc.api.workflow.get_workflow_from_spec
summary: Creates a workflow from a workflow spec and returns the workflow
tags:
- Workflow Specifications
responses:
'200':
description: Workflow generated successfully
content:
application/json:
schema:
$ref: "#/components/schemas/Workflow"
put: put:
operationId: crc.api.workflow.update_workflow_specification operationId: crc.api.workflow.update_workflow_specification
security: security:
@ -469,6 +489,21 @@ paths:
responses: responses:
'204': '204':
description: The workflow specification has been removed. description: The workflow specification has been removed.
/workflow-specification/standalone:
get:
operationId: crc.api.workflow.standalone_workflow_specs
summary: Provides a list of workflow specifications that can be run outside a study.
tags:
- Workflow Specifications
responses:
'200':
description: A list of workflow specifications
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/WorkflowSpec"
/workflow-specification/{spec_id}/validate: /workflow-specification/{spec_id}/validate:
parameters: parameters:
- name: spec_id - name: spec_id
@ -1571,6 +1606,9 @@ components:
category_id: category_id:
type: integer type: integer
nullable: true nullable: true
standalone:
type: boolean
example: false
workflow_spec_category: workflow_spec_category:
$ref: "#/components/schemas/WorkflowSpecCategory" $ref: "#/components/schemas/WorkflowSpecCategory"
is_status: is_status:
@ -1643,6 +1681,8 @@ components:
type: integer type: integer
num_tasks_incomplete: num_tasks_incomplete:
type: integer type: integer
study_id:
type: integer
example: example:
id: 291234 id: 291234

View File

@ -55,7 +55,7 @@ def update_datastore(id, body):
raise ApiError('unknown_item', 'The item "' + id + '" is not recognized.') raise ApiError('unknown_item', 'The item "' + id + '" is not recognized.')
DataStoreSchema().load(body, instance=item, session=session) DataStoreSchema().load(body, instance=item, session=session)
item.last_updated = datetime.now() item.last_updated = datetime.utcnow()
session.add(item) session.add(item)
session.commit() session.commit()
return DataStoreSchema().dump(item) return DataStoreSchema().dump(item)
@ -87,7 +87,7 @@ def add_datastore(body):
'but not more than one of these') 'but not more than one of these')
item = DataStoreSchema().load(body) item = DataStoreSchema().load(body)
item.last_updated = datetime.now() item.last_updated = datetime.utcnow()
session.add(item) session.add(item)
session.commit() session.commit()
return DataStoreSchema().dump(item) return DataStoreSchema().dump(item)

View File

@ -23,7 +23,7 @@ def add_study(body):
study_model = StudyModel(user_uid=UserService.current_user().uid, study_model = StudyModel(user_uid=UserService.current_user().uid,
title=body['title'], title=body['title'],
primary_investigator_id=body['primary_investigator_id'], primary_investigator_id=body['primary_investigator_id'],
last_updated=datetime.now(), last_updated=datetime.utcnow(),
status=StudyStatus.in_progress) status=StudyStatus.in_progress)
session.add(study_model) session.add(study_model)
StudyService.add_study_update_event(study_model, StudyService.add_study_update_event(study_model,
@ -33,7 +33,7 @@ def add_study(body):
errors = StudyService._add_all_workflow_specs_to_study(study_model) errors = StudyService._add_all_workflow_specs_to_study(study_model)
session.commit() session.commit()
study = StudyService().get_study(study_model.id) study = StudyService().get_study(study_model.id, do_status=True)
study_data = StudySchema().dump(study) study_data = StudySchema().dump(study)
study_data["errors"] = ApiErrorSchema(many=True).dump(errors) study_data["errors"] = ApiErrorSchema(many=True).dump(errors)
return study_data return study_data
@ -51,7 +51,7 @@ def update_study(study_id, body):
study: Study = StudyForUpdateSchema().load(body) study: Study = StudyForUpdateSchema().load(body)
status = StudyStatus(study.status) status = StudyStatus(study.status)
study_model.last_updated = datetime.now() study_model.last_updated = datetime.utcnow()
if study_model.status != status: if study_model.status != status:
study_model.status = status study_model.status = status
@ -74,8 +74,8 @@ def update_study(study_id, body):
return StudySchema().dump(study) return StudySchema().dump(study)
def get_study(study_id): def get_study(study_id, update_status=False):
study = StudyService.get_study(study_id) study = StudyService.get_study(study_id, do_status=update_status)
if (study is None): if (study is None):
raise ApiError("unknown_study", 'The study "' + study_id + '" is not recognized.', status_code=404) raise ApiError("unknown_study", 'The study "' + study_id + '" is not recognized.', status_code=404)
return StudySchema().dump(study) return StudySchema().dump(study)

View File

@ -214,7 +214,7 @@ def _handle_login(user_info: LdapModel, redirect_url=None):
g.user = user g.user = user
# Return the frontend auth callback URL, with auth token appended. # Return the frontend auth callback URL, with auth token appended.
auth_token = user.encode_auth_token().decode() auth_token = user.encode_auth_token()
g.token = auth_token g.token = auth_token
if redirect_url is not None: if redirect_url is not None:

View File

@ -101,6 +101,24 @@ def delete_workflow_specification(spec_id):
session.commit() session.commit()
def get_workflow_from_spec(spec_id):
workflow_model = WorkflowService.get_workflow_from_spec(spec_id, g.user)
processor = WorkflowProcessor(workflow_model)
processor.do_engine_steps()
processor.save()
WorkflowService.update_task_assignments(processor)
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
return WorkflowApiSchema().dump(workflow_api_model)
def standalone_workflow_specs():
schema = WorkflowSpecModelSchema(many=True)
specs = WorkflowService.get_standalone_workflow_specs()
return schema.dump(specs)
def get_workflow(workflow_id, do_engine_steps=True): def get_workflow(workflow_id, do_engine_steps=True):
"""Retrieve workflow based on workflow_id, and return it in the last saved State. """Retrieve workflow based on workflow_id, and return it in the last saved State.
If do_engine_steps is False, return the workflow without running any engine tasks or logging any events. """ If do_engine_steps is False, return the workflow without running any engine tasks or logging any events. """
@ -185,9 +203,6 @@ def update_task(workflow_id, task_id, body, terminate_loop=None, update_all=Fals
if workflow_model is None: if workflow_model is None:
raise ApiError("invalid_workflow_id", "The given workflow id is not valid.", status_code=404) raise ApiError("invalid_workflow_id", "The given workflow id is not valid.", status_code=404)
elif workflow_model.study is None:
raise ApiError("invalid_study", "There is no study associated with the given workflow.", status_code=404)
processor = WorkflowProcessor(workflow_model) processor = WorkflowProcessor(workflow_model)
task_id = uuid.UUID(task_id) task_id = uuid.UUID(task_id)
spiff_task = processor.bpmn_workflow.get_task(task_id) spiff_task = processor.bpmn_workflow.get_task(task_id)

View File

@ -184,9 +184,12 @@ def update_or_create_current_file(remote,workflow_spec_id,updatefile):
currentfile.content_type = updatefile['content_type'] currentfile.content_type = updatefile['content_type']
currentfile.primary_process_id = updatefile['primary_process_id'] currentfile.primary_process_id = updatefile['primary_process_id']
session.add(currentfile) session.add(currentfile)
content = WorkflowSyncService.get_remote_file_by_hash(remote, updatefile['md5_hash']) try:
FileService.update_file(currentfile, content, updatefile['type']) content = WorkflowSyncService.get_remote_file_by_hash(remote, updatefile['md5_hash'])
FileService.update_file(currentfile, content, updatefile['type'])
except ApiError:
# Remote files doesn't exist, don't update it.
print("Remote file " + currentfile.name + " does not exist, so not syncing.")
def sync_changed_files(remote,workflow_spec_id): def sync_changed_files(remote,workflow_spec_id):
""" """

View File

@ -191,7 +191,7 @@ class DocumentDirectory(object):
class WorkflowApi(object): class WorkflowApi(object):
def __init__(self, id, status, next_task, navigation, def __init__(self, id, status, next_task, navigation,
spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks, spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks,
last_updated, is_review, title): last_updated, is_review, title, study_id):
self.id = id self.id = id
self.status = status self.status = status
self.next_task = next_task # The next task that requires user input. self.next_task = next_task # The next task that requires user input.
@ -204,13 +204,14 @@ class WorkflowApi(object):
self.last_updated = last_updated self.last_updated = last_updated
self.title = title self.title = title
self.is_review = is_review self.is_review = is_review
self.study_id = study_id or ''
class WorkflowApiSchema(ma.Schema): class WorkflowApiSchema(ma.Schema):
class Meta: class Meta:
model = WorkflowApi model = WorkflowApi
fields = ["id", "status", "next_task", "navigation", fields = ["id", "status", "next_task", "navigation",
"workflow_spec_id", "spec_version", "is_latest_spec", "total_tasks", "completed_tasks", "workflow_spec_id", "spec_version", "is_latest_spec", "total_tasks", "completed_tasks",
"last_updated", "is_review", "title"] "last_updated", "is_review", "title", "study_id"]
unknown = INCLUDE unknown = INCLUDE
status = EnumField(WorkflowStatus) status = EnumField(WorkflowStatus)
@ -221,7 +222,7 @@ class WorkflowApiSchema(ma.Schema):
def make_workflow(self, data, **kwargs): def make_workflow(self, data, **kwargs):
keys = ['id', 'status', 'next_task', 'navigation', keys = ['id', 'status', 'next_task', 'navigation',
'workflow_spec_id', 'spec_version', 'is_latest_spec', "total_tasks", "completed_tasks", 'workflow_spec_id', 'spec_version', 'is_latest_spec', "total_tasks", "completed_tasks",
"last_updated", "is_review", "title"] "last_updated", "is_review", "title", "study_id"]
filtered_fields = {key: data[key] for key in keys} filtered_fields = {key: data[key] for key in keys}
filtered_fields['next_task'] = TaskSchema().make_task(data['next_task']) filtered_fields['next_task'] = TaskSchema().make_task(data['next_task'])
return WorkflowApi(**filtered_fields) return WorkflowApi(**filtered_fields)

View File

@ -10,7 +10,7 @@ from crc import db, ma
class DataStoreModel(db.Model): class DataStoreModel(db.Model):
__tablename__ = 'data_store' __tablename__ = 'data_store'
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
last_updated = db.Column(db.DateTime(timezone=True), default=func.now()) last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now())
key = db.Column(db.String, nullable=False) key = db.Column(db.String, nullable=False)
workflow_id = db.Column(db.Integer) workflow_id = db.Column(db.Integer)
study_id = db.Column(db.Integer, nullable=True) study_id = db.Column(db.Integer, nullable=True)

View File

@ -1,7 +1,7 @@
import enum import enum
from typing import cast from typing import cast
from marshmallow import INCLUDE, EXCLUDE from marshmallow import INCLUDE, EXCLUDE, fields, Schema
from marshmallow_enum import EnumField from marshmallow_enum import EnumField
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from sqlalchemy import func, Index from sqlalchemy import func, Index
@ -65,11 +65,13 @@ class FileDataModel(db.Model):
md5_hash = db.Column(UUID(as_uuid=True), unique=False, nullable=False) md5_hash = db.Column(UUID(as_uuid=True), unique=False, nullable=False)
data = deferred(db.Column(db.LargeBinary)) # Don't load it unless you have to. data = deferred(db.Column(db.LargeBinary)) # Don't load it unless you have to.
version = db.Column(db.Integer, default=0) version = db.Column(db.Integer, default=0)
date_created = db.Column(db.DateTime(timezone=True), default=func.now()) size = db.Column(db.Integer, default=0)
date_created = db.Column(db.DateTime(timezone=True), server_default=func.now())
file_model_id = db.Column(db.Integer, db.ForeignKey('file.id')) file_model_id = db.Column(db.Integer, db.ForeignKey('file.id'))
file_model = db.relationship("FileModel", foreign_keys=[file_model_id]) file_model = db.relationship("FileModel", foreign_keys=[file_model_id])
class FileModel(db.Model): class FileModel(db.Model):
__tablename__ = 'file' __tablename__ = 'file'
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
@ -117,11 +119,13 @@ class File(object):
if data_model: if data_model:
instance.last_modified = data_model.date_created instance.last_modified = data_model.date_created
instance.latest_version = data_model.version instance.latest_version = data_model.version
instance.size = data_model.size
else: else:
instance.last_modified = None instance.last_modified = None
instance.latest_version = None instance.latest_version = None
return instance return instance
class FileModelSchema(SQLAlchemyAutoSchema): class FileModelSchema(SQLAlchemyAutoSchema):
class Meta: class Meta:
model = FileModel model = FileModel
@ -132,17 +136,19 @@ class FileModelSchema(SQLAlchemyAutoSchema):
type = EnumField(FileType) type = EnumField(FileType)
class FileSchema(ma.Schema): class FileSchema(Schema):
class Meta: class Meta:
model = File model = File
fields = ["id", "name", "is_status", "is_reference", "content_type", fields = ["id", "name", "is_status", "is_reference", "content_type",
"primary", "primary_process_id", "workflow_spec_id", "workflow_id", "primary", "primary_process_id", "workflow_spec_id", "workflow_id",
"irb_doc_code", "last_modified", "latest_version", "type", "categories", "irb_doc_code", "last_modified", "latest_version", "type", "categories",
"description", "category", "description", "download_name"] "description", "category", "description", "download_name", "size"]
unknown = INCLUDE unknown = INCLUDE
type = EnumField(FileType) type = EnumField(FileType)
class LookupFileModel(db.Model): class LookupFileModel(db.Model):
"""Gives us a quick way to tell what kind of lookup is set on a form field. """Gives us a quick way to tell what kind of lookup is set on a form field.
Connected to the file data model, so that if a new version of the same file is Connected to the file data model, so that if a new version of the same file is

View File

@ -15,7 +15,7 @@ class LdapModel(db.Model):
department = db.Column(db.String) department = db.Column(db.String)
affiliation = db.Column(db.String) affiliation = db.Column(db.String)
sponsor_type = db.Column(db.String) sponsor_type = db.Column(db.String)
date_cached = db.Column(db.DateTime(timezone=True), default=func.now()) date_cached = db.Column(db.DateTime(timezone=True), server_default=func.now())
@classmethod @classmethod
def from_entry(cls, entry): def from_entry(cls, entry):

View File

@ -41,7 +41,7 @@ class StudyModel(db.Model):
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String) title = db.Column(db.String)
short_title = db.Column(db.String, nullable=True) short_title = db.Column(db.String, nullable=True)
last_updated = db.Column(db.DateTime(timezone=True), default=func.now()) last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now())
status = db.Column(db.Enum(StudyStatus)) status = db.Column(db.Enum(StudyStatus))
irb_status = db.Column(db.Enum(IrbStatus)) irb_status = db.Column(db.Enum(IrbStatus))
primary_investigator_id = db.Column(db.String, nullable=True) primary_investigator_id = db.Column(db.String, nullable=True)
@ -85,7 +85,7 @@ class StudyEvent(db.Model):
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
study_id = db.Column(db.Integer, db.ForeignKey(StudyModel.id), nullable=False) study_id = db.Column(db.Integer, db.ForeignKey(StudyModel.id), nullable=False)
study = db.relationship(StudyModel, back_populates='events_history') study = db.relationship(StudyModel, back_populates='events_history')
create_date = db.Column(db.DateTime(timezone=True), default=func.now()) create_date = db.Column(db.DateTime(timezone=True), server_default=func.now())
status = db.Column(db.Enum(StudyStatus)) status = db.Column(db.Enum(StudyStatus))
comment = db.Column(db.String, default='') comment = db.Column(db.String, default='')
event_type = db.Column(db.Enum(StudyEventType)) event_type = db.Column(db.Enum(StudyEventType))

View File

@ -5,12 +5,13 @@ from crc import db, ma
from crc.models.study import StudyModel, StudySchema, WorkflowMetadataSchema, WorkflowMetadata from crc.models.study import StudyModel, StudySchema, WorkflowMetadataSchema, WorkflowMetadata
from crc.models.workflow import WorkflowModel from crc.models.workflow import WorkflowModel
from crc.services.ldap_service import LdapService from crc.services.ldap_service import LdapService
from sqlalchemy import func
class TaskEventModel(db.Model): class TaskEventModel(db.Model):
__tablename__ = 'task_event' __tablename__ = 'task_event'
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
study_id = db.Column(db.Integer, db.ForeignKey('study.id'), nullable=False) study_id = db.Column(db.Integer, db.ForeignKey('study.id'))
user_uid = db.Column(db.String, nullable=False) # In some cases the unique user id may not exist in the db yet. user_uid = db.Column(db.String, nullable=False) # In some cases the unique user id may not exist in the db yet.
workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=False) workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=False)
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id')) workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'))
@ -27,7 +28,7 @@ class TaskEventModel(db.Model):
mi_count = db.Column(db.Integer) mi_count = db.Column(db.Integer)
mi_index = db.Column(db.Integer) mi_index = db.Column(db.Integer)
process_name = db.Column(db.String) process_name = db.Column(db.String)
date = db.Column(db.DateTime) date = db.Column(db.DateTime(timezone=True),default=func.now())
class TaskEventModelSchema(SQLAlchemyAutoSchema): class TaskEventModelSchema(SQLAlchemyAutoSchema):

View File

@ -3,6 +3,7 @@ import enum
import marshmallow import marshmallow
from marshmallow import EXCLUDE from marshmallow import EXCLUDE
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from sqlalchemy import func
from crc import db from crc import db
from crc.models.file import FileModel, FileDataModel from crc.models.file import FileModel, FileDataModel
@ -33,6 +34,7 @@ class WorkflowSpecModel(db.Model):
category_id = db.Column(db.Integer, db.ForeignKey('workflow_spec_category.id'), nullable=True) category_id = db.Column(db.Integer, db.ForeignKey('workflow_spec_category.id'), nullable=True)
category = db.relationship("WorkflowSpecCategoryModel") category = db.relationship("WorkflowSpecCategoryModel")
is_master_spec = db.Column(db.Boolean, default=False) is_master_spec = db.Column(db.Boolean, default=False)
standalone = db.Column(db.Boolean, default=False)
class WorkflowSpecModelSchema(SQLAlchemyAutoSchema): class WorkflowSpecModelSchema(SQLAlchemyAutoSchema):
@ -87,7 +89,8 @@ class WorkflowModel(db.Model):
workflow_spec = db.relationship("WorkflowSpecModel") workflow_spec = db.relationship("WorkflowSpecModel")
total_tasks = db.Column(db.Integer, default=0) total_tasks = db.Column(db.Integer, default=0)
completed_tasks = db.Column(db.Integer, default=0) completed_tasks = db.Column(db.Integer, default=0)
last_updated = db.Column(db.DateTime) last_updated = db.Column(db.DateTime(timezone=True), server_default=func.now())
user_id = db.Column(db.String, default=None)
# Order By is important or generating hashes on reviews. # Order By is important or generating hashes on reviews.
dependencies = db.relationship(WorkflowSpecDependencyFile, cascade="all, delete, delete-orphan", dependencies = db.relationship(WorkflowSpecDependencyFile, cascade="all, delete, delete-orphan",
order_by="WorkflowSpecDependencyFile.file_data_id") order_by="WorkflowSpecDependencyFile.file_data_id")

View File

@ -114,7 +114,10 @@ Takes two arguments:
doc_context = self.rich_text_update(doc_context) doc_context = self.rich_text_update(doc_context)
doc_context = self.append_images(doc, doc_context, image_file_data) doc_context = self.append_images(doc, doc_context, image_file_data)
jinja_env = jinja2.Environment(autoescape=True) jinja_env = jinja2.Environment(autoescape=True)
doc.render(doc_context, jinja_env) try:
doc.render(doc_context, jinja_env)
except Exception as e:
print (e)
target_stream = BytesIO() target_stream = BytesIO()
doc.save(target_stream) doc.save(target_stream)
target_stream.seek(0) # move to the beginning of the stream. target_stream.seek(0) # move to the beginning of the stream.

View File

@ -76,7 +76,7 @@ class DataStoreBase(object):
workflow_id=workflow_id, workflow_id=workflow_id,
spec_id=workflow_spec_id) spec_id=workflow_spec_id)
study.value = args[1] study.value = args[1]
study.last_updated = datetime.now() study.last_updated = datetime.utcnow()
overwritten = self.overwritten(study.value, prev_value) overwritten = self.overwritten(study.value, prev_value)
session.add(study) session.add(study)
session.commit() session.commit()

View File

@ -10,7 +10,9 @@ class FileDataGet(Script, DataStoreBase):
return """Gets user data from the data store - takes only two keyword arguments arguments: 'file_id' and 'key' """ return """Gets user data from the data store - takes only two keyword arguments arguments: 'file_id' and 'key' """
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs): def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
self.do_task(task, study_id, workflow_id, *args, **kwargs) if self.validate_kw_args(**kwargs):
myargs = [kwargs['key']]
return True
def validate_kw_args(self,**kwargs): def validate_kw_args(self,**kwargs):
if kwargs.get('key',None) is None: if kwargs.get('key',None) is None:

View File

@ -10,7 +10,11 @@ class FileDataSet(Script, DataStoreBase):
return """Sets data the data store - takes three keyword arguments arguments: 'file_id' and 'key' and 'value'""" return """Sets data the data store - takes three keyword arguments arguments: 'file_id' and 'key' and 'value'"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs): def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
self.do_task(task, study_id, workflow_id, *args, **kwargs) if self.validate_kw_args(**kwargs):
myargs = [kwargs['key'],kwargs['value']]
fileid = kwargs['file_id']
del(kwargs['file_id'])
return True
def validate_kw_args(self,**kwargs): def validate_kw_args(self,**kwargs):
if kwargs.get('key',None) is None: if kwargs.get('key',None) is None:

View File

@ -0,0 +1,24 @@
from crc.scripts.script import Script
from crc.api.common import ApiError
from crc.services.protocol_builder import ProtocolBuilderService
class IRBInfo(Script):
pb = ProtocolBuilderService()
def get_description(self):
return """Returns the IRB Info data for a Study"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
return isinstance(study_id, int)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
irb_info = self.pb.get_irb_info(study_id)
if irb_info:
return irb_info
else:
raise ApiError.from_task(code='missing_irb_info',
message=f'There was a problem retrieving IRB Info for study {study_id}.',
task=task)

View File

@ -10,7 +10,7 @@ def firsttime():
def sincetime(txt,lasttime): def sincetime(txt,lasttime):
thistime=firsttime() thistime=firsttime()
print('%s runtime was %2f'%(txt,thistime-lasttime)) print('%2.4f sec | %s' % (thistime-lasttime, txt))
return thistime return thistime
def timeit(f): def timeit(f):
@ -20,7 +20,7 @@ def timeit(f):
ts = time.time() ts = time.time()
result = f(*args, **kw) result = f(*args, **kw)
te = time.time() te = time.time()
print('func:%r args:[%r, %r] took: %2.4f sec' % (f.__name__, args, kw, te-ts)) print('%2.4f sec | func:%r args:[%r, %r] ' % (te-ts, f.__name__, args, kw))
return result return result
return timed return timed

View File

@ -14,6 +14,7 @@ from sqlalchemy.exc import IntegrityError
from crc import session, app from crc import session, app
from crc.api.common import ApiError from crc.api.common import ApiError
from crc.models.data_store import DataStoreModel
from crc.models.file import FileType, FileDataModel, FileModel, LookupFileModel, LookupDataModel from crc.models.file import FileType, FileDataModel, FileModel, LookupFileModel, LookupDataModel
from crc.models.workflow import WorkflowSpecModel, WorkflowModel, WorkflowSpecDependencyFile from crc.models.workflow import WorkflowSpecModel, WorkflowModel, WorkflowSpecDependencyFile
from crc.services.cache_service import cache from crc.services.cache_service import cache
@ -175,6 +176,8 @@ class FileService(object):
order_by(desc(FileDataModel.date_created)).first() order_by(desc(FileDataModel.date_created)).first()
md5_checksum = UUID(hashlib.md5(binary_data).hexdigest()) md5_checksum = UUID(hashlib.md5(binary_data).hexdigest())
size = len(binary_data)
if (latest_data_model is not None) and (md5_checksum == latest_data_model.md5_hash): if (latest_data_model is not None) and (md5_checksum == latest_data_model.md5_hash):
# This file does not need to be updated, it's the same file. If it is arhived, # This file does not need to be updated, it's the same file. If it is arhived,
# then de-arvhive it. # then de-arvhive it.
@ -210,7 +213,8 @@ class FileService(object):
new_file_data_model = FileDataModel( new_file_data_model = FileDataModel(
data=binary_data, file_model_id=file_model.id, file_model=file_model, data=binary_data, file_model_id=file_model.id, file_model=file_model,
version=version, md5_hash=md5_checksum, date_created=datetime.now() version=version, md5_hash=md5_checksum, date_created=datetime.utcnow(),
size=size
) )
session.add_all([file_model, new_file_data_model]) session.add_all([file_model, new_file_data_model])
session.commit() session.commit()
@ -389,6 +393,7 @@ class FileService(object):
session.query(LookupDataModel).filter_by(lookup_file_model_id=lf.id).delete() session.query(LookupDataModel).filter_by(lookup_file_model_id=lf.id).delete()
session.query(LookupFileModel).filter_by(id=lf.id).delete() session.query(LookupFileModel).filter_by(id=lf.id).delete()
session.query(FileDataModel).filter_by(file_model_id=file_id).delete() session.query(FileDataModel).filter_by(file_model_id=file_id).delete()
session.query(DataStoreModel).filter_by(file_id=file_id).delete()
session.query(FileModel).filter_by(id=file_id).delete() session.query(FileModel).filter_by(id=file_id).delete()
session.commit() session.commit()
except IntegrityError as ie: except IntegrityError as ie:

View File

@ -14,6 +14,7 @@ class ProtocolBuilderService(object):
REQUIRED_DOCS_URL = app.config['PB_REQUIRED_DOCS_URL'] REQUIRED_DOCS_URL = app.config['PB_REQUIRED_DOCS_URL']
STUDY_DETAILS_URL = app.config['PB_STUDY_DETAILS_URL'] STUDY_DETAILS_URL = app.config['PB_STUDY_DETAILS_URL']
SPONSORS_URL = app.config['PB_SPONSORS_URL'] SPONSORS_URL = app.config['PB_SPONSORS_URL']
IRB_INFO_URL = app.config['PB_IRB_INFO_URL']
@staticmethod @staticmethod
def is_enabled(): def is_enabled():
@ -55,6 +56,10 @@ class ProtocolBuilderService(object):
def get_study_details(study_id) -> {}: def get_study_details(study_id) -> {}:
return ProtocolBuilderService.__make_request(study_id, ProtocolBuilderService.STUDY_DETAILS_URL) return ProtocolBuilderService.__make_request(study_id, ProtocolBuilderService.STUDY_DETAILS_URL)
@staticmethod
def get_irb_info(study_id) -> {}:
return ProtocolBuilderService.__make_request(study_id, ProtocolBuilderService.IRB_INFO_URL)
@staticmethod @staticmethod
def get_sponsors(study_id) -> {}: def get_sponsors(study_id) -> {}:
return ProtocolBuilderService.__make_request(study_id, ProtocolBuilderService.SPONSORS_URL) return ProtocolBuilderService.__make_request(study_id, ProtocolBuilderService.SPONSORS_URL)

View File

@ -55,7 +55,7 @@ class StudyService(object):
return studies return studies
@staticmethod @staticmethod
def get_study(study_id, study_model: StudyModel = None, do_status=True): def get_study(study_id, study_model: StudyModel = None, do_status=False):
"""Returns a study model that contains all the workflows organized by category. """Returns a study model that contains all the workflows organized by category.
IMPORTANT: This is intended to be a lightweight call, it should never involve IMPORTANT: This is intended to be a lightweight call, it should never involve
loading up and executing all the workflows in a study to calculate information.""" loading up and executing all the workflows in a study to calculate information."""
@ -508,8 +508,9 @@ class StudyService(object):
def _create_workflow_model(study: StudyModel, spec): def _create_workflow_model(study: StudyModel, spec):
workflow_model = WorkflowModel(status=WorkflowStatus.not_started, workflow_model = WorkflowModel(status=WorkflowStatus.not_started,
study=study, study=study,
user_id=None,
workflow_spec_id=spec.id, workflow_spec_id=spec.id,
last_updated=datetime.now()) last_updated=datetime.utcnow())
session.add(workflow_model) session.add(workflow_model)
session.commit() session.commit()
return workflow_model return workflow_model

View File

@ -42,7 +42,7 @@ class CustomBpmnScriptEngine(BpmnScriptEngine):
""" """
return self.evaluate_expression(task, expression) return self.evaluate_expression(task, expression)
@timeit
def execute(self, task: SpiffTask, script, data): def execute(self, task: SpiffTask, script, data):
study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY] study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
@ -219,7 +219,7 @@ class WorkflowProcessor(object):
self.workflow_model.status = self.get_status() self.workflow_model.status = self.get_status()
self.workflow_model.total_tasks = len(tasks) self.workflow_model.total_tasks = len(tasks)
self.workflow_model.completed_tasks = sum(1 for t in tasks if t.state in complete_states) self.workflow_model.completed_tasks = sum(1 for t in tasks if t.state in complete_states)
self.workflow_model.last_updated = datetime.now() self.workflow_model.last_updated = datetime.utcnow()
self.update_dependencies(self.spec_data_files) self.update_dependencies(self.spec_data_files)
session.add(self.workflow_model) session.add(self.workflow_model)
session.commit() session.commit()

View File

@ -57,13 +57,15 @@ class WorkflowService(object):
user = db.session.query(UserModel).filter_by(uid="test").first() user = db.session.query(UserModel).filter_by(uid="test").first()
if not user: if not user:
db.session.add(UserModel(uid="test")) db.session.add(UserModel(uid="test"))
db.session.commit()
study = db.session.query(StudyModel).filter_by(user_uid="test").first() study = db.session.query(StudyModel).filter_by(user_uid="test").first()
if not study: if not study:
db.session.add(StudyModel(user_uid="test", title="test")) db.session.add(StudyModel(user_uid="test", title="test"))
db.session.commit() db.session.commit()
study = db.session.query(StudyModel).filter_by(user_uid="test").first()
workflow_model = WorkflowModel(status=WorkflowStatus.not_started, workflow_model = WorkflowModel(status=WorkflowStatus.not_started,
workflow_spec_id=spec_id, workflow_spec_id=spec_id,
last_updated=datetime.now(), last_updated=datetime.utcnow(),
study=study) study=study)
return workflow_model return workflow_model
@ -408,7 +410,8 @@ class WorkflowService(object):
completed_tasks=processor.workflow_model.completed_tasks, completed_tasks=processor.workflow_model.completed_tasks,
last_updated=processor.workflow_model.last_updated, last_updated=processor.workflow_model.last_updated,
is_review=is_review, is_review=is_review,
title=spec.display_name title=spec.display_name,
study_id=processor.workflow_model.study_id or None
) )
if not next_task: # The Next Task can be requested to be a certain task, useful for parallel tasks. if not next_task: # The Next Task can be requested to be a certain task, useful for parallel tasks.
# This may or may not work, sometimes there is no next task to complete. # This may or may not work, sometimes there is no next task to complete.
@ -667,30 +670,39 @@ class WorkflowService(object):
@staticmethod @staticmethod
def get_users_assigned_to_task(processor, spiff_task) -> List[str]: def get_users_assigned_to_task(processor, spiff_task) -> List[str]:
if not hasattr(spiff_task.task_spec, 'lane') or spiff_task.task_spec.lane is None: if processor.workflow_model.study_id is None and processor.workflow_model.user_id is None:
associated = StudyService.get_study_associates(processor.workflow_model.study.id) raise ApiError.from_task(code='invalid_workflow',
return [user['uid'] for user in associated if user['access']] message='A workflow must have either a study_id or a user_id.',
if spiff_task.task_spec.lane not in spiff_task.data: task=spiff_task)
return [] # No users are assignable to the task at this moment # Standalone workflow - we only care about the current user
lane_users = spiff_task.data[spiff_task.task_spec.lane] elif processor.workflow_model.study_id is None and processor.workflow_model.user_id is not None:
if not isinstance(lane_users, list): return [processor.workflow_model.user_id]
lane_users = [lane_users] # Workflow associated with a study - get all the users
else:
if not hasattr(spiff_task.task_spec, 'lane') or spiff_task.task_spec.lane is None:
associated = StudyService.get_study_associates(processor.workflow_model.study.id)
return [user['uid'] for user in associated if user['access']]
if spiff_task.task_spec.lane not in spiff_task.data:
return [] # No users are assignable to the task at this moment
lane_users = spiff_task.data[spiff_task.task_spec.lane]
if not isinstance(lane_users, list):
lane_users = [lane_users]
lane_uids = [] lane_uids = []
for user in lane_users: for user in lane_users:
if isinstance(user, dict): if isinstance(user, dict):
if 'value' in user and user['value'] is not None: if 'value' in user and user['value'] is not None:
lane_uids.append(user['value']) lane_uids.append(user['value'])
else:
raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." %
spiff_task.task_spec.name, task=spiff_task)
elif isinstance(user, str):
lane_uids.append(user)
else: else:
raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." % raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user is not a string or dict" %
spiff_task.task_spec.name, task=spiff_task) spiff_task.task_spec.name, task=spiff_task)
elif isinstance(user, str):
lane_uids.append(user)
else:
raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user is not a string or dict" %
spiff_task.task_spec.name, task=spiff_task)
return lane_uids return lane_uids
@staticmethod @staticmethod
def log_task_action(user_uid, processor, spiff_task, action): def log_task_action(user_uid, processor, spiff_task, action):
@ -714,7 +726,7 @@ class WorkflowService(object):
mi_count=task.multi_instance_count, # This is the number of times the task could repeat. mi_count=task.multi_instance_count, # This is the number of times the task could repeat.
mi_index=task.multi_instance_index, # And the index of the currently repeating task. mi_index=task.multi_instance_index, # And the index of the currently repeating task.
process_name=task.process_name, process_name=task.process_name,
date=datetime.now(), date=datetime.utcnow(),
) )
db.session.add(task_event) db.session.add(task_event)
db.session.commit() db.session.commit()
@ -783,3 +795,19 @@ class WorkflowService(object):
for workflow in workflows: for workflow in workflows:
if workflow.status == WorkflowStatus.user_input_required or workflow.status == WorkflowStatus.waiting: if workflow.status == WorkflowStatus.user_input_required or workflow.status == WorkflowStatus.waiting:
WorkflowProcessor.reset(workflow, clear_data=False) WorkflowProcessor.reset(workflow, clear_data=False)
@staticmethod
def get_workflow_from_spec(workflow_spec_id, user):
workflow_model = WorkflowModel(status=WorkflowStatus.not_started,
study=None,
user_id=user.uid,
workflow_spec_id=workflow_spec_id,
last_updated=datetime.now())
db.session.add(workflow_model)
db.session.commit()
return workflow_model
@staticmethod
def get_standalone_workflow_specs():
specs = db.session.query(WorkflowSpecModel).filter_by(standalone=True).all()
return specs

View File

@ -47,6 +47,6 @@ class WorkflowSyncService(object):
return json.loads(response.text) return json.loads(response.text)
else: else:
raise ApiError("workflow_sync_error", raise ApiError("workflow_sync_error",
"Received an invalid response from the protocol builder (status %s): %s when calling " "Received an invalid response from the remote CR-Connect API (status %s): %s when calling "
"url '%s'." % "url '%s'." %
(response.status_code, response.text, url)) (response.status_code, response.text, url))

View File

@ -266,7 +266,7 @@ class ExampleDataLoader:
from_tests=True) from_tests=True)
def create_spec(self, id, name, display_name="", description="", filepath=None, master_spec=False, def create_spec(self, id, name, display_name="", description="", filepath=None, master_spec=False,
category_id=None, display_order=None, from_tests=False): category_id=None, display_order=None, from_tests=False, standalone=False):
"""Assumes that a directory exists in static/bpmn with the same name as the given id. """Assumes that a directory exists in static/bpmn with the same name as the given id.
further assumes that the [id].bpmn is the primary file for the workflow. further assumes that the [id].bpmn is the primary file for the workflow.
returns an array of data models to be added to the database.""" returns an array of data models to be added to the database."""
@ -278,7 +278,8 @@ class ExampleDataLoader:
description=description, description=description,
is_master_spec=master_spec, is_master_spec=master_spec,
category_id=category_id, category_id=category_id,
display_order=display_order) display_order=display_order,
standalone=standalone)
db.session.add(spec) db.session.add(spec)
db.session.commit() db.session.commit()
if not filepath and not from_tests: if not filepath and not from_tests:

View File

@ -0,0 +1,28 @@
"""empty message
Revision ID: 62910318009f
Revises: 665624ac29f1
Create Date: 2021-04-28 14:09:57.648732
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '62910318009f'
down_revision = '665624ac29f1'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('file_data', sa.Column('size', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('file_data', 'size')
# ### end Alembic commands ###

View File

@ -19,12 +19,11 @@ depends_on = None
def upgrade(): def upgrade():
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.add_column('data_store', sa.Column('file_id', sa.Integer(), nullable=True)) op.add_column('data_store', sa.Column('file_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'data_store', 'file', ['file_id'], ['id']) op.create_foreign_key('file_id_key', 'data_store', 'file', ['file_id'], ['id'])
# ### end Alembic commands ### # ### end Alembic commands ###
def downgrade(): def downgrade():
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'data_store', type_='foreignkey') op.drop_constraint('file_id_key', 'data_store', type_='foreignkey')
op.drop_column('data_store', 'file_id') op.drop_column('data_store', 'file_id')
# ### end Alembic commands ### # ### end Alembic commands ###

View File

@ -0,0 +1,30 @@
"""empty message
Revision ID: 8b976945a54e
Revises: c872232ebdcb
Create Date: 2021-04-18 11:42:41.894378
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8b976945a54e'
down_revision = 'c872232ebdcb'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('workflow', sa.Column('user_id', sa.String(), nullable=True))
op.add_column('workflow_spec', sa.Column('standalone', sa.Boolean(), default=False))
op.execute("UPDATE workflow_spec SET standalone=False WHERE standalone is null;")
op.execute("ALTER TABLE task_event ALTER COLUMN study_id DROP NOT NULL")
def downgrade():
op.execute("UPDATE workflow SET user_id=NULL WHERE user_id is not NULL")
op.drop_column('workflow', 'user_id')
op.drop_column('workflow_spec', 'standalone')
op.execute("ALTER TABLE task_event ALTER COLUMN study_id SET NOT NULL ")

View File

@ -0,0 +1,28 @@
"""update type on task_events table and workflow table
Revision ID: abeffe547305
Revises: 665624ac29f1
Create Date: 2021-04-28 08:51:16.220260
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'abeffe547305'
down_revision = '665624ac29f1'
branch_labels = None
depends_on = None
def upgrade():
op.execute("alter table task_event alter column date type timestamp with time zone")
op.execute("alter table workflow alter column last_updated type timestamp with time zone")
pass
def downgrade():
op.execute("alter table task_event alter column date type timestamp without time zone")
op.execute("alter table workflow alter column last_updated type timestamp without time zone")
pass

View File

@ -0,0 +1,38 @@
"""empty message
Revision ID: bbf064082623
Revises: c1449d1d1681
Create Date: 2021-05-13 15:07:44.463757
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
from sqlalchemy import func
revision = 'bbf064082623'
down_revision = 'c1449d1d1681'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('data_store', 'last_updated', server_default=func.now())
op.alter_column('file_data', 'date_created', server_default=func.now())
op.alter_column('data_store', 'last_updated', server_default=func.now())
op.alter_column('ldap_model', 'date_cached', server_default=func.now())
op.alter_column('study', 'last_updated', server_default=func.now())
op.alter_column('study_event', 'create_date', server_default=func.now())
op.alter_column('workflow', 'last_updated', server_default=func.now())
def downgrade():
op.alter_column('data_store', 'last_updated', server_default=None)
op.alter_column('file_data', 'date_created', server_default=None)
op.alter_column('data_store', 'last_updated', server_default=None)
op.alter_column('ldap_model', 'date_cached', server_default=None)
op.alter_column('study', 'last_updated', server_default=None)
op.alter_column('study_event', 'create_date', server_default=None)
op.alter_column('workflow', 'last_updated', server_default=None)

View File

@ -0,0 +1,24 @@
"""empty message
Revision ID: c1449d1d1681
Revises: abeffe547305, 8b976945a54e, 62910318009f
Create Date: 2021-05-04 13:20:55.447143
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c1449d1d1681'
down_revision = ('abeffe547305', '8b976945a54e', '62910318009f')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass

View File

@ -10,3 +10,5 @@ services:
- POSTGRES_USER=${DB_USER} - POSTGRES_USER=${DB_USER}
- POSTGRES_PASSWORD=${DB_PASS} - POSTGRES_PASSWORD=${DB_PASS}
- POSTGRES_MULTIPLE_DATABASES=crc_dev,crc_test,pb,pb_test - POSTGRES_MULTIPLE_DATABASES=crc_dev,crc_test,pb,pb_test
- TZ=America/New_York
- PGTZ=America/New_York

View File

@ -70,7 +70,7 @@ class BaseTest(unittest.TestCase):
{ {
'id': 0, 'id': 0,
'title': 'The impact of fried pickles on beer consumption in bipedal software developers.', 'title': 'The impact of fried pickles on beer consumption in bipedal software developers.',
'last_updated': datetime.datetime.now(), 'last_updated': datetime.datetime.utcnow(),
'status': StudyStatus.in_progress, 'status': StudyStatus.in_progress,
'primary_investigator_id': 'dhf8r', 'primary_investigator_id': 'dhf8r',
'sponsor': 'Sartography Pharmaceuticals', 'sponsor': 'Sartography Pharmaceuticals',
@ -80,7 +80,7 @@ class BaseTest(unittest.TestCase):
{ {
'id': 1, 'id': 1,
'title': 'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels', 'title': 'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels',
'last_updated': datetime.datetime.now(), 'last_updated': datetime.datetime.utcnow(),
'status': StudyStatus.in_progress, 'status': StudyStatus.in_progress,
'primary_investigator_id': 'dhf8r', 'primary_investigator_id': 'dhf8r',
'sponsor': 'Makerspace & Co.', 'sponsor': 'Makerspace & Co.',
@ -131,7 +131,7 @@ class BaseTest(unittest.TestCase):
user = UserService.current_user(allow_admin_impersonate=True) user = UserService.current_user(allow_admin_impersonate=True)
self.assertEqual(uid, user.uid, 'Logged in user should match given user uid') self.assertEqual(uid, user.uid, 'Logged in user should match given user uid')
return dict(Authorization='Bearer ' + user_model.encode_auth_token().decode()) return dict(Authorization='Bearer ' + user_model.encode_auth_token())
def delete_example_data(self, use_crc_data=False, use_rrt_data=False): def delete_example_data(self, use_crc_data=False, use_rrt_data=False):
""" """
@ -175,11 +175,6 @@ class BaseTest(unittest.TestCase):
specs = session.query(WorkflowSpecModel).all() specs = session.query(WorkflowSpecModel).all()
self.assertIsNotNone(specs) self.assertIsNotNone(specs)
for spec in specs:
files = session.query(FileModel).filter_by(workflow_spec_id=spec.id).all()
self.assertIsNotNone(files)
self.assertGreater(len(files), 0)
for spec in specs: for spec in specs:
files = session.query(FileModel).filter_by(workflow_spec_id=spec.id).all() files = session.query(FileModel).filter_by(workflow_spec_id=spec.id).all()
self.assertIsNotNone(files) self.assertIsNotNone(files)
@ -379,6 +374,10 @@ class BaseTest(unittest.TestCase):
def complete_form(self, workflow_in, task_in, dict_data, update_all=False, error_code=None, terminate_loop=None, def complete_form(self, workflow_in, task_in, dict_data, update_all=False, error_code=None, terminate_loop=None,
user_uid="dhf8r"): user_uid="dhf8r"):
# workflow_in should be a workflow, not a workflow_api
# we were passing in workflow_api in many of our tests, and
# this caused problems testing standalone workflows
standalone = getattr(workflow_in.workflow_spec, 'standalone', False)
prev_completed_task_count = workflow_in.completed_tasks prev_completed_task_count = workflow_in.completed_tasks
if isinstance(task_in, dict): if isinstance(task_in, dict):
task_id = task_in["id"] task_id = task_in["id"]
@ -421,7 +420,8 @@ class BaseTest(unittest.TestCase):
.order_by(TaskEventModel.date.desc()).all() .order_by(TaskEventModel.date.desc()).all()
self.assertGreater(len(task_events), 0) self.assertGreater(len(task_events), 0)
event = task_events[0] event = task_events[0]
self.assertIsNotNone(event.study_id) if not standalone:
self.assertIsNotNone(event.study_id)
self.assertEqual(user_uid, event.user_uid) self.assertEqual(user_uid, event.user_uid)
self.assertEqual(workflow.id, event.workflow_id) self.assertEqual(workflow.id, event.workflow_id)
self.assertEqual(workflow.workflow_spec_id, event.workflow_spec_id) self.assertEqual(workflow.workflow_spec_id, event.workflow_spec_id)

View File

@ -0,0 +1,58 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_0ixyfs0" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:process id="Process_HelloWorld" name="Hello World Process" isExecutable="true">
<bpmn:documentation>This workflow asks for a name and says hello</bpmn:documentation>
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_0qyd2b7</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_0qyd2b7" sourceRef="StartEvent_1" targetRef="Task_GetName" />
<bpmn:userTask id="Task_GetName" name="Get Name" camunda:formKey="Name">
<bpmn:documentation>Hello</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="name" label="Name" type="string" defaultValue="World" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0qyd2b7</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1h46b40</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="SequenceFlow_1h46b40" sourceRef="Task_GetName" targetRef="Task_SayHello" />
<bpmn:manualTask id="Task_SayHello" name="Say Hello">
<bpmn:documentation>Hello {{name}}</bpmn:documentation>
<bpmn:incoming>SequenceFlow_1h46b40</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0lqrc6e</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:endEvent id="EndEvent_1l03lqw">
<bpmn:incoming>SequenceFlow_0lqrc6e</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="SequenceFlow_0lqrc6e" sourceRef="Task_SayHello" targetRef="EndEvent_1l03lqw" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_HelloWorld">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0qyd2b7_di" bpmnElement="SequenceFlow_0qyd2b7">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="UserTask_0fbucz7_di" bpmnElement="Task_GetName">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1h46b40_di" bpmnElement="SequenceFlow_1h46b40">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="ManualTask_1tia2zr_di" bpmnElement="Task_SayHello">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_1l03lqw_di" bpmnElement="EndEvent_1l03lqw">
<dc:Bounds x="592" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0lqrc6e_di" bpmnElement="SequenceFlow_0lqrc6e">
<di:waypoint x="530" y="117" />
<di:waypoint x="592" y="117" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,52 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1e8c8os" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.5.0">
<bpmn:process id="Process_GetIRBInfo" name="Get IRB Info" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_0xey0zw</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_0xey0zw" sourceRef="StartEvent_1" targetRef="Task_GetInfo" />
<bpmn:scriptTask id="Task_GetInfo" name="Get IRB Info">
<bpmn:incoming>SequenceFlow_0xey0zw</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_03hympo</bpmn:outgoing>
<bpmn:script>irb_info = get_irb_info()</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_03hympo" sourceRef="Task_GetInfo" targetRef="Task_PrintInfo" />
<bpmn:endEvent id="EndEvent_0qdzlqr">
<bpmn:incoming>SequenceFlow_1s6cthx</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="SequenceFlow_1s6cthx" sourceRef="Task_PrintInfo" targetRef="EndEvent_0qdzlqr" />
<bpmn:manualTask id="Task_PrintInfo" name="Print IRB Info">
<bpmn:documentation>IRB Info: {{irb_info}}</bpmn:documentation>
<bpmn:incoming>SequenceFlow_03hympo</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1s6cthx</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_GetIRBInfo">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0xey0zw_di" bpmnElement="SequenceFlow_0xey0zw">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="ScriptTask_0xmrk10_di" bpmnElement="Task_GetInfo">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_03hympo_di" bpmnElement="SequenceFlow_03hympo">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="EndEvent_0qdzlqr_di" bpmnElement="EndEvent_0qdzlqr">
<dc:Bounds x="592" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1s6cthx_di" bpmnElement="SequenceFlow_1s6cthx">
<di:waypoint x="530" y="117" />
<di:waypoint x="592" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="ManualTask_1uh1r6q_di" bpmnElement="Task_PrintInfo">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,38 @@
[
{
"AGENDA_DATE": "2021-04-15T00:00:00+00:00",
"DATE_MODIFIED": "2021-04-15T00:00:00+00:00",
"IRBEVENT": "IRB Event 1",
"IRB_ADMINISTRATIVE_REVIEWER": "IRB Admin Reviewer 1",
"IRB_OF_RECORD": "IRB of Record 1",
"IRB_REVIEW_TYPE": "IRB Review Type 1",
"IRB_STATUS": "IRB Status 1",
"STUDYIRBREVIEWERADMIN": "Study IRB Review Admin 1",
"UVA_IRB_HSR_IS_IRB_OF_RECORD_FOR_ALL_SITES": 1,
"UVA_STUDY_TRACKING": "UVA Study Tracking 1"
},
{
"AGENDA_DATE": "2021-04-15T00:00:00+00:00",
"DATE_MODIFIED": "2021-04-15T00:00:00+00:00",
"IRBEVENT": "IRB Event 2",
"IRB_ADMINISTRATIVE_REVIEWER": "IRB Admin Reviewer 2",
"IRB_OF_RECORD": "IRB of Record 2",
"IRB_REVIEW_TYPE": "IRB Review Type 2",
"IRB_STATUS": "IRB Status 2",
"STUDYIRBREVIEWERADMIN": "Study IRB Review Admin 2",
"UVA_IRB_HSR_IS_IRB_OF_RECORD_FOR_ALL_SITES": 2,
"UVA_STUDY_TRACKING": "UVA Study Tracking 2"
},
{
"AGENDA_DATE": "2021-04-15T00:00:00+00:00",
"DATE_MODIFIED": "2021-04-15T00:00:00+00:00",
"IRBEVENT": "IRB Event 3",
"IRB_ADMINISTRATIVE_REVIEWER": "IRB Admin Reviewer 3",
"IRB_OF_RECORD": "IRB of Record 3",
"IRB_REVIEW_TYPE": "IRB Review Type 3",
"IRB_STATUS": "IRB Status 3",
"STUDYIRBREVIEWERADMIN": "Study IRB Review Admin 3",
"UVA_IRB_HSR_IS_IRB_OF_RECORD_FOR_ALL_SITES": 3,
"UVA_STUDY_TRACKING": "UVA Study Tracking 3"
}
]

View File

@ -15,7 +15,7 @@ class FakeGithubCreates(Mock):
def get_repo(var, name): def get_repo(var, name):
class FakeRepo(Mock): class FakeRepo(Mock):
def get_contents(var, filename, ref): def get_contents(var, filename, ref):
raise UnknownObjectException(status='Failure', data='Failed data') raise UnknownObjectException(status='Failure', data='Failed data', headers=[])
def update_file(var, path, message, content, sha, branch): def update_file(var, path, message, content, sha, branch):
pass pass
return FakeRepo() return FakeRepo()
@ -72,7 +72,7 @@ class TestFileService(BaseTest):
file_data = FileService.get_workflow_data_files(workflow_id=workflow.id) file_data = FileService.get_workflow_data_files(workflow_id=workflow.id)
self.assertEqual(1, len(file_data)) self.assertEqual(1, len(file_data))
self.assertEqual(2, file_data[0].version) self.assertEqual(2, file_data[0].version)
self.assertEquals(4, file_data[0].size) # File dat size is included.
def test_add_file_from_form_increments_version_and_replaces_on_subsequent_add_with_same_name(self): def test_add_file_from_form_increments_version_and_replaces_on_subsequent_add_with_same_name(self):
self.load_example_data() self.load_example_data()

View File

@ -181,11 +181,11 @@ class TestFilesApi(BaseTest):
data['file'] = io.BytesIO(self.minimal_bpmn("abcdef")), 'my_new_file.bpmn' data['file'] = io.BytesIO(self.minimal_bpmn("abcdef")), 'my_new_file.bpmn'
rv = self.app.post('/v1.0/file?workflow_spec_id=%s' % spec.id, data=data, follow_redirects=True, rv = self.app.post('/v1.0/file?workflow_spec_id=%s' % spec.id, data=data, follow_redirects=True,
content_type='multipart/form-data', headers=self.logged_in_headers()) content_type='multipart/form-data', headers=self.logged_in_headers())
json_data = json.loads(rv.get_data(as_text=True)) file_json = json.loads(rv.get_data(as_text=True))
file = FileModelSchema().load(json_data, session=session) self.assertEquals(80, file_json['size'])
data['file'] = io.BytesIO(self.minimal_bpmn("efghijk")), 'my_new_file.bpmn' data['file'] = io.BytesIO(self.minimal_bpmn("efghijk")), 'my_new_file.bpmn'
rv = self.app.put('/v1.0/file/%i/data' % file.id, data=data, follow_redirects=True, rv = self.app.put('/v1.0/file/%i/data' % file_json['id'], data=data, follow_redirects=True,
content_type='multipart/form-data', headers=self.logged_in_headers()) content_type='multipart/form-data', headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
self.assertIsNotNone(rv.get_data()) self.assertIsNotNone(rv.get_data())
@ -193,14 +193,14 @@ class TestFilesApi(BaseTest):
self.assertEqual(2, file_json['latest_version']) self.assertEqual(2, file_json['latest_version'])
self.assertEqual(FileType.bpmn.value, file_json['type']) self.assertEqual(FileType.bpmn.value, file_json['type'])
self.assertEqual("application/octet-stream", file_json['content_type']) self.assertEqual("application/octet-stream", file_json['content_type'])
self.assertEqual(spec.id, file.workflow_spec_id) self.assertEqual(spec.id, file_json['workflow_spec_id'])
# Assure it is updated in the database and properly persisted. # Assure it is updated in the database and properly persisted.
file_model = session.query(FileModel).filter(FileModel.id == file.id).first() file_model = session.query(FileModel).filter(FileModel.id == file_json['id']).first()
file_data = FileService.get_file_data(file_model.id) file_data = FileService.get_file_data(file_model.id)
self.assertEqual(2, file_data.version) self.assertEqual(2, file_data.version)
rv = self.app.get('/v1.0/file/%i/data' % file.id, headers=self.logged_in_headers()) rv = self.app.get('/v1.0/file/%i/data' % file_json['id'], headers=self.logged_in_headers())
self.assert_success(rv) self.assert_success(rv)
data = rv.get_data() data = rv.get_data()
self.assertIsNotNone(data) self.assertIsNotNone(data)

View File

@ -1,8 +1,8 @@
import json import json
from profile import Profile from profile import Profile
from tests.base_test import BaseTest
from crc.services.ldap_service import LdapService from crc.services.ldap_service import LdapService
from tests.base_test import BaseTest
from datetime import datetime, timezone from datetime import datetime, timezone
from unittest.mock import patch from unittest.mock import patch
@ -25,7 +25,7 @@ class TestStudyApi(BaseTest):
TEST_STUDY = { TEST_STUDY = {
"title": "Phase III Trial of Genuine People Personalities (GPP) Autonomous Intelligent Emotional Agents " "title": "Phase III Trial of Genuine People Personalities (GPP) Autonomous Intelligent Emotional Agents "
"for Interstellar Spacecraft", "for Interstellar Spacecraft",
"last_updated": datetime.now(tz=timezone.utc), "last_updated": datetime.utcnow(),
"primary_investigator_id": "tmm2x", "primary_investigator_id": "tmm2x",
"user_uid": "dhf8r", "user_uid": "dhf8r",
} }
@ -113,10 +113,9 @@ class TestStudyApi(BaseTest):
self.assertEqual(study["ind_number"], db_study.ind_number) self.assertEqual(study["ind_number"], db_study.ind_number)
self.assertEqual(study["user_uid"], db_study.user_uid) self.assertEqual(study["user_uid"], db_study.user_uid)
workflow_spec_count =session.query(WorkflowSpecModel).filter(WorkflowSpecModel.is_master_spec == False).count() workflow_spec_count =session.query(WorkflowSpecModel).count()
workflow_count = session.query(WorkflowModel).filter(WorkflowModel.study_id == study['id']).count() workflow_count = session.query(WorkflowModel).filter(WorkflowModel.study_id == study['id']).count()
error_count = len(study["errors"]) self.assertEqual(workflow_spec_count, workflow_count)
self.assertEqual(workflow_spec_count, workflow_count + error_count)
study_event = session.query(StudyEvent).first() study_event = session.query(StudyEvent).first()
self.assertIsNotNone(study_event) self.assertIsNotNone(study_event)

View File

@ -72,7 +72,7 @@ class TestStudyCancellations(BaseTest):
workflow, study_id = self.load_workflow() workflow, study_id = self.load_workflow()
workflow_api, first_task = self.get_first_task(workflow) workflow_api, first_task = self.get_first_task(workflow)
self.complete_form(workflow_api, first_task, {}) self.complete_form(workflow, first_task, {})
study_result = self.put_study_on_hold(study_id) study_result = self.put_study_on_hold(study_id)
self.assertEqual('New Title', study_result.title) self.assertEqual('New Title', study_result.title)
@ -82,10 +82,10 @@ class TestStudyCancellations(BaseTest):
workflow, study_id = self.load_workflow() workflow, study_id = self.load_workflow()
workflow_api, first_task = self.get_first_task(workflow) workflow_api, first_task = self.get_first_task(workflow)
self.complete_form(workflow_api, first_task, {}) self.complete_form(workflow, first_task, {})
workflow_api, next_task = self.get_second_task(workflow) workflow_api, next_task = self.get_second_task(workflow)
self.complete_form(workflow_api, next_task, {'how_many': 3}) self.complete_form(workflow, next_task, {'how_many': 3})
study_result = self.put_study_on_hold(study_id) study_result = self.put_study_on_hold(study_id)
self.assertEqual('Second Title', study_result.title) self.assertEqual('Second Title', study_result.title)
@ -95,13 +95,13 @@ class TestStudyCancellations(BaseTest):
workflow, study_id = self.load_workflow() workflow, study_id = self.load_workflow()
workflow_api, first_task = self.get_first_task(workflow) workflow_api, first_task = self.get_first_task(workflow)
self.complete_form(workflow_api, first_task, {}) self.complete_form(workflow, first_task, {})
workflow_api, second_task = self.get_second_task(workflow) workflow_api, second_task = self.get_second_task(workflow)
self.complete_form(workflow_api, second_task, {'how_many': 3}) self.complete_form(workflow, second_task, {'how_many': 3})
workflow_api, third_task = self.get_third_task(workflow) workflow_api, third_task = self.get_third_task(workflow)
self.complete_form(workflow_api, third_task, {}) self.complete_form(workflow, third_task, {})
study_result = self.put_study_on_hold(study_id) study_result = self.put_study_on_hold(study_id)
self.assertEqual('Beer consumption in the bipedal software engineer', study_result.title) self.assertEqual('Beer consumption in the bipedal software engineer', study_result.title)

View File

@ -47,7 +47,7 @@ class TestStudyService(BaseTest):
self.assertIsNotNone(study.id) self.assertIsNotNone(study.id)
workflow = WorkflowModel(workflow_spec_id="random_fact", study_id=study.id, workflow = WorkflowModel(workflow_spec_id="random_fact", study_id=study.id,
status=WorkflowStatus.not_started, last_updated=datetime.now()) status=WorkflowStatus.not_started, last_updated=datetime.utcnow())
db.session.add(workflow) db.session.add(workflow)
db.session.commit() db.session.commit()
# Assure there is a master specification, one standard spec, and lookup tables. # Assure there is a master specification, one standard spec, and lookup tables.

View File

@ -33,7 +33,7 @@ class TestAuthentication(BaseTest):
user_1 = UserModel(uid="dhf8r") user_1 = UserModel(uid="dhf8r")
expected_exp_1 = timegm((datetime.utcnow() + timedelta(hours=new_ttl)).utctimetuple()) expected_exp_1 = timegm((datetime.utcnow() + timedelta(hours=new_ttl)).utctimetuple())
auth_token_1 = user_1.encode_auth_token() auth_token_1 = user_1.encode_auth_token()
self.assertTrue(isinstance(auth_token_1, bytes)) self.assertTrue(isinstance(auth_token_1, str))
self.assertEqual("dhf8r", user_1.decode_auth_token(auth_token_1).get("sub")) self.assertEqual("dhf8r", user_1.decode_auth_token(auth_token_1).get("sub"))
#actual_exp_1 = user_1.decode_auth_token(auth_token_1).get("exp") #actual_exp_1 = user_1.decode_auth_token(auth_token_1).get("exp")
#self.assertTrue(expected_exp_1 - 1000 <= actual_exp_1 <= expected_exp_1 + 1000) #self.assertTrue(expected_exp_1 - 1000 <= actual_exp_1 <= expected_exp_1 + 1000)
@ -131,7 +131,7 @@ class TestAuthentication(BaseTest):
admin_user = self._login_as_admin() admin_user = self._login_as_admin()
admin_study = self._make_fake_study(admin_user.uid) admin_study = self._make_fake_study(admin_user.uid)
admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode()) admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token())
rv_add_study = self.app.post( rv_add_study = self.app.post(
'/v1.0/study', '/v1.0/study',
@ -164,7 +164,7 @@ class TestAuthentication(BaseTest):
# Non-admin user should not be able to delete a study # Non-admin user should not be able to delete a study
non_admin_user = self._login_as_non_admin() non_admin_user = self._login_as_non_admin()
non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token().decode()) non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token())
non_admin_study = self._make_fake_study(non_admin_user.uid) non_admin_study = self._make_fake_study(non_admin_user.uid)
rv_add_study = self.app.post( rv_add_study = self.app.post(
@ -211,7 +211,7 @@ class TestAuthentication(BaseTest):
self.load_example_data() self.load_example_data()
admin_user = self._login_as_admin() admin_user = self._login_as_admin()
admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode()) admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token())
# User should not be in the system yet. # User should not be in the system yet.
non_admin_user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first() non_admin_user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first()
@ -230,7 +230,7 @@ class TestAuthentication(BaseTest):
self.logout() self.logout()
non_admin_user = self._login_as_non_admin() non_admin_user = self._login_as_non_admin()
self.assertEqual(non_admin_user.uid, self.non_admin_uid) self.assertEqual(non_admin_user.uid, self.non_admin_uid)
non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token().decode()) non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token())
# Add a study for the non-admin user # Add a study for the non-admin user
non_admin_study = self._make_fake_study(self.non_admin_uid) non_admin_study = self._make_fake_study(self.non_admin_uid)
@ -273,7 +273,7 @@ class TestAuthentication(BaseTest):
def _make_fake_study(self, uid): def _make_fake_study(self, uid):
return { return {
"title": "blah", "title": "blah",
"last_updated": datetime.now(tz=timezone.utc), "last_updated": datetime.utcnow(),
"status": StudyStatus.in_progress, "status": StudyStatus.in_progress,
"primary_investigator_id": uid, "primary_investigator_id": uid,
"user_uid": uid, "user_uid": uid,

View File

@ -13,7 +13,8 @@ class TestAutoSetPrimaryBPMN(BaseTest):
category_id = session.query(WorkflowSpecCategoryModel).first().id category_id = session.query(WorkflowSpecCategoryModel).first().id
# Add a workflow spec # Add a workflow spec
spec = WorkflowSpecModel(id='make_cookies', name='make_cookies', display_name='Cooooookies', spec = WorkflowSpecModel(id='make_cookies', name='make_cookies', display_name='Cooooookies',
description='Om nom nom delicious cookies', category_id=category_id) description='Om nom nom delicious cookies', category_id=category_id,
standalone=False)
rv = self.app.post('/v1.0/workflow-specification', rv = self.app.post('/v1.0/workflow-specification',
headers=self.logged_in_headers(), headers=self.logged_in_headers(),
content_type="application/json", content_type="application/json",

View File

@ -23,7 +23,6 @@ class TestEmailScript(BaseTest):
first_task = self.get_workflow_api(workflow).next_task first_task = self.get_workflow_api(workflow).next_task
workflow = self.get_workflow_api(workflow)
self.complete_form(workflow, first_task, {'subject': 'My Email Subject', 'recipients': 'test@example.com'}) self.complete_form(workflow, first_task, {'subject': 'My Email Subject', 'recipients': 'test@example.com'})
self.assertEqual(1, len(outbox)) self.assertEqual(1, len(outbox))
@ -49,7 +48,6 @@ class TestEmailScript(BaseTest):
def test_bad_email_address_1(self): def test_bad_email_address_1(self):
workflow = self.create_workflow('email_script') workflow = self.create_workflow('email_script')
first_task = self.get_workflow_api(workflow).next_task first_task = self.get_workflow_api(workflow).next_task
workflow = self.get_workflow_api(workflow)
with self.assertRaises(AssertionError): with self.assertRaises(AssertionError):
self.complete_form(workflow, first_task, {'recipients': 'test@example'}) self.complete_form(workflow, first_task, {'recipients': 'test@example'})
@ -57,7 +55,6 @@ class TestEmailScript(BaseTest):
def test_bad_email_address_2(self): def test_bad_email_address_2(self):
workflow = self.create_workflow('email_script') workflow = self.create_workflow('email_script')
first_task = self.get_workflow_api(workflow).next_task first_task = self.get_workflow_api(workflow).next_task
workflow = self.get_workflow_api(workflow)
with self.assertRaises(AssertionError): with self.assertRaises(AssertionError):
self.complete_form(workflow, first_task, {'recipients': 'test'}) self.complete_form(workflow, first_task, {'recipients': 'test'})

View File

@ -0,0 +1,21 @@
from tests.base_test import BaseTest
from crc import app, session
from crc.services.protocol_builder import ProtocolBuilderService
from unittest.mock import patch
class TestIRBInfo(BaseTest):
@patch('crc.services.protocol_builder.requests.get')
def test_irb_info_script(self, mock_get):
app.config['PB_ENABLED'] = True
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('irb_info.json')
workflow = self.create_workflow('irb_info_script')
irb_info = ProtocolBuilderService.get_irb_info(workflow.study_id)
workflow_api = self.get_workflow_api(workflow)
first_task = workflow_api.next_task
self.assertEqual('Task_PrintInfo', first_task.name)
self.assertEqual(f'IRB Info: {irb_info}', first_task.documentation)

View File

@ -0,0 +1,23 @@
from tests.base_test import BaseTest
from crc import session
from crc.models.user import UserModel
from crc.services.workflow_service import WorkflowService
from example_data import ExampleDataLoader
class TestNoStudyWorkflow(BaseTest):
def test_no_study_workflow(self):
self.load_example_data()
spec = ExampleDataLoader().create_spec('hello_world', 'Hello World', standalone=True, from_tests=True)
user = session.query(UserModel).first()
self.assertIsNotNone(user)
workflow_model = WorkflowService.get_workflow_from_spec(spec.id, user)
workflow_api = self.get_workflow_api(workflow_model)
first_task = workflow_api.next_task
self.complete_form(workflow_model, first_task, {'name': 'Big Guy'})
workflow_api = self.get_workflow_api(workflow_model)
second_task = workflow_api.next_task
self.assertEqual(second_task.documentation, 'Hello Big Guy')

View File

@ -13,10 +13,10 @@ class TestMessageEvent(BaseTest):
# Start the workflow. # Start the workflow.
first_task = self.get_workflow_api(workflow).next_task first_task = self.get_workflow_api(workflow).next_task
self.assertEqual('Activity_GetData', first_task.name) self.assertEqual('Activity_GetData', first_task.name)
workflow = self.get_workflow_api(workflow)
self.complete_form(workflow, first_task, {'formdata': 'asdf'}) self.complete_form(workflow, first_task, {'formdata': 'asdf'})
workflow = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
self.assertEqual('Activity_HowMany', workflow.next_task.name) self.assertEqual('Activity_HowMany', workflow_api.next_task.name)
# reset the workflow # reset the workflow
# this ultimately calls crc.api.workflow.set_current_task # this ultimately calls crc.api.workflow.set_current_task

View File

@ -67,14 +67,14 @@ class TestMultiinstanceTasksApi(BaseTest):
content_type="application/json") content_type="application/json")
self.assert_success(rv) self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True)) json_data = json.loads(rv.get_data(as_text=True))
workflow = WorkflowApiSchema().load(json_data) workflow_api = WorkflowApiSchema().load(json_data)
data = workflow.next_task.data data = workflow_api.next_task.data
data['investigator']['email'] = "dhf8r@virginia.edu" data['investigator']['email'] = "dhf8r@virginia.edu"
self.complete_form(workflow, workflow.next_task, data) self.complete_form(workflow, workflow_api.next_task, data)
#tasks = self.get_workflow_api(workflow).user_tasks #tasks = self.get_workflow_api(workflow).user_tasks
workflow = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
self.assertEqual(WorkflowStatus.complete, workflow.status) self.assertEqual(WorkflowStatus.complete, workflow_api.status)
@patch('crc.services.protocol_builder.requests.get') @patch('crc.services.protocol_builder.requests.get')

View File

@ -60,3 +60,15 @@ class TestProtocolBuilder(BaseTest):
self.assertEqual(2, response[0]["SS_STUDY"]) self.assertEqual(2, response[0]["SS_STUDY"])
self.assertEqual(2453, response[0]["SPONSOR_ID"]) self.assertEqual(2453, response[0]["SPONSOR_ID"])
self.assertEqual("Abbott Ltd", response[0]["SP_NAME"]) self.assertEqual("Abbott Ltd", response[0]["SP_NAME"])
@patch('crc.services.protocol_builder.requests.get')
def test_get_irb_info(self, mock_get):
app.config['PB_ENABLED'] = True
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('irb_info.json')
response = ProtocolBuilderService.get_irb_info(self.test_study_id)
self.assertIsNotNone(response)
self.assertEqual(3, len(response))
self.assertEqual('IRB Event 1', response[0]["IRBEVENT"])
self.assertEqual('IRB Event 2', response[1]["IRBEVENT"])
self.assertEqual('IRB Event 3', response[2]["IRBEVENT"])

View File

@ -386,15 +386,15 @@ class TestTasksApi(BaseTest):
# Start the workflow. # Start the workflow.
first_task = self.get_workflow_api(workflow).next_task first_task = self.get_workflow_api(workflow).next_task
self.complete_form(workflow, first_task, {"has_bananas": True}) self.complete_form(workflow, first_task, {"has_bananas": True})
workflow = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
self.assertEqual('Task_Num_Bananas', workflow.next_task.name) self.assertEqual('Task_Num_Bananas', workflow_api.next_task.name)
# Trying to re-submit the initial task, and answer differently, should result in an error. # Trying to re-submit the initial task, and answer differently, should result in an error.
self.complete_form(workflow, first_task, {"has_bananas": False}, error_code="invalid_state") self.complete_form(workflow, first_task, {"has_bananas": False}, error_code="invalid_state")
# Go ahead and set the number of bananas. # Go ahead and set the number of bananas.
workflow = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
task = workflow.next_task task = workflow_api.next_task
self.complete_form(workflow, task, {"num_bananas": 4}) self.complete_form(workflow, task, {"num_bananas": 4})
# We are now at the end of the workflow. # We are now at the end of the workflow.
@ -405,19 +405,19 @@ class TestTasksApi(BaseTest):
content_type="application/json") content_type="application/json")
self.assert_success(rv) self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True)) json_data = json.loads(rv.get_data(as_text=True))
workflow = WorkflowApiSchema().load(json_data) workflow_api = WorkflowApiSchema().load(json_data)
# Assure the Next Task is the one we just reset the token to be on. # Assure the Next Task is the one we just reset the token to be on.
self.assertEqual("Task_Has_Bananas", workflow.next_task.name) self.assertEqual("Task_Has_Bananas", workflow_api.next_task.name)
# Go ahead and get that workflow one more time, it should still be right. # Go ahead and get that workflow one more time, it should still be right.
workflow = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
# Assure the Next Task is the one we just reset the token to be on. # Assure the Next Task is the one we just reset the token to be on.
self.assertEqual("Task_Has_Bananas", workflow.next_task.name) self.assertEqual("Task_Has_Bananas", workflow_api.next_task.name)
# The next task should be a different value. # The next task should be a different value.
self.complete_form(workflow, workflow.next_task, {"has_bananas": False}) self.complete_form(workflow, workflow_api.next_task, {"has_bananas": False})
workflow = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
self.assertEqual('Task_Why_No_Bananas', workflow.next_task.name) self.assertEqual('Task_Why_No_Bananas', workflow_api.next_task.name)

View File

@ -52,7 +52,7 @@ class TestWorkflowSync(BaseTest):
self.load_example_data() self.load_example_data()
othersys = get_all_spec_state() othersys = get_all_spec_state()
rf2pos = get_random_fact_pos(othersys) rf2pos = get_random_fact_pos(othersys)
othersys[rf2pos]['date_created'] = str(datetime.now()) othersys[rf2pos]['date_created'] = str(datetime.utcnow())
othersys[rf2pos]['md5_hash'] = '12345' othersys[rf2pos]['md5_hash'] = '12345'
mock_get.return_value = othersys mock_get.return_value = othersys
response = get_changed_workflows('localhost:0000') #endpoint is not used due to mock response = get_changed_workflows('localhost:0000') #endpoint is not used due to mock
@ -69,7 +69,7 @@ class TestWorkflowSync(BaseTest):
self.load_example_data() self.load_example_data()
othersys = get_all_spec_state() othersys = get_all_spec_state()
othersys.append({'workflow_spec_id':'my_new_workflow', othersys.append({'workflow_spec_id':'my_new_workflow',
'date_created':str(datetime.now()), 'date_created':str(datetime.utcnow()),
'md5_hash': '12345'}) 'md5_hash': '12345'})
mock_get.return_value = othersys mock_get.return_value = othersys
response = get_changed_workflows('localhost:0000') #endpoint is not used due to mock response = get_changed_workflows('localhost:0000') #endpoint is not used due to mock
@ -121,7 +121,7 @@ class TestWorkflowSync(BaseTest):
self.load_example_data() self.load_example_data()
othersys = get_workflow_spec_files('random_fact') othersys = get_workflow_spec_files('random_fact')
rf2pos = get_random_fact_2_pos(othersys) rf2pos = get_random_fact_2_pos(othersys)
othersys[rf2pos]['date_created'] = str(datetime.now()) othersys[rf2pos]['date_created'] = str(datetime.utcnow())
othersys[rf2pos]['md5_hash'] = '12345' othersys[rf2pos]['md5_hash'] = '12345'
mock_get.return_value = othersys mock_get.return_value = othersys
response = get_changed_files('localhost:0000','random_fact',as_df=False) #endpoint is not used due to mock response = get_changed_files('localhost:0000','random_fact',as_df=False) #endpoint is not used due to mock
@ -145,7 +145,7 @@ class TestWorkflowSync(BaseTest):
# change the remote file date and hash # change the remote file date and hash
othersys = get_workflow_spec_files('random_fact') othersys = get_workflow_spec_files('random_fact')
rf2pos = get_random_fact_2_pos(othersys) rf2pos = get_random_fact_2_pos(othersys)
othersys[rf2pos]['date_created'] = str(datetime.now()) othersys[rf2pos]['date_created'] = str(datetime.utcnow())
othersys[rf2pos]['md5_hash'] = '12345' othersys[rf2pos]['md5_hash'] = '12345'
spec_files_mock.return_value = othersys spec_files_mock.return_value = othersys
# actually go get a different file # actually go get a different file
@ -179,7 +179,7 @@ class TestWorkflowSync(BaseTest):
'primary':False, 'primary':False,
'content_type':'text/text', 'content_type':'text/text',
'primary_process_id':None, 'primary_process_id':None,
'date_created':str(datetime.now()), 'date_created':str(datetime.utcnow()),
'md5_hash':'12345' 'md5_hash':'12345'
} }
othersys.append(newfile) othersys.append(newfile)

View File

@ -7,7 +7,7 @@ class TestBooleanDefault(BaseTest):
workflow = self.create_workflow('boolean_default_value') workflow = self.create_workflow('boolean_default_value')
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
set_default_task = workflow_api.next_task set_default_task = workflow_api.next_task
result = self.complete_form(workflow_api, set_default_task, {'yes_no': yes_no}) result = self.complete_form(workflow, set_default_task, {'yes_no': yes_no})
return result return result
def test_boolean_true_string(self): def test_boolean_true_string(self):

View File

@ -7,35 +7,35 @@ class TestWorkflowEnumDefault(BaseTest):
def test_enum_default_from_value_expression(self): def test_enum_default_from_value_expression(self):
workflow = self.create_workflow('enum_value_expression') workflow = self.create_workflow('enum_value_expression')
first_task = self.get_workflow_api(workflow).next_task
self.assertEqual('Activity_UserInput', first_task.name)
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
first_task = workflow_api.next_task
self.assertEqual('Activity_UserInput', first_task.name)
result = self.complete_form(workflow_api, first_task, {'user_input': True}) result = self.complete_form(workflow, first_task, {'user_input': True})
self.assertIn('user_input', result.next_task.data) self.assertIn('user_input', result.next_task.data)
self.assertEqual(True, result.next_task.data['user_input']) self.assertEqual(True, result.next_task.data['user_input'])
self.assertIn('lookup_output', result.next_task.data) self.assertIn('lookup_output', result.next_task.data)
self.assertEqual('black', result.next_task.data['lookup_output']) self.assertEqual('black', result.next_task.data['lookup_output'])
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
self.assertEqual('Activity_PickColor', self.get_workflow_api(workflow_api).next_task.name) self.assertEqual('Activity_PickColor', workflow_api.next_task.name)
self.assertEqual({'value': 'black', 'label': 'Black'}, workflow_api.next_task.data['color_select']) self.assertEqual({'value': 'black', 'label': 'Black'}, workflow_api.next_task.data['color_select'])
# #
workflow = self.create_workflow('enum_value_expression') workflow = self.create_workflow('enum_value_expression')
first_task = self.get_workflow_api(workflow).next_task
self.assertEqual('Activity_UserInput', first_task.name)
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
first_task = workflow_api.next_task
self.assertEqual('Activity_UserInput', first_task.name)
result = self.complete_form(workflow_api, first_task, {'user_input': False}) result = self.complete_form(workflow, first_task, {'user_input': False})
self.assertIn('user_input', result.next_task.data) self.assertIn('user_input', result.next_task.data)
self.assertEqual(False, result.next_task.data['user_input']) self.assertEqual(False, result.next_task.data['user_input'])
self.assertIn('lookup_output', result.next_task.data) self.assertIn('lookup_output', result.next_task.data)
self.assertEqual('white', result.next_task.data['lookup_output']) self.assertEqual('white', result.next_task.data['lookup_output'])
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
self.assertEqual('Activity_PickColor', self.get_workflow_api(workflow_api).next_task.name) self.assertEqual('Activity_PickColor', workflow_api.next_task.name)
self.assertEqual({'value': 'white', 'label': 'White'}, workflow_api.next_task.data['color_select']) self.assertEqual({'value': 'white', 'label': 'White'}, workflow_api.next_task.data['color_select'])
def test_enum_value_expression_and_default(self): def test_enum_value_expression_and_default(self):

View File

@ -18,7 +18,7 @@ class TestFormFieldName(BaseTest):
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
first_task = workflow_api.next_task first_task = workflow_api.next_task
self.complete_form(workflow_api, first_task, {}) self.complete_form(workflow, first_task, {})
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
second_task = workflow_api.next_task second_task = workflow_api.next_task

View File

@ -34,14 +34,13 @@ class TestWorkflowHiddenRequiredField(BaseTest):
first_task = workflow_api.next_task first_task = workflow_api.next_task
self.assertEqual('Activity_Hello', first_task.name) self.assertEqual('Activity_Hello', first_task.name)
workflow_api = self.get_workflow_api(workflow)
self.complete_form(workflow_api, first_task, {}) self.complete_form(workflow, first_task, {})
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
second_task = workflow_api.next_task second_task = workflow_api.next_task
self.assertEqual('Activity_HiddenField', second_task.name) self.assertEqual('Activity_HiddenField', second_task.name)
self.complete_form(workflow_api, second_task, {}) self.complete_form(workflow, second_task, {})
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
# The color field is hidden and required. Make sure we use the default value # The color field is hidden and required. Make sure we use the default value

View File

@ -12,20 +12,20 @@ class TestWorkflowRestart(BaseTest):
workflow = self.create_workflow('message_event') workflow = self.create_workflow('message_event')
first_task = self.get_workflow_api(workflow).next_task
self.assertEqual('Activity_GetData', first_task.name)
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
first_task = workflow_api.next_task
self.assertEqual('Activity_GetData', first_task.name)
result = self.complete_form(workflow_api, first_task, {'formdata': 'asdf'}) result = self.complete_form(workflow, first_task, {'formdata': 'asdf'})
self.assertIn('formdata', result.next_task.data) self.assertIn('formdata', result.next_task.data)
self.assertEqual('asdf', result.next_task.data['formdata']) self.assertEqual('asdf', result.next_task.data['formdata'])
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
self.assertEqual('Activity_HowMany', self.get_workflow_api(workflow_api).next_task.name) self.assertEqual('Activity_HowMany', workflow_api.next_task.name)
# restart with data. should land at beginning with data # restart with data. should land at beginning with data
workflow_api = self.restart_workflow_api(result) workflow_api = self.restart_workflow_api(result)
first_task = self.get_workflow_api(workflow_api).next_task first_task = workflow_api.next_task
self.assertEqual('Activity_GetData', first_task.name) self.assertEqual('Activity_GetData', first_task.name)
self.assertIn('formdata', workflow_api.next_task.data) self.assertIn('formdata', workflow_api.next_task.data)
self.assertEqual('asdf', workflow_api.next_task.data['formdata']) self.assertEqual('asdf', workflow_api.next_task.data['formdata'])
@ -36,7 +36,6 @@ class TestWorkflowRestart(BaseTest):
self.assertEqual('Activity_GetData', first_task.name) self.assertEqual('Activity_GetData', first_task.name)
self.assertNotIn('formdata', workflow_api.next_task.data) self.assertNotIn('formdata', workflow_api.next_task.data)
def test_workflow_restart_delete_files(self): def test_workflow_restart_delete_files(self):
self.load_example_data() self.load_example_data()
irb_code = 'Study_Protocol_Document' irb_code = 'Study_Protocol_Document'
@ -80,14 +79,14 @@ class TestWorkflowRestart(BaseTest):
study_id = workflow.study_id study_id = workflow.study_id
# Start the workflow. # Start the workflow.
first_task = self.get_workflow_api(workflow).next_task
self.assertEqual('Activity_GetData', first_task.name)
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
self.complete_form(workflow_api, first_task, {'formdata': 'asdf'}) first_task = workflow_api.next_task
self.assertEqual('Activity_GetData', first_task.name)
self.complete_form(workflow, first_task, {'formdata': 'asdf'})
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
self.assertEqual('Activity_HowMany', workflow_api.next_task.name) self.assertEqual('Activity_HowMany', workflow_api.next_task.name)
workflow_api = self.restart_workflow_api(workflow) self.restart_workflow_api(workflow)
study_result = session.query(StudyModel).filter(StudyModel.id == study_id).first() study_result = session.query(StudyModel).filter(StudyModel.id == study_id).first()
self.assertEqual('New Title', study_result.title) self.assertEqual('New Title', study_result.title)
@ -106,17 +105,16 @@ class TestWorkflowRestart(BaseTest):
study_id = workflow.study_id study_id = workflow.study_id
# Start the workflow. # Start the workflow.
first_task = self.get_workflow_api(workflow).next_task
self.assertEqual('Activity_GetData', first_task.name)
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
self.complete_form(workflow_api, first_task, {'formdata': 'asdf'}) first_task = workflow_api.next_task
self.assertEqual('Activity_GetData', first_task.name)
self.complete_form(workflow, first_task, {'formdata': 'asdf'})
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
next_task = workflow_api.next_task next_task = workflow_api.next_task
self.assertEqual('Activity_HowMany', next_task.name) self.assertEqual('Activity_HowMany', next_task.name)
self.complete_form(workflow_api, next_task, {'how_many': 3}) self.complete_form(workflow, next_task, {'how_many': 3})
workflow_api = self.restart_workflow_api(workflow)
study_result = session.query(StudyModel).filter(StudyModel.id == study_id).first() study_result = session.query(StudyModel).filter(StudyModel.id == study_id).first()
self.assertEqual('Beer consumption in the bipedal software engineer', study_result.title) self.assertEqual('Beer consumption in the bipedal software engineer', study_result.title)

View File

@ -3,7 +3,9 @@ import json
from tests.base_test import BaseTest from tests.base_test import BaseTest
from crc import session from crc import session
from crc.models.file import FileModel from crc.models.file import FileModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel, WorkflowSpecCategoryModelSchema
from example_data import ExampleDataLoader
class TestWorkflowSpec(BaseTest): class TestWorkflowSpec(BaseTest):
@ -28,7 +30,8 @@ class TestWorkflowSpec(BaseTest):
category_id = session.query(WorkflowSpecCategoryModel).first().id category_id = session.query(WorkflowSpecCategoryModel).first().id
category_count = session.query(WorkflowSpecModel).filter_by(category_id=category_id).count() category_count = session.query(WorkflowSpecModel).filter_by(category_id=category_id).count()
spec = WorkflowSpecModel(id='make_cookies', name='make_cookies', display_name='Cooooookies', spec = WorkflowSpecModel(id='make_cookies', name='make_cookies', display_name='Cooooookies',
description='Om nom nom delicious cookies', category_id=category_id) description='Om nom nom delicious cookies', category_id=category_id,
standalone=False)
rv = self.app.post('/v1.0/workflow-specification', rv = self.app.post('/v1.0/workflow-specification',
headers=self.logged_in_headers(), headers=self.logged_in_headers(),
content_type="application/json", content_type="application/json",
@ -101,3 +104,60 @@ class TestWorkflowSpec(BaseTest):
num_workflows_after = session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id).count() num_workflows_after = session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id).count()
self.assertEqual(num_files_after + num_workflows_after, 0) self.assertEqual(num_files_after + num_workflows_after, 0)
def test_get_standalone_workflow_specs(self):
self.load_example_data()
category = session.query(WorkflowSpecCategoryModel).first()
ExampleDataLoader().create_spec('hello_world', 'Hello World', category_id=category.id,
standalone=True, from_tests=True)
rv = self.app.get('/v1.0/workflow-specification/standalone', headers=self.logged_in_headers())
self.assertEqual(1, len(rv.json))
ExampleDataLoader().create_spec('email_script', 'Email Script', category_id=category.id,
standalone=True, from_tests=True)
rv = self.app.get('/v1.0/workflow-specification/standalone', headers=self.logged_in_headers())
self.assertEqual(2, len(rv.json))
def test_get_workflow_from_workflow_spec(self):
self.load_example_data()
spec = ExampleDataLoader().create_spec('hello_world', 'Hello World', standalone=True, from_tests=True)
rv = self.app.post(f'/v1.0/workflow-specification/{spec.id}', headers=self.logged_in_headers())
self.assert_success(rv)
self.assertEqual('hello_world', rv.json['workflow_spec_id'])
self.assertEqual('Task_GetName', rv.json['next_task']['name'])
def test_add_workflow_spec_category(self):
self.load_example_data()
count = session.query(WorkflowSpecCategoryModel).count()
category = WorkflowSpecCategoryModel(
id=count,
name='another_test_category',
display_name='Another Test Category',
display_order=0
)
rv = self.app.post(f'/v1.0/workflow-specification-category',
headers=self.logged_in_headers(),
content_type="application/json",
data=json.dumps(WorkflowSpecCategoryModelSchema().dump(category))
)
self.assert_success(rv)
result = session.query(WorkflowSpecCategoryModel).filter(WorkflowSpecCategoryModel.name=='another_test_category').first()
self.assertEqual('Another Test Category', result.display_name)
self.assertEqual(count, result.id)
def test_update_workflow_spec_category(self):
self.load_example_data()
category = session.query(WorkflowSpecCategoryModel).first()
category_name_before = category.name
new_category_name = category_name_before + '_asdf'
self.assertNotEqual(category_name_before, new_category_name)
category.name = new_category_name
rv = self.app.put(f'/v1.0/workflow-specification-category/{category.id}',
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(WorkflowSpecCategoryModelSchema().dump(category)))
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
self.assertEqual(new_category_name, json_data['name'])

View File

@ -9,7 +9,7 @@ class TestValueExpression(BaseTest):
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
first_task = workflow_api.next_task first_task = workflow_api.next_task
self.complete_form(workflow_api, first_task, {'value_expression_value': ''}) self.complete_form(workflow, first_task, {'value_expression_value': ''})
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
second_task = workflow_api.next_task second_task = workflow_api.next_task
@ -26,7 +26,7 @@ class TestValueExpression(BaseTest):
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
first_task = workflow_api.next_task first_task = workflow_api.next_task
self.complete_form(workflow_api, first_task, {'value_expression_value': 'black'}) self.complete_form(workflow, first_task, {'value_expression_value': 'black'})
workflow_api = self.get_workflow_api(workflow) workflow_api = self.get_workflow_api(workflow)
second_task = workflow_api.next_task second_task = workflow_api.next_task