Merge branch 'dev' into cr-workflow-108

This commit is contained in:
Kelly McDonald 2020-08-12 10:40:54 -04:00
commit 2b0f1acb72
66 changed files with 1839 additions and 559 deletions

View File

@ -38,13 +38,14 @@ recommonmark = "*"
requests = "*"
sentry-sdk = {extras = ["flask"],version = "==0.14.4"}
sphinx = "*"
spiffworkflow = {editable = true,git = "https://github.com/sartography/SpiffWorkflow.git",ref = "master"}
#spiffworkflow = {editable = true,path="/home/kelly/sartography/SpiffWorkflow/"}
swagger-ui-bundle = "*"
spiffworkflow = {editable = true, git = "https://github.com/sartography/SpiffWorkflow.git", ref = "master"}
webtest = "*"
werkzeug = "*"
xlrd = "*"
xlsxwriter = "*"
pygithub = "*"
python-box = "*"
[requires]
python_version = "3.7"

406
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "97a15c4ade88db2b384d52436633889a4d9b0bdcaeea86b8a679ebda6f73fb59"
"sha256": "096abf7ce152358489282a004ed634ca64730cb98276f3a513ed2d5b8a6635c6"
},
"pipfile-spec": 6,
"requires": {
@ -32,10 +32,11 @@
},
"amqp": {
"hashes": [
"sha256:24dbaff8ce4f30566bb88976b398e8c4e77637171af3af6f1b9650f48890e60b",
"sha256:bb68f8d2bced8f93ccfd07d96c689b716b3227720add971be980accfc2952139"
"sha256:70cdb10628468ff14e57ec2f751c7aa9e48e7e3651cfd62d431213c0c4e58f21",
"sha256:aa7f313fb887c91f15474c1229907a04dac0b8135822d6603437803424c0aa59"
],
"version": "==2.6.0"
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==2.6.1"
},
"aniso8601": {
"hashes": [
@ -49,6 +50,7 @@
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==19.3.0"
},
"babel": {
@ -56,6 +58,7 @@
"sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38",
"sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.8.0"
},
"bcrypt": {
@ -79,6 +82,7 @@
"sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7",
"sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==3.1.7"
},
"beautifulsoup4": {
@ -107,6 +111,7 @@
"sha256:ef17d7dffde7fc73ecab3a3b6389d93d3213bac53fa7f28e68e33647ad50b916",
"sha256:fd77e4248bb1b7af5f7922dd8e81156f540306e3a5c4b1c24167c1f5f06025da"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==4.4.6"
},
"certifi": {
@ -118,36 +123,36 @@
},
"cffi": {
"hashes": [
"sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff",
"sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b",
"sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac",
"sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0",
"sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384",
"sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26",
"sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6",
"sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b",
"sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e",
"sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd",
"sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2",
"sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66",
"sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc",
"sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8",
"sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55",
"sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4",
"sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5",
"sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d",
"sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78",
"sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa",
"sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793",
"sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f",
"sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a",
"sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f",
"sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30",
"sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f",
"sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3",
"sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c"
"sha256:267adcf6e68d77ba154334a3e4fc921b8e63cbb38ca00d33d40655d4228502bc",
"sha256:26f33e8f6a70c255767e3c3f957ccafc7f1f706b966e110b855bfe944511f1f9",
"sha256:3cd2c044517f38d1b577f05927fb9729d3396f1d44d0c659a445599e79519792",
"sha256:4a03416915b82b81af5502459a8a9dd62a3c299b295dcdf470877cb948d655f2",
"sha256:4ce1e995aeecf7cc32380bc11598bfdfa017d592259d5da00fc7ded11e61d022",
"sha256:4f53e4128c81ca3212ff4cf097c797ab44646a40b42ec02a891155cd7a2ba4d8",
"sha256:4fa72a52a906425416f41738728268072d5acfd48cbe7796af07a923236bcf96",
"sha256:66dd45eb9530e3dde8f7c009f84568bc7cac489b93d04ac86e3111fb46e470c2",
"sha256:6923d077d9ae9e8bacbdb1c07ae78405a9306c8fd1af13bfa06ca891095eb995",
"sha256:833401b15de1bb92791d7b6fb353d4af60dc688eaa521bd97203dcd2d124a7c1",
"sha256:8416ed88ddc057bab0526d4e4e9f3660f614ac2394b5e019a628cdfff3733849",
"sha256:892daa86384994fdf4856cb43c93f40cbe80f7f95bb5da94971b39c7f54b3a9c",
"sha256:98be759efdb5e5fa161e46d404f4e0ce388e72fbf7d9baf010aff16689e22abe",
"sha256:a6d28e7f14ecf3b2ad67c4f106841218c8ab12a0683b1528534a6c87d2307af3",
"sha256:b1d6ebc891607e71fd9da71688fcf332a6630b7f5b7f5549e6e631821c0e5d90",
"sha256:b2a2b0d276a136146e012154baefaea2758ef1f56ae9f4e01c612b0831e0bd2f",
"sha256:b87dfa9f10a470eee7f24234a37d1d5f51e5f5fa9eeffda7c282e2b8f5162eb1",
"sha256:bac0d6f7728a9cc3c1e06d4fcbac12aaa70e9379b3025b27ec1226f0e2d404cf",
"sha256:c991112622baee0ae4d55c008380c32ecfd0ad417bcd0417ba432e6ba7328caa",
"sha256:cda422d54ee7905bfc53ee6915ab68fe7b230cacf581110df4272ee10462aadc",
"sha256:d3148b6ba3923c5850ea197a91a42683f946dba7e8eb82dfa211ab7e708de939",
"sha256:d6033b4ffa34ef70f0b8086fd4c3df4bf801fee485a8a7d4519399818351aa8e",
"sha256:ddff0b2bd7edcc8c82d1adde6dbbf5e60d57ce985402541cd2985c27f7bec2a0",
"sha256:e23cb7f1d8e0f93addf0cae3c5b6f00324cccb4a7949ee558d7b6ca973ab8ae9",
"sha256:effd2ba52cee4ceff1a77f20d2a9f9bf8d50353c854a282b8760ac15b9833168",
"sha256:f90c2267101010de42f7273c94a1f026e56cbc043f9330acd8a80e64300aba33",
"sha256:f960375e9823ae6a07072ff7f8a85954e5a6434f97869f50d0e41649a1c8144f",
"sha256:fcf32bf76dc25e30ed793145a57426064520890d7c02866eb93d3e4abe516948"
],
"version": "==1.14.0"
"version": "==1.14.1"
},
"chardet": {
"hashes": [
@ -161,6 +166,7 @@
"sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a",
"sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==7.1.2"
},
"clickclick": {
@ -182,6 +188,7 @@
"sha256:2ca44140ee259b5e3d8aaf47c79c36a7ab0d5e94d70bd4105c03ede7a20ea5a1",
"sha256:cffc044844040c7ce04e9acd1838b5f2e5fa3170182f6fda4d2ea8b0099dbadd"
],
"markers": "python_version >= '3.6'",
"version": "==5.0.0"
},
"connexion": {
@ -197,49 +204,58 @@
},
"coverage": {
"hashes": [
"sha256:0fc4e0d91350d6f43ef6a61f64a48e917637e1dcfcba4b4b7d543c628ef82c2d",
"sha256:10f2a618a6e75adf64329f828a6a5b40244c1c50f5ef4ce4109e904e69c71bd2",
"sha256:12eaccd86d9a373aea59869bc9cfa0ab6ba8b1477752110cb4c10d165474f703",
"sha256:1874bdc943654ba46d28f179c1846f5710eda3aeb265ff029e0ac2b52daae404",
"sha256:1dcebae667b73fd4aa69237e6afb39abc2f27520f2358590c1b13dd90e32abe7",
"sha256:1e58fca3d9ec1a423f1b7f2aa34af4f733cbfa9020c8fe39ca451b6071237405",
"sha256:214eb2110217f2636a9329bc766507ab71a3a06a8ea30cdeebb47c24dce5972d",
"sha256:25fe74b5b2f1b4abb11e103bb7984daca8f8292683957d0738cd692f6a7cc64c",
"sha256:32ecee61a43be509b91a526819717d5e5650e009a8d5eda8631a59c721d5f3b6",
"sha256:3740b796015b889e46c260ff18b84683fa2e30f0f75a171fb10d2bf9fb91fc70",
"sha256:3b2c34690f613525672697910894b60d15800ac7e779fbd0fccf532486c1ba40",
"sha256:41d88736c42f4a22c494c32cc48a05828236e37c991bd9760f8923415e3169e4",
"sha256:42fa45a29f1059eda4d3c7b509589cc0343cd6bbf083d6118216830cd1a51613",
"sha256:4bb385a747e6ae8a65290b3df60d6c8a692a5599dc66c9fa3520e667886f2e10",
"sha256:509294f3e76d3f26b35083973fbc952e01e1727656d979b11182f273f08aa80b",
"sha256:5c74c5b6045969b07c9fb36b665c9cac84d6c174a809fc1b21bdc06c7836d9a0",
"sha256:60a3d36297b65c7f78329b80120f72947140f45b5c7a017ea730f9112b40f2ec",
"sha256:6f91b4492c5cde83bfe462f5b2b997cdf96a138f7c58b1140f05de5751623cf1",
"sha256:7403675df5e27745571aba1c957c7da2dacb537c21e14007ec3a417bf31f7f3d",
"sha256:87bdc8135b8ee739840eee19b184804e5d57f518578ffc797f5afa2c3c297913",
"sha256:8a3decd12e7934d0254939e2bf434bf04a5890c5bf91a982685021786a08087e",
"sha256:9702e2cb1c6dec01fb8e1a64c015817c0800a6eca287552c47a5ee0ebddccf62",
"sha256:a4d511012beb967a39580ba7d2549edf1e6865a33e5fe51e4dce550522b3ac0e",
"sha256:bbb387811f7a18bdc61a2ea3d102be0c7e239b0db9c83be7bfa50f095db5b92a",
"sha256:bfcc811883699ed49afc58b1ed9f80428a18eb9166422bce3c31a53dba00fd1d",
"sha256:c32aa13cc3fe86b0f744dfe35a7f879ee33ac0a560684fef0f3e1580352b818f",
"sha256:ca63dae130a2e788f2b249200f01d7fa240f24da0596501d387a50e57aa7075e",
"sha256:d54d7ea74cc00482a2410d63bf10aa34ebe1c49ac50779652106c867f9986d6b",
"sha256:d67599521dff98ec8c34cd9652cbcfe16ed076a2209625fca9dc7419b6370e5c",
"sha256:d82db1b9a92cb5c67661ca6616bdca6ff931deceebb98eecbd328812dab52032",
"sha256:d9ad0a988ae20face62520785ec3595a5e64f35a21762a57d115dae0b8fb894a",
"sha256:ebf2431b2d457ae5217f3a1179533c456f3272ded16f8ed0b32961a6d90e38ee",
"sha256:ed9a21502e9223f563e071759f769c3d6a2e1ba5328c31e86830368e8d78bc9c",
"sha256:f50632ef2d749f541ca8e6c07c9928a37f87505ce3a9f20c8446ad310f1aa87b"
"sha256:098a703d913be6fbd146a8c50cc76513d726b022d170e5e98dc56d958fd592fb",
"sha256:16042dc7f8e632e0dcd5206a5095ebd18cb1d005f4c89694f7f8aafd96dd43a3",
"sha256:1adb6be0dcef0cf9434619d3b892772fdb48e793300f9d762e480e043bd8e716",
"sha256:27ca5a2bc04d68f0776f2cdcb8bbd508bbe430a7bf9c02315cd05fb1d86d0034",
"sha256:28f42dc5172ebdc32622a2c3f7ead1b836cdbf253569ae5673f499e35db0bac3",
"sha256:2fcc8b58953d74d199a1a4d633df8146f0ac36c4e720b4a1997e9b6327af43a8",
"sha256:304fbe451698373dc6653772c72c5d5e883a4aadaf20343592a7abb2e643dae0",
"sha256:30bc103587e0d3df9e52cd9da1dd915265a22fad0b72afe54daf840c984b564f",
"sha256:40f70f81be4d34f8d491e55936904db5c527b0711b2a46513641a5729783c2e4",
"sha256:4186fc95c9febeab5681bc3248553d5ec8c2999b8424d4fc3a39c9cba5796962",
"sha256:46794c815e56f1431c66d81943fa90721bb858375fb36e5903697d5eef88627d",
"sha256:4869ab1c1ed33953bb2433ce7b894a28d724b7aa76c19b11e2878034a4e4680b",
"sha256:4f6428b55d2916a69f8d6453e48a505c07b2245653b0aa9f0dee38785939f5e4",
"sha256:52f185ffd3291196dc1aae506b42e178a592b0b60a8610b108e6ad892cfc1bb3",
"sha256:538f2fd5eb64366f37c97fdb3077d665fa946d2b6d95447622292f38407f9258",
"sha256:64c4f340338c68c463f1b56e3f2f0423f7b17ba6c3febae80b81f0e093077f59",
"sha256:675192fca634f0df69af3493a48224f211f8db4e84452b08d5fcebb9167adb01",
"sha256:700997b77cfab016533b3e7dbc03b71d33ee4df1d79f2463a318ca0263fc29dd",
"sha256:8505e614c983834239f865da2dd336dcf9d72776b951d5dfa5ac36b987726e1b",
"sha256:962c44070c281d86398aeb8f64e1bf37816a4dfc6f4c0f114756b14fc575621d",
"sha256:9e536783a5acee79a9b308be97d3952b662748c4037b6a24cbb339dc7ed8eb89",
"sha256:9ea749fd447ce7fb1ac71f7616371f04054d969d412d37611716721931e36efd",
"sha256:a34cb28e0747ea15e82d13e14de606747e9e484fb28d63c999483f5d5188e89b",
"sha256:a3ee9c793ffefe2944d3a2bd928a0e436cd0ac2d9e3723152d6fd5398838ce7d",
"sha256:aab75d99f3f2874733946a7648ce87a50019eb90baef931698f96b76b6769a46",
"sha256:b1ed2bdb27b4c9fc87058a1cb751c4df8752002143ed393899edb82b131e0546",
"sha256:b360d8fd88d2bad01cb953d81fd2edd4be539df7bfec41e8753fe9f4456a5082",
"sha256:b8f58c7db64d8f27078cbf2a4391af6aa4e4767cc08b37555c4ae064b8558d9b",
"sha256:c1bbb628ed5192124889b51204de27c575b3ffc05a5a91307e7640eff1d48da4",
"sha256:c2ff24df02a125b7b346c4c9078c8936da06964cc2d276292c357d64378158f8",
"sha256:c890728a93fffd0407d7d37c1e6083ff3f9f211c83b4316fae3778417eab9811",
"sha256:c96472b8ca5dc135fb0aa62f79b033f02aa434fb03a8b190600a5ae4102df1fd",
"sha256:ce7866f29d3025b5b34c2e944e66ebef0d92e4a4f2463f7266daa03a1332a651",
"sha256:e26c993bd4b220429d4ec8c1468eca445a4064a61c74ca08da7429af9bc53bb0"
],
"index": "pypi",
"version": "==5.2"
"version": "==5.2.1"
},
"deprecated": {
"hashes": [
"sha256:525ba66fb5f90b07169fdd48b6373c18f1ee12728ca277ca44567a367d9d7f74",
"sha256:a766c1dccb30c5f6eb2b203f87edd1d8588847709c78589e1521d769addc8218"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.2.10"
},
"docutils": {
"hashes": [
"sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
"sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==0.16"
},
"docxtpl": {
@ -322,12 +338,14 @@
"sha256:05b31d2034dd3f2a685cbbae4cfc4ed906b2a733cff7964ada450fd5e462b84e",
"sha256:bfc7150eaf809b1c283879302f04c42791136060c6eeb12c0c6674fb1291fae5"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.4.4"
},
"future": {
"hashes": [
"sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.18.2"
},
"gunicorn": {
@ -350,6 +368,7 @@
"sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
"sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.10"
},
"imagesize": {
@ -357,21 +376,15 @@
"sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1",
"sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.2.0"
},
"importlib-metadata": {
"hashes": [
"sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83",
"sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"
],
"markers": "python_version < '3.8'",
"version": "==1.7.0"
},
"inflection": {
"hashes": [
"sha256:88b101b2668a1d81d6d72d4c2018e53bc6c7fc544c987849da1c7f77545c3bc9",
"sha256:f576e85132d34f5bf7df5183c2c6f94cfb32e528f53065345cf71329ba0b8924"
],
"markers": "python_version >= '3.5'",
"version": "==0.5.0"
},
"itsdangerous": {
@ -379,6 +392,7 @@
"sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19",
"sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.0"
},
"jdcal": {
@ -393,6 +407,7 @@
"sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0",
"sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==2.11.2"
},
"jsonschema": {
@ -407,11 +422,16 @@
"sha256:be48cdffb54a2194d93ad6533d73f69408486483d189fe9f5990ee24255b0e0a",
"sha256:ca1b45faac8c0b18493d02a8571792f3c40291cf2bcf1f55afed3d8f3aa7ba74"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==4.6.11"
},
"ldap3": {
"hashes": [
"sha256:17f04298b70bf7ecaa5db8a7d8622b5a962ef7fc2b245b2eea705ac1c24338c0",
"sha256:298769ab0232b3a3efa1e84881096c24526fe37911c83a11285f222fe4975efd",
"sha256:4fd2db72d0412cc16ee86be01332095e86e361329c3579b314231eb2e56c7871",
"sha256:52ab557b3c4908db4a90bea16731aa714b1b54e039b54fd4c4b83994c6c48c0c",
"sha256:53aaae5bf14f3827c69600ddf4d61b88f49c055bb93060e9702c5bafd206c744",
"sha256:81df4ac8b6df10fb1f05b17c18d0cb8c4c344d5a03083c382824960ed959cf5b"
],
"index": "pypi",
@ -459,6 +479,7 @@
"sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27",
"sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.3"
},
"markdown": {
@ -505,6 +526,7 @@
"sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7",
"sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.1"
},
"marshmallow": {
@ -560,15 +582,16 @@
"sha256:ed8a311493cf5480a2ebc597d1e177231984c818a86875126cfd004241a73c3e",
"sha256:ef71a1d4fd4858596ae80ad1ec76404ad29701f8ca7cdcebc50300178db14dfc"
],
"markers": "python_version >= '3.6'",
"version": "==1.19.1"
},
"openapi-spec-validator": {
"hashes": [
"sha256:0caacd9829e9e3051e830165367bf58d436d9487b29a09220fa7edb9f47ff81b",
"sha256:d4da8aef72bf5be40cf0df444abd20009a41baf9048a8e03750c07a934f1bdd8",
"sha256:e489c7a273284bc78277ac22791482e8058d323b4a265015e9fcddf6a8045bcd"
"sha256:6dd75e50c94f1bb454d0e374a56418e7e06a07affb2c7f1df88564c5d728dac3",
"sha256:79381a69b33423ee400ae1624a461dae7725e450e2e306e32f2dd8d16a4d85cb",
"sha256:ec1b01a00e20955a527358886991ae34b4b791b253027ee9f7df5f84b59d91c7"
],
"version": "==0.2.8"
"version": "==0.2.9"
},
"openpyxl": {
"hashes": [
@ -583,29 +606,30 @@
"sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8",
"sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==20.4"
},
"pandas": {
"hashes": [
"sha256:02f1e8f71cd994ed7fcb9a35b6ddddeb4314822a0e09a9c5b2d278f8cb5d4096",
"sha256:13f75fb18486759da3ff40f5345d9dd20e7d78f2a39c5884d013456cec9876f0",
"sha256:35b670b0abcfed7cad76f2834041dcf7ae47fd9b22b63622d67cdc933d79f453",
"sha256:4c73f373b0800eb3062ffd13d4a7a2a6d522792fa6eb204d67a4fad0a40f03dc",
"sha256:5759edf0b686b6f25a5d4a447ea588983a33afc8a0081a0954184a4a87fd0dd7",
"sha256:5a7cf6044467c1356b2b49ef69e50bf4d231e773c3ca0558807cdba56b76820b",
"sha256:69c5d920a0b2a9838e677f78f4dde506b95ea8e4d30da25859db6469ded84fa8",
"sha256:8778a5cc5a8437a561e3276b85367412e10ae9fff07db1eed986e427d9a674f8",
"sha256:9871ef5ee17f388f1cb35f76dc6106d40cb8165c562d573470672f4cdefa59ef",
"sha256:9c31d52f1a7dd2bb4681d9f62646c7aa554f19e8e9addc17e8b1b20011d7522d",
"sha256:ab8173a8efe5418bbe50e43f321994ac6673afc5c7c4839014cf6401bbdd0705",
"sha256:ae961f1f0e270f1e4e2273f6a539b2ea33248e0e3a11ffb479d757918a5e03a9",
"sha256:b3c4f93fcb6e97d993bf87cdd917883b7dab7d20c627699f360a8fb49e9e0b91",
"sha256:c9410ce8a3dee77653bc0684cfa1535a7f9c291663bd7ad79e39f5ab58f67ab3",
"sha256:f69e0f7b7c09f1f612b1f8f59e2df72faa8a6b41c5a436dde5b615aaf948f107",
"sha256:faa42a78d1350b02a7d2f0dbe3c80791cf785663d6997891549d0f86dc49125e"
"sha256:0210f8fe19c2667a3817adb6de2c4fd92b1b78e1975ca60c0efa908e0985cbdb",
"sha256:0227e3a6e3a22c0e283a5041f1e3064d78fbde811217668bb966ed05386d8a7e",
"sha256:0bc440493cf9dc5b36d5d46bbd5508f6547ba68b02a28234cd8e81fdce42744d",
"sha256:16504f915f1ae424052f1e9b7cd2d01786f098fbb00fa4e0f69d42b22952d798",
"sha256:182a5aeae319df391c3df4740bb17d5300dcd78034b17732c12e62e6dd79e4a4",
"sha256:35db623487f00d9392d8af44a24516d6cb9f274afaf73cfcfe180b9c54e007d2",
"sha256:40ec0a7f611a3d00d3c666c4cceb9aa3f5bf9fbd81392948a93663064f527203",
"sha256:47a03bfef80d6812c91ed6fae43f04f2fa80a4e1b82b35aa4d9002e39529e0b8",
"sha256:4b21d46728f8a6be537716035b445e7ef3a75dbd30bd31aa1b251323219d853e",
"sha256:4d1a806252001c5db7caecbe1a26e49a6c23421d85a700960f6ba093112f54a1",
"sha256:60e20a4ab4d4fec253557d0fc9a4e4095c37b664f78c72af24860c8adcd07088",
"sha256:9f61cca5262840ff46ef857d4f5f65679b82188709d0e5e086a9123791f721c8",
"sha256:a15835c8409d5edc50b4af93be3377b5dd3eb53517e7f785060df1f06f6da0e2",
"sha256:b39508562ad0bb3f384b0db24da7d68a2608b9ddc85b1d931ccaaa92d5e45273",
"sha256:ed60848caadeacecefd0b1de81b91beff23960032cded0ac1449242b506a3b3f",
"sha256:fc714895b6de6803ac9f661abb316853d0cd657f5d23985222255ad76ccedc25"
],
"index": "pypi",
"version": "==1.0.5"
"version": "==1.1.0"
},
"psycopg2-binary": {
"hashes": [
@ -645,8 +669,19 @@
},
"pyasn1": {
"hashes": [
"sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359",
"sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576",
"sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf",
"sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7",
"sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d",
"sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"
"sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00",
"sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8",
"sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86",
"sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12",
"sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776",
"sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba",
"sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2",
"sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"
],
"version": "==0.4.8"
},
@ -655,13 +690,23 @@
"sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
"sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.20"
},
"pygithub": {
"hashes": [
"sha256:8375a058ec651cc0774244a3bc7395cf93617298735934cdd59e5bcd9a1df96e",
"sha256:d2d17d1e3f4474e070353f201164685a95b5a92f5ee0897442504e399c7bc249"
],
"index": "pypi",
"version": "==1.51"
},
"pygments": {
"hashes": [
"sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",
"sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"
],
"markers": "python_version >= '3.5'",
"version": "==2.6.1"
},
"pyjwt": {
@ -677,6 +722,7 @@
"sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
"sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.4.7"
},
"pyrsistent": {
@ -687,10 +733,11 @@
},
"python-box": {
"hashes": [
"sha256:2df0d0e0769b6d6e7daed8d5e0b10a38e0b5486ee75914c30f2a927f7a374111",
"sha256:ddea019b4ee53fe3f822407b0b26ec54ff6233042c68b54244d3503ae4d6218f"
"sha256:bcb057e8960f4d888a4caf8f668eeca3c5c61ad349d8d81c4339414984fa9454",
"sha256:f02e059a299cac0515687aafec7543d401b12759d6578e53fae74154e0cbaa79"
],
"version": "==5.0.1"
"index": "pypi",
"version": "==5.1.0"
},
"python-dateutil": {
"hashes": [
@ -710,7 +757,9 @@
"hashes": [
"sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d",
"sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b",
"sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"
"sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8",
"sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77",
"sha256:ea87e17f6ec459e780e4221f295411462e0d0810858e055fc514684350a2f522"
],
"version": "==1.0.4"
},
@ -824,6 +873,7 @@
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.15.0"
},
"snowballstemmer": {
@ -838,6 +888,7 @@
"sha256:1634eea42ab371d3d346309b93df7870a88610f0725d47528be902a0d95ecc55",
"sha256:a59dc181727e95d25f781f0eb4fd1825ff45590ec8ff49eadfd7f1a537cc0232"
],
"markers": "python_version >= '3.5'",
"version": "==2.0.1"
},
"sphinx": {
@ -853,6 +904,7 @@
"sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a",
"sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.2"
},
"sphinxcontrib-devhelp": {
@ -860,6 +912,7 @@
"sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e",
"sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.2"
},
"sphinxcontrib-htmlhelp": {
@ -867,6 +920,7 @@
"sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f",
"sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.3"
},
"sphinxcontrib-jsmath": {
@ -874,6 +928,7 @@
"sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178",
"sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.1"
},
"sphinxcontrib-qthelp": {
@ -881,6 +936,7 @@
"sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72",
"sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.3"
},
"sphinxcontrib-serializinghtml": {
@ -888,12 +944,13 @@
"sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc",
"sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"
],
"markers": "python_version >= '3.5'",
"version": "==1.1.4"
},
"spiffworkflow": {
"editable": true,
"git": "https://github.com/sartography/SpiffWorkflow.git",
"ref": "74529738b4e16be5aadd846669a201560f81a6d4"
"ref": "7c8d59e7b9a978795bc8d1f354002fdc89540672"
},
"sqlalchemy": {
"hashes": [
@ -926,6 +983,7 @@
"sha256:f57be5673e12763dd400fea568608700a63ce1c6bd5bdbc3cc3a2c5fdb045274",
"sha256:fc728ece3d5c772c196fd338a99798e7efac7a04f9cb6416299a3638ee9a94cd"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.3.18"
},
"swagger-ui-bundle": {
@ -938,16 +996,18 @@
},
"urllib3": {
"hashes": [
"sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527",
"sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115"
"sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a",
"sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"
],
"version": "==1.25.9"
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
"version": "==1.25.10"
},
"vine": {
"hashes": [
"sha256:133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87",
"sha256:ea4947cc56d1fd6f2095c8d543ee25dad966f78692528e68b4fada11ba3f98af"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.3.0"
},
"waitress": {
@ -955,6 +1015,7 @@
"sha256:1bb436508a7487ac6cb097ae7a7fe5413aefca610550baf58f0940e51ecfb261",
"sha256:3d633e78149eb83b60a07dfabb35579c29aac2d24bb803c18b26fb2ab1a584db"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==1.4.4"
},
"webob": {
@ -962,6 +1023,7 @@
"sha256:a3c89a8e9ba0aeb17382836cdb73c516d0ecf6630ec40ec28288f3ed459ce87b",
"sha256:aa3a917ed752ba3e0b242234b2a373f9c4e2a75d35291dcbe977649bd21fd108"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.8.6"
},
"webtest": {
@ -980,12 +1042,18 @@
"index": "pypi",
"version": "==1.0.1"
},
"wrapt": {
"hashes": [
"sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"
],
"version": "==1.12.1"
},
"wtforms": {
"hashes": [
"sha256:6ff8635f4caeed9f38641d48cfe019d0d3896f41910ab04494143fc027866e1b",
"sha256:861a13b3ae521d6700dac3b2771970bd354a63ba7043ecc3a82b5288596a1972"
"sha256:7b504fc724d0d1d4d5d5c114e778ec88c37ea53144683e084215eed5155ada4c",
"sha256:81195de0ac94fbc8368abbaf9197b88c4f3ffd6c2719b5bf5fc9da744f3d829c"
],
"version": "==2.3.1"
"version": "==2.3.3"
},
"xlrd": {
"hashes": [
@ -997,18 +1065,11 @@
},
"xlsxwriter": {
"hashes": [
"sha256:828b3285fc95105f5b1946a6a015b31cf388bd5378fdc6604e4d1b7839df2e77",
"sha256:82a3b0e73e3913483da23791d1a25e4d2dbb3837d1be4129473526b9a270a5cc"
"sha256:3015f707cf237d277cf1b2d7805f409f0387e32bc52f3c76db9f85098980e828",
"sha256:ee3fc2f32890246aba44dd14d777d6b3135e3454f865d8cc669618e20152296b"
],
"index": "pypi",
"version": "==1.2.9"
},
"zipp": {
"hashes": [
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
],
"version": "==3.1.0"
"version": "==1.3.0"
}
},
"develop": {
@ -1017,61 +1078,62 @@
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==19.3.0"
},
"coverage": {
"hashes": [
"sha256:0fc4e0d91350d6f43ef6a61f64a48e917637e1dcfcba4b4b7d543c628ef82c2d",
"sha256:10f2a618a6e75adf64329f828a6a5b40244c1c50f5ef4ce4109e904e69c71bd2",
"sha256:12eaccd86d9a373aea59869bc9cfa0ab6ba8b1477752110cb4c10d165474f703",
"sha256:1874bdc943654ba46d28f179c1846f5710eda3aeb265ff029e0ac2b52daae404",
"sha256:1dcebae667b73fd4aa69237e6afb39abc2f27520f2358590c1b13dd90e32abe7",
"sha256:1e58fca3d9ec1a423f1b7f2aa34af4f733cbfa9020c8fe39ca451b6071237405",
"sha256:214eb2110217f2636a9329bc766507ab71a3a06a8ea30cdeebb47c24dce5972d",
"sha256:25fe74b5b2f1b4abb11e103bb7984daca8f8292683957d0738cd692f6a7cc64c",
"sha256:32ecee61a43be509b91a526819717d5e5650e009a8d5eda8631a59c721d5f3b6",
"sha256:3740b796015b889e46c260ff18b84683fa2e30f0f75a171fb10d2bf9fb91fc70",
"sha256:3b2c34690f613525672697910894b60d15800ac7e779fbd0fccf532486c1ba40",
"sha256:41d88736c42f4a22c494c32cc48a05828236e37c991bd9760f8923415e3169e4",
"sha256:42fa45a29f1059eda4d3c7b509589cc0343cd6bbf083d6118216830cd1a51613",
"sha256:4bb385a747e6ae8a65290b3df60d6c8a692a5599dc66c9fa3520e667886f2e10",
"sha256:509294f3e76d3f26b35083973fbc952e01e1727656d979b11182f273f08aa80b",
"sha256:5c74c5b6045969b07c9fb36b665c9cac84d6c174a809fc1b21bdc06c7836d9a0",
"sha256:60a3d36297b65c7f78329b80120f72947140f45b5c7a017ea730f9112b40f2ec",
"sha256:6f91b4492c5cde83bfe462f5b2b997cdf96a138f7c58b1140f05de5751623cf1",
"sha256:7403675df5e27745571aba1c957c7da2dacb537c21e14007ec3a417bf31f7f3d",
"sha256:87bdc8135b8ee739840eee19b184804e5d57f518578ffc797f5afa2c3c297913",
"sha256:8a3decd12e7934d0254939e2bf434bf04a5890c5bf91a982685021786a08087e",
"sha256:9702e2cb1c6dec01fb8e1a64c015817c0800a6eca287552c47a5ee0ebddccf62",
"sha256:a4d511012beb967a39580ba7d2549edf1e6865a33e5fe51e4dce550522b3ac0e",
"sha256:bbb387811f7a18bdc61a2ea3d102be0c7e239b0db9c83be7bfa50f095db5b92a",
"sha256:bfcc811883699ed49afc58b1ed9f80428a18eb9166422bce3c31a53dba00fd1d",
"sha256:c32aa13cc3fe86b0f744dfe35a7f879ee33ac0a560684fef0f3e1580352b818f",
"sha256:ca63dae130a2e788f2b249200f01d7fa240f24da0596501d387a50e57aa7075e",
"sha256:d54d7ea74cc00482a2410d63bf10aa34ebe1c49ac50779652106c867f9986d6b",
"sha256:d67599521dff98ec8c34cd9652cbcfe16ed076a2209625fca9dc7419b6370e5c",
"sha256:d82db1b9a92cb5c67661ca6616bdca6ff931deceebb98eecbd328812dab52032",
"sha256:d9ad0a988ae20face62520785ec3595a5e64f35a21762a57d115dae0b8fb894a",
"sha256:ebf2431b2d457ae5217f3a1179533c456f3272ded16f8ed0b32961a6d90e38ee",
"sha256:ed9a21502e9223f563e071759f769c3d6a2e1ba5328c31e86830368e8d78bc9c",
"sha256:f50632ef2d749f541ca8e6c07c9928a37f87505ce3a9f20c8446ad310f1aa87b"
"sha256:098a703d913be6fbd146a8c50cc76513d726b022d170e5e98dc56d958fd592fb",
"sha256:16042dc7f8e632e0dcd5206a5095ebd18cb1d005f4c89694f7f8aafd96dd43a3",
"sha256:1adb6be0dcef0cf9434619d3b892772fdb48e793300f9d762e480e043bd8e716",
"sha256:27ca5a2bc04d68f0776f2cdcb8bbd508bbe430a7bf9c02315cd05fb1d86d0034",
"sha256:28f42dc5172ebdc32622a2c3f7ead1b836cdbf253569ae5673f499e35db0bac3",
"sha256:2fcc8b58953d74d199a1a4d633df8146f0ac36c4e720b4a1997e9b6327af43a8",
"sha256:304fbe451698373dc6653772c72c5d5e883a4aadaf20343592a7abb2e643dae0",
"sha256:30bc103587e0d3df9e52cd9da1dd915265a22fad0b72afe54daf840c984b564f",
"sha256:40f70f81be4d34f8d491e55936904db5c527b0711b2a46513641a5729783c2e4",
"sha256:4186fc95c9febeab5681bc3248553d5ec8c2999b8424d4fc3a39c9cba5796962",
"sha256:46794c815e56f1431c66d81943fa90721bb858375fb36e5903697d5eef88627d",
"sha256:4869ab1c1ed33953bb2433ce7b894a28d724b7aa76c19b11e2878034a4e4680b",
"sha256:4f6428b55d2916a69f8d6453e48a505c07b2245653b0aa9f0dee38785939f5e4",
"sha256:52f185ffd3291196dc1aae506b42e178a592b0b60a8610b108e6ad892cfc1bb3",
"sha256:538f2fd5eb64366f37c97fdb3077d665fa946d2b6d95447622292f38407f9258",
"sha256:64c4f340338c68c463f1b56e3f2f0423f7b17ba6c3febae80b81f0e093077f59",
"sha256:675192fca634f0df69af3493a48224f211f8db4e84452b08d5fcebb9167adb01",
"sha256:700997b77cfab016533b3e7dbc03b71d33ee4df1d79f2463a318ca0263fc29dd",
"sha256:8505e614c983834239f865da2dd336dcf9d72776b951d5dfa5ac36b987726e1b",
"sha256:962c44070c281d86398aeb8f64e1bf37816a4dfc6f4c0f114756b14fc575621d",
"sha256:9e536783a5acee79a9b308be97d3952b662748c4037b6a24cbb339dc7ed8eb89",
"sha256:9ea749fd447ce7fb1ac71f7616371f04054d969d412d37611716721931e36efd",
"sha256:a34cb28e0747ea15e82d13e14de606747e9e484fb28d63c999483f5d5188e89b",
"sha256:a3ee9c793ffefe2944d3a2bd928a0e436cd0ac2d9e3723152d6fd5398838ce7d",
"sha256:aab75d99f3f2874733946a7648ce87a50019eb90baef931698f96b76b6769a46",
"sha256:b1ed2bdb27b4c9fc87058a1cb751c4df8752002143ed393899edb82b131e0546",
"sha256:b360d8fd88d2bad01cb953d81fd2edd4be539df7bfec41e8753fe9f4456a5082",
"sha256:b8f58c7db64d8f27078cbf2a4391af6aa4e4767cc08b37555c4ae064b8558d9b",
"sha256:c1bbb628ed5192124889b51204de27c575b3ffc05a5a91307e7640eff1d48da4",
"sha256:c2ff24df02a125b7b346c4c9078c8936da06964cc2d276292c357d64378158f8",
"sha256:c890728a93fffd0407d7d37c1e6083ff3f9f211c83b4316fae3778417eab9811",
"sha256:c96472b8ca5dc135fb0aa62f79b033f02aa434fb03a8b190600a5ae4102df1fd",
"sha256:ce7866f29d3025b5b34c2e944e66ebef0d92e4a4f2463f7266daa03a1332a651",
"sha256:e26c993bd4b220429d4ec8c1468eca445a4064a61c74ca08da7429af9bc53bb0"
],
"index": "pypi",
"version": "==5.2"
"version": "==5.2.1"
},
"importlib-metadata": {
"iniconfig": {
"hashes": [
"sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83",
"sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"
"sha256:80cf40c597eb564e86346103f609d74efce0f6b4d4f30ec8ce9e2c26411ba437",
"sha256:e5f92f89355a67de0595932a6c6c02ab4afddc6fcdc0bfc5becd0d60884d3f69"
],
"markers": "python_version < '3.8'",
"version": "==1.7.0"
"version": "==1.0.1"
},
"more-itertools": {
"hashes": [
"sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5",
"sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2"
],
"markers": "python_version >= '3.5'",
"version": "==8.4.0"
},
"packaging": {
@ -1079,6 +1141,7 @@
"sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8",
"sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==20.4"
},
"pbr": {
@ -1094,6 +1157,7 @@
"sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0",
"sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.13.1"
},
"py": {
@ -1101,6 +1165,7 @@
"sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2",
"sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.9.0"
},
"pyparsing": {
@ -1108,36 +1173,31 @@
"sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
"sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.4.7"
},
"pytest": {
"hashes": [
"sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1",
"sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"
"sha256:85228d75db9f45e06e57ef9bf4429267f81ac7c0d742cc9ed63d09886a9fe6f4",
"sha256:8b6007800c53fdacd5a5c192203f4e531eb2a1540ad9c752e052ec0f7143dbad"
],
"index": "pypi",
"version": "==5.4.3"
"version": "==6.0.1"
},
"six": {
"hashes": [
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.15.0"
},
"wcwidth": {
"toml": {
"hashes": [
"sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784",
"sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"
"sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f",
"sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"
],
"version": "==0.2.5"
},
"zipp": {
"hashes": [
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
],
"version": "==3.1.0"
"version": "==0.10.1"
}
}
}

View File

@ -18,6 +18,9 @@ Make sure all of the following are properly installed on your system:
- [Install pipenv](https://pipenv-es.readthedocs.io/es/stable/)
- [Add ${HOME}/.local/bin to your PATH](https://github.com/pypa/pipenv/issues/2122#issue-319600584)
### Running Postgres
### Project Initialization
1. Clone this repository.
2. In PyCharm:

View File

@ -46,6 +46,9 @@ PB_STUDY_DETAILS_URL = environ.get('PB_STUDY_DETAILS_URL', default=PB_BASE_URL +
LDAP_URL = environ.get('LDAP_URL', default="ldap.virginia.edu").strip('/') # No trailing slash or http://
LDAP_TIMEOUT_SEC = int(environ.get('LDAP_TIMEOUT_SEC', default=1))
# Github token
GITHUB_TOKEN = environ.get('GITHUB_TOKEN', None)
# Email configuration
DEFAULT_SENDER = 'askresearch@virginia.edu'
FALLBACK_EMAILS = ['askresearch@virginia.edu', 'sartographysupport@googlegroups.com']

View File

@ -31,6 +31,13 @@ paths:
'304':
description: Redirection to the hosted frontend with an auth_token header.
/user:
parameters:
- name: admin_impersonate_uid
in: query
required: false
description: For admins, the unique uid of an existing user to impersonate.
schema:
type: string
get:
operationId: crc.api.user.get_current_user
summary: Returns the current user.
@ -38,11 +45,27 @@ paths:
- Users
responses:
'200':
description: The currently authenticated user.
description: The currently-authenticated user, or, if the current user is an admin and admin_impersonate_uid is provided, this will be the user with the given uid.
content:
application/json:
schema:
$ref: "#/components/schemas/User"
/list_users:
get:
operationId: crc.api.user.get_all_users
security:
- auth_admin: ['secret']
summary: Returns a list of all users in the database.
tags:
- Users
responses:
'200':
description: All users in the database.
content:
application/json:
schema:
type: array
$ref: "#/components/schemas/User"
# /v1.0/study
/study:
get:
@ -56,6 +79,7 @@ paths:
content:
application/json:
schema:
type: array
$ref: "#/components/schemas/Study"
post:
operationId: crc.api.study.add_study
@ -572,6 +596,18 @@ paths:
description: The type of action the event documents, options include "ASSIGNMENT" for tasks that are waiting on you, "COMPLETE" for things have completed.
schema:
type: string
- name: workflow
in: query
required: false
description: Restrict results to the given workflow.
schema:
type: number
- name: study
in: query
required: false
description: Restrict results to the given study.
schema:
type: number
get:
operationId: crc.api.workflow.get_task_events
summary: Returns a list of task events related to the current user. Can be filtered by type.
@ -610,6 +646,12 @@ paths:
description: Set this to true to reset the workflow
schema:
type: boolean
- name: do_engine_steps
in: query
required: false
description: Defaults to true, can be set to false if you are just looking at the workflow not completeing it.
schema:
type: boolean
tags:
- Workflows and Tasks
responses:
@ -1044,9 +1086,9 @@ components:
user_uid:
type: string
example: dhf8r
protocol_builder_status:
status:
type: string
enum: [INCOMPLETE, ACTIVE, HOLD, OPEN, ABANDONED]
enum: ['in_progress', 'hold', 'open_for_enrollment', 'abandoned']
example: done
sponsor:
type: string

View File

@ -3,19 +3,22 @@ import json
from flask import url_for
from flask_admin import Admin
from flask_admin.actions import action
from flask_admin.contrib import sqla
from flask_admin.contrib.sqla import ModelView
from sqlalchemy import desc
from werkzeug.utils import redirect
from jinja2 import Markup
from crc import db, app
from crc.api.user import verify_token, verify_token_admin
from crc.models.approval import ApprovalModel
from crc.models.file import FileModel
from crc.models.file import FileModel, FileDataModel
from crc.models.task_event import TaskEventModel
from crc.models.study import StudyModel
from crc.models.user import UserModel
from crc.models.workflow import WorkflowModel
from crc.services.file_service import FileService
class AdminModelView(sqla.ModelView):
@ -34,26 +37,40 @@ class AdminModelView(sqla.ModelView):
# redirect to login page if user doesn't have access
return redirect(url_for('home'))
class UserView(AdminModelView):
column_filters = ['uid']
class StudyView(AdminModelView):
column_filters = ['id', 'primary_investigator_id']
column_searchable_list = ['title']
class ApprovalView(AdminModelView):
column_filters = ['study_id', 'approver_uid']
class WorkflowView(AdminModelView):
column_filters = ['study_id', 'id']
class FileView(AdminModelView):
column_filters = ['workflow_id']
column_filters = ['workflow_id', 'type']
@action('publish', 'Publish', 'Are you sure you want to publish this file(s)?')
def action_publish(self, ids):
FileService.publish_to_github(ids)
@action('update', 'Update', 'Are you sure you want to update this file(s)?')
def action_update(self, ids):
FileService.update_from_github(ids)
def json_formatter(view, context, model, name):
value = getattr(model, name)
json_value = json.dumps(value, ensure_ascii=False, indent=2)
return Markup('<pre>{}</pre>'.format(json_value))
return Markup(f'<pre>{json_value}</pre>')
class TaskEventView(AdminModelView):
column_filters = ['workflow_id', 'action']
@ -62,6 +79,7 @@ class TaskEventView(AdminModelView):
'form_data': json_formatter,
}
admin = Admin(app)
admin.add_view(StudyView(StudyModel, db.session))

View File

@ -1,5 +1,6 @@
from SpiffWorkflow import WorkflowException
from SpiffWorkflow.exceptions import WorkflowTaskExecException
from flask import g
from crc import ma, app
@ -24,6 +25,11 @@ class ApiError(Exception):
instance.task_id = task.task_spec.name or ""
instance.task_name = task.task_spec.description or ""
instance.file_name = task.workflow.spec.file or ""
# Fixme: spiffworkflow is doing something weird where task ends up referenced in the data in some cases.
if "task" in task.data:
task.data.pop("task")
instance.task_data = task.data
app.logger.error(message, exc_info=True)
return instance
@ -60,3 +66,5 @@ class ApiErrorSchema(ma.Schema):
def handle_invalid_usage(error):
response = ApiErrorSchema().dump(error)
return response, error.status_code

View File

@ -6,8 +6,9 @@ from sqlalchemy.exc import IntegrityError
from crc import session
from crc.api.common import ApiError, ApiErrorSchema
from crc.models.protocol_builder import ProtocolBuilderStatus
from crc.models.study import StudySchema, StudyModel, Study
from crc.models.study import Study, StudyModel, StudySchema, StudyStatus
from crc.services.study_service import StudyService
from crc.services.user_service import UserService
def add_study(body):
@ -17,11 +18,11 @@ def add_study(body):
if 'title' not in body:
raise ApiError("missing_title", "Can't create a new study without a title.")
study_model = StudyModel(user_uid=g.user.uid,
study_model = StudyModel(user_uid=UserService.current_user().uid,
title=body['title'],
primary_investigator_id=body['primary_investigator_id'],
last_updated=datetime.now(),
protocol_builder_status=ProtocolBuilderStatus.ACTIVE)
status=StudyStatus.in_progress)
session.add(study_model)
errors = StudyService._add_all_workflow_specs_to_study(study_model)
@ -65,8 +66,9 @@ def delete_study(study_id):
def user_studies():
"""Returns all the studies associated with the current user. """
StudyService.synch_with_protocol_builder_if_enabled(g.user)
studies = StudyService.get_studies_for_user(g.user)
user = UserService.current_user(allow_admin_impersonate=True)
StudyService.synch_with_protocol_builder_if_enabled(user)
studies = StudyService.get_studies_for_user(user)
results = StudySchema(many=True).dump(studies)
return results

View File

@ -1,10 +1,11 @@
import flask
from flask import g, request
from crc import app, db
from crc import app, session
from crc.api.common import ApiError
from crc.models.user import UserModel, UserModelSchema
from crc.services.ldap_service import LdapService, LdapModel
from crc.services.user_service import UserService
"""
.. module:: crc.api.user
@ -31,14 +32,14 @@ def verify_token(token=None):
failure_error = ApiError("invalid_token", "Unable to decode the token you provided. Please re-authenticate",
status_code=403)
if not _is_production() and (token is None or 'user' not in g):
g.user = UserModel.query.first()
token = g.user.encode_auth_token()
if token:
try:
token_info = UserModel.decode_auth_token(token)
g.user = UserModel.query.filter_by(uid=token_info['sub']).first()
# If the user is valid, store the token for this session
if g.user:
g.token = token
except:
raise failure_error
if g.user is not None:
@ -47,27 +48,37 @@ def verify_token(token=None):
raise failure_error
# If there's no token and we're in production, get the user from the SSO headers and return their token
if not token and _is_production():
elif _is_production():
uid = _get_request_uid(request)
if uid is not None:
db_user = UserModel.query.filter_by(uid=uid).first()
# If the user is valid, store the user and token for this session
if db_user is not None:
g.user = db_user
token = g.user.encode_auth_token().decode()
g.token = token
token_info = UserModel.decode_auth_token(token)
return token_info
else:
raise ApiError("no_user", "User not found. Please login via the frontend app before accessing this feature.",
status_code=403)
raise ApiError("no_user",
"User not found. Please login via the frontend app before accessing this feature.",
status_code=403)
else:
# Fall back to a default user if this is not production.
g.user = UserModel.query.first()
token = g.user.encode_auth_token()
token_info = UserModel.decode_auth_token(token)
return token_info
def verify_token_admin(token=None):
"""
Verifies the token for the user (if provided) in non-production environment. If in production environment,
checks that the user is in the list of authorized admins
Verifies the token for the user (if provided) in non-production environment.
If in production environment, checks that the user is in the list of authorized admins
Args:
token: Optional[str]
@ -75,21 +86,44 @@ def verify_token_admin(token=None):
Returns:
token: str
"""
# If this is production, check that the user is in the list of admins
if _is_production():
uid = _get_request_uid(request)
if uid is not None and uid in app.config['ADMIN_UIDS']:
return verify_token()
# If we're not in production, just use the normal verify_token method
else:
return verify_token(token)
verify_token(token)
if "user" in g and g.user.is_admin():
token = g.user.encode_auth_token()
token_info = UserModel.decode_auth_token(token)
return token_info
def get_current_user():
return UserModelSchema().dump(g.user)
def start_impersonating(uid):
if uid is not None and UserService.user_is_admin():
UserService.start_impersonating(uid)
user = UserService.current_user(allow_admin_impersonate=True)
return UserModelSchema().dump(user)
def stop_impersonating():
if UserService.user_is_admin():
UserService.stop_impersonating()
user = UserService.current_user(allow_admin_impersonate=False)
return UserModelSchema().dump(user)
def get_current_user(admin_impersonate_uid=None):
if UserService.user_is_admin():
if admin_impersonate_uid is not None:
UserService.start_impersonating(admin_impersonate_uid)
else:
UserService.stop_impersonating()
user = UserService.current_user(UserService.user_is_admin() and UserService.admin_is_impersonating())
return UserModelSchema().dump(user)
def get_all_users():
if "user" in g and g.user.is_admin():
all_users = session.query(UserModel).all()
return UserModelSchema(many=True).dump(all_users)
def login(
@ -132,7 +166,6 @@ def login(
# X-Forwarded-Server: dev.crconnect.uvadcos.io
# Connection: Keep-Alive
# If we're in production, override any uid with the uid from the SSO request headers
if _is_production():
uid = _get_request_uid(request)
@ -180,6 +213,8 @@ def _handle_login(user_info: LdapModel, redirect_url=None):
# Return the frontend auth callback URL, with auth token appended.
auth_token = user.encode_auth_token().decode()
g.token = auth_token
if redirect_url is not None:
if redirect_url.find("http://") != 0 and redirect_url.find("https://") != 0:
redirect_url = "http://" + redirect_url
@ -192,13 +227,13 @@ def _handle_login(user_info: LdapModel, redirect_url=None):
def _upsert_user(user_info):
user = db.session.query(UserModel).filter(UserModel.uid == user_info.uid).first()
user = session.query(UserModel).filter(UserModel.uid == user_info.uid).first()
if user is None:
# Add new user
user = UserModel()
else:
user = db.session.query(UserModel).filter(UserModel.uid == user_info.uid).with_for_update().first()
user = session.query(UserModel).filter(UserModel.uid == user_info.uid).with_for_update().first()
user.uid = user_info.uid
user.display_name = user_info.display_name
@ -206,8 +241,8 @@ def _upsert_user(user_info):
user.affiliation = user_info.affiliation
user.title = user_info.title
db.session.add(user)
db.session.commit()
session.add(user)
session.commit()
return user

View File

@ -13,6 +13,7 @@ from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, Workflow
from crc.services.file_service import FileService
from crc.services.lookup_service import LookupService
from crc.services.study_service import StudyService
from crc.services.user_service import UserService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.workflow_service import WorkflowService
@ -95,19 +96,30 @@ def delete_workflow_specification(spec_id):
session.commit()
def get_workflow(workflow_id, soft_reset=False, hard_reset=False):
def get_workflow(workflow_id, soft_reset=False, hard_reset=False, do_engine_steps=True):
"""Soft reset will attempt to update to the latest spec without starting over,
Hard reset will update to the latest spec and start from the beginning.
Read Only will return the workflow in a read only state, without running any
engine tasks or logging any events. """
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
processor = WorkflowProcessor(workflow_model, soft_reset=soft_reset, hard_reset=hard_reset)
if do_engine_steps:
processor.do_engine_steps()
processor.save()
WorkflowService.update_task_assignments(processor)
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
WorkflowService.update_task_assignments(processor)
return WorkflowApiSchema().dump(workflow_api_model)
def get_task_events(action):
def get_task_events(action = None, workflow = None, study = None):
"""Provides a way to see a history of what has happened, or get a list of tasks that need your attention."""
query = session.query(TaskEventModel).filter(TaskEventModel.user_uid == g.user.uid)
if action:
query = query.filter(TaskEventModel.action == action)
if workflow:
query = query.filter(TaskEventModel.workflow_id == workflow)
if study:
query = query.filter(TaskEventModel.study_id == study)
events = query.all()
# Turn the database records into something a little richer for the UI to use.
@ -130,7 +142,7 @@ def set_current_task(workflow_id, task_id):
task_id = uuid.UUID(task_id)
spiff_task = processor.bpmn_workflow.get_task(task_id)
_verify_user_and_role(processor, spiff_task)
user_uid = g.user.uid
user_uid = UserService.current_user(allow_admin_impersonate=True).uid
if spiff_task.state != spiff_task.COMPLETED and spiff_task.state != spiff_task.READY:
raise ApiError("invalid_state", "You may not move the token to a task who's state is not "
"currently set to COMPLETE or READY.")
@ -173,7 +185,8 @@ def update_task(workflow_id, task_id, body, terminate_loop=None):
processor.save()
# Log the action, and any pending task assignments in the event of lanes in the workflow.
WorkflowService.log_task_action(g.user.uid, processor, spiff_task, WorkflowService.TASK_ACTION_COMPLETE)
user = UserService.current_user(allow_admin_impersonate=False) # Always log as the real user.
WorkflowService.log_task_action(user.uid, processor, spiff_task, WorkflowService.TASK_ACTION_COMPLETE)
WorkflowService.update_task_assignments(processor)
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
@ -233,19 +246,11 @@ def lookup(workflow_id, field_id, query=None, value=None, limit=10):
def _verify_user_and_role(processor, spiff_task):
"""Assures the currently logged in user can access the given workflow and task, or
raises an error.
Allow administrators to modify tasks, otherwise assure that the current user
is allowed to edit or update the task. Will raise the appropriate error if user
is not authorized. """
if 'user' not in g:
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
if g.user.uid in app.config['ADMIN_UIDS']:
return g.user.uid
raises an error. """
user = UserService.current_user(allow_admin_impersonate=True)
allowed_users = WorkflowService.get_users_assigned_to_task(processor, spiff_task)
if g.user.uid not in allowed_users:
if user.uid not in allowed_users:
raise ApiError.from_task("permission_denied",
f"This task must be completed by '{allowed_users}', "
f"but you are {g.user.uid}", spiff_task)
f"but you are {user.uid}", spiff_task)

View File

@ -143,7 +143,8 @@ class NavigationItemSchema(ma.Schema):
class WorkflowApi(object):
def __init__(self, id, status, next_task, navigation,
spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks, last_updated, title):
spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks,
last_updated, title):
self.id = id
self.status = status
self.next_task = next_task # The next task that requires user input.

View File

@ -17,16 +17,17 @@ class ProtocolBuilderInvestigatorType(enum.Enum):
SCI = "Scientific Contact"
# Deprecated: Marked for removal
class ProtocolBuilderStatus(enum.Enum):
# • Active: found in PB and no HSR number and not hold
# • Hold: store boolean value in CR Connect (add to Study Model)
# • Open To Enrollment: has start date and HSR number?
# • Abandoned: deleted in PB
INCOMPLETE = 'incomplete' # Found in PB but not ready to start (not q_complete)
ACTIVE = 'active', # found in PB, marked as "q_complete" and no HSR number and not hold
HOLD = 'hold', # CR Connect side, if the Study ias marked as "hold".
OPEN = 'open', # Open To Enrollment: has start date and HSR number?
ABANDONED = 'Abandoned' # Not found in PB
incomplete = 'incomplete' # Found in PB but not ready to start (not q_complete)
active = 'active' # found in PB, marked as "q_complete" and no HSR number and not hold
hold = 'hold' # CR Connect side, if the Study ias marked as "hold".
open = 'open' # Open To Enrollment: has start date and HSR number?
abandoned = 'abandoned' # Not found in PB
#DRAFT = 'draft', # !Q_COMPLETE

View File

@ -1,3 +1,7 @@
import datetime
import enum
import json
import marshmallow
from marshmallow import INCLUDE, fields
from marshmallow_enum import EnumField
@ -11,12 +15,26 @@ from crc.models.workflow import WorkflowSpecCategoryModel, WorkflowState, Workfl
WorkflowModel
class StudyStatus(enum.Enum):
in_progress = 'in_progress'
hold = 'hold'
open_for_enrollment = 'open_for_enrollment'
abandoned = 'abandoned'
class IrbStatus(enum.Enum):
incomplete_in_protocol_builder = 'incomplete in protocol builder'
completed_in_protocol_builder = 'completed in protocol builder'
hsr_assigned = 'hsr number assigned'
class StudyModel(db.Model):
__tablename__ = 'study'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String)
last_updated = db.Column(db.DateTime(timezone=True), default=func.now())
protocol_builder_status = db.Column(db.Enum(ProtocolBuilderStatus))
status = db.Column(db.Enum(StudyStatus))
irb_status = db.Column(db.Enum(IrbStatus))
primary_investigator_id = db.Column(db.String, nullable=True)
sponsor = db.Column(db.String, nullable=True)
hsr_number = db.Column(db.String, nullable=True)
@ -25,6 +43,7 @@ class StudyModel(db.Model):
investigator_uids = db.Column(db.ARRAY(db.String), nullable=True)
requirements = db.Column(db.ARRAY(db.Integer), nullable=True)
on_hold = db.Column(db.Boolean, default=False)
enrollment_date = db.Column(db.DateTime(timezone=True), nullable=True)
def update_from_protocol_builder(self, pbs: ProtocolBuilderStudy):
self.hsr_number = pbs.HSRNUMBER
@ -32,22 +51,27 @@ class StudyModel(db.Model):
self.user_uid = pbs.NETBADGEID
self.last_updated = pbs.DATE_MODIFIED
self.protocol_builder_status = ProtocolBuilderStatus.ACTIVE
self.irb_status = IrbStatus.incomplete_in_protocol_builder
self.status = StudyStatus.in_progress
if pbs.HSRNUMBER:
self.protocol_builder_status = ProtocolBuilderStatus.OPEN
self.irb_status = IrbStatus.hsr_assigned
self.status = StudyStatus.open_for_enrollment
if self.on_hold:
self.protocol_builder_status = ProtocolBuilderStatus.HOLD
self.status = StudyStatus.hold
class WorkflowMetadata(object):
def __init__(self, id, name, display_name, description, spec_version, category_id, state: WorkflowState, status: WorkflowStatus,
total_tasks, completed_tasks, display_order):
def __init__(self, id, name = None, display_name = None, description = None, spec_version = None,
category_id = None, category_display_name = None, state: WorkflowState = None,
status: WorkflowStatus = None, total_tasks = None, completed_tasks = None,
display_order = None):
self.id = id
self.name = name
self.display_name = display_name
self.description = description
self.spec_version = spec_version
self.category_id = category_id
self.category_display_name = category_display_name
self.state = state
self.status = status
self.total_tasks = total_tasks
@ -64,6 +88,7 @@ class WorkflowMetadata(object):
description=workflow.workflow_spec.description,
spec_version=workflow.spec_version(),
category_id=workflow.workflow_spec.category_id,
category_display_name=workflow.workflow_spec.category.display_name,
state=WorkflowState.optional,
status=workflow.status,
total_tasks=workflow.total_tasks,
@ -79,7 +104,8 @@ class WorkflowMetadataSchema(ma.Schema):
class Meta:
model = WorkflowMetadata
additional = ["id", "name", "display_name", "description",
"total_tasks", "completed_tasks", "display_order"]
"total_tasks", "completed_tasks", "display_order",
"category_id", "category_display_name"]
unknown = INCLUDE
@ -102,15 +128,15 @@ class CategorySchema(ma.Schema):
class Study(object):
def __init__(self, title, last_updated, primary_investigator_id, user_uid,
id=None,
protocol_builder_status=None,
id=None, status=None, irb_status=None,
sponsor="", hsr_number="", ind_number="", categories=[],
files=[], approvals=[], **argsv):
files=[], approvals=[], enrollment_date=None, **argsv):
self.id = id
self.user_uid = user_uid
self.title = title
self.last_updated = last_updated
self.protocol_builder_status = protocol_builder_status
self.status = status
self.irb_status = irb_status
self.primary_investigator_id = primary_investigator_id
self.sponsor = sponsor
self.hsr_number = hsr_number
@ -119,6 +145,7 @@ class Study(object):
self.approvals = approvals
self.warnings = []
self.files = files
self.enrollment_date = enrollment_date
@classmethod
def from_model(cls, study_model: StudyModel):
@ -128,9 +155,29 @@ class Study(object):
return instance
def update_model(self, study_model: StudyModel):
for k,v in self.__dict__.items():
if not k.startswith('_'):
study_model.__dict__[k] = v
"""As the case for update was very reduced, it's mostly and specifically
updating only the study status and generating a history record
"""
status = StudyStatus(self.status)
study_model.last_updated = datetime.datetime.now()
study_model.status = status
if status == StudyStatus.open_for_enrollment:
study_model.enrollment_date = self.enrollment_date
# change = {
# 'status': ProtocolBuilderStatus(self.protocol_builder_status).value,
# 'comment': '' if not hasattr(self, 'comment') else self.comment,
# 'date': str(datetime.datetime.now())
# }
# if study_model.changes_history:
# changes_history = json.loads(study_model.changes_history)
# changes_history.append(change)
# else:
# changes_history = [change]
# study_model.changes_history = json.dumps(changes_history)
def model_args(self):
"""Arguments that can be passed into the Study Model to update it."""
@ -140,22 +187,44 @@ class Study(object):
return self_dict
class StudySchema(ma.Schema):
class StudyForUpdateSchema(ma.Schema):
id = fields.Integer(required=False, allow_none=True)
categories = fields.List(fields.Nested(CategorySchema), dump_only=True)
warnings = fields.List(fields.Nested(ApiErrorSchema), dump_only=True)
protocol_builder_status = EnumField(ProtocolBuilderStatus)
status = EnumField(StudyStatus, by_value=True)
hsr_number = fields.String(allow_none=True)
sponsor = fields.String(allow_none=True)
ind_number = fields.String(allow_none=True)
files = fields.List(fields.Nested(FileSchema), dump_only=True)
approvals = fields.List(fields.Nested('ApprovalSchema'), dump_only=True)
enrollment_date = fields.DateTime(allow_none=True)
comment = fields.String(allow_none=True)
class Meta:
model = Study
additional = ["id", "title", "last_updated", "primary_investigator_id", "user_uid",
"sponsor", "ind_number", "approvals", "files"]
unknown = INCLUDE
@marshmallow.post_load
def make_study(self, data, **kwargs):
"""Can load the basic study data for updates to the database, but categories are write only"""
return Study(**data)
class StudySchema(ma.Schema):
id = fields.Integer(required=False, allow_none=True)
categories = fields.List(fields.Nested(CategorySchema), dump_only=True)
warnings = fields.List(fields.Nested(ApiErrorSchema), dump_only=True)
protocol_builder_status = EnumField(StudyStatus, by_value=True)
status = EnumField(StudyStatus, by_value=True)
hsr_number = fields.String(allow_none=True)
sponsor = fields.String(allow_none=True)
ind_number = fields.String(allow_none=True)
files = fields.List(fields.Nested(FileSchema), dump_only=True)
approvals = fields.List(fields.Nested('ApprovalSchema'), dump_only=True)
enrollment_date = fields.Date(allow_none=True)
class Meta:
model = Study
additional = ["id", "title", "last_updated", "primary_investigator_id", "user_uid",
"sponsor", "ind_number", "approvals", "files", "enrollment_date"]
unknown = INCLUDE
@marshmallow.post_load

View File

@ -50,15 +50,16 @@ class TaskEvent(object):
self.task_type = model.task_type
self.task_state = model.task_state
self.task_lane = model.task_lane
self.date = model.date
class TaskEventSchema(ma.Schema):
study = fields.Nested(StudySchema, dump_only=True)
workflow = fields.Nested(WorkflowMetadataSchema, dump_only=True)
task_lane = fields.String(allow_none=True, required=False)
class Meta:
model = TaskEvent
additional = ["id", "user_uid", "action", "task_id", "task_title",
"task_name", "task_type", "task_state", "task_lane"]
"task_name", "task_type", "task_state", "task_lane", "date"]
unknown = INCLUDE

View File

@ -1,6 +1,7 @@
import datetime
import jwt
from marshmallow import fields
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from crc import db, app
@ -21,6 +22,10 @@ class UserModel(db.Model):
# TODO: Add Department and School
def is_admin(self):
# Currently admin abilities are set in the configuration, but this
# may change in the future.
return self.uid in app.config['ADMIN_UIDS']
def encode_auth_token(self):
"""
@ -60,4 +65,14 @@ class UserModelSchema(SQLAlchemyAutoSchema):
model = UserModel
load_instance = True
include_relationships = True
is_admin = fields.Method('get_is_admin', dump_only=True)
def get_is_admin(self, obj):
return obj.is_admin()
class AdminSessionModel(db.Model):
__tablename__ = 'admin_session'
id = db.Column(db.Integer, primary_key=True)
token = db.Column(db.String, unique=True)
admin_impersonate_uid = db.Column(db.String)

View File

@ -40,7 +40,7 @@ class FactService(Script):
else:
details = "unknown fact type."
self.add_data_to_task(task, details)
#self.add_data_to_task(task, details)
print(details)
return details

50
crc/scripts/ldap.py Normal file
View File

@ -0,0 +1,50 @@
import copy
from crc import app
from crc.api.common import ApiError
from crc.scripts.script import Script
from crc.services.ldap_service import LdapService
class Ldap(Script):
"""This Script allows to be introduced as part of a workflow and called from there, taking
a UID (or several) as input and looking it up through LDAP to return the person's details """
def get_description(self):
return """
Attempts to create a dictionary with person details, using the
provided argument (a UID) and look it up through LDAP.
Examples:
supervisor_info = ldap(supervisor_uid) // Sets the supervisor information to ldap details for the given uid.
"""
def do_task_validate_only(self, task, *args, **kwargs):
return self.set_users_info_in_task(task, args)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
return self.set_users_info_in_task(task, args)
def set_users_info_in_task(self, task, args):
if len(args) != 1:
raise ApiError(code="missing_argument",
message="Ldap takes a single argument, the "
"UID for the person we want to look up")
uid = args[0]
user_info_dict = {}
user_info = LdapService.user_info(uid)
user_info_dict = {
"display_name": user_info.display_name,
"given_name": user_info.given_name,
"email_address": user_info.email_address,
"telephone_number": user_info.telephone_number,
"title": user_info.title,
"department": user_info.department,
"affiliation": user_info.affiliation,
"sponsor_type": user_info.sponsor_type,
"uid": user_info.uid,
"proper_name": user_info.proper_name()
}
return user_info_dict

View File

@ -11,7 +11,7 @@ class RequestApproval(Script):
return """
Creates an approval request on this workflow, by the given approver_uid(s),"
Takes multiple arguments, which should point to data located in current task
or be quoted strings. The order is important. Approvals will be processed
or be quoted strings. The order is important. Approvals will be processed
in this order.
Example:

View File

@ -23,6 +23,53 @@ class Script(object):
"This is an internal error. The script you are trying to execute '%s' " % self.__class__.__name__ +
"does must provide a validate_only option that mimics the do_task, " +
"but does not make external calls or database updates." )
@staticmethod
def generate_augmented_list(task, study_id,workflow_id):
"""
this makes a dictionary of lambda functions that are closed over the class instance that
They represent. This is passed into PythonScriptParser as a list of helper functions that are
available for running. In general, they maintain the do_task call structure that they had, but
they always return a value rather than updating the task data.
We may be able to remove the task for each of these calls if we are not using it other than potentially
updating the task data.
"""
def make_closure(subclass,task,study_id,workflow_id):
instance = subclass()
return lambda *a : subclass.do_task(instance,task,study_id,workflow_id,*a)
execlist = {}
subclasses = Script.get_all_subclasses()
for x in range(len(subclasses)):
subclass = subclasses[x]
execlist[subclass.__module__.split('.')[-1]] = make_closure(subclass,task,study_id,
workflow_id)
return execlist
@staticmethod
def generate_augmented_validate_list(task, study_id, workflow_id):
"""
this makes a dictionary of lambda functions that are closed over the class instance that
They represent. This is passed into PythonScriptParser as a list of helper functions that are
available for running. In general, they maintain the do_task call structure that they had, but
they always return a value rather than updating the task data.
We may be able to remove the task for each of these calls if we are not using it other than potentially
updating the task data.
"""
def make_closure_validate(subclass,task,study_id,workflow_id):
instance = subclass()
return lambda *a : subclass.do_task_validate_only(instance,task,study_id,workflow_id,*a)
execlist = {}
subclasses = Script.get_all_subclasses()
for x in range(len(subclasses)):
subclass = subclasses[x]
execlist[subclass.__module__.split('.')[-1]] = make_closure_validate(subclass,task,study_id,
workflow_id)
return execlist
@staticmethod
def get_all_subclasses():

View File

@ -8,7 +8,7 @@ from crc.scripts.script import Script
from crc.services.file_service import FileService
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.study_service import StudyService
from box import Box
class StudyInfo(Script):
"""Please see the detailed description that is provided below. """
@ -149,11 +149,11 @@ Returns information specific to the protocol.
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
"""For validation only, pretend no results come back from pb"""
self.check_args(args)
self.check_args(args,2)
# Assure the reference file exists (a bit hacky, but we want to raise this error early, and cleanly.)
FileService.get_reference_file_data(FileService.DOCUMENT_LIST)
FileService.get_reference_file_data(FileService.INVESTIGATOR_LIST)
data = {
data = Box({
"study":{
"info": {
"id": 12,
@ -195,38 +195,50 @@ Returns information specific to the protocol.
'id': 0,
}
}
}
self.add_data_to_task(task=task, data=data["study"])
self.add_data_to_task(task, {"documents": StudyService().get_documents_status(study_id)})
})
if args[0]=='documents':
return StudyService().get_documents_status(study_id)
return data['study'][args[0]]
#self.add_data_to_task(task=task, data=data["study"])
#self.add_data_to_task(task, {"documents": StudyService().get_documents_status(study_id)})
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
self.check_args(args)
self.check_args(args,2)
prefix = None
if len(args) > 1:
prefix = args[1]
cmd = args[0]
study_info = {}
if self.__class__.__name__ in task.data:
study_info = task.data[self.__class__.__name__]
# study_info = {}
# if self.__class__.__name__ in task.data:
# study_info = task.data[self.__class__.__name__]
retval = None
if cmd == 'info':
study = session.query(StudyModel).filter_by(id=study_id).first()
schema = StudySchema()
self.add_data_to_task(task, {cmd: schema.dump(study)})
retval = schema.dump(study)
if cmd == 'investigators':
self.add_data_to_task(task, {cmd: StudyService().get_investigators(study_id)})
retval = StudyService().get_investigators(study_id)
if cmd == 'roles':
self.add_data_to_task(task, {cmd: StudyService().get_investigators(study_id, all=True)})
retval = StudyService().get_investigators(study_id, all=True)
if cmd == 'details':
self.add_data_to_task(task, {cmd: self.pb.get_study_details(study_id)})
retval = self.pb.get_study_details(study_id)
if cmd == 'approvals':
self.add_data_to_task(task, {cmd: StudyService().get_approvals(study_id)})
retval = StudyService().get_approvals(study_id)
if cmd == 'documents':
self.add_data_to_task(task, {cmd: StudyService().get_documents_status(study_id)})
retval = StudyService().get_documents_status(study_id)
if cmd == 'protocol':
self.add_data_to_task(task, {cmd: StudyService().get_protocol(study_id)})
retval = StudyService().get_protocol(study_id)
if isinstance(retval,dict) and prefix is not None:
return Box({x:retval[x] for x in retval.keys() if x[:len(prefix)] == prefix})
elif isinstance(retval,dict):
return Box(retval)
else:
return retval
def check_args(self, args):
if len(args) != 1 or (args[0] not in StudyInfo.type_options):
def check_args(self, args, maxlen=1):
if len(args) < 1 or len(args) > maxlen or (args[0] not in StudyInfo.type_options):
raise ApiError(code="missing_argument",
message="The StudyInfo script requires a single argument which must be "
"one of %s" % ",".join(StudyInfo.type_options))

View File

@ -2,6 +2,7 @@ import hashlib
import json
import os
from datetime import datetime
from github import Github, UnknownObjectException
from uuid import UUID
from lxml import etree
@ -78,7 +79,8 @@ class FileService(object):
""" Opens a reference file (assumes that it is xls file) and returns the data as a
dictionary, each row keyed on the given index_column name. If there are columns
that should be represented as integers, pass these as an array of int_columns, lest
you get '1.0' rather than '1' """
you get '1.0' rather than '1'
fixme: This is stupid stupid slow. Place it in the database and just check if it is up to date."""
data_model = FileService.get_reference_file_data(reference_file_name)
xls = ExcelFile(data_model.data)
df = xls.parse(xls.sheet_names[0])
@ -332,3 +334,51 @@ class FileService(object):
file_model.archived = True
session.commit()
app.logger.info("Failed to delete file, so archiving it instead. %i, due to %s" % (file_id, str(ie)))
@staticmethod
def update_from_github(file_ids):
gh_token = app.config['GITHUB_TOKEN']
_github = Github(gh_token)
repo = _github.get_user().get_repo('crispy-fiesta')
for file_id in file_ids:
file_data_model = FileDataModel.query.filter_by(
file_model_id=file_id
).order_by(
desc(FileDataModel.version)
).first()
try:
repo_file = repo.get_contents(file_data_model.file_model.name)
except UnknownObjectException:
# TODO: Add message indicating file is not in the repo
pass
else:
file_data_model.data = repo_file.decoded_content
session.add(file_data_model)
session.commit()
@staticmethod
def publish_to_github(file_ids):
gh_token = app.config['GITHUB_TOKEN']
_github = Github(gh_token)
repo = _github.get_user().get_repo('crispy-fiesta')
for file_id in file_ids:
file_data_model = FileDataModel.query.filter_by(file_model_id=file_id).first()
try:
repo_file = repo.get_contents(file_data_model.file_model.name)
except UnknownObjectException:
repo.create_file(
path=file_data_model.file_model.name,
message=f'Creating {file_data_model.file_model.name}',
content=file_data_model.data
)
return {'created': True}
else:
updated = repo.update_file(
path=repo_file.path,
message=f'Updating {file_data_model.file_model.name}',
content=file_data_model.data,
sha=repo_file.sha
)
return {'updated': True}

View File

@ -1,6 +1,5 @@
from copy import copy
from datetime import datetime
import json
from typing import List
import requests
@ -13,16 +12,15 @@ from crc.api.common import ApiError
from crc.models.file import FileModel, FileModelSchema, File
from crc.models.ldap import LdapSchema
from crc.models.protocol_builder import ProtocolBuilderStudy, ProtocolBuilderStatus
from crc.models.task_event import TaskEventModel
from crc.models.study import StudyModel, Study, Category, WorkflowMetadata
from crc.models.study import StudyModel, Study, StudyStatus, Category, WorkflowMetadata
from crc.models.task_event import TaskEventModel, TaskEvent
from crc.models.workflow import WorkflowSpecCategoryModel, WorkflowModel, WorkflowSpecModel, WorkflowState, \
WorkflowStatus
from crc.services.approval_service import ApprovalService
from crc.services.file_service import FileService
from crc.services.ldap_service import LdapService
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.approval_service import ApprovalService
from crc.models.approval import Approval
class StudyService(object):
@ -63,11 +61,10 @@ class StudyService(object):
files = (File.from_models(model, FileService.get_file_data(model.id),
FileService.get_doc_dictionary()) for model in files)
study.files = list(files)
# Calling this line repeatedly is very very slow. It creates the
# master spec and runs it. Don't execute this for Abandoned studies, as
# we don't have the information to process them.
if study.protocol_builder_status != ProtocolBuilderStatus.ABANDONED:
if study.status != StudyStatus.abandoned:
status = StudyService.__get_study_status(study_model)
study.warnings = StudyService.__update_status_of_workflow_meta(workflow_metas, status)
@ -268,7 +265,7 @@ class StudyService(object):
for study in db_studies:
pb_study = next((pbs for pbs in pb_studies if pbs.STUDYID == study.id), None)
if not pb_study:
study.protocol_builder_status = ProtocolBuilderStatus.ABANDONED
study.status = StudyStatus.abandoned
db.session.commit()

View File

@ -0,0 +1,117 @@
from flask import g
from crc import session
from crc.api.common import ApiError
from crc.models.user import UserModel, AdminSessionModel
class UserService(object):
"""Provides common tools for working with users"""
# Returns true if the current user is logged in.
@staticmethod
def has_user():
return 'token' in g and \
bool(g.token) and \
'user' in g and \
bool(g.user)
# Returns true if the current user is an admin.
@staticmethod
def user_is_admin():
return UserService.has_user() and g.user.is_admin()
# Returns true if the current admin user is impersonating another user.
@staticmethod
def admin_is_impersonating():
if UserService.user_is_admin():
adminSession: AdminSessionModel = UserService.get_admin_session()
return adminSession is not None
else:
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)
# Returns true if the given user uid is different from the current user's uid.
@staticmethod
def is_different_user(uid):
return UserService.has_user() and uid is not None and uid is not g.user.uid
@staticmethod
def current_user(allow_admin_impersonate=False) -> UserModel:
if not UserService.has_user():
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
# Admins can pretend to be different users and act on a user's behalf in
# some circumstances.
if UserService.user_is_admin() and allow_admin_impersonate and UserService.admin_is_impersonating():
return UserService.get_admin_session_user()
else:
return g.user
# Admins can pretend to be different users and act on a user's behalf in some circumstances.
# This method allows an admin user to start impersonating another user with the given uid.
# Stops impersonating if the uid is None or invalid.
@staticmethod
def start_impersonating(uid=None):
if not UserService.has_user():
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
if not UserService.user_is_admin():
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)
if uid is None:
raise ApiError("invalid_uid", "Please provide a valid user uid.")
if not UserService.admin_is_impersonating() and UserService.is_different_user(uid):
# Impersonate the user if the given uid is valid.
impersonate_user = session.query(UserModel).filter(UserModel.uid == uid).first()
if impersonate_user is not None:
g.impersonate_user = impersonate_user
# Store the uid and user session token.
session.add(AdminSessionModel(token=g.token, admin_impersonate_uid=uid))
session.commit()
else:
raise ApiError("invalid_uid", "The uid provided is not valid.")
@staticmethod
def stop_impersonating():
if not UserService.has_user():
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
# Clear out the current impersonating user.
if 'impersonate_user' in g:
del g.impersonate_user
admin_session: AdminSessionModel = UserService.get_admin_session()
if admin_session:
session.delete(admin_session)
session.commit()
@staticmethod
def in_list(uids, allow_admin_impersonate=False):
"""Returns true if the current user's id is in the given list of ids. False if there
is no user, or the user is not in the list."""
if UserService.has_user(): # If someone is logged in, lock tasks that don't belong to them.
user = UserService.current_user(allow_admin_impersonate)
if user.uid in uids:
return True
return False
@staticmethod
def get_admin_session() -> AdminSessionModel:
if UserService.user_is_admin():
return session.query(AdminSessionModel).filter(AdminSessionModel.token == g.token).first()
else:
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)
@staticmethod
def get_admin_session_user() -> UserModel:
if UserService.user_is_admin():
admin_session = UserService.get_admin_session()
if admin_session is not None:
return session.query(UserModel).filter(UserModel.uid == admin_session.admin_impersonate_uid).first()
else:
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)

View File

@ -17,6 +17,7 @@ from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from SpiffWorkflow.exceptions import WorkflowTaskExecException
from SpiffWorkflow.specs import WorkflowSpec
import crc
from crc import session, app
from crc.api.common import ApiError
from crc.models.file import FileDataModel, FileModel, FileType
@ -28,64 +29,71 @@ from crc import app
class CustomBpmnScriptEngine(BpmnScriptEngine):
"""This is a custom script processor that can be easily injected into Spiff Workflow.
Rather than execute arbitrary code, this assumes the script references a fully qualified python class
such as myapp.RandomFact. """
It will execute python code read in from the bpmn. It will also make any scripts in the
scripts directory available for execution. """
def execute(self, task: SpiffTask, script, data):
"""
Functions in two modes.
1. If the command is proceeded by #! then this is assumed to be a python script, and will
attempt to load that python module and execute the do_task method on that script. Scripts
must be located in the scripts package and they must extend the script.py class.
2. If not proceeded by the #! this will attempt to execute the script directly and assumes it is
valid Python.
"""
# Shlex splits the whole string while respecting double quoted strings within
if not script.startswith('#!'):
try:
super().execute(task, script, data)
except SyntaxError as e:
raise ApiError.from_task('syntax_error',
f'If you are running a pre-defined script, please'
f' proceed the script with "#!", otherwise this is assumed to be'
f' pure python: {script}, {e.msg}', task=task)
else:
self.run_predefined_script(task, script[2:], data) # strip off the first two characters.
def run_predefined_script(self, task: SpiffTask, script, data):
commands = shlex.split(script)
path_and_command = commands[0].rsplit(".", 1)
if len(path_and_command) == 1:
module_name = "crc.scripts." + self.camel_to_snake(path_and_command[0])
class_name = path_and_command[0]
study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
if WorkflowProcessor.WORKFLOW_ID_KEY in task.workflow.data:
workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
else:
module_name = "crc.scripts." + path_and_command[0] + "." + self.camel_to_snake(path_and_command[1])
class_name = path_and_command[1]
workflow_id = None
try:
mod = __import__(module_name, fromlist=[class_name])
klass = getattr(mod, class_name)
study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
if WorkflowProcessor.WORKFLOW_ID_KEY in task.workflow.data:
workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
else:
workflow_id = None
if not isinstance(klass(), Script):
raise ApiError.from_task("invalid_script",
"This is an internal error. The script '%s:%s' you called " %
(module_name, class_name) +
"does not properly implement the CRC Script class.",
task=task)
if task.workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY]:
"""If this is running a validation, and not a normal process, then we want to
mimic running the script, but not make any external calls or database changes."""
klass().do_task_validate_only(task, study_id, workflow_id, *commands[1:])
augmentMethods = Script.generate_augmented_validate_list(task, study_id, workflow_id)
else:
klass().do_task(task, study_id, workflow_id, *commands[1:])
except ModuleNotFoundError:
raise ApiError.from_task("invalid_script",
"Unable to locate Script: '%s:%s'" % (module_name, class_name),
task=task)
augmentMethods = Script.generate_augmented_list(task, study_id, workflow_id)
super().execute(task, script, data, externalMethods=augmentMethods)
except SyntaxError as e:
raise ApiError('syntax_error',
f'Something is wrong with your python script '
f'please correct the following:'
f' {script}, {e.msg}')
except NameError as e:
raise ApiError('name_error',
f'something you are referencing does not exist:'
f' {script}, {e.name}')
# else:
# self.run_predefined_script(task, script[2:], data) # strip off the first two characters.
# def run_predefined_script(self, task: SpiffTask, script, data):
# commands = shlex.split(script)
# path_and_command = commands[0].rsplit(".", 1)
# if len(path_and_command) == 1:
# module_name = "crc.scripts." + self.camel_to_snake(path_and_command[0])
# class_name = path_and_command[0]
# else:
# module_name = "crc.scripts." + path_and_command[0] + "." + self.camel_to_snake(path_and_command[1])
# class_name = path_and_command[1]
# try:
# mod = __import__(module_name, fromlist=[class_name])
# klass = getattr(mod, class_name)
# study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
# if WorkflowProcessor.WORKFLOW_ID_KEY in task.workflow.data:
# workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
# else:
# workflow_id = None
#
# if not isinstance(klass(), Script):
# raise ApiError.from_task("invalid_script",
# "This is an internal error. The script '%s:%s' you called " %
# (module_name, class_name) +
# "does not properly implement the CRC Script class.",
# task=task)
# if task.workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY]:
# """If this is running a validation, and not a normal process, then we want to
# mimic running the script, but not make any external calls or database changes."""
# klass().do_task_validate_only(task, study_id, workflow_id, *commands[1:])
# else:
# klass().do_task(task, study_id, workflow_id, *commands[1:])
# except ModuleNotFoundError:
# raise ApiError.from_task("invalid_script",
# "Unable to locate Script: '%s:%s'" % (module_name, class_name),
# task=task)
def evaluate_expression(self, task, expression):
"""
@ -117,7 +125,8 @@ class WorkflowProcessor(object):
STUDY_ID_KEY = "study_id"
VALIDATION_PROCESS_KEY = "validate_only"
def __init__(self, workflow_model: WorkflowModel, soft_reset=False, hard_reset=False, validate_only=False):
def __init__(self, workflow_model: WorkflowModel,
soft_reset=False, hard_reset=False, validate_only=False):
"""Create a Workflow Processor based on the serialized information available in the workflow model.
If soft_reset is set to true, it will try to use the latest version of the workflow specification
without resetting to the beginning of the workflow. This will work for some minor changes to the spec.
@ -180,10 +189,10 @@ class WorkflowProcessor(object):
bpmn_workflow = BpmnWorkflow(spec, script_engine=self._script_engine)
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = workflow_model.study_id
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = validate_only
try:
bpmn_workflow.do_engine_steps()
except WorkflowException as we:
raise ApiError.from_task_spec("error_loading_workflow", str(we), we.sender)
# try:
# bpmn_workflow.do_engine_steps()
# except WorkflowException as we:
# raise ApiError.from_task_spec("error_loading_workflow", str(we), we.sender)
return bpmn_workflow
def save(self):

View File

@ -30,6 +30,7 @@ from crc.models.workflow import WorkflowModel, WorkflowStatus, WorkflowSpecModel
from crc.services.file_service import FileService
from crc.services.lookup_service import LookupService
from crc.services.study_service import StudyService
from crc.services.user_service import UserService
from crc.services.workflow_processor import WorkflowProcessor
@ -154,10 +155,9 @@ class WorkflowService(object):
if len(field.options) > 0:
random_choice = random.choice(field.options)
if isinstance(random_choice, dict):
choice = random.choice(field.options)
return {
'value': choice['id'],
'label': choice['name']
'value': random_choice['id'],
'label': random_choice['name']
}
else:
# fixme: why it is sometimes an EnumFormFieldOption, and other times not?
@ -239,7 +239,7 @@ class WorkflowService(object):
nav_item['title'] = nav_item['task'].title # Prefer the task title.
user_uids = WorkflowService.get_users_assigned_to_task(processor, spiff_task)
if 'user' not in g or not g.user or g.user.uid not in user_uids:
if not UserService.in_list(user_uids, allow_admin_impersonate=True):
nav_item['state'] = WorkflowService.TASK_STATE_LOCKED
else:
@ -272,7 +272,7 @@ class WorkflowService(object):
workflow_api.next_task = WorkflowService.spiff_task_to_api_task(next_task, add_docs_and_forms=True)
# Update the state of the task to locked if the current user does not own the task.
user_uids = WorkflowService.get_users_assigned_to_task(processor, next_task)
if 'user' not in g or not g.user or g.user.uid not in user_uids:
if not UserService.in_list(user_uids, allow_admin_impersonate=True):
workflow_api.next_task.state = WorkflowService.TASK_STATE_LOCKED
return workflow_api
@ -471,6 +471,7 @@ class WorkflowService(object):
db.session.query(TaskEventModel). \
filter(TaskEventModel.workflow_id == processor.workflow_model.id). \
filter(TaskEventModel.action == WorkflowService.TASK_ACTION_ASSIGNMENT).delete()
db.session.commit()
for task in processor.get_current_user_tasks():
user_ids = WorkflowService.get_users_assigned_to_task(processor, task)

View File

@ -212,7 +212,8 @@
<bpmn:scriptTask id="Activity_10nxpt2" name="Load Study Details">
<bpmn:incoming>SequenceFlow_1r3yrhy</bpmn:incoming>
<bpmn:outgoing>Flow_09h1imz</bpmn:outgoing>
<bpmn:script>#! StudyInfo details</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['details'] = study_info('details')</bpmn:script>
</bpmn:scriptTask>
<bpmn:businessRuleTask id="Activity_PBMultiSiteCheckQ12" name="PB Multi-Site Check Q12" camunda:decisionRef="Decision_core_info_multi_site_q12">
<bpmn:incoming>Flow_09h1imz</bpmn:incoming>

View File

@ -453,7 +453,7 @@ Indicate all the possible formats in which you will transmit your data outside o
<bpmn:incoming>SequenceFlow_0k2r83n</bpmn:incoming>
<bpmn:incoming>SequenceFlow_0t6xl9i</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_16kyite</bpmn:outgoing>
<bpmn:script>#! CompleteTemplate NEW_DSP_template.docx Study_DataSecurityPlan</bpmn:script>
<bpmn:script>complete_template('NEW_DSP_template.docx','Study_DataSecurityPlan')</bpmn:script>
</bpmn:scriptTask>
<bpmn:manualTask id="Task_0q6ir2l" name="View Instructions">
<bpmn:documentation>##### Instructions

View File

@ -44,7 +44,7 @@
{% endif %}{% endfor %}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:properties>
<camunda:property name="display_name" value="Documents and Approvals" />
<camunda:property name="display_name" value="'Documents and Approvals'" />
</camunda:properties>
</bpmn:extensionElements>
<bpmn:incoming>Flow_142jtxs</bpmn:incoming>
@ -53,12 +53,13 @@
<bpmn:scriptTask id="Activity_0a14x7j" name="Load Approvals">
<bpmn:incoming>Flow_0c7ryff</bpmn:incoming>
<bpmn:outgoing>Flow_142jtxs</bpmn:outgoing>
<bpmn:script>#! StudyInfo approvals</bpmn:script>
<bpmn:script>StudyInfo['approvals'] = study_info('approvals')</bpmn:script>
</bpmn:scriptTask>
<bpmn:scriptTask id="Activity_1aju60t" name="Load Documents">
<bpmn:incoming>Flow_1k3su2q</bpmn:incoming>
<bpmn:outgoing>Flow_0c7ryff</bpmn:outgoing>
<bpmn:script>#! StudyInfo documents</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['documents'] = study_info('documents')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_142jtxs" sourceRef="Activity_0a14x7j" targetRef="Activity_DisplayDocsAndApprovals" />
<bpmn:sequenceFlow id="Flow_0c7ryff" sourceRef="Activity_1aju60t" targetRef="Activity_0a14x7j" />

View File

@ -36,7 +36,8 @@
<bpmn:scriptTask id="ScriptTask_1fn00ox" name="Load IRB Details">
<bpmn:incoming>SequenceFlow_1dhb8f4</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1uzcl1f</bpmn:outgoing>
<bpmn:script>#! StudyInfo details</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['details'] = study_info('details')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_1uzcl1f" sourceRef="ScriptTask_1fn00ox" targetRef="Task_SupplementIDE" />
<bpmn:exclusiveGateway id="ExclusiveGateway_1fib89p" name="IS_IDE = True and Number Provided?&#10;&#10;">

View File

@ -217,7 +217,8 @@ Protocol Owner: **(need to insert value here)**</bpmn:documentation>
<bpmn:scriptTask id="Activity_LoadDocuments" name="Load Documents">
<bpmn:incoming>SequenceFlow_1dexemq</bpmn:incoming>
<bpmn:outgoing>Flow_1x9d2mo</bpmn:outgoing>
<bpmn:script>#! StudyInfo documents</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['documents'] = study_info('documents')</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">

View File

@ -12,7 +12,8 @@
<bpmn:scriptTask id="ScriptTask_LoadIRBDetails" name="Load IRB Details">
<bpmn:incoming>SequenceFlow_1dhb8f4</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1uzcl1f</bpmn:outgoing>
<bpmn:script>#! StudyInfo details</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['details'] = study_info('details')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_1uzcl1f" sourceRef="ScriptTask_LoadIRBDetails" targetRef="Task_SupplementIDE" />
<bpmn:businessRuleTask id="Task_SupplementIDE" name="Current IND Status" camunda:decisionRef="decision_ind_check">

View File

@ -8,7 +8,8 @@
<bpmn:scriptTask id="ScriptTask_02924vs" name="Load IRB Details">
<bpmn:incoming>SequenceFlow_1fmyo77</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_18nr0gf</bpmn:outgoing>
<bpmn:script>#! StudyInfo details</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['details'] = study_info('details')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_1fmyo77" sourceRef="StartEvent_1" targetRef="ScriptTask_02924vs" />
<bpmn:sequenceFlow id="SequenceFlow_18nr0gf" sourceRef="ScriptTask_02924vs" targetRef="Activity_FromIRB-API" />

View File

@ -7,7 +7,8 @@
<bpmn:scriptTask id="ScriptTask_LoadPersonnel" name="Load IRB Personnel">
<bpmn:incoming>Flow_0kcrx5l</bpmn:incoming>
<bpmn:outgoing>Flow_1dcsioh</bpmn:outgoing>
<bpmn:script>#! StudyInfo investigators</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['investigators'] = study_info('investigators')</bpmn:script>
</bpmn:scriptTask>
<bpmn:endEvent id="EndEvent_1qor16n">
<bpmn:documentation>## The following information was gathered:

View File

@ -1,33 +1,116 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_300b2c3" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_300b2c3" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.0.0">
<bpmn:collaboration id="Collaboration_163c7c8">
<bpmn:participant id="Participant_1mnua71" name="team" processRef="Process_cd666f3" />
</bpmn:collaboration>
<bpmn:process id="Process_cd666f3" isExecutable="true">
<bpmn:laneSet id="LaneSet_0ucxzw3">
<bpmn:lane id="Lane_16ml9fk">
<bpmn:flowNodeRef>StartEvent_1</bpmn:flowNodeRef>
<bpmn:flowNodeRef>Activity_1qpy9ra</bpmn:flowNodeRef>
<bpmn:flowNodeRef>Event_1m9fnmv</bpmn:flowNodeRef>
<bpmn:flowNodeRef>Activity_0c5drp3</bpmn:flowNodeRef>
</bpmn:lane>
<bpmn:lane id="Lane_1jw70kl" name="supervisor">
<bpmn:flowNodeRef>Gateway_0ved0t9</bpmn:flowNodeRef>
<bpmn:flowNodeRef>Activity_107ojvq</bpmn:flowNodeRef>
</bpmn:lane>
</bpmn:laneSet>
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0q51aiq</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0q51aiq" sourceRef="StartEvent_1" targetRef="Activity_1qpy9ra" />
<bpmn:userTask id="Activity_1qpy9ra" name="Placeholder" camunda:formKey="Formkey_placeholder">
<bpmn:userTask id="Activity_1qpy9ra" name="Assign Approver" camunda:formKey="form_assign_approver">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="FormField_2t2220o" label="Will this be updated later" type="boolean" />
<camunda:formField id="supervisor" label="Approver UID" type="string" defaultValue="dhf8r">
<camunda:validation>
<camunda:constraint name="required" config="true" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0q51aiq</bpmn:incoming>
<bpmn:outgoing>Flow_0ai4j1x</bpmn:outgoing>
<bpmn:incoming>Flow_1ugh4wn</bpmn:incoming>
<bpmn:outgoing>Flow_0d2snmk</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_0q51aiq" sourceRef="StartEvent_1" targetRef="Activity_1qpy9ra" />
<bpmn:sequenceFlow id="Flow_0d2snmk" sourceRef="Activity_1qpy9ra" targetRef="Activity_107ojvq" />
<bpmn:exclusiveGateway id="Gateway_0ved0t9" name="Approved?">
<bpmn:incoming>Flow_0apr3nj</bpmn:incoming>
<bpmn:outgoing>Flow_0mhtlkt</bpmn:outgoing>
<bpmn:outgoing>Flow_11tnx3n</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_0apr3nj" sourceRef="Activity_107ojvq" targetRef="Gateway_0ved0t9" />
<bpmn:sequenceFlow id="Flow_0mhtlkt" name="Yes" sourceRef="Gateway_0ved0t9" targetRef="Event_1m9fnmv">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">is_study_approved == True</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:endEvent id="Event_1m9fnmv">
<bpmn:incoming>Flow_0ai4j1x</bpmn:incoming>
<bpmn:incoming>Flow_0mhtlkt</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0ai4j1x" sourceRef="Activity_1qpy9ra" targetRef="Event_1m9fnmv" />
<bpmn:sequenceFlow id="Flow_11tnx3n" name="No" sourceRef="Gateway_0ved0t9" targetRef="Activity_0c5drp3">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">is_study_approved == False</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:userTask id="Activity_107ojvq" name="Approve Study" camunda:formKey="form_approve_study">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="is_study_approved" label="Approve this study?" type="boolean">
<camunda:validation>
<camunda:constraint name="required" config="true" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0d2snmk</bpmn:incoming>
<bpmn:outgoing>Flow_0apr3nj</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_1ugh4wn" sourceRef="Activity_0c5drp3" targetRef="Activity_1qpy9ra" />
<bpmn:manualTask id="Activity_0c5drp3" name="Review Feedback">
<bpmn:documentation>Your request was not approved. Try again.</bpmn:documentation>
<bpmn:incoming>Flow_11tnx3n</bpmn:incoming>
<bpmn:outgoing>Flow_1ugh4wn</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_cd666f3">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Collaboration_163c7c8">
<bpmndi:BPMNShape id="Participant_1mnua71_di" bpmnElement="Participant_1mnua71" isHorizontal="true">
<dc:Bounds x="129" y="117" width="600" height="250" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Lane_1jw70kl_di" bpmnElement="Lane_1jw70kl" isHorizontal="true">
<dc:Bounds x="159" y="242" width="570" height="125" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Lane_16ml9fk_di" bpmnElement="Lane_16ml9fk" isHorizontal="true">
<dc:Bounds x="159" y="117" width="570" height="125" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_11tnx3n_di" bpmnElement="Flow_11tnx3n">
<di:waypoint x="460" y="275" />
<di:waypoint x="460" y="217" />
<bpmndi:BPMNLabel>
<dc:Bounds x="468" y="241" width="15" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0mhtlkt_di" bpmnElement="Flow_0mhtlkt">
<di:waypoint x="485" y="300" />
<di:waypoint x="660" y="300" />
<di:waypoint x="660" y="195" />
<bpmndi:BPMNLabel>
<dc:Bounds x="563" y="282" width="19" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0apr3nj_di" bpmnElement="Flow_0apr3nj">
<di:waypoint x="370" y="300" />
<di:waypoint x="435" y="300" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0d2snmk_di" bpmnElement="Flow_0d2snmk">
<di:waypoint x="320" y="217" />
<di:waypoint x="320" y="260" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0q51aiq_di" bpmnElement="Flow_0q51aiq">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0ai4j1x_di" bpmnElement="Flow_0ai4j1x">
<bpmndi:BPMNEdge id="Flow_1ugh4wn_di" bpmnElement="Flow_1ugh4wn">
<di:waypoint x="400" y="177" />
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
@ -35,8 +118,20 @@
<bpmndi:BPMNShape id="Activity_14cpuv6_di" bpmnElement="Activity_1qpy9ra">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0ved0t9_di" bpmnElement="Gateway_0ved0t9" isMarkerVisible="true">
<dc:Bounds x="435" y="275" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="435" y="332" width="54" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1m9fnmv_di" bpmnElement="Event_1m9fnmv">
<dc:Bounds x="432" y="159" width="36" height="36" />
<dc:Bounds x="642" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1ps6jft_di" bpmnElement="Activity_107ojvq">
<dc:Bounds x="270" y="260" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1al86eb_di" bpmnElement="Activity_0c5drp3">
<dc:Bounds x="400" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>

View File

@ -598,7 +598,7 @@ Use the EHS [Lab Safety Plan During COVID 19 template](https://www.google.com/ur
This step is internal to the system and do not require and user interaction</bpmn:documentation>
<bpmn:incoming>Flow_11uqavk</bpmn:incoming>
<bpmn:outgoing>Flow_0aqgwvu</bpmn:outgoing>
<bpmn:script>#! CompleteTemplate ResearchRampUpPlan.docx RESEARCH_RAMPUP</bpmn:script>
<bpmn:script>complete_template('ResearchRampUpPlan.docx','RESEARCH_RAMPUP')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0aqgwvu" sourceRef="Activity_GenerateRRP" targetRef="Activity_AcknowledgePlanReview" />
<bpmn:sequenceFlow id="Flow_0j4rs82" sourceRef="Activity_SubmitPlan" targetRef="Activity_0absozl" />
@ -755,7 +755,7 @@ Notify the Area Monitor for
This step is internal to the system and do not require and user interaction</bpmn:documentation>
<bpmn:incoming>Flow_0j4rs82</bpmn:incoming>
<bpmn:outgoing>Flow_07ge8uf</bpmn:outgoing>
<bpmn:script>#!RequestApproval ApprvlApprvr1 ApprvlApprvr2</bpmn:script>
<bpmn:script>request_approval('ApprvlApprvr1','ApprvlApprvr2')</bpmn:script>
</bpmn:scriptTask>
<bpmn:scriptTask id="Activity_1u58hox" name="Update Request">
<bpmn:documentation>#### Script Task
@ -764,7 +764,7 @@ This step is internal to the system and do not require and user interaction</bpm
This step is internal to the system and do not require and user interaction</bpmn:documentation>
<bpmn:incoming>Flow_16y8glw</bpmn:incoming>
<bpmn:outgoing>Flow_0uc4o6c</bpmn:outgoing>
<bpmn:script>#! UpdateStudy title:PIComputingID.label pi:PIComputingID.value</bpmn:script>
<bpmn:script>update_study('title:PIComputingID.label','pi:PIComputingID.value')</bpmn:script>
</bpmn:scriptTask>
<bpmn:userTask id="PersonnelSchedule" name="Upload Weekly Personnel Schedule(s)" camunda:formKey="Personnel Weekly Schedule">
<bpmn:documentation>#### Weekly Personnel Schedule(s)

View File

@ -116,7 +116,7 @@
</camunda:formField>
</camunda:formData>
<camunda:properties>
<camunda:property name="display_name" value="Select Template Type" />
<camunda:property name="display_name" value="'Select Template Type'" />
</camunda:properties>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0schnpa</bpmn:incoming>

View File

@ -11,7 +11,8 @@
<bpmn:scriptTask id="Task_Load_Requirements" name="Load Documents From PB">
<bpmn:incoming>SequenceFlow_1ees8ka</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_17ct47v</bpmn:outgoing>
<bpmn:script>#! StudyInfo documents</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['documents'] = study_info('documents')</bpmn:script>
</bpmn:scriptTask>
<bpmn:businessRuleTask id="Activity_1yqy50i" name="Enter Core Info&#10;" camunda:decisionRef="enter_core_info">
<bpmn:incoming>Flow_1m8285h</bpmn:incoming>
@ -29,7 +30,7 @@
<bpmn:parallelGateway id="Gateway_1nta7st" name="Some Name">
<bpmn:extensionElements>
<camunda:properties>
<camunda:property name="display_name" value="Some Name" />
<camunda:property name="display_name" value="'Some Name'" />
</camunda:properties>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_17ct47v</bpmn:incoming>
@ -62,7 +63,8 @@
<bpmn:scriptTask id="Activity_0f295la" name="Load Details from PB">
<bpmn:incoming>Flow_0pwtiqm</bpmn:incoming>
<bpmn:outgoing>Flow_0eq6px2</bpmn:outgoing>
<bpmn:script>#! StudyInfo details</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['details'] = study_info('details')</bpmn:script>
</bpmn:scriptTask>
<bpmn:businessRuleTask id="Activity_0ahlc3u" name="IDE Supplement" camunda:decisionRef="decision_ide_menu_check">
<bpmn:incoming>Flow_14ce1d7</bpmn:incoming>
@ -91,7 +93,8 @@
<bpmn:scriptTask id="Activity_0g3qa1c" name="Load Personnel from PB">
<bpmn:incoming>Flow_1qyrmzn</bpmn:incoming>
<bpmn:outgoing>Flow_0vo6ul1</bpmn:outgoing>
<bpmn:script>#! StudyInfo investigators</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['investigators'] = study_info('investigators')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1ybicki" sourceRef="Activity_13ep6ar" targetRef="Event_135x8jg" />
<bpmn:businessRuleTask id="Activity_13ep6ar" name="Personnel" camunda:decisionRef="personnel">

View File

@ -251,7 +251,6 @@ class ExampleDataLoader:
master_spec=False,
from_tests=True)
def create_spec(self, id, name, display_name="", description="", filepath=None, master_spec=False,
category_id=None, display_order=None, from_tests=False):
"""Assumes that a directory exists in static/bpmn with the same name as the given id.

View File

@ -0,0 +1,36 @@
"""empty message
Revision ID: 1c3f88dbccc3
Revises: 2e7b377cbc7b
Create Date: 2020-07-30 18:51:01.816284
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '1c3f88dbccc3'
down_revision = 'ab06a94e5d4c'
branch_labels = None
depends_on = None
def upgrade():
op.execute("CREATE TYPE irbstatus AS ENUM('incomplete_in_protocol_builder', 'completed_in_protocol_builder', 'hsr_assigned')")
op.execute("CREATE TYPE studystatus AS ENUM('in_progress', 'hold', 'open_for_enrollment', 'abandoned')")
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('study', sa.Column('irb_status', sa.Enum('incomplete_in_protocol_builder', 'completed_in_protocol_builder', 'hsr_assigned', name='irbstatus'), nullable=True))
op.add_column('study', sa.Column('status', sa.Enum('in_progress', 'hold', 'open_for_enrollment', 'abandoned', name='studystatus'), nullable=True))
op.drop_column('study', 'protocol_builder_status')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('study', sa.Column('protocol_builder_status', postgresql.ENUM('incomplete', 'active', 'hold', 'open', 'abandoned', name='protocolbuilderstatus'), autoincrement=False, nullable=True))
op.drop_column('study', 'status')
op.drop_column('study', 'irb_status')
# ### end Alembic commands ###
op.execute('DROP TYPE studystatus')
op.execute('DROP TYPE irbstatus')

View File

@ -0,0 +1,32 @@
"""empty message
Revision ID: 2e7b377cbc7b
Revises: c4ddb69e7ef4
Create Date: 2020-07-28 17:03:23.586828
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2e7b377cbc7b'
down_revision = 'c4ddb69e7ef4'
branch_labels = None
depends_on = None
def upgrade():
op.execute('update study set protocol_builder_status = NULL;')
op.execute('ALTER TYPE protocolbuilderstatus RENAME TO pbs_old;')
op.execute("CREATE TYPE protocolbuilderstatus AS ENUM('incomplete', 'active', 'hold', 'open', 'abandoned')")
op.execute("ALTER TABLE study ALTER COLUMN protocol_builder_status TYPE protocolbuilderstatus USING protocol_builder_status::text::protocolbuilderstatus;")
op.execute('DROP TYPE pbs_old;')
op.execute("update study set protocol_builder_status = 'incomplete';")
def downgrade():
op.execute('update study set protocol_builder_status = NULL;')
op.execute('ALTER TYPE protocolbuilderstatus RENAME TO pbs_old;')
op.execute("CREATE TYPE protocolbuilderstatus AS ENUM('INCOMPLETE', 'ACTIVE', 'HOLD', 'OPEN', 'ABANDONED')")
op.execute("ALTER TABLE study ALTER COLUMN protocol_builder_status TYPE protocolbuilderstatus USING protocol_builder_status::text::protocolbuilderstatus;")
op.execute('DROP TYPE pbs_old;')

View File

@ -0,0 +1,34 @@
"""empty message
Revision ID: ab06a94e5d4c
Revises: 2e7b377cbc7b
Create Date: 2020-07-30 11:23:46.601338
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ab06a94e5d4c'
down_revision = '2e7b377cbc7b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('admin_session',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(), nullable=True),
sa.Column('admin_impersonate_uid', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('admin_session')
# ### end Alembic commands ###

View File

@ -0,0 +1,28 @@
"""empty message
Revision ID: c4ddb69e7ef4
Revises: ffef4661a37d
Create Date: 2020-07-22 09:04:09.769239
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c4ddb69e7ef4'
down_revision = 'ffef4661a37d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('study', sa.Column('enrollment_date', sa.DateTime(timezone=True), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('study', 'enrollment_date')
# ### end Alembic commands ###

View File

@ -1,3 +1,3 @@
from setuptools import setup
setup(setup_requires=["pbr"], pbr=True)
setup(setup_requires=["pbr"], pbr=True, install_requires=['box'])

View File

@ -15,18 +15,19 @@ from crc import app, db, session
from crc.models.api_models import WorkflowApiSchema, MultiInstanceType
from crc.models.approval import ApprovalModel, ApprovalStatus
from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES
from crc.models.protocol_builder import ProtocolBuilderStatus
from crc.models.task_event import TaskEventModel
from crc.models.study import StudyModel
from crc.models.study import StudyModel, StudyStatus
from crc.models.user import UserModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecCategoryModel
from crc.services.file_service import FileService
from crc.services.study_service import StudyService
from crc.services.user_service import UserService
from crc.services.workflow_service import WorkflowService
from example_data import ExampleDataLoader
#UNCOMMENT THIS FOR DEBUGGING SQL ALCHEMY QUERIES
# UNCOMMENT THIS FOR DEBUGGING SQL ALCHEMY QUERIES
import logging
logging.basicConfig()
@ -37,22 +38,32 @@ class BaseTest(unittest.TestCase):
if not app.config['TESTING']:
raise (Exception("INVALID TEST CONFIGURATION. This is almost always in import order issue."
"The first class to import in each test should be the base_test.py file."))
"The first class to import in each test should be the base_test.py file."))
auths = {}
test_uid = "dhf8r"
users = [
{
'uid':'dhf8r',
'email_address':'dhf8r@virginia.EDU',
'display_name':'Daniel Harold Funk',
'affiliation':'staff@virginia.edu;member@virginia.edu',
'eppn':'dhf8r@virginia.edu',
'first_name':'Daniel',
'last_name':'Funk',
'title':'SOFTWARE ENGINEER V'
}
'uid': 'dhf8r',
'email_address': 'dhf8r@virginia.EDU',
'display_name': 'Daniel Harold Funk',
'affiliation': 'staff@virginia.edu;member@virginia.edu',
'eppn': 'dhf8r@virginia.edu',
'first_name': 'Daniel',
'last_name': 'Funk',
'title': 'SOFTWARE ENGINEER V'
},
{
'uid': 'lbd3p',
'email_address': 'lbd3p@virginia.EDU',
'display_name': 'Laura Barnes',
'affiliation': 'staff@virginia.edu;member@virginia.edu',
'eppn': 'lbd3p@virginia.edu',
'first_name': 'Laura',
'last_name': 'Barnes',
'title': 'Associate Professor of Systems and Information Engineering'
},
]
studies = [
@ -60,7 +71,7 @@ class BaseTest(unittest.TestCase):
'id':0,
'title':'The impact of fried pickles on beer consumption in bipedal software developers.',
'last_updated':datetime.datetime.now(),
'protocol_builder_status':ProtocolBuilderStatus.ACTIVE,
'status':StudyStatus.in_progress,
'primary_investigator_id':'dhf8r',
'sponsor':'Sartography Pharmaceuticals',
'ind_number':'1234',
@ -70,7 +81,7 @@ class BaseTest(unittest.TestCase):
'id':1,
'title':'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels',
'last_updated':datetime.datetime.now(),
'protocol_builder_status':ProtocolBuilderStatus.ACTIVE,
'status':StudyStatus.in_progress,
'primary_investigator_id':'dhf8r',
'sponsor':'Makerspace & Co.',
'ind_number':'5678',
@ -78,7 +89,6 @@ class BaseTest(unittest.TestCase):
}
]
@classmethod
def setUpClass(cls):
app.config.from_object('config.testing')
@ -98,7 +108,7 @@ class BaseTest(unittest.TestCase):
def tearDown(self):
ExampleDataLoader.clean_db()
g.user = None
self.logout()
self.auths = {}
def logged_in_headers(self, user=None, redirect_url='http://some/frontend/url'):
@ -118,7 +128,8 @@ class BaseTest(unittest.TestCase):
self.assertIsNotNone(user_model.display_name)
self.assertEqual(user_model.uid, uid)
self.assertTrue('user' in g, 'User should be in Flask globals')
self.assertEqual(uid, g.user.uid, 'Logged in user should match given user uid')
user = UserService.current_user(allow_admin_impersonate=True)
self.assertEqual(uid, user.uid, 'Logged in user should match given user uid')
return dict(Authorization='Bearer ' + user_model.encode_auth_token().decode())
@ -135,16 +146,21 @@ class BaseTest(unittest.TestCase):
else:
ExampleDataLoader().load_test_data()
for user_json in self.users:
db.session.add(UserModel(**user_json))
db.session.commit()
# If in production mode, only add the first user.
if app.config['PRODUCTION']:
session.add(UserModel(**self.users[0]))
else:
for user_json in self.users:
session.add(UserModel(**user_json))
session.commit()
for study_json in self.studies:
study_model = StudyModel(**study_json)
db.session.add(study_model)
session.add(study_model)
StudyService._add_all_workflow_specs_to_study(study_model)
db.session.execute(Sequence(StudyModel.__tablename__ + '_id_seq'))
db.session.commit()
db.session.flush()
session.execute(Sequence(StudyModel.__tablename__ + '_id_seq'))
session.commit()
session.flush()
specs = session.query(WorkflowSpecModel).all()
self.assertIsNotNone(specs)
@ -164,14 +180,21 @@ class BaseTest(unittest.TestCase):
self.assertGreater(len(file_data), 0)
@staticmethod
def load_test_spec(dir_name, master_spec=False, category_id=None):
def load_test_spec(dir_name, display_name=None, master_spec=False, category_id=None):
"""Loads a spec into the database based on a directory in /tests/data"""
if category_id is None:
category = WorkflowSpecCategoryModel(name="test", display_name="Test Workflows", display_order=0)
session.add(category)
session.commit()
category_id = category.id
if session.query(WorkflowSpecModel).filter_by(id=dir_name).count() > 0:
return session.query(WorkflowSpecModel).filter_by(id=dir_name).first()
filepath = os.path.join(app.root_path, '..', 'tests', 'data', dir_name, "*")
if display_name is None:
display_name = dir_name
return ExampleDataLoader().create_spec(id=dir_name, name=dir_name, filepath=filepath, master_spec=master_spec,
category_id=category_id)
display_name=display_name, category_id=category_id)
@staticmethod
def protocol_builder_response(file_name):
@ -210,14 +233,13 @@ class BaseTest(unittest.TestCase):
return '?%s' % '&'.join(query_string_list)
def replace_file(self, name, file_path):
"""Replaces a stored file with the given name with the contents of the file at the given path."""
file_service = FileService()
file = open(file_path, "rb")
data = file.read()
file_model = db.session.query(FileModel).filter(FileModel.name == name).first()
file_model = session.query(FileModel).filter(FileModel.name == name).first()
noise, file_extension = os.path.splitext(file_path)
content_type = CONTENT_TYPES[file_extension[1:]]
file_service.update_file(file_model, data, content_type)
@ -226,18 +248,19 @@ class BaseTest(unittest.TestCase):
user = session.query(UserModel).filter(UserModel.uid == uid).first()
if user is None:
user = UserModel(uid=uid, email_address=email, display_name=display_name)
db.session.add(user)
db.session.commit()
session.add(user)
session.commit()
return user
def create_study(self, uid="dhf8r", title="Beer consumption in the bipedal software engineer", primary_investigator_id="lb3dp"):
def create_study(self, uid="dhf8r", title="Beer consumption in the bipedal software engineer",
primary_investigator_id="lb3dp"):
study = session.query(StudyModel).filter_by(user_uid=uid).filter_by(title=title).first()
if study is None:
user = self.create_user(uid=uid)
study = StudyModel(title=title, protocol_builder_status=ProtocolBuilderStatus.ACTIVE,
study = StudyModel(title=title, status=StudyStatus.in_progress,
user_uid=user.uid, primary_investigator_id=primary_investigator_id)
db.session.add(study)
db.session.commit()
session.add(study)
session.commit()
return study
def _create_study_workflow_approvals(self, user_uid, title, primary_investigator_id, approver_uids, statuses,
@ -263,11 +286,13 @@ class BaseTest(unittest.TestCase):
return full_study
def create_workflow(self, workflow_name, study=None, category_id=None, as_user="dhf8r"):
db.session.flush()
spec = db.session.query(WorkflowSpecModel).filter(WorkflowSpecModel.name == workflow_name).first()
def create_workflow(self, workflow_name, display_name=None, study=None, category_id=None, as_user="dhf8r"):
session.flush()
spec = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.name == workflow_name).first()
if spec is None:
spec = self.load_test_spec(workflow_name, category_id=category_id)
if display_name is None:
display_name = workflow_name
spec = self.load_test_spec(workflow_name, display_name, category_id=category_id)
if study is None:
study = self.create_study(uid=as_user)
workflow_model = StudyService._create_workflow_model(study, spec)
@ -282,29 +307,31 @@ class BaseTest(unittest.TestCase):
file.close()
def create_approval(
self,
study=None,
workflow=None,
approver_uid=None,
status=None,
version=None,
self,
study=None,
workflow=None,
approver_uid=None,
status=None,
version=None,
):
study = study or self.create_study()
workflow = workflow or self.create_workflow()
approver_uid = approver_uid or self.test_uid
status = status or ApprovalStatus.PENDING.value
version = version or 1
approval = ApprovalModel(study=study, workflow=workflow, approver_uid=approver_uid, status=status, version=version)
db.session.add(approval)
db.session.commit()
approval = ApprovalModel(study=study, workflow=workflow, approver_uid=approver_uid, status=status,
version=version)
session.add(approval)
session.commit()
return approval
def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False, user_uid="dhf8r"):
def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False, do_engine_steps=True, user_uid="dhf8r"):
user = session.query(UserModel).filter_by(uid=user_uid).first()
self.assertIsNotNone(user)
rv = self.app.get('/v1.0/workflow/%i?soft_reset=%s&hard_reset=%s' %
(workflow.id, str(soft_reset), str(hard_reset)),
rv = self.app.get(f'/v1.0/workflow/{workflow.id}'
f'?soft_reset={str(soft_reset)}'
f'&hard_reset={str(hard_reset)}'
f'&do_engine_steps={str(do_engine_steps)}',
headers=self.logged_in_headers(user),
content_type="application/json")
self.assert_success(rv)
@ -313,7 +340,6 @@ class BaseTest(unittest.TestCase):
self.assertEqual(workflow.workflow_spec_id, workflow_api.workflow_spec_id)
return workflow_api
def complete_form(self, workflow_in, task_in, dict_data, error_code=None, terminate_loop=None, user_uid="dhf8r"):
prev_completed_task_count = workflow_in.completed_tasks
if isinstance(task_in, dict):
@ -378,12 +404,18 @@ class BaseTest(unittest.TestCase):
self.assertEqual(task_in.multi_instance_count, event.mi_count)
if task_in.multi_instance_type == 'looping' and not terminate_loop:
self.assertEqual(task_in.multi_instance_index+1, event.mi_index)
self.assertEqual(task_in.multi_instance_index + 1, event.mi_index)
else:
self.assertEqual(task_in.multi_instance_index, event.mi_index)
self.assertEqual(task_in.process_name, event.process_name)
self.assertIsNotNone(event.date)
workflow = WorkflowApiSchema().load(json_data)
return workflow
def logout(self):
if 'user' in g:
del g.user
if 'impersonate_user' in g:
del g.impersonate_user

View File

@ -27,7 +27,7 @@
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_1i7hk1a</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_11c35oq</bpmn:outgoing>
<bpmn:script>#! CompleteTemplate Letter.docx AD_CoCApp</bpmn:script>
<bpmn:script>complete_template('Letter.docx','AD_CoCApp')</bpmn:script>
</bpmn:scriptTask>
<bpmn:endEvent id="EndEvent_0evb22x">
<bpmn:incoming>SequenceFlow_11c35oq</bpmn:incoming>

View File

@ -20,7 +20,7 @@ Email content to be delivered to {{ ApprvlApprvr1 }}
---</bpmn:documentation>
<bpmn:incoming>Flow_08n2npe</bpmn:incoming>
<bpmn:outgoing>Flow_1xlrgne</bpmn:outgoing>
<bpmn:script>#! Email "Camunda Email Subject" ApprvlApprvr1 PIComputingID</bpmn:script>
<bpmn:script>email("Camunda Email Subject",'ApprvlApprvr1','PIComputingID')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1synsig" sourceRef="StartEvent_1" targetRef="Activity_1l9vih3" />
<bpmn:sequenceFlow id="Flow_1xlrgne" sourceRef="Activity_0s5v97n" targetRef="Event_0izrcj4" />

View File

@ -11,7 +11,7 @@
<bpmn:scriptTask id="Invalid_Script_Task" name="An Invalid Script Reference">
<bpmn:incoming>SequenceFlow_1pnq3kg</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_12pf6um</bpmn:outgoing>
<bpmn:script>#! NoSuchScript withArg1</bpmn:script>
<bpmn:script>no_such_script('withArg1')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_12pf6um" sourceRef="Invalid_Script_Task" targetRef="EndEvent_063bpg6" />
</bpmn:process>

View File

@ -0,0 +1,70 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_0y2dq4f" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="Process_0tad5ma" name="Set Recipients" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1synsig</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:endEvent id="Event_0izrcj4">
<bpmn:incoming>Flow_11e7jgz</bpmn:incoming>
</bpmn:endEvent>
<bpmn:scriptTask id="Activity_0s5v97n" name="Ldap Replace">
<bpmn:incoming>Flow_08n2npe</bpmn:incoming>
<bpmn:outgoing>Flow_1xlrgne</bpmn:outgoing>
<bpmn:script>Supervisor = ldap(Supervisor)
Investigator = ldap(Investigator)</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1synsig" sourceRef="StartEvent_1" targetRef="Activity_1l9vih3" />
<bpmn:sequenceFlow id="Flow_1xlrgne" sourceRef="Activity_0s5v97n" targetRef="Activity_0f78ek5" />
<bpmn:sequenceFlow id="Flow_08n2npe" sourceRef="Activity_1l9vih3" targetRef="Activity_0s5v97n" />
<bpmn:userTask id="Activity_1l9vih3" name="Set UIDs">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="Supervisor" label="Approver" type="string" />
<camunda:formField id="Investigator" label="Primary Investigator" type="string" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_1synsig</bpmn:incoming>
<bpmn:outgoing>Flow_08n2npe</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_11e7jgz" sourceRef="Activity_0f78ek5" targetRef="Event_0izrcj4" />
<bpmn:userTask id="Activity_0f78ek5" name="Read UIDs">
<bpmn:incoming>Flow_1xlrgne</bpmn:incoming>
<bpmn:outgoing>Flow_11e7jgz</bpmn:outgoing>
</bpmn:userTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0tad5ma">
<bpmndi:BPMNEdge id="Flow_11e7jgz_di" bpmnElement="Flow_11e7jgz">
<di:waypoint x="720" y="117" />
<di:waypoint x="802" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_08n2npe_di" bpmnElement="Flow_08n2npe">
<di:waypoint x="370" y="117" />
<di:waypoint x="450" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1xlrgne_di" bpmnElement="Flow_1xlrgne">
<di:waypoint x="550" y="117" />
<di:waypoint x="620" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1synsig_di" bpmnElement="Flow_1synsig">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0izrcj4_di" bpmnElement="Event_0izrcj4">
<dc:Bounds x="802" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_04imfm6_di" bpmnElement="Activity_0s5v97n">
<dc:Bounds x="450" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0xugr62_di" bpmnElement="Activity_1l9vih3">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_17h05g6_di" bpmnElement="Activity_0f78ek5">
<dc:Bounds x="620" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_17fwemw" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_17fwemw" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="MultiInstance" isExecutable="true">
<bpmn:startEvent id="StartEvent_1" name="StartEvent_1">
<bpmn:outgoing>Flow_0t6p1sb</bpmn:outgoing>
@ -29,7 +29,8 @@
<bpmn:scriptTask id="Task_1v0e2zu" name="Load Personnel">
<bpmn:incoming>Flow_0t6p1sb</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1p568pp</bpmn:outgoing>
<bpmn:script>#! StudyInfo investigators</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['investigators'] = study_info('investigators')</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">

View File

@ -11,7 +11,7 @@
<bpmn:sequenceFlow id="Flow_0ugjw69" sourceRef="MultiInstanceTask" targetRef="Event_End" />
<bpmn:userTask id="MultiInstanceTask" name="Gather more information" camunda:formKey="GetEmail">
<bpmn:documentation># Please provide addtional information about:
## Investigator ID: {{investigator.user_id}}
## Investigator ID: {{investigator.user_id}}
## Role: {{investigator.type_full}}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
@ -29,7 +29,8 @@
<bpmn:scriptTask id="Task_1v0e2zu" name="Load Personnel">
<bpmn:incoming>Flow_0t6p1sb</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1p568pp</bpmn:outgoing>
<bpmn:script>#! StudyInfo investigators</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['investigators'] = study_info('investigators')</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">

View File

@ -132,7 +132,7 @@ Autoconverted link https://github.com/nodeca/pica (enable linkify to see)
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0641sh6</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0t29gjo</bpmn:outgoing>
<bpmn:script>#! FactService</bpmn:script>
<bpmn:script>FactService = fact_service()</bpmn:script>
</bpmn:scriptTask>
<bpmn:endEvent id="EndEvent_0u1cgrf">
<bpmn:documentation># Great Job!

View File

@ -8,12 +8,19 @@
<bpmn:scriptTask id="Task_Script_Load_Study_Details" name="Load Study Info">
<bpmn:incoming>SequenceFlow_1nfe5m9</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1bqiin0</bpmn:outgoing>
<bpmn:script>#! StudyInfo info</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['info'] = study_info('info')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_1bqiin0" sourceRef="Task_Script_Load_Study_Details" targetRef="EndEvent_171dj09" />
<bpmn:sequenceFlow id="SequenceFlow_1bqiin0" sourceRef="Task_Script_Load_Study_Details" targetRef="Activity_0w91u9s" />
<bpmn:endEvent id="EndEvent_171dj09">
<bpmn:incoming>SequenceFlow_1bqiin0</bpmn:incoming>
<bpmn:incoming>Flow_0ochvmi</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0ochvmi" sourceRef="Activity_0w91u9s" targetRef="EndEvent_171dj09" />
<bpmn:scriptTask id="Activity_0w91u9s" name="StudyInfo as Script">
<bpmn:incoming>SequenceFlow_1bqiin0</bpmn:incoming>
<bpmn:outgoing>Flow_0ochvmi</bpmn:outgoing>
<bpmn:script>study = study_info('info','p')</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0exnnpv">
@ -29,10 +36,17 @@
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1bqiin0_di" bpmnElement="SequenceFlow_1bqiin0">
<di:waypoint x="370" y="117" />
<di:waypoint x="402" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="EndEvent_171dj09_di" bpmnElement="EndEvent_171dj09">
<dc:Bounds x="402" y="99" width="36" height="36" />
<dc:Bounds x="622" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0ochvmi_di" bpmnElement="Flow_0ochvmi">
<di:waypoint x="530" y="117" />
<di:waypoint x="622" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_1wtk4bb_di" bpmnElement="Activity_0w91u9s">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>

View File

@ -11,7 +11,8 @@
<bpmn:scriptTask id="Task_Load_Requirements" name="Load Required Documents From PM">
<bpmn:incoming>SequenceFlow_1ees8ka</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_17ct47v</bpmn:outgoing>
<bpmn:script>#! StudyInfo documents</bpmn:script>
<bpmn:script>StudyInfo = {}
StudyInfo['documents'] = study_info('documents')</bpmn:script>
</bpmn:scriptTask>
<bpmn:businessRuleTask id="Activity_1yqy50i" name="Enter Core Info&#10;" camunda:decisionRef="enter_core_info">
<bpmn:incoming>Flow_1m8285h</bpmn:incoming>

View File

@ -1,9 +1,45 @@
from github import UnknownObjectException
from sqlalchemy import desc
from tests.base_test import BaseTest
from unittest.mock import patch, Mock
from crc import db
from crc.models.file import FileDataModel
from crc.services.file_service import FileService
from crc.services.workflow_processor import WorkflowProcessor
class FakeGithubCreates(Mock):
def get_user(var):
class FakeUser(Mock):
def get_repo(var, name):
class FakeRepo(Mock):
def get_contents(var, filename):
raise UnknownObjectException(status='Failure', data='Failed data')
def update_file(var, path, message, content, sha):
pass
return FakeRepo()
return FakeUser()
class FakeGithub(Mock):
def get_user(var):
class FakeUser(Mock):
def get_repo(var, name):
class FakeRepo(Mock):
def get_contents(var, filename):
fake_file = Mock()
fake_file.decoded_content = b'Some bytes'
fake_file.path = '/el/path/'
fake_file.data = 'Serious data'
fake_file.sha = 'Sha'
return fake_file
def update_file(var, path, message, content, sha):
pass
return FakeRepo()
return FakeUser()
class TestFileService(BaseTest):
"""Largely tested via the test_file_api, and time is tight, but adding new tests here."""
@ -103,3 +139,62 @@ class TestFileService(BaseTest):
binary_data=b'5678')
file_models = FileService.get_workflow_files(workflow_id=workflow.id)
self.assertEqual(2, len(file_models))
@patch('crc.services.file_service.Github')
def test_update_from_github(self, mock_github):
mock_github.return_value = FakeGithub()
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
task = processor.next_task()
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
file_model = FileService.add_workflow_file(workflow_id=workflow.id,
irb_doc_code=irb_code,
name="anything.png", content_type="text",
binary_data=b'1234')
FileService.update_from_github([file_model.id])
file_model_data = FileDataModel.query.filter_by(
file_model_id=file_model.id
).order_by(
desc(FileDataModel.version)
).first()
self.assertEqual(file_model_data.data, b'Some bytes')
@patch('crc.services.file_service.Github')
def test_publish_to_github_creates(self, mock_github):
mock_github.return_value = FakeGithubCreates()
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
task = processor.next_task()
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
file_model = FileService.add_workflow_file(workflow_id=workflow.id,
irb_doc_code=irb_code,
name="anything.png", content_type="text",
binary_data=b'1234')
result = FileService.publish_to_github([file_model.id])
self.assertEqual(result['created'], True)
@patch('crc.services.file_service.Github')
def test_publish_to_github_updates(self, mock_github):
mock_github.return_value = FakeGithub()
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
task = processor.next_task()
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
file_model = FileService.add_workflow_file(workflow_id=workflow.id,
irb_doc_code=irb_code,
name="anything.png", content_type="text",
binary_data=b'1234')
result = FileService.publish_to_github([file_model.id])
self.assertEqual(result['updated'], True)

View File

@ -72,10 +72,10 @@ class TestFilesApi(BaseTest):
self.assertEqual(file, file2)
def test_add_file_from_task_and_form_errors_on_invalid_form_field_name(self):
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
processor.do_engine_steps()
task = processor.next_task()
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
correct_name = task.task_spec.form.fields[0].id
@ -96,6 +96,7 @@ class TestFilesApi(BaseTest):
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
processor.do_engine_steps()
task = processor.next_task()
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
correct_name = task.task_spec.form.fields[0].id

View File

@ -0,0 +1,70 @@
from tests.base_test import BaseTest
from crc.services.workflow_processor import WorkflowProcessor
from crc.scripts.ldap import Ldap
from crc.api.common import ApiError
from crc import db, mail
class TestLdapLookupScript(BaseTest):
def test_get_existing_user_details(self):
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('empty_workflow')
processor = WorkflowProcessor(workflow)
task = processor.next_task()
script = Ldap()
user_details = script.do_task(task, workflow.study_id, workflow.id, "dhf8r")
self.assertEqual(user_details['display_name'], 'Dan Funk')
self.assertEqual(user_details['given_name'], 'Dan')
self.assertEqual(user_details['email_address'], 'dhf8r@virginia.edu')
self.assertEqual(user_details['telephone_number'], '+1 (434) 924-1723')
self.assertEqual(user_details['title'], 'E42:He\'s a hoopy frood')
self.assertEqual(user_details['department'], 'E0:EN-Eng Study of Parallel Universes')
self.assertEqual(user_details['affiliation'], 'faculty')
self.assertEqual(user_details['sponsor_type'], 'Staff')
self.assertEqual(user_details['uid'], 'dhf8r')
self.assertEqual(user_details['proper_name'], 'Dan Funk - (dhf8r)')
def test_get_invalid_user_details(self):
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('empty_workflow')
processor = WorkflowProcessor(workflow)
task = processor.next_task()
task.data = {
'PIComputingID': 'rec3z'
}
script = Ldap()
with(self.assertRaises(ApiError)):
user_details = script.do_task(task, workflow.study_id, workflow.id, "PIComputingID")
def test_bpmn_task_receives_user_details(self):
workflow = self.create_workflow('ldap_replace')
task_data = {
'Supervisor': 'dhf8r',
'Investigator': 'lb3dp'
}
task = self.get_workflow_api(workflow).next_task
self.complete_form(workflow, task, task_data)
task = self.get_workflow_api(workflow).next_task
self.assertEqual(task.data['Supervisor']['display_name'], 'Dan Funk')
self.assertEqual(task.data['Supervisor']['given_name'], 'Dan')
self.assertEqual(task.data['Supervisor']['email_address'], 'dhf8r@virginia.edu')
self.assertEqual(task.data['Supervisor']['telephone_number'], '+1 (434) 924-1723')
self.assertEqual(task.data['Supervisor']['title'], 'E42:He\'s a hoopy frood')
self.assertEqual(task.data['Supervisor']['department'], 'E0:EN-Eng Study of Parallel Universes')
self.assertEqual(task.data['Supervisor']['affiliation'], 'faculty')
self.assertEqual(task.data['Supervisor']['sponsor_type'], 'Staff')
self.assertEqual(task.data['Supervisor']['uid'], 'dhf8r')
self.assertEqual(task.data['Supervisor']['proper_name'], 'Dan Funk - (dhf8r)')

View File

@ -1,4 +1,6 @@
import json
from profile import Profile
from tests.base_test import BaseTest
from datetime import datetime, timezone
@ -9,10 +11,11 @@ from crc.models.protocol_builder import ProtocolBuilderStatus, \
ProtocolBuilderStudySchema
from crc.models.approval import ApprovalStatus
from crc.models.task_event import TaskEventModel
from crc.models.study import StudyModel, StudySchema
from crc.models.study import StudyModel, StudySchema, StudyStatus
from crc.models.workflow import WorkflowSpecModel, WorkflowModel
from crc.services.file_service import FileService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.workflow_service import WorkflowService
class TestStudyApi(BaseTest):
@ -21,16 +24,17 @@ class TestStudyApi(BaseTest):
"title": "Phase III Trial of Genuine People Personalities (GPP) Autonomous Intelligent Emotional Agents "
"for Interstellar Spacecraft",
"last_updated": datetime.now(tz=timezone.utc),
"protocol_builder_status": ProtocolBuilderStatus.ACTIVE,
"primary_investigator_id": "tmm2x",
"user_uid": "dhf8r",
}
def add_test_study(self):
study_schema = StudySchema().dump(self.TEST_STUDY)
study_schema['status'] = StudyStatus.in_progress.value
rv = self.app.post('/v1.0/study',
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(StudySchema().dump(self.TEST_STUDY)))
data=json.dumps(study_schema))
self.assert_success(rv)
return json.loads(rv.get_data(as_text=True))
@ -132,15 +136,16 @@ class TestStudyApi(BaseTest):
self.load_example_data()
study: StudyModel = session.query(StudyModel).first()
study.title = "Pilot Study of Fjord Placement for Single Fraction Outcomes to Cortisol Susceptibility"
study.protocol_builder_status = ProtocolBuilderStatus.ACTIVE
study_schema = StudySchema().dump(study)
study_schema['status'] = StudyStatus.in_progress.value
rv = self.app.put('/v1.0/study/%i' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(StudySchema().dump(study)))
data=json.dumps(study_schema))
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
self.assertEqual(study.title, json_data['title'])
self.assertEqual(study.protocol_builder_status.name, json_data['protocol_builder_status'])
self.assertEqual(study.status.value, json_data['status'])
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_investigators') # mock_studies
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_required_docs') # mock_docs
@ -178,15 +183,15 @@ class TestStudyApi(BaseTest):
num_incomplete = 0
num_abandoned = 0
num_active = 0
num_in_progress = 0
num_open = 0
for study in json_data:
if study['protocol_builder_status'] == 'ABANDONED': # One study does not exist in user_studies.json
if study['status'] == 'abandoned': # One study does not exist in user_studies.json
num_abandoned += 1
if study['protocol_builder_status'] == 'ACTIVE': # One study is marked complete without HSR Number
num_active += 1
if study['protocol_builder_status'] == 'OPEN': # One study is marked complete and has an HSR Number
if study['status'] == 'in_progress': # One study is marked complete without HSR Number
num_in_progress += 1
if study['status'] == 'open_for_enrollment': # One study is marked complete and has an HSR Number
num_open += 1
db_studies_after = session.query(StudyModel).all()
@ -194,10 +199,10 @@ class TestStudyApi(BaseTest):
self.assertGreater(num_db_studies_after, num_db_studies_before)
self.assertEqual(num_abandoned, 1)
self.assertEqual(num_open, 1)
self.assertEqual(num_active, 2)
self.assertEqual(num_in_progress, 2)
self.assertEqual(num_incomplete, 0)
self.assertEqual(len(json_data), num_db_studies_after)
self.assertEqual(num_open + num_active + num_incomplete + num_abandoned, num_db_studies_after)
self.assertEqual(num_open + num_in_progress + num_incomplete + num_abandoned, num_db_studies_after)
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_investigators') # mock_studies
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_required_docs') # mock_docs
@ -225,7 +230,7 @@ class TestStudyApi(BaseTest):
json_data = json.loads(rv.get_data(as_text=True))
self.assertEqual(study.id, json_data['id'])
self.assertEqual(study.title, json_data['title'])
self.assertEqual(study.protocol_builder_status.name, json_data['protocol_builder_status'])
self.assertEqual(study.status.value, json_data['status'])
self.assertEqual(study.primary_investigator_id, json_data['primary_investigator_id'])
self.assertEqual(study.sponsor, json_data['sponsor'])
self.assertEqual(study.ind_number, json_data['ind_number'])

View File

@ -6,7 +6,7 @@ from tests.base_test import BaseTest
from crc import db, app
from crc.models.protocol_builder import ProtocolBuilderStatus
from crc.models.study import StudyModel
from crc.models.study import StudyModel, StudyStatus
from crc.models.user import UserModel
from crc.models.workflow import WorkflowModel, WorkflowStatus, \
WorkflowSpecCategoryModel
@ -27,7 +27,10 @@ class TestStudyService(BaseTest):
# Assure some basic models are in place, This is a damn mess. Our database models need an overhaul to make
# this easier - better relationship modeling is now critical.
self.load_test_spec("top_level_workflow", master_spec=True)
cat = WorkflowSpecCategoryModel(name="approvals", display_name="Approvals", display_order=0)
db.session.add(cat)
db.session.commit()
self.load_test_spec("top_level_workflow", master_spec=True, category_id=cat.id)
user = db.session.query(UserModel).filter(UserModel.uid == "dhf8r").first()
if not user:
user = UserModel(uid="dhf8r", email_address="whatever@stuff.com", display_name="Stayathome Smellalots")
@ -37,13 +40,9 @@ class TestStudyService(BaseTest):
for study in db.session.query(StudyModel).all():
StudyService().delete_study(study.id)
study = StudyModel(title="My title", protocol_builder_status=ProtocolBuilderStatus.ACTIVE, user_uid=user.uid)
study = StudyModel(title="My title", status=StudyStatus.in_progress, user_uid=user.uid)
db.session.add(study)
cat = WorkflowSpecCategoryModel(name="approvals", display_name="Approvals", display_order=0)
db.session.add(cat)
db.session.commit()
self.assertIsNotNone(cat.id)
self.load_test_spec("random_fact", category_id=cat.id)
self.assertIsNotNone(study.id)
@ -80,6 +79,7 @@ class TestStudyService(BaseTest):
# Initialize the Workflow with the workflow processor.
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.id == workflow.id).first()
processor = WorkflowProcessor(workflow_model)
processor.do_engine_steps()
# Assure the workflow is now started, and knows the total and completed tasks.
studies = StudyService.get_studies_for_user(user)

View File

@ -5,14 +5,16 @@ from datetime import timezone, datetime, timedelta
import jwt
from tests.base_test import BaseTest
from crc import db, app
from crc import app, session
from crc.api.common import ApiError
from crc.models.protocol_builder import ProtocolBuilderStatus
from crc.models.study import StudySchema, StudyModel
from crc.models.study import StudySchema, StudyModel, StudyStatus
from crc.models.user import UserModel
class TestAuthentication(BaseTest):
admin_uid = 'dhf8r'
non_admin_uid = 'lb3dp'
def tearDown(self):
# Assure we set the production flag back to false.
@ -58,9 +60,9 @@ class TestAuthentication(BaseTest):
self.assertTrue(expected_exp_3 - 1000 <= actual_exp_3 <= expected_exp_3 + 1000)
def test_non_production_auth_creates_user(self):
new_uid = 'lb3dp' ## Assure this user id is in the fake responses from ldap.
new_uid = self.non_admin_uid ## Assure this user id is in the fake responses from ldap.
self.load_example_data()
user = db.session.query(UserModel).filter(UserModel.uid == new_uid).first()
user = session.query(UserModel).filter(UserModel.uid == new_uid).first()
self.assertIsNone(user)
user_info = {'uid': new_uid, 'first_name': 'Cordi', 'last_name': 'Nator',
@ -72,7 +74,7 @@ class TestAuthentication(BaseTest):
self.assertTrue(rv_1.status_code == 302)
self.assertTrue(str.startswith(rv_1.location, redirect_url))
user = db.session.query(UserModel).filter(UserModel.uid == new_uid).first()
user = session.query(UserModel).filter(UserModel.uid == new_uid).first()
self.assertIsNotNone(user)
self.assertIsNotNone(user.display_name)
self.assertIsNotNone(user.email_address)
@ -88,21 +90,20 @@ class TestAuthentication(BaseTest):
self.load_example_data()
new_uid = 'lb3dp' # This user is in the test ldap system.
user = db.session.query(UserModel).filter_by(uid=new_uid).first()
# User should not be in the system yet.
user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first()
self.assertIsNone(user)
redirect_url = 'http://worlds.best.website/admin'
headers = dict(Uid=new_uid)
db.session.flush()
rv = self.app.get('v1.0/login', follow_redirects=False, headers=headers)
self.assert_success(rv)
user = db.session.query(UserModel).filter_by(uid=new_uid).first()
self.assertIsNotNone(user)
self.assertEqual(new_uid, user.uid)
self.assertEqual("Laura Barnes", user.display_name)
self.assertEqual("lb3dp@virginia.edu", user.email_address)
self.assertEqual("E0:Associate Professor of Systems and Information Engineering", user.title)
# Log in
non_admin_user = self._login_as_non_admin()
# User should be in the system now.
redirect_url = 'http://worlds.best.website/admin'
rv_user = self.app.get('/v1.0/user', headers=self.logged_in_headers(non_admin_user, redirect_url=redirect_url))
self.assert_success(rv_user)
user_data = json.loads(rv_user.get_data(as_text=True))
self.assertEqual(self.non_admin_uid, user_data['uid'])
self.assertFalse(user_data['is_admin'])
# Switch production mode back off
app.config['PRODUCTION'] = False
@ -119,6 +120,8 @@ class TestAuthentication(BaseTest):
user = UserModel(uid="dhf8r", first_name='Dan', last_name='Funk', email_address='dhf8r@virginia.edu')
rv = self.app.get('/v1.0/user', headers=self.logged_in_headers(user, redirect_url='http://omg.edu/lolwut'))
self.assert_success(rv)
user_data = json.loads(rv.get_data(as_text=True))
self.assertTrue(user_data['is_admin'])
def test_admin_can_access_admin_only_endpoints(self):
# Switch production mode on
@ -126,21 +129,8 @@ class TestAuthentication(BaseTest):
self.load_example_data()
admin_uids = app.config['ADMIN_UIDS']
self.assertGreater(len(admin_uids), 0)
admin_uid = admin_uids[0]
self.assertEqual(admin_uid, 'dhf8r') # This user is in the test ldap system.
admin_headers = dict(Uid=admin_uid)
rv = self.app.get('v1.0/login', follow_redirects=False, headers=admin_headers)
self.assert_success(rv)
admin_user = db.session.query(UserModel).filter(UserModel.uid == admin_uid).first()
self.assertIsNotNone(admin_user)
self.assertEqual(admin_uid, admin_user.uid)
admin_study = self._make_fake_study(admin_uid)
admin_user = self._login_as_admin()
admin_study = self._make_fake_study(admin_user.uid)
admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode())
rv_add_study = self.app.post(
@ -153,7 +143,7 @@ class TestAuthentication(BaseTest):
self.assert_success(rv_add_study, 'Admin user should be able to add a study')
new_admin_study = json.loads(rv_add_study.get_data(as_text=True))
db_admin_study = db.session.query(StudyModel).filter_by(id=new_admin_study['id']).first()
db_admin_study = session.query(StudyModel).filter_by(id=new_admin_study['id']).first()
self.assertIsNotNone(db_admin_study)
rv_del_study = self.app.delete(
@ -173,26 +163,9 @@ class TestAuthentication(BaseTest):
self.load_example_data()
# Non-admin user should not be able to delete a study
non_admin_uid = 'lb3dp'
admin_uids = app.config['ADMIN_UIDS']
self.assertGreater(len(admin_uids), 0)
self.assertNotIn(non_admin_uid, admin_uids)
non_admin_headers = dict(Uid=non_admin_uid)
rv = self.app.get(
'v1.0/login',
follow_redirects=False,
headers=non_admin_headers
)
self.assert_success(rv)
non_admin_user = db.session.query(UserModel).filter_by(uid=non_admin_uid).first()
self.assertIsNotNone(non_admin_user)
non_admin_user = self._login_as_non_admin()
non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token().decode())
non_admin_study = self._make_fake_study(non_admin_uid)
non_admin_study = self._make_fake_study(non_admin_user.uid)
rv_add_study = self.app.post(
'/v1.0/study',
@ -203,7 +176,7 @@ class TestAuthentication(BaseTest):
self.assert_success(rv_add_study, 'Non-admin user should be able to add a study')
new_non_admin_study = json.loads(rv_add_study.get_data(as_text=True))
db_non_admin_study = db.session.query(StudyModel).filter_by(id=new_non_admin_study['id']).first()
db_non_admin_study = session.query(StudyModel).filter_by(id=new_non_admin_study['id']).first()
self.assertIsNotNone(db_non_admin_study)
rv_non_admin_del_study = self.app.delete(
@ -216,11 +189,131 @@ class TestAuthentication(BaseTest):
# Switch production mode back off
app.config['PRODUCTION'] = False
def test_list_all_users(self):
self.load_example_data()
rv = self.app.get('/v1.0/user')
self.assert_failure(rv, 401)
rv = self.app.get('/v1.0/user', headers=self.logged_in_headers())
self.assert_success(rv)
all_users = session.query(UserModel).all()
rv = self.app.get('/v1.0/list_users', headers=self.logged_in_headers())
self.assert_success(rv)
user_data = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(user_data), len(all_users))
def test_admin_can_impersonate_another_user(self):
# Switch production mode on
app.config['PRODUCTION'] = True
self.load_example_data()
admin_user = self._login_as_admin()
admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode())
# User should not be in the system yet.
non_admin_user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first()
self.assertIsNone(non_admin_user)
# Admin should not be able to impersonate non-existent user
rv_1 = self.app.get(
'/v1.0/user?admin_impersonate_uid=' + self.non_admin_uid,
content_type="application/json",
headers=admin_token_headers,
follow_redirects=False
)
self.assert_failure(rv_1, 400)
# Add the non-admin user now
self.logout()
non_admin_user = self._login_as_non_admin()
self.assertEqual(non_admin_user.uid, self.non_admin_uid)
non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token().decode())
# Add a study for the non-admin user
non_admin_study = self._make_fake_study(self.non_admin_uid)
rv_add_study = self.app.post(
'/v1.0/study',
content_type="application/json",
headers=non_admin_token_headers,
data=json.dumps(StudySchema().dump(non_admin_study))
)
self.assert_success(rv_add_study, 'Non-admin user should be able to add a study')
self.logout()
# Admin should be able to impersonate user now
admin_user = self._login_as_admin()
rv_2 = self.app.get(
'/v1.0/user?admin_impersonate_uid=' + self.non_admin_uid,
content_type="application/json",
headers=admin_token_headers,
follow_redirects=False
)
self.assert_success(rv_2)
user_data_2 = json.loads(rv_2.get_data(as_text=True))
self.assertEqual(user_data_2['uid'], self.non_admin_uid, 'Admin user should impersonate non-admin user')
# Study endpoint should return non-admin user's studies
rv_study = self.app.get(
'/v1.0/study',
content_type="application/json",
headers=admin_token_headers,
follow_redirects=False
)
self.assert_success(rv_study, 'Admin user should be able to get impersonated user studies')
study_data = json.loads(rv_study.get_data(as_text=True))
self.assertGreaterEqual(len(study_data), 1)
self.assertEqual(study_data[0]['user_uid'], self.non_admin_uid)
# Switch production mode back off
app.config['PRODUCTION'] = False
def _make_fake_study(self, uid):
return {
"title": "blah",
"last_updated": datetime.now(tz=timezone.utc),
"protocol_builder_status": ProtocolBuilderStatus.ACTIVE,
"status": StudyStatus.in_progress,
"primary_investigator_id": uid,
"user_uid": uid,
}
def _login_as_admin(self):
admin_uids = app.config['ADMIN_UIDS']
self.assertGreater(len(admin_uids), 0)
self.assertIn(self.admin_uid, admin_uids)
admin_headers = dict(Uid=self.admin_uid)
rv = self.app.get('v1.0/login', follow_redirects=False, headers=admin_headers)
self.assert_success(rv)
admin_user = session.query(UserModel).filter(UserModel.uid == self.admin_uid).first()
self.assertIsNotNone(admin_user)
self.assertEqual(self.admin_uid, admin_user.uid)
self.assertTrue(admin_user.is_admin())
return admin_user
def _login_as_non_admin(self):
admin_uids = app.config['ADMIN_UIDS']
self.assertGreater(len(admin_uids), 0)
self.assertNotIn(self.non_admin_uid, admin_uids)
non_admin_headers = dict(Uid=self.non_admin_uid)
rv = self.app.get(
'v1.0/login?uid=' + self.non_admin_uid,
follow_redirects=False,
headers=non_admin_headers
)
self.assert_success(rv)
user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first()
self.assertIsNotNone(user)
self.assertFalse(user.is_admin())
self.assertIsNotNone(user)
self.assertEqual(self.non_admin_uid, user.uid)
self.assertEqual("Laura Barnes", user.display_name)
self.assertEqual("lb3dp@virginia.edu", user.email_address)
self.assertEqual("E0:Associate Professor of Systems and Information Engineering", user.title)
return user

43
tests/test_events.py Normal file
View File

@ -0,0 +1,43 @@
import json
from tests.base_test import BaseTest
from crc.models.workflow import WorkflowStatus
from crc import db
from crc.api.common import ApiError
from crc.models.task_event import TaskEventModel, TaskEventSchema
from crc.services.workflow_service import WorkflowService
class TestEvents(BaseTest):
def test_list_events_by_workflow(self):
workflow_one = self.create_workflow('exclusive_gateway')
# Start a the workflow.
first_task = self.get_workflow_api(workflow_one).next_task
self.complete_form(workflow_one, first_task, {"has_bananas": True})
workflow_one = self.get_workflow_api(workflow_one)
self.assertEqual('Task_Num_Bananas', workflow_one.next_task.name)
# Start a second workflow
workflow_two = self.create_workflow('subprocess')
workflow_api_two = self.get_workflow_api(workflow_two)
# Get all action events across workflows
rv = self.app.get('/v1.0/task_events?action=ASSIGNMENT',
headers=self.logged_in_headers(),
content_type="application/json")
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
tasks = TaskEventSchema(many=True).load(json_data)
self.assertEqual(2, len(tasks))
# Get action events for a single workflow
rv = self.app.get(f'/v1.0/task_events?action=ASSIGNMENT&workflow={workflow_one.id}',
headers=self.logged_in_headers(),
content_type="application/json")
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
tasks = TaskEventSchema(many=True).load(json_data)
self.assertEqual(1, len(tasks))

View File

@ -30,4 +30,4 @@ class TestLdapService(BaseTest):
user_info = LdapService.user_info("nosuch")
self.assertFalse(True, "An API error should be raised.")
except ApiError as ae:
self.assertEqual("missing_ldap_record", ae.code)
self.assertEqual("missing_ldap_record", ae.code)

View File

@ -9,6 +9,7 @@ from crc import session, app
from crc.models.api_models import WorkflowApiSchema, MultiInstanceType, TaskSchema
from crc.models.file import FileModelSchema
from crc.models.workflow import WorkflowStatus
from crc.models.task_event import TaskEventModel
class TestTasksApi(BaseTest):
@ -42,6 +43,24 @@ class TestTasksApi(BaseTest):
"""
self.assertTrue(str.startswith(task.documentation, expected_docs))
def test_get_workflow_without_running_engine_steps(self):
# Set up a new workflow
workflow = self.create_workflow('two_forms')
# get the first form in the two form workflow.
workflow_api = self.get_workflow_api(workflow, do_engine_steps=False)
# There should be no task event logs related to the workflow at this point.
task_events = session.query(TaskEventModel).filter(TaskEventModel.workflow_id == workflow.id).all()
self.assertEqual(0, len(task_events))
# Since the workflow was not started, the call to read-only should not execute any engine steps the
# current task should be the start event.
self.assertEqual("Start", workflow_api.next_task.name)
def test_get_form_for_previously_completed_task(self):
"""Assure we can look at previously completed steps without moving the token for the workflow."""
def test_two_forms_task(self):
# Set up a new workflow
self.load_example_data()
@ -69,7 +88,6 @@ class TestTasksApi(BaseTest):
self.assertIsNotNone(val)
def test_error_message_on_bad_gateway_expression(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
# get the first form in the two form workflow.
@ -77,7 +95,6 @@ class TestTasksApi(BaseTest):
self.complete_form(workflow, task, {"has_bananas": True})
def test_workflow_with_parallel_forms(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
# get the first form in the two form workflow.
@ -89,7 +106,6 @@ class TestTasksApi(BaseTest):
self.assertEqual("Task_Num_Bananas", workflow_api.next_task.name)
def test_navigation_with_parallel_forms(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
# get the first form in the two form workflow.
@ -107,7 +123,6 @@ class TestTasksApi(BaseTest):
self.assertEqual("NOOP", nav[3]['state'])
def test_navigation_with_exclusive_gateway(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway_2')
# get the first form in the two form workflow.
@ -124,7 +139,6 @@ class TestTasksApi(BaseTest):
self.assertEqual("Task 3", nav[6]['title'])
def test_document_added_to_workflow_shows_up_in_file_list(self):
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('docx')
@ -153,7 +167,6 @@ class TestTasksApi(BaseTest):
def test_get_documentation_populated_in_end(self):
self.load_example_data()
workflow = self.create_workflow('random_fact')
workflow_api = self.get_workflow_api(workflow)
task = workflow_api.next_task
@ -167,9 +180,7 @@ class TestTasksApi(BaseTest):
self.assertTrue("norris" in workflow_api.next_task.documentation)
def test_load_workflow_from_outdated_spec(self):
# Start the basic two_forms workflow and complete a task.
self.load_example_data()
workflow = self.create_workflow('two_forms')
workflow_api = self.get_workflow_api(workflow)
self.complete_form(workflow, workflow_api.next_task, {"color": "blue"})
@ -194,9 +205,7 @@ class TestTasksApi(BaseTest):
self.assertTrue(workflow_api.is_latest_spec)
def test_soft_reset_errors_out_and_next_result_is_on_original_version(self):
# Start the basic two_forms workflow and complete a task.
self.load_example_data()
workflow = self.create_workflow('two_forms')
workflow_api = self.get_workflow_api(workflow)
self.complete_form(workflow, workflow_api.next_task, {"color": "blue"})
@ -221,7 +230,6 @@ class TestTasksApi(BaseTest):
def test_manual_task_with_external_documentation(self):
self.load_example_data()
workflow = self.create_workflow('manual_task_with_external_documentation')
# get the first form in the two form workflow.
@ -235,7 +243,6 @@ class TestTasksApi(BaseTest):
self.assertTrue('Dan' in workflow_api.next_task.documentation)
def test_bpmn_extension_properties_are_populated(self):
self.load_example_data()
workflow = self.create_workflow('manual_task_with_external_documentation')
# get the first form in the two form workflow.
@ -268,9 +275,7 @@ class TestTasksApi(BaseTest):
# Assure that the names for each task are properly updated, so they aren't all the same.
self.assertEqual("Primary Investigator", workflow.next_task.properties['display_name'])
def test_lookup_endpoint_for_task_field_enumerations(self):
self.load_example_data()
workflow = self.create_workflow('enum_options_with_search')
# get the first form in the two form workflow.
workflow = self.get_workflow_api(workflow)
@ -286,7 +291,6 @@ class TestTasksApi(BaseTest):
self.assert_options_populated(results, ['CUSTOMER_NUMBER', 'CUSTOMER_NAME', 'CUSTOMER_CLASS_MEANING'])
def test_lookup_endpoint_for_task_field_using_lookup_entry_id(self):
self.load_example_data()
workflow = self.create_workflow('enum_options_with_search')
# get the first form in the two form workflow.
workflow = self.get_workflow_api(workflow)
@ -316,7 +320,6 @@ class TestTasksApi(BaseTest):
# the key/values from the spreadsheet are added directly to the form and it shows up as
# a dropdown. This tests the case of wanting to get additional data when a user selects
# something from a dropdown.
self.load_example_data()
workflow = self.create_workflow('enum_options_from_file')
# get the first form in the two form workflow.
workflow = self.get_workflow_api(workflow)
@ -334,7 +337,6 @@ class TestTasksApi(BaseTest):
self.assertIsInstance(results[0]['data'], dict)
def test_enum_from_task_data(self):
self.load_example_data()
workflow = self.create_workflow('enum_options_from_task_data')
# get the first form in the two form workflow.
workflow_api = self.get_workflow_api(workflow)
@ -359,7 +361,6 @@ class TestTasksApi(BaseTest):
self.assertEqual('Chesterfield', options[2]['data']['first_name'])
def test_lookup_endpoint_for_task_ldap_field_lookup(self):
self.load_example_data()
workflow = self.create_workflow('ldap_lookup')
# get the first form
workflow = self.get_workflow_api(workflow)
@ -378,7 +379,6 @@ class TestTasksApi(BaseTest):
self.assertEqual(1, len(results))
def test_sub_process(self):
self.load_example_data()
workflow = self.create_workflow('subprocess')
workflow_api = self.get_workflow_api(workflow)
@ -399,7 +399,6 @@ class TestTasksApi(BaseTest):
self.assertEqual(WorkflowStatus.complete, workflow_api.status)
def test_update_task_resets_token(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
# Start the workflow.
@ -477,3 +476,5 @@ class TestTasksApi(BaseTest):
workflow = self.get_workflow_api(workflow)
self.assertEqual(WorkflowStatus.complete, workflow.status)

View File

@ -68,7 +68,7 @@ class TestTasksApi(BaseTest):
def test_get_outstanding_tasks_awaiting_current_user(self):
submitter = self.create_user(uid='lje5u')
supervisor = self.create_user(uid='lb3dp')
workflow = self.create_workflow('roles', as_user=submitter.uid)
workflow = self.create_workflow('roles', display_name="Roles", as_user=submitter.uid)
workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid)
# User lje5u can complete the first task, and set her supervisor
@ -94,6 +94,7 @@ class TestTasksApi(BaseTest):
self.assertEquals(1, len(tasks))
self.assertEquals(workflow.id, tasks[0]['workflow']['id'])
self.assertEquals(workflow.study.id, tasks[0]['study']['id'])
self.assertEquals("Test Workflows", tasks[0]['workflow']['category_display_name'])
# Assure we can say something sensible like:
# You have a task called "Approval" to be completed in the "Supervisor Approval" workflow
@ -110,6 +111,7 @@ class TestTasksApi(BaseTest):
data['approval'] = True
self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid)
def test_navigation_and_current_task_updates_through_workflow(self):
submitter = self.create_user(uid='lje5u')
@ -178,7 +180,7 @@ class TestTasksApi(BaseTest):
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
nav = workflow_api.navigation
self.assertEquals(5, len(nav))
self.assertEquals('COMPLETED', nav[0]['state']) # We still have some issues here, the navigation will be off when looping back.
self.assertEquals('READY', nav[0]['state']) # When you loop back the task is again in the ready state.
self.assertEquals('LOCKED', nav[1]['state']) # Second item is locked, it is the review and doesn't belong to this user.
self.assertEquals('LOCKED', nav[2]['state']) # third item is a gateway belonging to the supervisor, and is locked.
self.assertEquals('READY', workflow_api.next_task.state)
@ -199,4 +201,67 @@ class TestTasksApi(BaseTest):
workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid)
self.assertEquals('COMPLETED', workflow_api.next_task.state)
self.assertEquals('EndEvent', workflow_api.next_task.type) # Are are at the end.
self.assertEquals(WorkflowStatus.complete, workflow_api.status)
self.assertEquals(WorkflowStatus.complete, workflow_api.status)
def get_assignment_task_events(self, uid):
return db.session.query(TaskEventModel). \
filter(TaskEventModel.user_uid == uid). \
filter(TaskEventModel.action == WorkflowService.TASK_ACTION_ASSIGNMENT).all()
def test_workflow_reset_correctly_resets_the_task_events(self):
submitter = self.create_user(uid='lje5u')
supervisor = self.create_user(uid='lb3dp')
workflow = self.create_workflow('roles', display_name="Roles", as_user=submitter.uid)
workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid)
# User lje5u can complete the first task, and set her supervisor
data = workflow_api.next_task.data
data['supervisor'] = supervisor.uid
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
# At this point there should be a task_log with an action of ASSIGNMENT on it for
# the supervisor.
self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid)))
# Resetting the workflow at this point should clear the event log.
workflow_api = self.get_workflow_api(workflow, hard_reset=True, user_uid=submitter.uid)
self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid)))
# Re-complete first task, and awaiting tasks should shift to 0 for for submitter, and 1 for supervisor
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid)))
# Complete the supervisor task with rejected approval, and the assignments should switch.
workflow_api = self.get_workflow_api(workflow, user_uid=supervisor.uid)
data = workflow_api.next_task.data
data["approval"] = False
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid)
self.assertEquals(1, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid)))
# Mark the return form review page as complete, and then recomplete the form, and assignments switch yet again.
workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid)
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid)))
# Complete the supervisor task, accepting the approval, and the workflow is completed.
# When it is all done, there should be no outstanding assignments.
workflow_api = self.get_workflow_api(workflow, user_uid=supervisor.uid)
data = workflow_api.next_task.data
data["approval"] = True
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid)
self.assertEquals(WorkflowStatus.complete, workflow_api.status)
self.assertEquals('EndEvent', workflow_api.next_task.type) # Are are at the end.
self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid)))
# Sending any subsequent complete forms does not result in a new task event
with self.assertRaises(AssertionError) as _api_error:
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid)))

View File

@ -36,6 +36,7 @@ class TestWorkflowProcessor(BaseTest):
workflow_spec_model = self.load_test_spec("random_fact")
study = session.query(StudyModel).first()
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(study.id, processor.bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY])
self.assertIsNotNone(processor)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
@ -62,6 +63,7 @@ class TestWorkflowProcessor(BaseTest):
files = session.query(FileModel).filter_by(workflow_spec_id='decision_table').all()
self.assertEqual(2, len(files))
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(1, len(next_user_tasks))
@ -86,6 +88,7 @@ class TestWorkflowProcessor(BaseTest):
workflow_spec_model = self.load_test_spec("parallel_tasks")
study = session.query(StudyModel).first()
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
# Complete the first steps of the 4 parallel tasks
@ -127,6 +130,7 @@ class TestWorkflowProcessor(BaseTest):
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("parallel_tasks")
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(4, len(next_user_tasks))
@ -215,6 +219,7 @@ class TestWorkflowProcessor(BaseTest):
self.assertEqual(2, len(files))
workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id="docx").first()
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(1, len(next_user_tasks))
@ -278,6 +283,7 @@ class TestWorkflowProcessor(BaseTest):
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(processor.workflow_model.workflow_spec_id, workflow_spec_model.id)
task = processor.next_task()
task.data = {"color": "blue"}

View File

@ -47,6 +47,7 @@ class TestWorkflowProcessorMultiInstance(BaseTest):
workflow_spec_model = self.load_test_spec("multi_instance")
study = session.query(StudyModel).first()
processor = self.get_processor(study, workflow_spec_model)
processor.bpmn_workflow.do_engine_steps()
self.assertEqual(study.id, processor.bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY])
self.assertIsNotNone(processor)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())

View File

@ -89,8 +89,8 @@ class TestWorkflowSpecValidation(BaseTest):
self.load_example_data()
errors = self.validate_workflow("invalid_script")
self.assertEqual(2, len(errors))
self.assertEqual("error_loading_workflow", errors[0]['code'])
self.assertTrue("NoSuchScript" in errors[0]['message'])
self.assertEqual("workflow_validation_exception", errors[0]['code'])
#self.assertTrue("NoSuchScript" in errors[0]['message'])
self.assertEqual("Invalid_Script_Task", errors[0]['task_id'])
self.assertEqual("An Invalid Script Reference", errors[0]['task_name'])
self.assertEqual("invalid_script.bpmn", errors[0]['file_name'])
@ -99,7 +99,7 @@ class TestWorkflowSpecValidation(BaseTest):
self.load_example_data()
errors = self.validate_workflow("invalid_script2")
self.assertEqual(2, len(errors))
self.assertEqual("error_loading_workflow", errors[0]['code'])
self.assertEqual("workflow_validation_exception", errors[0]['code'])
self.assertEqual("Invalid_Script_Task", errors[0]['task_id'])
self.assertEqual("An Invalid Script Reference", errors[0]['task_name'])
self.assertEqual("invalid_script2.bpmn", errors[0]['file_name'])