Merge pull request #168 from sartography/feature/approvals_dashboard_prototype

Feature/approvals dashboard prototype
This commit is contained in:
Aaron Louie 2020-07-31 13:27:43 -04:00 committed by GitHub
commit cb3f07b49a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 572 additions and 120 deletions

View File

@ -38,9 +38,8 @@ recommonmark = "*"
requests = "*"
sentry-sdk = {extras = ["flask"],version = "==0.14.4"}
sphinx = "*"
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow.git",ref = "master"}
#spiffworkflow = {editable = true,path="/home/kelly/sartography/SpiffWorkflow/"}
swagger-ui-bundle = "*"
spiffworkflow = {editable = true, git = "https://github.com/sartography/SpiffWorkflow.git", ref = "master"}
webtest = "*"
werkzeug = "*"
xlrd = "*"

206
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "45dc348da1f583da4a7c76113456b3f0225736e79a5da05ba2af9ede7f8089e0"
"sha256": "096abf7ce152358489282a004ed634ca64730cb98276f3a513ed2d5b8a6635c6"
},
"pipfile-spec": 6,
"requires": {
@ -30,6 +30,14 @@
"index": "pypi",
"version": "==1.4.2"
},
"amqp": {
"hashes": [
"sha256:70cdb10628468ff14e57ec2f751c7aa9e48e7e3651cfd62d431213c0c4e58f21",
"sha256:aa7f313fb887c91f15474c1229907a04dac0b8135822d6603437803424c0aa59"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==2.6.1"
},
"aniso8601": {
"hashes": [
"sha256:529dcb1f5f26ee0df6c0a1ee84b7b27197c3c50fc3a6321d66c544689237d072",
@ -42,6 +50,7 @@
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==19.3.0"
},
"babel": {
@ -49,6 +58,7 @@
"sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38",
"sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.8.0"
},
"bcrypt": {
@ -72,6 +82,7 @@
"sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7",
"sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==3.1.7"
},
"beautifulsoup4": {
@ -82,12 +93,27 @@
],
"version": "==4.9.1"
},
"billiard": {
"hashes": [
"sha256:bff575450859a6e0fbc2f9877d9b715b0bbc07c3565bb7ed2280526a0cdf5ede",
"sha256:d91725ce6425f33a97dfa72fb6bfef0e47d4652acd98a032bd1a7fbf06d5fa6a"
],
"version": "==3.6.3.0"
},
"blinker": {
"hashes": [
"sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6"
],
"version": "==1.4"
},
"celery": {
"hashes": [
"sha256:ef17d7dffde7fc73ecab3a3b6389d93d3213bac53fa7f28e68e33647ad50b916",
"sha256:fd77e4248bb1b7af5f7922dd8e81156f540306e3a5c4b1c24167c1f5f06025da"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==4.4.6"
},
"certifi": {
"hashes": [
"sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3",
@ -140,6 +166,7 @@
"sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a",
"sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==7.1.2"
},
"clickclick": {
@ -156,6 +183,14 @@
],
"version": "==0.9.1"
},
"configparser": {
"hashes": [
"sha256:2ca44140ee259b5e3d8aaf47c79c36a7ab0d5e94d70bd4105c03ede7a20ea5a1",
"sha256:cffc044844040c7ce04e9acd1838b5f2e5fa3170182f6fda4d2ea8b0099dbadd"
],
"markers": "python_version >= '3.6'",
"version": "==5.0.0"
},
"connexion": {
"extras": [
"swagger-ui"
@ -212,6 +247,7 @@
"sha256:525ba66fb5f90b07169fdd48b6373c18f1ee12728ca277ca44567a367d9d7f74",
"sha256:a766c1dccb30c5f6eb2b203f87edd1d8588847709c78589e1521d769addc8218"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.2.10"
},
"docutils": {
@ -219,6 +255,7 @@
"sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
"sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==0.16"
},
"docxtpl": {
@ -301,8 +338,16 @@
"sha256:05b31d2034dd3f2a685cbbae4cfc4ed906b2a733cff7964ada450fd5e462b84e",
"sha256:bfc7150eaf809b1c283879302f04c42791136060c6eeb12c0c6674fb1291fae5"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.4.4"
},
"future": {
"hashes": [
"sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.18.2"
},
"gunicorn": {
"hashes": [
"sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626",
@ -323,6 +368,7 @@
"sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
"sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.10"
},
"imagesize": {
@ -330,21 +376,15 @@
"sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1",
"sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.2.0"
},
"importlib-metadata": {
"hashes": [
"sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83",
"sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"
],
"markers": "python_version < '3.8'",
"version": "==1.7.0"
},
"inflection": {
"hashes": [
"sha256:88b101b2668a1d81d6d72d4c2018e53bc6c7fc544c987849da1c7f77545c3bc9",
"sha256:f576e85132d34f5bf7df5183c2c6f94cfb32e528f53065345cf71329ba0b8924"
],
"markers": "python_version >= '3.5'",
"version": "==0.5.0"
},
"itsdangerous": {
@ -352,6 +392,7 @@
"sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19",
"sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.0"
},
"jdcal": {
@ -366,6 +407,7 @@
"sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0",
"sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==2.11.2"
},
"jsonschema": {
@ -375,9 +417,21 @@
],
"version": "==3.2.0"
},
"kombu": {
"hashes": [
"sha256:be48cdffb54a2194d93ad6533d73f69408486483d189fe9f5990ee24255b0e0a",
"sha256:ca1b45faac8c0b18493d02a8571792f3c40291cf2bcf1f55afed3d8f3aa7ba74"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==4.6.11"
},
"ldap3": {
"hashes": [
"sha256:17f04298b70bf7ecaa5db8a7d8622b5a962ef7fc2b245b2eea705ac1c24338c0",
"sha256:298769ab0232b3a3efa1e84881096c24526fe37911c83a11285f222fe4975efd",
"sha256:4fd2db72d0412cc16ee86be01332095e86e361329c3579b314231eb2e56c7871",
"sha256:52ab557b3c4908db4a90bea16731aa714b1b54e039b54fd4c4b83994c6c48c0c",
"sha256:53aaae5bf14f3827c69600ddf4d61b88f49c055bb93060e9702c5bafd206c744",
"sha256:81df4ac8b6df10fb1f05b17c18d0cb8c4c344d5a03083c382824960ed959cf5b"
],
"index": "pypi",
@ -425,6 +479,7 @@
"sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27",
"sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.3"
},
"markdown": {
@ -471,6 +526,7 @@
"sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7",
"sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.1"
},
"marshmallow": {
@ -526,6 +582,7 @@
"sha256:ed8a311493cf5480a2ebc597d1e177231984c818a86875126cfd004241a73c3e",
"sha256:ef71a1d4fd4858596ae80ad1ec76404ad29701f8ca7cdcebc50300178db14dfc"
],
"markers": "python_version >= '3.6'",
"version": "==1.19.1"
},
"openapi-spec-validator": {
@ -549,6 +606,7 @@
"sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8",
"sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==20.4"
},
"pandas": {
@ -611,8 +669,19 @@
},
"pyasn1": {
"hashes": [
"sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359",
"sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576",
"sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf",
"sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7",
"sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d",
"sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"
"sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00",
"sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8",
"sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86",
"sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12",
"sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776",
"sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba",
"sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2",
"sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"
],
"version": "==0.4.8"
},
@ -621,6 +690,7 @@
"sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
"sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.20"
},
"pygithub": {
@ -636,6 +706,7 @@
"sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",
"sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"
],
"markers": "python_version >= '3.5'",
"version": "==2.6.1"
},
"pyjwt": {
@ -651,6 +722,7 @@
"sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
"sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.4.7"
},
"pyrsistent": {
@ -685,10 +757,67 @@
"hashes": [
"sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d",
"sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b",
"sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"
"sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8",
"sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77",
"sha256:ea87e17f6ec459e780e4221f295411462e0d0810858e055fc514684350a2f522"
],
"version": "==1.0.4"
},
"python-levenshtein-wheels": {
"hashes": [
"sha256:0065529c8aec4c044468286177761857d36981ba6f7fdb62d7d5f7ffd143de5d",
"sha256:016924a59d689f9f47d5f7b26b70f31e309255e8dd72602c91e93ceb752b9f92",
"sha256:089d046ea7727e583233c71fef1046663ed67b96967063ae8ddc9f551e86a4fc",
"sha256:09f9faaaa8f65726f91b44c11d3d622fee0f1780cfbe2bf3f410dd0e7345adcb",
"sha256:0aea217eab612acd45dcc3424a2e8dbd977cc309f80359d0c01971f1e65b9a9b",
"sha256:0beb91ad80b1573829066e5af36b80190c367be6e0a65292f073353b0388c7fc",
"sha256:0ec1bc73f5ed3a1a06e02d13bb3cd22a0b32ebf65a9667bbccba106bfa0546f1",
"sha256:0fa2ca69ef803bc6037a8c919e2e8a17b55e94c9c9ffcb4c21befbb15a1d0f40",
"sha256:11c77d0d74ab7f46f89a58ae9c2d67349ebc1ae3e18636627f9939d810167c31",
"sha256:19a68716a322486ddffc8bf7e5cf44a82f7700b05a10658e6e7fc5c7ae92b13d",
"sha256:19a95a01d28d63b042438ba860c4ace90362906a038fa77962ba33325d377d10",
"sha256:1a61f3a51e00a3608659bbaabb3f27af37c9dbe84d843369061a3e45cf0d5103",
"sha256:1c50aebebab403fb2dd415d70355446ac364dece502b0e2737a1a085bb9a4aa4",
"sha256:1d2390d04f9b673391e5ce1a0b054d0565f2e00ea5d1187a044221dc5c02c3e6",
"sha256:1e51cdc123625a28709662d24ea0cb4cf6f991845e6054d9f803c78da1d6b08f",
"sha256:1eca6dc97dfcf588f53281fe48a6d5c423d4e14bdab658a1aa6efd447acc64e0",
"sha256:1f0056d3216b0fe38f25c6f8ebc84bd9f6d34c55a7a9414341b674fb98961399",
"sha256:228b59460e9a786e498bdfc8011838b89c6054650b115c86c9c819a055a793b0",
"sha256:23020f9ff2cb3457a926dcc470b84f9bd5b7646bd8b8e06b915bdbbc905cb23f",
"sha256:2b7b7cf0f43b677f818aa9a610464abf06106c19a51b9ac35bd051a439f337a5",
"sha256:3b591c9a7e91480f0d7bf2041d325f578b9b9c2f2d593304377cb28862e7f9a2",
"sha256:3ca9c70411ab587d071c1d8fc8b69d0558be8e4aa920f2595e2cb5eb229ccc4c",
"sha256:3e6bcca97a7ff4e720352b57ddc26380c0583dcdd4b791acef7b574ad58468a7",
"sha256:3ed88f9e638da57647149115c34e0e120cae6f3d35eee7d77e22cc9c1d8eced3",
"sha256:445bf7941cb1fa05d6c2a4a502ad4868a5cacd92e8eb77b2bd008cdda9d37c55",
"sha256:4ba5e147d76d7ee884fd6eae461438b080bcc9f2c6eb9b576811e1bcfe8f808e",
"sha256:4bb128b719c30f3b9feacfe71a338ae07d39dbffc077139416f3535c89f12362",
"sha256:4e951907b9b5d40c9f1b611c8bdfe46ff8cf8371877cebbd589bf5840feab662",
"sha256:53c0c9964390368fd64460b690f168221c669766b193b7e80ae3950c2b9551f8",
"sha256:57c4edef81611098d37176278f2b6a3712bf864eed313496d7d80504805896d1",
"sha256:5b36e406937c6463d1c1ef3dd82d3f771d9d845f21351e8a026fe4dd398ea8d0",
"sha256:7d0821dab24b430dfdc2cba70a06e6d7a45cb839d0dd0e6db97bb99e23c3d884",
"sha256:7f7283dfe50eac8a8cd9b777de9eb50b1edf7dbb46fc7cc9d9b0050d0c135021",
"sha256:7f9759095b3fc825464a72b1cae95125e610eba3c70f91557754c32a0bf32ea2",
"sha256:8005a4df455569c0d490ddfd9e5a163f21293477fd0ed4ea9effdd723ddd8eaa",
"sha256:86e865f29ad3dc3bb4733e5247220173d90f05ac8d2ad18e9689a220f90de55f",
"sha256:98727050ba70eb8d318ec8a8203531c20119347fc8f281102b097326812742ab",
"sha256:ac9cdf044dcb9481c7da782db01b50c1f0e7cdd78c8507b963b6d072829c0263",
"sha256:acfad8ffed96891fe7c583d92717cd8ec0c03b59a954c389fd4e26a5cdeac610",
"sha256:ad15f25abff8220e556d64e2a27c646241b08f00faf1bc02313655696cd3edfa",
"sha256:b679f951f842c38665aa54bea4d7403099131f71fac6d8584f893a731fe1266d",
"sha256:b8c183dc4aa4e95dc5c373eedc3d205c176805835611fcfec5d9050736c695c4",
"sha256:c097a6829967c76526a037ed34500a028f78f0d765c8e3dbd1a7717afd09fb92",
"sha256:c2c76f483d05eddec60a5cd89e92385adef565a4f243b1d9a6abe2f6bd2a7c0a",
"sha256:c388baa3c04272a7c585d3da24030c142353eb26eb531dd2681502e6be7d7a26",
"sha256:cb0f2a711db665b5bf8697b5af3b9884bb1139385c5c12c2e472e4bbee62da99",
"sha256:cbac984d7b36e75b440d1c8ff9d3425d778364a0cbc23f8943383d4decd35d5e",
"sha256:f55adf069be2d655f8d668594fe1be1b84d9dc8106d380a9ada06f34941c33c8",
"sha256:f9084ed3b8997ad4353d124b903f2860a9695b9e080663276d9e58c32e293244",
"sha256:fb7df3504222fcb1fa593f76623abbb54d6019eec15aac5d05cd07ad90ac016c"
],
"version": "==0.13.1"
},
"pytz": {
"hashes": [
"sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed",
@ -744,6 +873,7 @@
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.15.0"
},
"snowballstemmer": {
@ -758,6 +888,7 @@
"sha256:1634eea42ab371d3d346309b93df7870a88610f0725d47528be902a0d95ecc55",
"sha256:a59dc181727e95d25f781f0eb4fd1825ff45590ec8ff49eadfd7f1a537cc0232"
],
"markers": "python_version >= '3.5'",
"version": "==2.0.1"
},
"sphinx": {
@ -773,6 +904,7 @@
"sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a",
"sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.2"
},
"sphinxcontrib-devhelp": {
@ -780,6 +912,7 @@
"sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e",
"sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.2"
},
"sphinxcontrib-htmlhelp": {
@ -787,6 +920,7 @@
"sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f",
"sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.3"
},
"sphinxcontrib-jsmath": {
@ -794,6 +928,7 @@
"sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178",
"sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.1"
},
"sphinxcontrib-qthelp": {
@ -801,6 +936,7 @@
"sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72",
"sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.3"
},
"sphinxcontrib-serializinghtml": {
@ -808,9 +944,11 @@
"sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc",
"sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"
],
"markers": "python_version >= '3.5'",
"version": "==1.1.4"
},
"spiffworkflow": {
"editable": true,
"git": "https://github.com/sartography/SpiffWorkflow.git",
"ref": "7c8d59e7b9a978795bc8d1f354002fdc89540672"
},
@ -845,6 +983,7 @@
"sha256:f57be5673e12763dd400fea568608700a63ce1c6bd5bdbc3cc3a2c5fdb045274",
"sha256:fc728ece3d5c772c196fd338a99798e7efac7a04f9cb6416299a3638ee9a94cd"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.3.18"
},
"swagger-ui-bundle": {
@ -860,13 +999,23 @@
"sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a",
"sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
"version": "==1.25.10"
},
"vine": {
"hashes": [
"sha256:133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87",
"sha256:ea4947cc56d1fd6f2095c8d543ee25dad966f78692528e68b4fada11ba3f98af"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.3.0"
},
"waitress": {
"hashes": [
"sha256:1bb436508a7487ac6cb097ae7a7fe5413aefca610550baf58f0940e51ecfb261",
"sha256:3d633e78149eb83b60a07dfabb35579c29aac2d24bb803c18b26fb2ab1a584db"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==1.4.4"
},
"webob": {
@ -874,6 +1023,7 @@
"sha256:a3c89a8e9ba0aeb17382836cdb73c516d0ecf6630ec40ec28288f3ed459ce87b",
"sha256:aa3a917ed752ba3e0b242234b2a373f9c4e2a75d35291dcbe977649bd21fd108"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.8.6"
},
"webtest": {
@ -920,13 +1070,6 @@
],
"index": "pypi",
"version": "==1.3.0"
},
"zipp": {
"hashes": [
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
],
"version": "==3.1.0"
}
},
"develop": {
@ -935,6 +1078,7 @@
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==19.3.0"
},
"coverage": {
@ -977,25 +1121,19 @@
"index": "pypi",
"version": "==5.2.1"
},
"importlib-metadata": {
"hashes": [
"sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83",
"sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"
],
"markers": "python_version < '3.8'",
"version": "==1.7.0"
},
"iniconfig": {
"hashes": [
"sha256:aa0b40f50a00e72323cb5d41302f9c6165728fd764ac8822aa3fff00a40d56b4"
"sha256:80cf40c597eb564e86346103f609d74efce0f6b4d4f30ec8ce9e2c26411ba437",
"sha256:e5f92f89355a67de0595932a6c6c02ab4afddc6fcdc0bfc5becd0d60884d3f69"
],
"version": "==1.0.0"
"version": "==1.0.1"
},
"more-itertools": {
"hashes": [
"sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5",
"sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2"
],
"markers": "python_version >= '3.5'",
"version": "==8.4.0"
},
"packaging": {
@ -1003,6 +1141,7 @@
"sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8",
"sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==20.4"
},
"pbr": {
@ -1018,6 +1157,7 @@
"sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0",
"sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.13.1"
},
"py": {
@ -1025,6 +1165,7 @@
"sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2",
"sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.9.0"
},
"pyparsing": {
@ -1032,6 +1173,7 @@
"sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
"sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.4.7"
},
"pytest": {
@ -1047,6 +1189,7 @@
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.15.0"
},
"toml": {
@ -1055,13 +1198,6 @@
"sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"
],
"version": "==0.10.1"
},
"zipp": {
"hashes": [
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
],
"version": "==3.1.0"
}
}
}

View File

@ -572,6 +572,18 @@ paths:
description: The type of action the event documents, options include "ASSIGNMENT" for tasks that are waiting on you, "COMPLETE" for things have completed.
schema:
type: string
- name: workflow
in: query
required: false
description: Restrict results to the given workflow.
schema:
type: number
- name: study
in: query
required: false
description: Restrict results to the given study.
schema:
type: number
get:
operationId: crc.api.workflow.get_task_events
summary: Returns a list of task events related to the current user. Can be filtered by type.
@ -610,6 +622,12 @@ paths:
description: Set this to true to reset the workflow
schema:
type: boolean
- name: do_engine_steps
in: query
required: false
description: Defaults to true, can be set to false if you are just looking at the workflow not completeing it.
schema:
type: boolean
tags:
- Workflows and Tasks
responses:
@ -1046,7 +1064,7 @@ components:
example: dhf8r
protocol_builder_status:
type: string
enum: [INCOMPLETE, ACTIVE, HOLD, OPEN, ABANDONED]
enum: ['incomplete', 'active', 'hold', 'open', 'abandoned']
example: done
sponsor:
type: string

View File

@ -24,6 +24,11 @@ class ApiError(Exception):
instance.task_id = task.task_spec.name or ""
instance.task_name = task.task_spec.description or ""
instance.file_name = task.workflow.spec.file or ""
# Fixme: spiffworkflow is doing something weird where task ends up referenced in the data in some cases.
if "task" in task.data:
task.data.pop("task")
instance.task_data = task.data
app.logger.error(message, exc_info=True)
return instance

View File

@ -21,7 +21,7 @@ def add_study(body):
title=body['title'],
primary_investigator_id=body['primary_investigator_id'],
last_updated=datetime.now(),
protocol_builder_status=ProtocolBuilderStatus.ACTIVE)
protocol_builder_status=ProtocolBuilderStatus.active)
session.add(study_model)
errors = StudyService._add_all_workflow_specs_to_study(study_model)

View File

@ -95,19 +95,30 @@ def delete_workflow_specification(spec_id):
session.commit()
def get_workflow(workflow_id, soft_reset=False, hard_reset=False):
def get_workflow(workflow_id, soft_reset=False, hard_reset=False, do_engine_steps=True):
"""Soft reset will attempt to update to the latest spec without starting over,
Hard reset will update to the latest spec and start from the beginning.
Read Only will return the workflow in a read only state, without running any
engine tasks or logging any events. """
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
processor = WorkflowProcessor(workflow_model, soft_reset=soft_reset, hard_reset=hard_reset)
if do_engine_steps:
processor.do_engine_steps()
processor.save()
WorkflowService.update_task_assignments(processor)
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
WorkflowService.update_task_assignments(processor)
return WorkflowApiSchema().dump(workflow_api_model)
def get_task_events(action):
def get_task_events(action = None, workflow = None, study = None):
"""Provides a way to see a history of what has happened, or get a list of tasks that need your attention."""
query = session.query(TaskEventModel).filter(TaskEventModel.user_uid == g.user.uid)
if action:
query = query.filter(TaskEventModel.action == action)
if workflow:
query = query.filter(TaskEventModel.workflow_id == workflow)
if study:
query = query.filter(TaskEventModel.study_id == study)
events = query.all()
# Turn the database records into something a little richer for the UI to use.

View File

@ -143,7 +143,8 @@ class NavigationItemSchema(ma.Schema):
class WorkflowApi(object):
def __init__(self, id, status, next_task, navigation,
spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks, last_updated, title):
spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks,
last_updated, title):
self.id = id
self.status = status
self.next_task = next_task # The next task that requires user input.

View File

@ -22,11 +22,11 @@ class ProtocolBuilderStatus(enum.Enum):
# • Hold: store boolean value in CR Connect (add to Study Model)
# • Open To Enrollment: has start date and HSR number?
# • Abandoned: deleted in PB
INCOMPLETE = 'incomplete' # Found in PB but not ready to start (not q_complete)
ACTIVE = 'active', # found in PB, marked as "q_complete" and no HSR number and not hold
HOLD = 'hold', # CR Connect side, if the Study ias marked as "hold".
OPEN = 'open', # Open To Enrollment: has start date and HSR number?
ABANDONED = 'Abandoned' # Not found in PB
incomplete = 'incomplete' # Found in PB but not ready to start (not q_complete)
active = 'active' # found in PB, marked as "q_complete" and no HSR number and not hold
hold = 'hold' # CR Connect side, if the Study ias marked as "hold".
open = 'open' # Open To Enrollment: has start date and HSR number?
abandoned = 'abandoned' # Not found in PB
#DRAFT = 'draft', # !Q_COMPLETE

View File

@ -25,6 +25,7 @@ class StudyModel(db.Model):
investigator_uids = db.Column(db.ARRAY(db.String), nullable=True)
requirements = db.Column(db.ARRAY(db.Integer), nullable=True)
on_hold = db.Column(db.Boolean, default=False)
enrollment_date = db.Column(db.DateTime(timezone=True), nullable=True)
def update_from_protocol_builder(self, pbs: ProtocolBuilderStudy):
self.hsr_number = pbs.HSRNUMBER
@ -32,16 +33,18 @@ class StudyModel(db.Model):
self.user_uid = pbs.NETBADGEID
self.last_updated = pbs.DATE_MODIFIED
self.protocol_builder_status = ProtocolBuilderStatus.ACTIVE
self.protocol_builder_status = ProtocolBuilderStatus.active
if pbs.HSRNUMBER:
self.protocol_builder_status = ProtocolBuilderStatus.OPEN
self.protocol_builder_status = ProtocolBuilderStatus.open
if self.on_hold:
self.protocol_builder_status = ProtocolBuilderStatus.HOLD
self.protocol_builder_status = ProtocolBuilderStatus.hold
class WorkflowMetadata(object):
def __init__(self, id, name, display_name, description, spec_version, category_id, category_display_name, state: WorkflowState, status: WorkflowStatus,
total_tasks, completed_tasks, display_order):
def __init__(self, id, name = None, display_name = None, description = None, spec_version = None,
category_id = None, category_display_name = None, state: WorkflowState = None,
status: WorkflowStatus = None, total_tasks = None, completed_tasks = None,
display_order = None):
self.id = id
self.name = name
self.display_name = display_name
@ -108,7 +111,7 @@ class Study(object):
id=None,
protocol_builder_status=None,
sponsor="", hsr_number="", ind_number="", categories=[],
files=[], approvals=[], **argsv):
files=[], approvals=[], enrollment_date=None, **argsv):
self.id = id
self.user_uid = user_uid
self.title = title
@ -122,6 +125,7 @@ class Study(object):
self.approvals = approvals
self.warnings = []
self.files = files
self.enrollment_date = enrollment_date
@classmethod
def from_model(cls, study_model: StudyModel):
@ -154,11 +158,12 @@ class StudySchema(ma.Schema):
ind_number = fields.String(allow_none=True)
files = fields.List(fields.Nested(FileSchema), dump_only=True)
approvals = fields.List(fields.Nested('ApprovalSchema'), dump_only=True)
enrollment_date = fields.Date(allow_none=True)
class Meta:
model = Study
additional = ["id", "title", "last_updated", "primary_investigator_id", "user_uid",
"sponsor", "ind_number", "approvals", "files"]
"sponsor", "ind_number", "approvals", "files", "enrollment_date"]
unknown = INCLUDE
@marshmallow.post_load

View File

@ -50,14 +50,16 @@ class TaskEvent(object):
self.task_type = model.task_type
self.task_state = model.task_state
self.task_lane = model.task_lane
self.date = model.date
class TaskEventSchema(ma.Schema):
study = fields.Nested(StudySchema, dump_only=True)
workflow = fields.Nested(WorkflowMetadataSchema, dump_only=True)
task_lane = fields.String(allow_none=True, required=False)
class Meta:
model = TaskEvent
additional = ["id", "user_uid", "action", "task_id", "task_title",
"task_name", "task_type", "task_state", "task_lane"]
"task_name", "task_type", "task_state", "task_lane", "date"]
unknown = INCLUDE

View File

@ -79,7 +79,8 @@ class FileService(object):
""" Opens a reference file (assumes that it is xls file) and returns the data as a
dictionary, each row keyed on the given index_column name. If there are columns
that should be represented as integers, pass these as an array of int_columns, lest
you get '1.0' rather than '1' """
you get '1.0' rather than '1'
fixme: This is stupid stupid slow. Place it in the database and just check if it is up to date."""
data_model = FileService.get_reference_file_data(reference_file_name)
xls = ExcelFile(data_model.data)
df = xls.parse(xls.sheet_names[0])

View File

@ -1,6 +1,5 @@
from copy import copy
from datetime import datetime
import json
from typing import List
import requests
@ -13,16 +12,15 @@ from crc.api.common import ApiError
from crc.models.file import FileModel, FileModelSchema, File
from crc.models.ldap import LdapSchema
from crc.models.protocol_builder import ProtocolBuilderStudy, ProtocolBuilderStatus
from crc.models.task_event import TaskEventModel
from crc.models.study import StudyModel, Study, Category, WorkflowMetadata
from crc.models.task_event import TaskEventModel, TaskEvent
from crc.models.workflow import WorkflowSpecCategoryModel, WorkflowModel, WorkflowSpecModel, WorkflowState, \
WorkflowStatus
from crc.services.approval_service import ApprovalService
from crc.services.file_service import FileService
from crc.services.ldap_service import LdapService
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.approval_service import ApprovalService
from crc.models.approval import Approval
class StudyService(object):
@ -63,11 +61,10 @@ class StudyService(object):
files = (File.from_models(model, FileService.get_file_data(model.id),
FileService.get_doc_dictionary()) for model in files)
study.files = list(files)
# Calling this line repeatedly is very very slow. It creates the
# master spec and runs it. Don't execute this for Abandoned studies, as
# we don't have the information to process them.
if study.protocol_builder_status != ProtocolBuilderStatus.ABANDONED:
if study.protocol_builder_status != ProtocolBuilderStatus.abandoned:
status = StudyService.__get_study_status(study_model)
study.warnings = StudyService.__update_status_of_workflow_meta(workflow_metas, status)
@ -268,7 +265,7 @@ class StudyService(object):
for study in db_studies:
pb_study = next((pbs for pbs in pb_studies if pbs.STUDYID == study.id), None)
if not pb_study:
study.protocol_builder_status = ProtocolBuilderStatus.ABANDONED
study.protocol_builder_status = ProtocolBuilderStatus.abandoned
db.session.commit()

View File

@ -125,7 +125,8 @@ class WorkflowProcessor(object):
STUDY_ID_KEY = "study_id"
VALIDATION_PROCESS_KEY = "validate_only"
def __init__(self, workflow_model: WorkflowModel, soft_reset=False, hard_reset=False, validate_only=False):
def __init__(self, workflow_model: WorkflowModel,
soft_reset=False, hard_reset=False, validate_only=False):
"""Create a Workflow Processor based on the serialized information available in the workflow model.
If soft_reset is set to true, it will try to use the latest version of the workflow specification
without resetting to the beginning of the workflow. This will work for some minor changes to the spec.
@ -189,7 +190,7 @@ class WorkflowProcessor(object):
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = workflow_model.study_id
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = validate_only
#try:
bpmn_workflow.do_engine_steps()
# bpmn_workflow.do_engine_steps()
# except WorkflowException as we:
# raise ApiError.from_task_spec("error_loading_workflow", str(we), we.sender)
return bpmn_workflow

View File

@ -154,10 +154,9 @@ class WorkflowService(object):
if len(field.options) > 0:
random_choice = random.choice(field.options)
if isinstance(random_choice, dict):
choice = random.choice(field.options)
return {
'value': choice['id'],
'label': choice['name']
'value': random_choice['id'],
'label': random_choice['name']
}
else:
# fixme: why it is sometimes an EnumFormFieldOption, and other times not?
@ -471,6 +470,7 @@ class WorkflowService(object):
db.session.query(TaskEventModel). \
filter(TaskEventModel.workflow_id == processor.workflow_model.id). \
filter(TaskEventModel.action == WorkflowService.TASK_ACTION_ASSIGNMENT).delete()
db.session.commit()
for task in processor.get_current_user_tasks():
user_ids = WorkflowService.get_users_assigned_to_task(processor, task)

View File

@ -44,7 +44,7 @@
{% endif %}{% endfor %}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:properties>
<camunda:property name="display_name" value="Documents and Approvals" />
<camunda:property name="display_name" value="'Documents and Approvals'" />
</camunda:properties>
</bpmn:extensionElements>
<bpmn:incoming>Flow_142jtxs</bpmn:incoming>

View File

@ -1,33 +1,116 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_300b2c3" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_300b2c3" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.0.0">
<bpmn:collaboration id="Collaboration_163c7c8">
<bpmn:participant id="Participant_1mnua71" name="team" processRef="Process_cd666f3" />
</bpmn:collaboration>
<bpmn:process id="Process_cd666f3" isExecutable="true">
<bpmn:laneSet id="LaneSet_0ucxzw3">
<bpmn:lane id="Lane_16ml9fk">
<bpmn:flowNodeRef>StartEvent_1</bpmn:flowNodeRef>
<bpmn:flowNodeRef>Activity_1qpy9ra</bpmn:flowNodeRef>
<bpmn:flowNodeRef>Event_1m9fnmv</bpmn:flowNodeRef>
<bpmn:flowNodeRef>Activity_0c5drp3</bpmn:flowNodeRef>
</bpmn:lane>
<bpmn:lane id="Lane_1jw70kl" name="supervisor">
<bpmn:flowNodeRef>Gateway_0ved0t9</bpmn:flowNodeRef>
<bpmn:flowNodeRef>Activity_107ojvq</bpmn:flowNodeRef>
</bpmn:lane>
</bpmn:laneSet>
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0q51aiq</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0q51aiq" sourceRef="StartEvent_1" targetRef="Activity_1qpy9ra" />
<bpmn:userTask id="Activity_1qpy9ra" name="Placeholder" camunda:formKey="Formkey_placeholder">
<bpmn:userTask id="Activity_1qpy9ra" name="Assign Approver" camunda:formKey="form_assign_approver">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="FormField_2t2220o" label="Will this be updated later" type="boolean" />
<camunda:formField id="supervisor" label="Approver UID" type="string" defaultValue="dhf8r">
<camunda:validation>
<camunda:constraint name="required" config="true" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0q51aiq</bpmn:incoming>
<bpmn:outgoing>Flow_0ai4j1x</bpmn:outgoing>
<bpmn:incoming>Flow_1ugh4wn</bpmn:incoming>
<bpmn:outgoing>Flow_0d2snmk</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_0q51aiq" sourceRef="StartEvent_1" targetRef="Activity_1qpy9ra" />
<bpmn:sequenceFlow id="Flow_0d2snmk" sourceRef="Activity_1qpy9ra" targetRef="Activity_107ojvq" />
<bpmn:exclusiveGateway id="Gateway_0ved0t9" name="Approved?">
<bpmn:incoming>Flow_0apr3nj</bpmn:incoming>
<bpmn:outgoing>Flow_0mhtlkt</bpmn:outgoing>
<bpmn:outgoing>Flow_11tnx3n</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_0apr3nj" sourceRef="Activity_107ojvq" targetRef="Gateway_0ved0t9" />
<bpmn:sequenceFlow id="Flow_0mhtlkt" name="Yes" sourceRef="Gateway_0ved0t9" targetRef="Event_1m9fnmv">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">is_study_approved == True</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:endEvent id="Event_1m9fnmv">
<bpmn:incoming>Flow_0ai4j1x</bpmn:incoming>
<bpmn:incoming>Flow_0mhtlkt</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0ai4j1x" sourceRef="Activity_1qpy9ra" targetRef="Event_1m9fnmv" />
<bpmn:sequenceFlow id="Flow_11tnx3n" name="No" sourceRef="Gateway_0ved0t9" targetRef="Activity_0c5drp3">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">is_study_approved == False</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:userTask id="Activity_107ojvq" name="Approve Study" camunda:formKey="form_approve_study">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="is_study_approved" label="Approve this study?" type="boolean">
<camunda:validation>
<camunda:constraint name="required" config="true" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0d2snmk</bpmn:incoming>
<bpmn:outgoing>Flow_0apr3nj</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_1ugh4wn" sourceRef="Activity_0c5drp3" targetRef="Activity_1qpy9ra" />
<bpmn:manualTask id="Activity_0c5drp3" name="Review Feedback">
<bpmn:documentation>Your request was not approved. Try again.</bpmn:documentation>
<bpmn:incoming>Flow_11tnx3n</bpmn:incoming>
<bpmn:outgoing>Flow_1ugh4wn</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_cd666f3">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Collaboration_163c7c8">
<bpmndi:BPMNShape id="Participant_1mnua71_di" bpmnElement="Participant_1mnua71" isHorizontal="true">
<dc:Bounds x="129" y="117" width="600" height="250" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Lane_1jw70kl_di" bpmnElement="Lane_1jw70kl" isHorizontal="true">
<dc:Bounds x="159" y="242" width="570" height="125" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Lane_16ml9fk_di" bpmnElement="Lane_16ml9fk" isHorizontal="true">
<dc:Bounds x="159" y="117" width="570" height="125" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_11tnx3n_di" bpmnElement="Flow_11tnx3n">
<di:waypoint x="460" y="275" />
<di:waypoint x="460" y="217" />
<bpmndi:BPMNLabel>
<dc:Bounds x="468" y="241" width="15" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0mhtlkt_di" bpmnElement="Flow_0mhtlkt">
<di:waypoint x="485" y="300" />
<di:waypoint x="660" y="300" />
<di:waypoint x="660" y="195" />
<bpmndi:BPMNLabel>
<dc:Bounds x="563" y="282" width="19" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0apr3nj_di" bpmnElement="Flow_0apr3nj">
<di:waypoint x="370" y="300" />
<di:waypoint x="435" y="300" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0d2snmk_di" bpmnElement="Flow_0d2snmk">
<di:waypoint x="320" y="217" />
<di:waypoint x="320" y="260" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0q51aiq_di" bpmnElement="Flow_0q51aiq">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0ai4j1x_di" bpmnElement="Flow_0ai4j1x">
<bpmndi:BPMNEdge id="Flow_1ugh4wn_di" bpmnElement="Flow_1ugh4wn">
<di:waypoint x="400" y="177" />
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
@ -35,8 +118,20 @@
<bpmndi:BPMNShape id="Activity_14cpuv6_di" bpmnElement="Activity_1qpy9ra">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0ved0t9_di" bpmnElement="Gateway_0ved0t9" isMarkerVisible="true">
<dc:Bounds x="435" y="275" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="435" y="332" width="54" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1m9fnmv_di" bpmnElement="Event_1m9fnmv">
<dc:Bounds x="432" y="159" width="36" height="36" />
<dc:Bounds x="642" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1ps6jft_di" bpmnElement="Activity_107ojvq">
<dc:Bounds x="270" y="260" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1al86eb_di" bpmnElement="Activity_0c5drp3">
<dc:Bounds x="400" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>

View File

@ -116,7 +116,7 @@
</camunda:formField>
</camunda:formData>
<camunda:properties>
<camunda:property name="display_name" value="Select Template Type" />
<camunda:property name="display_name" value="'Select Template Type'" />
</camunda:properties>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0schnpa</bpmn:incoming>

View File

@ -30,7 +30,7 @@ StudyInfo['documents'] = study_info('documents')</bpmn:script>
<bpmn:parallelGateway id="Gateway_1nta7st" name="Some Name">
<bpmn:extensionElements>
<camunda:properties>
<camunda:property name="display_name" value="Some Name" />
<camunda:property name="display_name" value="'Some Name'" />
</camunda:properties>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_17ct47v</bpmn:incoming>

View File

@ -0,0 +1,32 @@
"""empty message
Revision ID: 2e7b377cbc7b
Revises: c4ddb69e7ef4
Create Date: 2020-07-28 17:03:23.586828
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2e7b377cbc7b'
down_revision = 'c4ddb69e7ef4'
branch_labels = None
depends_on = None
def upgrade():
op.execute('update study set protocol_builder_status = NULL;')
op.execute('ALTER TYPE protocolbuilderstatus RENAME TO pbs_old;')
op.execute("CREATE TYPE protocolbuilderstatus AS ENUM('incomplete', 'active', 'hold', 'open', 'abandoned')")
op.execute("ALTER TABLE study ALTER COLUMN protocol_builder_status TYPE protocolbuilderstatus USING protocol_builder_status::text::protocolbuilderstatus;")
op.execute('DROP TYPE pbs_old;')
op.execute("update study set protocol_builder_status = 'incomplete';")
def downgrade():
op.execute('update study set protocol_builder_status = NULL;')
op.execute('ALTER TYPE protocolbuilderstatus RENAME TO pbs_old;')
op.execute("CREATE TYPE protocolbuilderstatus AS ENUM('INCOMPLETE', 'ACTIVE', 'HOLD', 'OPEN', 'ABANDONED')")
op.execute("ALTER TABLE study ALTER COLUMN protocol_builder_status TYPE protocolbuilderstatus USING protocol_builder_status::text::protocolbuilderstatus;")
op.execute('DROP TYPE pbs_old;')

View File

@ -0,0 +1,28 @@
"""empty message
Revision ID: c4ddb69e7ef4
Revises: ffef4661a37d
Create Date: 2020-07-22 09:04:09.769239
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c4ddb69e7ef4'
down_revision = 'ffef4661a37d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('study', sa.Column('enrollment_date', sa.DateTime(timezone=True), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('study', 'enrollment_date')
# ### end Alembic commands ###

View File

@ -60,7 +60,7 @@ class BaseTest(unittest.TestCase):
'id':0,
'title':'The impact of fried pickles on beer consumption in bipedal software developers.',
'last_updated':datetime.datetime.now(),
'protocol_builder_status':ProtocolBuilderStatus.ACTIVE,
'protocol_builder_status':ProtocolBuilderStatus.active,
'primary_investigator_id':'dhf8r',
'sponsor':'Sartography Pharmaceuticals',
'ind_number':'1234',
@ -70,7 +70,7 @@ class BaseTest(unittest.TestCase):
'id':1,
'title':'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels',
'last_updated':datetime.datetime.now(),
'protocol_builder_status':ProtocolBuilderStatus.ACTIVE,
'protocol_builder_status':ProtocolBuilderStatus.active,
'primary_investigator_id':'dhf8r',
'sponsor':'Makerspace & Co.',
'ind_number':'5678',
@ -241,7 +241,7 @@ class BaseTest(unittest.TestCase):
study = session.query(StudyModel).filter_by(user_uid=uid).filter_by(title=title).first()
if study is None:
user = self.create_user(uid=uid)
study = StudyModel(title=title, protocol_builder_status=ProtocolBuilderStatus.ACTIVE,
study = StudyModel(title=title, protocol_builder_status=ProtocolBuilderStatus.active,
user_uid=user.uid, primary_investigator_id=primary_investigator_id)
db.session.add(study)
db.session.commit()
@ -308,12 +308,13 @@ class BaseTest(unittest.TestCase):
db.session.commit()
return approval
def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False, user_uid="dhf8r"):
def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False, do_engine_steps=True, user_uid="dhf8r"):
user = session.query(UserModel).filter_by(uid=user_uid).first()
self.assertIsNotNone(user)
rv = self.app.get('/v1.0/workflow/%i?soft_reset=%s&hard_reset=%s' %
(workflow.id, str(soft_reset), str(hard_reset)),
rv = self.app.get(f'/v1.0/workflow/{workflow.id}'
f'?soft_reset={str(soft_reset)}'
f'&hard_reset={str(hard_reset)}'
f'&do_engine_steps={str(do_engine_steps)}',
headers=self.logged_in_headers(user),
content_type="application/json")
self.assert_success(rv)

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_17fwemw" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_17fwemw" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="MultiInstance" isExecutable="true">
<bpmn:startEvent id="StartEvent_1" name="StartEvent_1">
<bpmn:outgoing>Flow_0t6p1sb</bpmn:outgoing>

View File

@ -72,10 +72,10 @@ class TestFilesApi(BaseTest):
self.assertEqual(file, file2)
def test_add_file_from_task_and_form_errors_on_invalid_form_field_name(self):
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
processor.do_engine_steps()
task = processor.next_task()
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
correct_name = task.task_spec.form.fields[0].id
@ -96,6 +96,7 @@ class TestFilesApi(BaseTest):
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
processor.do_engine_steps()
task = processor.next_task()
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
correct_name = task.task_spec.form.fields[0].id

View File

@ -1,4 +1,6 @@
import json
from profile import Profile
from tests.base_test import BaseTest
from datetime import datetime, timezone
@ -13,6 +15,7 @@ from crc.models.study import StudyModel, StudySchema
from crc.models.workflow import WorkflowSpecModel, WorkflowModel
from crc.services.file_service import FileService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.workflow_service import WorkflowService
class TestStudyApi(BaseTest):
@ -21,7 +24,7 @@ class TestStudyApi(BaseTest):
"title": "Phase III Trial of Genuine People Personalities (GPP) Autonomous Intelligent Emotional Agents "
"for Interstellar Spacecraft",
"last_updated": datetime.now(tz=timezone.utc),
"protocol_builder_status": ProtocolBuilderStatus.ACTIVE,
"protocol_builder_status": ProtocolBuilderStatus.active,
"primary_investigator_id": "tmm2x",
"user_uid": "dhf8r",
}
@ -132,7 +135,7 @@ class TestStudyApi(BaseTest):
self.load_example_data()
study: StudyModel = session.query(StudyModel).first()
study.title = "Pilot Study of Fjord Placement for Single Fraction Outcomes to Cortisol Susceptibility"
study.protocol_builder_status = ProtocolBuilderStatus.ACTIVE
study.protocol_builder_status = ProtocolBuilderStatus.active
rv = self.app.put('/v1.0/study/%i' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
@ -182,11 +185,11 @@ class TestStudyApi(BaseTest):
num_open = 0
for study in json_data:
if study['protocol_builder_status'] == 'ABANDONED': # One study does not exist in user_studies.json
if study['protocol_builder_status'] == 'abandoned': # One study does not exist in user_studies.json
num_abandoned += 1
if study['protocol_builder_status'] == 'ACTIVE': # One study is marked complete without HSR Number
if study['protocol_builder_status'] == 'active': # One study is marked complete without HSR Number
num_active += 1
if study['protocol_builder_status'] == 'OPEN': # One study is marked complete and has an HSR Number
if study['protocol_builder_status'] == 'open': # One study is marked complete and has an HSR Number
num_open += 1
db_studies_after = session.query(StudyModel).all()

View File

@ -40,7 +40,7 @@ class TestStudyService(BaseTest):
for study in db.session.query(StudyModel).all():
StudyService().delete_study(study.id)
study = StudyModel(title="My title", protocol_builder_status=ProtocolBuilderStatus.ACTIVE, user_uid=user.uid)
study = StudyModel(title="My title", protocol_builder_status=ProtocolBuilderStatus.active, user_uid=user.uid)
db.session.add(study)
self.load_test_spec("random_fact", category_id=cat.id)
@ -79,6 +79,7 @@ class TestStudyService(BaseTest):
# Initialize the Workflow with the workflow processor.
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.id == workflow.id).first()
processor = WorkflowProcessor(workflow_model)
processor.do_engine_steps()
# Assure the workflow is now started, and knows the total and completed tasks.
studies = StudyService.get_studies_for_user(user)

View File

@ -220,7 +220,7 @@ class TestAuthentication(BaseTest):
return {
"title": "blah",
"last_updated": datetime.now(tz=timezone.utc),
"protocol_builder_status": ProtocolBuilderStatus.ACTIVE,
"protocol_builder_status": ProtocolBuilderStatus.active,
"primary_investigator_id": uid,
"user_uid": uid,
}

43
tests/test_events.py Normal file
View File

@ -0,0 +1,43 @@
import json
from tests.base_test import BaseTest
from crc.models.workflow import WorkflowStatus
from crc import db
from crc.api.common import ApiError
from crc.models.task_event import TaskEventModel, TaskEventSchema
from crc.services.workflow_service import WorkflowService
class TestEvents(BaseTest):
def test_list_events_by_workflow(self):
workflow_one = self.create_workflow('exclusive_gateway')
# Start a the workflow.
first_task = self.get_workflow_api(workflow_one).next_task
self.complete_form(workflow_one, first_task, {"has_bananas": True})
workflow_one = self.get_workflow_api(workflow_one)
self.assertEqual('Task_Num_Bananas', workflow_one.next_task.name)
# Start a second workflow
workflow_two = self.create_workflow('subprocess')
workflow_api_two = self.get_workflow_api(workflow_two)
# Get all action events across workflows
rv = self.app.get('/v1.0/task_events?action=ASSIGNMENT',
headers=self.logged_in_headers(),
content_type="application/json")
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
tasks = TaskEventSchema(many=True).load(json_data)
self.assertEqual(2, len(tasks))
# Get action events for a single workflow
rv = self.app.get(f'/v1.0/task_events?action=ASSIGNMENT&workflow={workflow_one.id}',
headers=self.logged_in_headers(),
content_type="application/json")
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
tasks = TaskEventSchema(many=True).load(json_data)
self.assertEqual(1, len(tasks))

View File

@ -9,6 +9,7 @@ from crc import session, app
from crc.models.api_models import WorkflowApiSchema, MultiInstanceType, TaskSchema
from crc.models.file import FileModelSchema
from crc.models.workflow import WorkflowStatus
from crc.models.task_event import TaskEventModel
class TestTasksApi(BaseTest):
@ -42,6 +43,24 @@ class TestTasksApi(BaseTest):
"""
self.assertTrue(str.startswith(task.documentation, expected_docs))
def test_get_workflow_without_running_engine_steps(self):
# Set up a new workflow
workflow = self.create_workflow('two_forms')
# get the first form in the two form workflow.
workflow_api = self.get_workflow_api(workflow, do_engine_steps=False)
# There should be no task event logs related to the workflow at this point.
task_events = session.query(TaskEventModel).filter(TaskEventModel.workflow_id == workflow.id).all()
self.assertEqual(0, len(task_events))
# Since the workflow was not started, the call to read-only should not execute any engine steps the
# current task should be the start event.
self.assertEqual("Start", workflow_api.next_task.name)
def test_get_form_for_previously_completed_task(self):
"""Assure we can look at previously completed steps without moving the token for the workflow."""
def test_two_forms_task(self):
# Set up a new workflow
self.load_example_data()
@ -69,7 +88,6 @@ class TestTasksApi(BaseTest):
self.assertIsNotNone(val)
def test_error_message_on_bad_gateway_expression(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
# get the first form in the two form workflow.
@ -77,7 +95,6 @@ class TestTasksApi(BaseTest):
self.complete_form(workflow, task, {"has_bananas": True})
def test_workflow_with_parallel_forms(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
# get the first form in the two form workflow.
@ -89,7 +106,6 @@ class TestTasksApi(BaseTest):
self.assertEqual("Task_Num_Bananas", workflow_api.next_task.name)
def test_navigation_with_parallel_forms(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
# get the first form in the two form workflow.
@ -107,7 +123,6 @@ class TestTasksApi(BaseTest):
self.assertEqual("NOOP", nav[3]['state'])
def test_navigation_with_exclusive_gateway(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway_2')
# get the first form in the two form workflow.
@ -124,7 +139,6 @@ class TestTasksApi(BaseTest):
self.assertEqual("Task 3", nav[6]['title'])
def test_document_added_to_workflow_shows_up_in_file_list(self):
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('docx')
@ -153,7 +167,6 @@ class TestTasksApi(BaseTest):
def test_get_documentation_populated_in_end(self):
self.load_example_data()
workflow = self.create_workflow('random_fact')
workflow_api = self.get_workflow_api(workflow)
task = workflow_api.next_task
@ -167,9 +180,7 @@ class TestTasksApi(BaseTest):
self.assertTrue("norris" in workflow_api.next_task.documentation)
def test_load_workflow_from_outdated_spec(self):
# Start the basic two_forms workflow and complete a task.
self.load_example_data()
workflow = self.create_workflow('two_forms')
workflow_api = self.get_workflow_api(workflow)
self.complete_form(workflow, workflow_api.next_task, {"color": "blue"})
@ -194,9 +205,7 @@ class TestTasksApi(BaseTest):
self.assertTrue(workflow_api.is_latest_spec)
def test_soft_reset_errors_out_and_next_result_is_on_original_version(self):
# Start the basic two_forms workflow and complete a task.
self.load_example_data()
workflow = self.create_workflow('two_forms')
workflow_api = self.get_workflow_api(workflow)
self.complete_form(workflow, workflow_api.next_task, {"color": "blue"})
@ -221,7 +230,6 @@ class TestTasksApi(BaseTest):
def test_manual_task_with_external_documentation(self):
self.load_example_data()
workflow = self.create_workflow('manual_task_with_external_documentation')
# get the first form in the two form workflow.
@ -235,7 +243,6 @@ class TestTasksApi(BaseTest):
self.assertTrue('Dan' in workflow_api.next_task.documentation)
def test_bpmn_extension_properties_are_populated(self):
self.load_example_data()
workflow = self.create_workflow('manual_task_with_external_documentation')
# get the first form in the two form workflow.
@ -268,9 +275,7 @@ class TestTasksApi(BaseTest):
# Assure that the names for each task are properly updated, so they aren't all the same.
self.assertEqual("Primary Investigator", workflow.next_task.properties['display_name'])
def test_lookup_endpoint_for_task_field_enumerations(self):
self.load_example_data()
workflow = self.create_workflow('enum_options_with_search')
# get the first form in the two form workflow.
workflow = self.get_workflow_api(workflow)
@ -286,7 +291,6 @@ class TestTasksApi(BaseTest):
self.assert_options_populated(results, ['CUSTOMER_NUMBER', 'CUSTOMER_NAME', 'CUSTOMER_CLASS_MEANING'])
def test_lookup_endpoint_for_task_field_using_lookup_entry_id(self):
self.load_example_data()
workflow = self.create_workflow('enum_options_with_search')
# get the first form in the two form workflow.
workflow = self.get_workflow_api(workflow)
@ -316,7 +320,6 @@ class TestTasksApi(BaseTest):
# the key/values from the spreadsheet are added directly to the form and it shows up as
# a dropdown. This tests the case of wanting to get additional data when a user selects
# something from a dropdown.
self.load_example_data()
workflow = self.create_workflow('enum_options_from_file')
# get the first form in the two form workflow.
workflow = self.get_workflow_api(workflow)
@ -334,7 +337,6 @@ class TestTasksApi(BaseTest):
self.assertIsInstance(results[0]['data'], dict)
def test_enum_from_task_data(self):
self.load_example_data()
workflow = self.create_workflow('enum_options_from_task_data')
# get the first form in the two form workflow.
workflow_api = self.get_workflow_api(workflow)
@ -359,7 +361,6 @@ class TestTasksApi(BaseTest):
self.assertEqual('Chesterfield', options[2]['data']['first_name'])
def test_lookup_endpoint_for_task_ldap_field_lookup(self):
self.load_example_data()
workflow = self.create_workflow('ldap_lookup')
# get the first form
workflow = self.get_workflow_api(workflow)
@ -378,7 +379,6 @@ class TestTasksApi(BaseTest):
self.assertEqual(1, len(results))
def test_sub_process(self):
self.load_example_data()
workflow = self.create_workflow('subprocess')
workflow_api = self.get_workflow_api(workflow)
@ -399,7 +399,6 @@ class TestTasksApi(BaseTest):
self.assertEqual(WorkflowStatus.complete, workflow_api.status)
def test_update_task_resets_token(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
# Start the workflow.
@ -477,3 +476,5 @@ class TestTasksApi(BaseTest):
workflow = self.get_workflow_api(workflow)
self.assertEqual(WorkflowStatus.complete, workflow.status)

View File

@ -111,6 +111,7 @@ class TestTasksApi(BaseTest):
data['approval'] = True
self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid)
def test_navigation_and_current_task_updates_through_workflow(self):
submitter = self.create_user(uid='lje5u')
@ -200,4 +201,67 @@ class TestTasksApi(BaseTest):
workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid)
self.assertEquals('COMPLETED', workflow_api.next_task.state)
self.assertEquals('EndEvent', workflow_api.next_task.type) # Are are at the end.
self.assertEquals(WorkflowStatus.complete, workflow_api.status)
self.assertEquals(WorkflowStatus.complete, workflow_api.status)
def get_assignment_task_events(self, uid):
return db.session.query(TaskEventModel). \
filter(TaskEventModel.user_uid == uid). \
filter(TaskEventModel.action == WorkflowService.TASK_ACTION_ASSIGNMENT).all()
def test_workflow_reset_correctly_resets_the_task_events(self):
submitter = self.create_user(uid='lje5u')
supervisor = self.create_user(uid='lb3dp')
workflow = self.create_workflow('roles', display_name="Roles", as_user=submitter.uid)
workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid)
# User lje5u can complete the first task, and set her supervisor
data = workflow_api.next_task.data
data['supervisor'] = supervisor.uid
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
# At this point there should be a task_log with an action of ASSIGNMENT on it for
# the supervisor.
self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid)))
# Resetting the workflow at this point should clear the event log.
workflow_api = self.get_workflow_api(workflow, hard_reset=True, user_uid=submitter.uid)
self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid)))
# Re-complete first task, and awaiting tasks should shift to 0 for for submitter, and 1 for supervisor
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid)))
# Complete the supervisor task with rejected approval, and the assignments should switch.
workflow_api = self.get_workflow_api(workflow, user_uid=supervisor.uid)
data = workflow_api.next_task.data
data["approval"] = False
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid)
self.assertEquals(1, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid)))
# Mark the return form review page as complete, and then recomplete the form, and assignments switch yet again.
workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid)
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid)))
# Complete the supervisor task, accepting the approval, and the workflow is completed.
# When it is all done, there should be no outstanding assignments.
workflow_api = self.get_workflow_api(workflow, user_uid=supervisor.uid)
data = workflow_api.next_task.data
data["approval"] = True
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid)
self.assertEquals(WorkflowStatus.complete, workflow_api.status)
self.assertEquals('EndEvent', workflow_api.next_task.type) # Are are at the end.
self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid)))
# Sending any subsequent complete forms does not result in a new task event
with self.assertRaises(AssertionError) as _api_error:
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid)))

View File

@ -36,6 +36,7 @@ class TestWorkflowProcessor(BaseTest):
workflow_spec_model = self.load_test_spec("random_fact")
study = session.query(StudyModel).first()
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(study.id, processor.bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY])
self.assertIsNotNone(processor)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
@ -62,6 +63,7 @@ class TestWorkflowProcessor(BaseTest):
files = session.query(FileModel).filter_by(workflow_spec_id='decision_table').all()
self.assertEqual(2, len(files))
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(1, len(next_user_tasks))
@ -86,6 +88,7 @@ class TestWorkflowProcessor(BaseTest):
workflow_spec_model = self.load_test_spec("parallel_tasks")
study = session.query(StudyModel).first()
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
# Complete the first steps of the 4 parallel tasks
@ -127,6 +130,7 @@ class TestWorkflowProcessor(BaseTest):
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("parallel_tasks")
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(4, len(next_user_tasks))
@ -215,6 +219,7 @@ class TestWorkflowProcessor(BaseTest):
self.assertEqual(2, len(files))
workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id="docx").first()
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(1, len(next_user_tasks))
@ -278,6 +283,7 @@ class TestWorkflowProcessor(BaseTest):
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(processor.workflow_model.workflow_spec_id, workflow_spec_model.id)
task = processor.next_task()
task.data = {"color": "blue"}

View File

@ -47,6 +47,7 @@ class TestWorkflowProcessorMultiInstance(BaseTest):
workflow_spec_model = self.load_test_spec("multi_instance")
study = session.query(StudyModel).first()
processor = self.get_processor(study, workflow_spec_model)
processor.bpmn_workflow.do_engine_steps()
self.assertEqual(study.id, processor.bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY])
self.assertIsNotNone(processor)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())