Merge pull request #173 from sartography/dev

Dev --> Testing
This commit is contained in:
Aaron Louie 2020-07-31 15:30:04 -04:00 committed by GitHub
commit e21884f6d3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 1084 additions and 281 deletions

View File

@ -38,14 +38,14 @@ recommonmark = "*"
requests = "*"
sentry-sdk = {extras = ["flask"],version = "==0.14.4"}
sphinx = "*"
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow.git",ref = "master"}
#spiffworkflow = {editable = true,path="/home/kelly/sartography/SpiffWorkflow/"}
swagger-ui-bundle = "*"
spiffworkflow = {editable = true, git = "https://github.com/sartography/SpiffWorkflow.git", ref = "master"}
webtest = "*"
werkzeug = "*"
xlrd = "*"
xlsxwriter = "*"
pygithub = "*"
python-box = "*"
[requires]
python_version = "3.7"

214
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "381d29428eb328ad6167774b510b9d818bd1505b95f50454a19f1564782326cc"
"sha256": "096abf7ce152358489282a004ed634ca64730cb98276f3a513ed2d5b8a6635c6"
},
"pipfile-spec": 6,
"requires": {
@ -30,6 +30,14 @@
"index": "pypi",
"version": "==1.4.2"
},
"amqp": {
"hashes": [
"sha256:70cdb10628468ff14e57ec2f751c7aa9e48e7e3651cfd62d431213c0c4e58f21",
"sha256:aa7f313fb887c91f15474c1229907a04dac0b8135822d6603437803424c0aa59"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==2.6.1"
},
"aniso8601": {
"hashes": [
"sha256:529dcb1f5f26ee0df6c0a1ee84b7b27197c3c50fc3a6321d66c544689237d072",
@ -42,6 +50,7 @@
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==19.3.0"
},
"babel": {
@ -49,6 +58,7 @@
"sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38",
"sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.8.0"
},
"bcrypt": {
@ -72,6 +82,7 @@
"sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7",
"sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==3.1.7"
},
"beautifulsoup4": {
@ -82,12 +93,27 @@
],
"version": "==4.9.1"
},
"billiard": {
"hashes": [
"sha256:bff575450859a6e0fbc2f9877d9b715b0bbc07c3565bb7ed2280526a0cdf5ede",
"sha256:d91725ce6425f33a97dfa72fb6bfef0e47d4652acd98a032bd1a7fbf06d5fa6a"
],
"version": "==3.6.3.0"
},
"blinker": {
"hashes": [
"sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6"
],
"version": "==1.4"
},
"celery": {
"hashes": [
"sha256:ef17d7dffde7fc73ecab3a3b6389d93d3213bac53fa7f28e68e33647ad50b916",
"sha256:fd77e4248bb1b7af5f7922dd8e81156f540306e3a5c4b1c24167c1f5f06025da"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==4.4.6"
},
"certifi": {
"hashes": [
"sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3",
@ -140,6 +166,7 @@
"sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a",
"sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==7.1.2"
},
"clickclick": {
@ -156,6 +183,14 @@
],
"version": "==0.9.1"
},
"configparser": {
"hashes": [
"sha256:2ca44140ee259b5e3d8aaf47c79c36a7ab0d5e94d70bd4105c03ede7a20ea5a1",
"sha256:cffc044844040c7ce04e9acd1838b5f2e5fa3170182f6fda4d2ea8b0099dbadd"
],
"markers": "python_version >= '3.6'",
"version": "==5.0.0"
},
"connexion": {
"extras": [
"swagger-ui"
@ -212,6 +247,7 @@
"sha256:525ba66fb5f90b07169fdd48b6373c18f1ee12728ca277ca44567a367d9d7f74",
"sha256:a766c1dccb30c5f6eb2b203f87edd1d8588847709c78589e1521d769addc8218"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.2.10"
},
"docutils": {
@ -219,6 +255,7 @@
"sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
"sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==0.16"
},
"docxtpl": {
@ -301,8 +338,16 @@
"sha256:05b31d2034dd3f2a685cbbae4cfc4ed906b2a733cff7964ada450fd5e462b84e",
"sha256:bfc7150eaf809b1c283879302f04c42791136060c6eeb12c0c6674fb1291fae5"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.4.4"
},
"future": {
"hashes": [
"sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.18.2"
},
"gunicorn": {
"hashes": [
"sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626",
@ -323,6 +368,7 @@
"sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
"sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.10"
},
"imagesize": {
@ -330,21 +376,15 @@
"sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1",
"sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.2.0"
},
"importlib-metadata": {
"hashes": [
"sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83",
"sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"
],
"markers": "python_version < '3.8'",
"version": "==1.7.0"
},
"inflection": {
"hashes": [
"sha256:88b101b2668a1d81d6d72d4c2018e53bc6c7fc544c987849da1c7f77545c3bc9",
"sha256:f576e85132d34f5bf7df5183c2c6f94cfb32e528f53065345cf71329ba0b8924"
],
"markers": "python_version >= '3.5'",
"version": "==0.5.0"
},
"itsdangerous": {
@ -352,6 +392,7 @@
"sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19",
"sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.0"
},
"jdcal": {
@ -366,6 +407,7 @@
"sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0",
"sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==2.11.2"
},
"jsonschema": {
@ -375,9 +417,21 @@
],
"version": "==3.2.0"
},
"kombu": {
"hashes": [
"sha256:be48cdffb54a2194d93ad6533d73f69408486483d189fe9f5990ee24255b0e0a",
"sha256:ca1b45faac8c0b18493d02a8571792f3c40291cf2bcf1f55afed3d8f3aa7ba74"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==4.6.11"
},
"ldap3": {
"hashes": [
"sha256:17f04298b70bf7ecaa5db8a7d8622b5a962ef7fc2b245b2eea705ac1c24338c0",
"sha256:298769ab0232b3a3efa1e84881096c24526fe37911c83a11285f222fe4975efd",
"sha256:4fd2db72d0412cc16ee86be01332095e86e361329c3579b314231eb2e56c7871",
"sha256:52ab557b3c4908db4a90bea16731aa714b1b54e039b54fd4c4b83994c6c48c0c",
"sha256:53aaae5bf14f3827c69600ddf4d61b88f49c055bb93060e9702c5bafd206c744",
"sha256:81df4ac8b6df10fb1f05b17c18d0cb8c4c344d5a03083c382824960ed959cf5b"
],
"index": "pypi",
@ -425,6 +479,7 @@
"sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27",
"sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.3"
},
"markdown": {
@ -471,6 +526,7 @@
"sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7",
"sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.1"
},
"marshmallow": {
@ -526,6 +582,7 @@
"sha256:ed8a311493cf5480a2ebc597d1e177231984c818a86875126cfd004241a73c3e",
"sha256:ef71a1d4fd4858596ae80ad1ec76404ad29701f8ca7cdcebc50300178db14dfc"
],
"markers": "python_version >= '3.6'",
"version": "==1.19.1"
},
"openapi-spec-validator": {
@ -549,6 +606,7 @@
"sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8",
"sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==20.4"
},
"pandas": {
@ -611,8 +669,19 @@
},
"pyasn1": {
"hashes": [
"sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359",
"sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576",
"sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf",
"sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7",
"sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d",
"sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"
"sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00",
"sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8",
"sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86",
"sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12",
"sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776",
"sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba",
"sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2",
"sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"
],
"version": "==0.4.8"
},
@ -621,6 +690,7 @@
"sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
"sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.20"
},
"pygithub": {
@ -636,6 +706,7 @@
"sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",
"sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"
],
"markers": "python_version >= '3.5'",
"version": "==2.6.1"
},
"pyjwt": {
@ -651,6 +722,7 @@
"sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
"sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.4.7"
},
"pyrsistent": {
@ -659,6 +731,14 @@
],
"version": "==0.16.0"
},
"python-box": {
"hashes": [
"sha256:bcb057e8960f4d888a4caf8f668eeca3c5c61ad349d8d81c4339414984fa9454",
"sha256:f02e059a299cac0515687aafec7543d401b12759d6578e53fae74154e0cbaa79"
],
"index": "pypi",
"version": "==5.1.0"
},
"python-dateutil": {
"hashes": [
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
@ -677,10 +757,67 @@
"hashes": [
"sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d",
"sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b",
"sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"
"sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8",
"sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77",
"sha256:ea87e17f6ec459e780e4221f295411462e0d0810858e055fc514684350a2f522"
],
"version": "==1.0.4"
},
"python-levenshtein-wheels": {
"hashes": [
"sha256:0065529c8aec4c044468286177761857d36981ba6f7fdb62d7d5f7ffd143de5d",
"sha256:016924a59d689f9f47d5f7b26b70f31e309255e8dd72602c91e93ceb752b9f92",
"sha256:089d046ea7727e583233c71fef1046663ed67b96967063ae8ddc9f551e86a4fc",
"sha256:09f9faaaa8f65726f91b44c11d3d622fee0f1780cfbe2bf3f410dd0e7345adcb",
"sha256:0aea217eab612acd45dcc3424a2e8dbd977cc309f80359d0c01971f1e65b9a9b",
"sha256:0beb91ad80b1573829066e5af36b80190c367be6e0a65292f073353b0388c7fc",
"sha256:0ec1bc73f5ed3a1a06e02d13bb3cd22a0b32ebf65a9667bbccba106bfa0546f1",
"sha256:0fa2ca69ef803bc6037a8c919e2e8a17b55e94c9c9ffcb4c21befbb15a1d0f40",
"sha256:11c77d0d74ab7f46f89a58ae9c2d67349ebc1ae3e18636627f9939d810167c31",
"sha256:19a68716a322486ddffc8bf7e5cf44a82f7700b05a10658e6e7fc5c7ae92b13d",
"sha256:19a95a01d28d63b042438ba860c4ace90362906a038fa77962ba33325d377d10",
"sha256:1a61f3a51e00a3608659bbaabb3f27af37c9dbe84d843369061a3e45cf0d5103",
"sha256:1c50aebebab403fb2dd415d70355446ac364dece502b0e2737a1a085bb9a4aa4",
"sha256:1d2390d04f9b673391e5ce1a0b054d0565f2e00ea5d1187a044221dc5c02c3e6",
"sha256:1e51cdc123625a28709662d24ea0cb4cf6f991845e6054d9f803c78da1d6b08f",
"sha256:1eca6dc97dfcf588f53281fe48a6d5c423d4e14bdab658a1aa6efd447acc64e0",
"sha256:1f0056d3216b0fe38f25c6f8ebc84bd9f6d34c55a7a9414341b674fb98961399",
"sha256:228b59460e9a786e498bdfc8011838b89c6054650b115c86c9c819a055a793b0",
"sha256:23020f9ff2cb3457a926dcc470b84f9bd5b7646bd8b8e06b915bdbbc905cb23f",
"sha256:2b7b7cf0f43b677f818aa9a610464abf06106c19a51b9ac35bd051a439f337a5",
"sha256:3b591c9a7e91480f0d7bf2041d325f578b9b9c2f2d593304377cb28862e7f9a2",
"sha256:3ca9c70411ab587d071c1d8fc8b69d0558be8e4aa920f2595e2cb5eb229ccc4c",
"sha256:3e6bcca97a7ff4e720352b57ddc26380c0583dcdd4b791acef7b574ad58468a7",
"sha256:3ed88f9e638da57647149115c34e0e120cae6f3d35eee7d77e22cc9c1d8eced3",
"sha256:445bf7941cb1fa05d6c2a4a502ad4868a5cacd92e8eb77b2bd008cdda9d37c55",
"sha256:4ba5e147d76d7ee884fd6eae461438b080bcc9f2c6eb9b576811e1bcfe8f808e",
"sha256:4bb128b719c30f3b9feacfe71a338ae07d39dbffc077139416f3535c89f12362",
"sha256:4e951907b9b5d40c9f1b611c8bdfe46ff8cf8371877cebbd589bf5840feab662",
"sha256:53c0c9964390368fd64460b690f168221c669766b193b7e80ae3950c2b9551f8",
"sha256:57c4edef81611098d37176278f2b6a3712bf864eed313496d7d80504805896d1",
"sha256:5b36e406937c6463d1c1ef3dd82d3f771d9d845f21351e8a026fe4dd398ea8d0",
"sha256:7d0821dab24b430dfdc2cba70a06e6d7a45cb839d0dd0e6db97bb99e23c3d884",
"sha256:7f7283dfe50eac8a8cd9b777de9eb50b1edf7dbb46fc7cc9d9b0050d0c135021",
"sha256:7f9759095b3fc825464a72b1cae95125e610eba3c70f91557754c32a0bf32ea2",
"sha256:8005a4df455569c0d490ddfd9e5a163f21293477fd0ed4ea9effdd723ddd8eaa",
"sha256:86e865f29ad3dc3bb4733e5247220173d90f05ac8d2ad18e9689a220f90de55f",
"sha256:98727050ba70eb8d318ec8a8203531c20119347fc8f281102b097326812742ab",
"sha256:ac9cdf044dcb9481c7da782db01b50c1f0e7cdd78c8507b963b6d072829c0263",
"sha256:acfad8ffed96891fe7c583d92717cd8ec0c03b59a954c389fd4e26a5cdeac610",
"sha256:ad15f25abff8220e556d64e2a27c646241b08f00faf1bc02313655696cd3edfa",
"sha256:b679f951f842c38665aa54bea4d7403099131f71fac6d8584f893a731fe1266d",
"sha256:b8c183dc4aa4e95dc5c373eedc3d205c176805835611fcfec5d9050736c695c4",
"sha256:c097a6829967c76526a037ed34500a028f78f0d765c8e3dbd1a7717afd09fb92",
"sha256:c2c76f483d05eddec60a5cd89e92385adef565a4f243b1d9a6abe2f6bd2a7c0a",
"sha256:c388baa3c04272a7c585d3da24030c142353eb26eb531dd2681502e6be7d7a26",
"sha256:cb0f2a711db665b5bf8697b5af3b9884bb1139385c5c12c2e472e4bbee62da99",
"sha256:cbac984d7b36e75b440d1c8ff9d3425d778364a0cbc23f8943383d4decd35d5e",
"sha256:f55adf069be2d655f8d668594fe1be1b84d9dc8106d380a9ada06f34941c33c8",
"sha256:f9084ed3b8997ad4353d124b903f2860a9695b9e080663276d9e58c32e293244",
"sha256:fb7df3504222fcb1fa593f76623abbb54d6019eec15aac5d05cd07ad90ac016c"
],
"version": "==0.13.1"
},
"pytz": {
"hashes": [
"sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed",
@ -736,6 +873,7 @@
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.15.0"
},
"snowballstemmer": {
@ -750,6 +888,7 @@
"sha256:1634eea42ab371d3d346309b93df7870a88610f0725d47528be902a0d95ecc55",
"sha256:a59dc181727e95d25f781f0eb4fd1825ff45590ec8ff49eadfd7f1a537cc0232"
],
"markers": "python_version >= '3.5'",
"version": "==2.0.1"
},
"sphinx": {
@ -765,6 +904,7 @@
"sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a",
"sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.2"
},
"sphinxcontrib-devhelp": {
@ -772,6 +912,7 @@
"sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e",
"sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.2"
},
"sphinxcontrib-htmlhelp": {
@ -779,6 +920,7 @@
"sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f",
"sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.3"
},
"sphinxcontrib-jsmath": {
@ -786,6 +928,7 @@
"sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178",
"sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.1"
},
"sphinxcontrib-qthelp": {
@ -793,6 +936,7 @@
"sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72",
"sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.3"
},
"sphinxcontrib-serializinghtml": {
@ -800,9 +944,11 @@
"sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc",
"sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"
],
"markers": "python_version >= '3.5'",
"version": "==1.1.4"
},
"spiffworkflow": {
"editable": true,
"git": "https://github.com/sartography/SpiffWorkflow.git",
"ref": "7c8d59e7b9a978795bc8d1f354002fdc89540672"
},
@ -837,6 +983,7 @@
"sha256:f57be5673e12763dd400fea568608700a63ce1c6bd5bdbc3cc3a2c5fdb045274",
"sha256:fc728ece3d5c772c196fd338a99798e7efac7a04f9cb6416299a3638ee9a94cd"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.3.18"
},
"swagger-ui-bundle": {
@ -852,13 +999,23 @@
"sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a",
"sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
"version": "==1.25.10"
},
"vine": {
"hashes": [
"sha256:133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87",
"sha256:ea4947cc56d1fd6f2095c8d543ee25dad966f78692528e68b4fada11ba3f98af"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.3.0"
},
"waitress": {
"hashes": [
"sha256:1bb436508a7487ac6cb097ae7a7fe5413aefca610550baf58f0940e51ecfb261",
"sha256:3d633e78149eb83b60a07dfabb35579c29aac2d24bb803c18b26fb2ab1a584db"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==1.4.4"
},
"webob": {
@ -866,6 +1023,7 @@
"sha256:a3c89a8e9ba0aeb17382836cdb73c516d0ecf6630ec40ec28288f3ed459ce87b",
"sha256:aa3a917ed752ba3e0b242234b2a373f9c4e2a75d35291dcbe977649bd21fd108"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.8.6"
},
"webtest": {
@ -912,13 +1070,6 @@
],
"index": "pypi",
"version": "==1.3.0"
},
"zipp": {
"hashes": [
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
],
"version": "==3.1.0"
}
},
"develop": {
@ -927,6 +1078,7 @@
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==19.3.0"
},
"coverage": {
@ -969,25 +1121,19 @@
"index": "pypi",
"version": "==5.2.1"
},
"importlib-metadata": {
"hashes": [
"sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83",
"sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"
],
"markers": "python_version < '3.8'",
"version": "==1.7.0"
},
"iniconfig": {
"hashes": [
"sha256:aa0b40f50a00e72323cb5d41302f9c6165728fd764ac8822aa3fff00a40d56b4"
"sha256:80cf40c597eb564e86346103f609d74efce0f6b4d4f30ec8ce9e2c26411ba437",
"sha256:e5f92f89355a67de0595932a6c6c02ab4afddc6fcdc0bfc5becd0d60884d3f69"
],
"version": "==1.0.0"
"version": "==1.0.1"
},
"more-itertools": {
"hashes": [
"sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5",
"sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2"
],
"markers": "python_version >= '3.5'",
"version": "==8.4.0"
},
"packaging": {
@ -995,6 +1141,7 @@
"sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8",
"sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==20.4"
},
"pbr": {
@ -1010,6 +1157,7 @@
"sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0",
"sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.13.1"
},
"py": {
@ -1017,6 +1165,7 @@
"sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2",
"sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.9.0"
},
"pyparsing": {
@ -1024,6 +1173,7 @@
"sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
"sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.4.7"
},
"pytest": {
@ -1039,6 +1189,7 @@
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.15.0"
},
"toml": {
@ -1047,13 +1198,6 @@
"sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"
],
"version": "==0.10.1"
},
"zipp": {
"hashes": [
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
],
"version": "==3.1.0"
}
}
}

View File

@ -18,6 +18,9 @@ Make sure all of the following are properly installed on your system:
- [Install pipenv](https://pipenv-es.readthedocs.io/es/stable/)
- [Add ${HOME}/.local/bin to your PATH](https://github.com/pypa/pipenv/issues/2122#issue-319600584)
### Running Postgres
### Project Initialization
1. Clone this repository.
2. In PyCharm:

View File

@ -31,6 +31,13 @@ paths:
'304':
description: Redirection to the hosted frontend with an auth_token header.
/user:
parameters:
- name: admin_impersonate_uid
in: query
required: false
description: For admins, the unique uid of an existing user to impersonate.
schema:
type: string
get:
operationId: crc.api.user.get_current_user
summary: Returns the current user.
@ -38,11 +45,27 @@ paths:
- Users
responses:
'200':
description: The currently authenticated user.
description: The currently-authenticated user, or, if the current user is an admin and admin_impersonate_uid is provided, this will be the user with the given uid.
content:
application/json:
schema:
$ref: "#/components/schemas/User"
/list_users:
get:
operationId: crc.api.user.get_all_users
security:
- auth_admin: ['secret']
summary: Returns a list of all users in the database.
tags:
- Users
responses:
'200':
description: All users in the database.
content:
application/json:
schema:
type: array
$ref: "#/components/schemas/User"
# /v1.0/study
/study:
get:
@ -56,6 +79,7 @@ paths:
content:
application/json:
schema:
type: array
$ref: "#/components/schemas/Study"
post:
operationId: crc.api.study.add_study
@ -572,6 +596,18 @@ paths:
description: The type of action the event documents, options include "ASSIGNMENT" for tasks that are waiting on you, "COMPLETE" for things have completed.
schema:
type: string
- name: workflow
in: query
required: false
description: Restrict results to the given workflow.
schema:
type: number
- name: study
in: query
required: false
description: Restrict results to the given study.
schema:
type: number
get:
operationId: crc.api.workflow.get_task_events
summary: Returns a list of task events related to the current user. Can be filtered by type.
@ -610,6 +646,12 @@ paths:
description: Set this to true to reset the workflow
schema:
type: boolean
- name: do_engine_steps
in: query
required: false
description: Defaults to true, can be set to false if you are just looking at the workflow not completeing it.
schema:
type: boolean
tags:
- Workflows and Tasks
responses:
@ -1046,7 +1088,7 @@ components:
example: dhf8r
protocol_builder_status:
type: string
enum: [INCOMPLETE, ACTIVE, HOLD, OPEN, ABANDONED]
enum: ['incomplete', 'active', 'hold', 'open', 'abandoned']
example: done
sponsor:
type: string

View File

@ -1,5 +1,6 @@
from SpiffWorkflow import WorkflowException
from SpiffWorkflow.exceptions import WorkflowTaskExecException
from flask import g
from crc import ma, app
@ -24,6 +25,11 @@ class ApiError(Exception):
instance.task_id = task.task_spec.name or ""
instance.task_name = task.task_spec.description or ""
instance.file_name = task.workflow.spec.file or ""
# Fixme: spiffworkflow is doing something weird where task ends up referenced in the data in some cases.
if "task" in task.data:
task.data.pop("task")
instance.task_data = task.data
app.logger.error(message, exc_info=True)
return instance
@ -60,3 +66,5 @@ class ApiErrorSchema(ma.Schema):
def handle_invalid_usage(error):
response = ApiErrorSchema().dump(error)
return response, error.status_code

View File

@ -8,6 +8,7 @@ from crc.api.common import ApiError, ApiErrorSchema
from crc.models.protocol_builder import ProtocolBuilderStatus
from crc.models.study import StudySchema, StudyModel, Study
from crc.services.study_service import StudyService
from crc.services.user_service import UserService
def add_study(body):
@ -17,11 +18,11 @@ def add_study(body):
if 'title' not in body:
raise ApiError("missing_title", "Can't create a new study without a title.")
study_model = StudyModel(user_uid=g.user.uid,
study_model = StudyModel(user_uid=UserService.current_user().uid,
title=body['title'],
primary_investigator_id=body['primary_investigator_id'],
last_updated=datetime.now(),
protocol_builder_status=ProtocolBuilderStatus.ACTIVE)
protocol_builder_status=ProtocolBuilderStatus.active)
session.add(study_model)
errors = StudyService._add_all_workflow_specs_to_study(study_model)
@ -65,8 +66,9 @@ def delete_study(study_id):
def user_studies():
"""Returns all the studies associated with the current user. """
StudyService.synch_with_protocol_builder_if_enabled(g.user)
studies = StudyService.get_studies_for_user(g.user)
user = UserService.current_user(allow_admin_impersonate=True)
StudyService.synch_with_protocol_builder_if_enabled(user)
studies = StudyService.get_studies_for_user(user)
results = StudySchema(many=True).dump(studies)
return results

View File

@ -1,10 +1,11 @@
import flask
from flask import g, request
from crc import app, db
from crc import app, session
from crc.api.common import ApiError
from crc.models.user import UserModel, UserModelSchema
from crc.services.ldap_service import LdapService, LdapModel
from crc.services.user_service import UserService
"""
.. module:: crc.api.user
@ -35,6 +36,10 @@ def verify_token(token=None):
try:
token_info = UserModel.decode_auth_token(token)
g.user = UserModel.query.filter_by(uid=token_info['sub']).first()
# If the user is valid, store the token for this session
if g.user:
g.token = token
except:
raise failure_error
if g.user is not None:
@ -49,27 +54,31 @@ def verify_token(token=None):
if uid is not None:
db_user = UserModel.query.filter_by(uid=uid).first()
# If the user is valid, store the user and token for this session
if db_user is not None:
g.user = db_user
token = g.user.encode_auth_token().decode()
g.token = token
token_info = UserModel.decode_auth_token(token)
return token_info
else:
raise ApiError("no_user", "User not found. Please login via the frontend app before accessing this feature.",
status_code=403)
raise ApiError("no_user",
"User not found. Please login via the frontend app before accessing this feature.",
status_code=403)
else:
# Fall back to a default user if this is not production.
g.user = UserModel.query.first()
token = g.user.encode_auth_token()
token_info = UserModel.decode_auth_token(token)
return token_info
def verify_token_admin(token=None):
"""
Verifies the token for the user (if provided) in non-production environment. If in production environment,
checks that the user is in the list of authorized admins
Verifies the token for the user (if provided) in non-production environment.
If in production environment, checks that the user is in the list of authorized admins
Args:
token: Optional[str]
@ -77,21 +86,44 @@ def verify_token_admin(token=None):
Returns:
token: str
"""
# If this is production, check that the user is in the list of admins
if _is_production():
uid = _get_request_uid(request)
if uid is not None and uid in app.config['ADMIN_UIDS']:
return verify_token()
# If we're not in production, just use the normal verify_token method
else:
return verify_token(token)
verify_token(token)
if "user" in g and g.user.is_admin():
token = g.user.encode_auth_token()
token_info = UserModel.decode_auth_token(token)
return token_info
def get_current_user():
return UserModelSchema().dump(g.user)
def start_impersonating(uid):
if uid is not None and UserService.user_is_admin():
UserService.start_impersonating(uid)
user = UserService.current_user(allow_admin_impersonate=True)
return UserModelSchema().dump(user)
def stop_impersonating():
if UserService.user_is_admin():
UserService.stop_impersonating()
user = UserService.current_user(allow_admin_impersonate=False)
return UserModelSchema().dump(user)
def get_current_user(admin_impersonate_uid=None):
if UserService.user_is_admin():
if admin_impersonate_uid is not None:
UserService.start_impersonating(admin_impersonate_uid)
else:
UserService.stop_impersonating()
user = UserService.current_user(UserService.user_is_admin() and UserService.admin_is_impersonating())
return UserModelSchema().dump(user)
def get_all_users():
if "user" in g and g.user.is_admin():
all_users = session.query(UserModel).all()
return UserModelSchema(many=True).dump(all_users)
def login(
@ -134,7 +166,6 @@ def login(
# X-Forwarded-Server: dev.crconnect.uvadcos.io
# Connection: Keep-Alive
# If we're in production, override any uid with the uid from the SSO request headers
if _is_production():
uid = _get_request_uid(request)
@ -182,6 +213,8 @@ def _handle_login(user_info: LdapModel, redirect_url=None):
# Return the frontend auth callback URL, with auth token appended.
auth_token = user.encode_auth_token().decode()
g.token = auth_token
if redirect_url is not None:
if redirect_url.find("http://") != 0 and redirect_url.find("https://") != 0:
redirect_url = "http://" + redirect_url
@ -194,13 +227,13 @@ def _handle_login(user_info: LdapModel, redirect_url=None):
def _upsert_user(user_info):
user = db.session.query(UserModel).filter(UserModel.uid == user_info.uid).first()
user = session.query(UserModel).filter(UserModel.uid == user_info.uid).first()
if user is None:
# Add new user
user = UserModel()
else:
user = db.session.query(UserModel).filter(UserModel.uid == user_info.uid).with_for_update().first()
user = session.query(UserModel).filter(UserModel.uid == user_info.uid).with_for_update().first()
user.uid = user_info.uid
user.display_name = user_info.display_name
@ -208,8 +241,8 @@ def _upsert_user(user_info):
user.affiliation = user_info.affiliation
user.title = user_info.title
db.session.add(user)
db.session.commit()
session.add(user)
session.commit()
return user

View File

@ -13,6 +13,7 @@ from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, Workflow
from crc.services.file_service import FileService
from crc.services.lookup_service import LookupService
from crc.services.study_service import StudyService
from crc.services.user_service import UserService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.workflow_service import WorkflowService
@ -95,19 +96,30 @@ def delete_workflow_specification(spec_id):
session.commit()
def get_workflow(workflow_id, soft_reset=False, hard_reset=False):
def get_workflow(workflow_id, soft_reset=False, hard_reset=False, do_engine_steps=True):
"""Soft reset will attempt to update to the latest spec without starting over,
Hard reset will update to the latest spec and start from the beginning.
Read Only will return the workflow in a read only state, without running any
engine tasks or logging any events. """
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
processor = WorkflowProcessor(workflow_model, soft_reset=soft_reset, hard_reset=hard_reset)
if do_engine_steps:
processor.do_engine_steps()
processor.save()
WorkflowService.update_task_assignments(processor)
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
WorkflowService.update_task_assignments(processor)
return WorkflowApiSchema().dump(workflow_api_model)
def get_task_events(action):
def get_task_events(action = None, workflow = None, study = None):
"""Provides a way to see a history of what has happened, or get a list of tasks that need your attention."""
query = session.query(TaskEventModel).filter(TaskEventModel.user_uid == g.user.uid)
if action:
query = query.filter(TaskEventModel.action == action)
if workflow:
query = query.filter(TaskEventModel.workflow_id == workflow)
if study:
query = query.filter(TaskEventModel.study_id == study)
events = query.all()
# Turn the database records into something a little richer for the UI to use.
@ -130,7 +142,7 @@ def set_current_task(workflow_id, task_id):
task_id = uuid.UUID(task_id)
spiff_task = processor.bpmn_workflow.get_task(task_id)
_verify_user_and_role(processor, spiff_task)
user_uid = g.user.uid
user_uid = UserService.current_user(allow_admin_impersonate=True).uid
if spiff_task.state != spiff_task.COMPLETED and spiff_task.state != spiff_task.READY:
raise ApiError("invalid_state", "You may not move the token to a task who's state is not "
"currently set to COMPLETE or READY.")
@ -173,7 +185,8 @@ def update_task(workflow_id, task_id, body, terminate_loop=None):
processor.save()
# Log the action, and any pending task assignments in the event of lanes in the workflow.
WorkflowService.log_task_action(g.user.uid, processor, spiff_task, WorkflowService.TASK_ACTION_COMPLETE)
user = UserService.current_user(allow_admin_impersonate=False) # Always log as the real user.
WorkflowService.log_task_action(user.uid, processor, spiff_task, WorkflowService.TASK_ACTION_COMPLETE)
WorkflowService.update_task_assignments(processor)
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
@ -233,19 +246,11 @@ def lookup(workflow_id, field_id, query=None, value=None, limit=10):
def _verify_user_and_role(processor, spiff_task):
"""Assures the currently logged in user can access the given workflow and task, or
raises an error.
Allow administrators to modify tasks, otherwise assure that the current user
is allowed to edit or update the task. Will raise the appropriate error if user
is not authorized. """
if 'user' not in g:
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
if g.user.uid in app.config['ADMIN_UIDS']:
return g.user.uid
raises an error. """
user = UserService.current_user(allow_admin_impersonate=True)
allowed_users = WorkflowService.get_users_assigned_to_task(processor, spiff_task)
if g.user.uid not in allowed_users:
if user.uid not in allowed_users:
raise ApiError.from_task("permission_denied",
f"This task must be completed by '{allowed_users}', "
f"but you are {g.user.uid}", spiff_task)
f"but you are {user.uid}", spiff_task)

View File

@ -143,7 +143,8 @@ class NavigationItemSchema(ma.Schema):
class WorkflowApi(object):
def __init__(self, id, status, next_task, navigation,
spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks, last_updated, title):
spec_version, is_latest_spec, workflow_spec_id, total_tasks, completed_tasks,
last_updated, title):
self.id = id
self.status = status
self.next_task = next_task # The next task that requires user input.

View File

@ -22,11 +22,11 @@ class ProtocolBuilderStatus(enum.Enum):
# • Hold: store boolean value in CR Connect (add to Study Model)
# • Open To Enrollment: has start date and HSR number?
# • Abandoned: deleted in PB
INCOMPLETE = 'incomplete' # Found in PB but not ready to start (not q_complete)
ACTIVE = 'active', # found in PB, marked as "q_complete" and no HSR number and not hold
HOLD = 'hold', # CR Connect side, if the Study ias marked as "hold".
OPEN = 'open', # Open To Enrollment: has start date and HSR number?
ABANDONED = 'Abandoned' # Not found in PB
incomplete = 'incomplete' # Found in PB but not ready to start (not q_complete)
active = 'active' # found in PB, marked as "q_complete" and no HSR number and not hold
hold = 'hold' # CR Connect side, if the Study ias marked as "hold".
open = 'open' # Open To Enrollment: has start date and HSR number?
abandoned = 'abandoned' # Not found in PB
#DRAFT = 'draft', # !Q_COMPLETE

View File

@ -25,6 +25,7 @@ class StudyModel(db.Model):
investigator_uids = db.Column(db.ARRAY(db.String), nullable=True)
requirements = db.Column(db.ARRAY(db.Integer), nullable=True)
on_hold = db.Column(db.Boolean, default=False)
enrollment_date = db.Column(db.DateTime(timezone=True), nullable=True)
def update_from_protocol_builder(self, pbs: ProtocolBuilderStudy):
self.hsr_number = pbs.HSRNUMBER
@ -32,16 +33,18 @@ class StudyModel(db.Model):
self.user_uid = pbs.NETBADGEID
self.last_updated = pbs.DATE_MODIFIED
self.protocol_builder_status = ProtocolBuilderStatus.ACTIVE
self.protocol_builder_status = ProtocolBuilderStatus.active
if pbs.HSRNUMBER:
self.protocol_builder_status = ProtocolBuilderStatus.OPEN
self.protocol_builder_status = ProtocolBuilderStatus.open
if self.on_hold:
self.protocol_builder_status = ProtocolBuilderStatus.HOLD
self.protocol_builder_status = ProtocolBuilderStatus.hold
class WorkflowMetadata(object):
def __init__(self, id, name, display_name, description, spec_version, category_id, category_display_name, state: WorkflowState, status: WorkflowStatus,
total_tasks, completed_tasks, display_order):
def __init__(self, id, name = None, display_name = None, description = None, spec_version = None,
category_id = None, category_display_name = None, state: WorkflowState = None,
status: WorkflowStatus = None, total_tasks = None, completed_tasks = None,
display_order = None):
self.id = id
self.name = name
self.display_name = display_name
@ -108,7 +111,7 @@ class Study(object):
id=None,
protocol_builder_status=None,
sponsor="", hsr_number="", ind_number="", categories=[],
files=[], approvals=[], **argsv):
files=[], approvals=[], enrollment_date=None, **argsv):
self.id = id
self.user_uid = user_uid
self.title = title
@ -122,6 +125,7 @@ class Study(object):
self.approvals = approvals
self.warnings = []
self.files = files
self.enrollment_date = enrollment_date
@classmethod
def from_model(cls, study_model: StudyModel):
@ -154,11 +158,12 @@ class StudySchema(ma.Schema):
ind_number = fields.String(allow_none=True)
files = fields.List(fields.Nested(FileSchema), dump_only=True)
approvals = fields.List(fields.Nested('ApprovalSchema'), dump_only=True)
enrollment_date = fields.Date(allow_none=True)
class Meta:
model = Study
additional = ["id", "title", "last_updated", "primary_investigator_id", "user_uid",
"sponsor", "ind_number", "approvals", "files"]
"sponsor", "ind_number", "approvals", "files", "enrollment_date"]
unknown = INCLUDE
@marshmallow.post_load

View File

@ -50,14 +50,16 @@ class TaskEvent(object):
self.task_type = model.task_type
self.task_state = model.task_state
self.task_lane = model.task_lane
self.date = model.date
class TaskEventSchema(ma.Schema):
study = fields.Nested(StudySchema, dump_only=True)
workflow = fields.Nested(WorkflowMetadataSchema, dump_only=True)
task_lane = fields.String(allow_none=True, required=False)
class Meta:
model = TaskEvent
additional = ["id", "user_uid", "action", "task_id", "task_title",
"task_name", "task_type", "task_state", "task_lane"]
"task_name", "task_type", "task_state", "task_lane", "date"]
unknown = INCLUDE

View File

@ -1,6 +1,7 @@
import datetime
import jwt
from marshmallow import fields
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from crc import db, app
@ -21,6 +22,10 @@ class UserModel(db.Model):
# TODO: Add Department and School
def is_admin(self):
# Currently admin abilities are set in the configuration, but this
# may change in the future.
return self.uid in app.config['ADMIN_UIDS']
def encode_auth_token(self):
"""
@ -60,4 +65,14 @@ class UserModelSchema(SQLAlchemyAutoSchema):
model = UserModel
load_instance = True
include_relationships = True
is_admin = fields.Method('get_is_admin', dump_only=True)
def get_is_admin(self, obj):
return obj.is_admin()
class AdminSessionModel(db.Model):
__tablename__ = 'admin_session'
id = db.Column(db.Integer, primary_key=True)
token = db.Column(db.String, unique=True)
admin_impersonate_uid = db.Column(db.String)

View File

@ -79,7 +79,8 @@ class FileService(object):
""" Opens a reference file (assumes that it is xls file) and returns the data as a
dictionary, each row keyed on the given index_column name. If there are columns
that should be represented as integers, pass these as an array of int_columns, lest
you get '1.0' rather than '1' """
you get '1.0' rather than '1'
fixme: This is stupid stupid slow. Place it in the database and just check if it is up to date."""
data_model = FileService.get_reference_file_data(reference_file_name)
xls = ExcelFile(data_model.data)
df = xls.parse(xls.sheet_names[0])

View File

@ -1,6 +1,5 @@
from copy import copy
from datetime import datetime
import json
from typing import List
import requests
@ -13,16 +12,15 @@ from crc.api.common import ApiError
from crc.models.file import FileModel, FileModelSchema, File
from crc.models.ldap import LdapSchema
from crc.models.protocol_builder import ProtocolBuilderStudy, ProtocolBuilderStatus
from crc.models.task_event import TaskEventModel
from crc.models.study import StudyModel, Study, Category, WorkflowMetadata
from crc.models.task_event import TaskEventModel, TaskEvent
from crc.models.workflow import WorkflowSpecCategoryModel, WorkflowModel, WorkflowSpecModel, WorkflowState, \
WorkflowStatus
from crc.services.approval_service import ApprovalService
from crc.services.file_service import FileService
from crc.services.ldap_service import LdapService
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.approval_service import ApprovalService
from crc.models.approval import Approval
class StudyService(object):
@ -63,11 +61,10 @@ class StudyService(object):
files = (File.from_models(model, FileService.get_file_data(model.id),
FileService.get_doc_dictionary()) for model in files)
study.files = list(files)
# Calling this line repeatedly is very very slow. It creates the
# master spec and runs it. Don't execute this for Abandoned studies, as
# we don't have the information to process them.
if study.protocol_builder_status != ProtocolBuilderStatus.ABANDONED:
if study.protocol_builder_status != ProtocolBuilderStatus.abandoned:
status = StudyService.__get_study_status(study_model)
study.warnings = StudyService.__update_status_of_workflow_meta(workflow_metas, status)
@ -268,7 +265,7 @@ class StudyService(object):
for study in db_studies:
pb_study = next((pbs for pbs in pb_studies if pbs.STUDYID == study.id), None)
if not pb_study:
study.protocol_builder_status = ProtocolBuilderStatus.ABANDONED
study.protocol_builder_status = ProtocolBuilderStatus.abandoned
db.session.commit()

View File

@ -0,0 +1,117 @@
from flask import g
from crc import session
from crc.api.common import ApiError
from crc.models.user import UserModel, AdminSessionModel
class UserService(object):
"""Provides common tools for working with users"""
# Returns true if the current user is logged in.
@staticmethod
def has_user():
return 'token' in g and \
bool(g.token) and \
'user' in g and \
bool(g.user)
# Returns true if the current user is an admin.
@staticmethod
def user_is_admin():
return UserService.has_user() and g.user.is_admin()
# Returns true if the current admin user is impersonating another user.
@staticmethod
def admin_is_impersonating():
if UserService.user_is_admin():
adminSession: AdminSessionModel = UserService.get_admin_session()
return adminSession is not None
else:
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)
# Returns true if the given user uid is different from the current user's uid.
@staticmethod
def is_different_user(uid):
return UserService.has_user() and uid is not None and uid is not g.user.uid
@staticmethod
def current_user(allow_admin_impersonate=False) -> UserModel:
if not UserService.has_user():
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
# Admins can pretend to be different users and act on a user's behalf in
# some circumstances.
if UserService.user_is_admin() and allow_admin_impersonate and UserService.admin_is_impersonating():
return UserService.get_admin_session_user()
else:
return g.user
# Admins can pretend to be different users and act on a user's behalf in some circumstances.
# This method allows an admin user to start impersonating another user with the given uid.
# Stops impersonating if the uid is None or invalid.
@staticmethod
def start_impersonating(uid=None):
if not UserService.has_user():
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
if not UserService.user_is_admin():
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)
if uid is None:
raise ApiError("invalid_uid", "Please provide a valid user uid.")
if not UserService.admin_is_impersonating() and UserService.is_different_user(uid):
# Impersonate the user if the given uid is valid.
impersonate_user = session.query(UserModel).filter(UserModel.uid == uid).first()
if impersonate_user is not None:
g.impersonate_user = impersonate_user
# Store the uid and user session token.
session.add(AdminSessionModel(token=g.token, admin_impersonate_uid=uid))
session.commit()
else:
raise ApiError("invalid_uid", "The uid provided is not valid.")
@staticmethod
def stop_impersonating():
if not UserService.has_user():
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
# Clear out the current impersonating user.
if 'impersonate_user' in g:
del g.impersonate_user
admin_session: AdminSessionModel = UserService.get_admin_session()
if admin_session:
session.delete(admin_session)
session.commit()
@staticmethod
def in_list(uids, allow_admin_impersonate=False):
"""Returns true if the current user's id is in the given list of ids. False if there
is no user, or the user is not in the list."""
if UserService.has_user(): # If someone is logged in, lock tasks that don't belong to them.
user = UserService.current_user(allow_admin_impersonate)
if user.uid in uids:
return True
return False
@staticmethod
def get_admin_session() -> AdminSessionModel:
if UserService.user_is_admin():
return session.query(AdminSessionModel).filter(AdminSessionModel.token == g.token).first()
else:
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)
@staticmethod
def get_admin_session_user() -> UserModel:
if UserService.user_is_admin():
admin_session = UserService.get_admin_session()
if admin_session is not None:
return session.query(UserModel).filter(UserModel.uid == admin_session.admin_impersonate_uid).first()
else:
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)

View File

@ -125,7 +125,8 @@ class WorkflowProcessor(object):
STUDY_ID_KEY = "study_id"
VALIDATION_PROCESS_KEY = "validate_only"
def __init__(self, workflow_model: WorkflowModel, soft_reset=False, hard_reset=False, validate_only=False):
def __init__(self, workflow_model: WorkflowModel,
soft_reset=False, hard_reset=False, validate_only=False):
"""Create a Workflow Processor based on the serialized information available in the workflow model.
If soft_reset is set to true, it will try to use the latest version of the workflow specification
without resetting to the beginning of the workflow. This will work for some minor changes to the spec.
@ -188,10 +189,10 @@ class WorkflowProcessor(object):
bpmn_workflow = BpmnWorkflow(spec, script_engine=self._script_engine)
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = workflow_model.study_id
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = validate_only
#try:
bpmn_workflow.do_engine_steps()
# except WorkflowException as we:
# raise ApiError.from_task_spec("error_loading_workflow", str(we), we.sender)
# try:
# bpmn_workflow.do_engine_steps()
# except WorkflowException as we:
# raise ApiError.from_task_spec("error_loading_workflow", str(we), we.sender)
return bpmn_workflow
def save(self):

View File

@ -30,6 +30,7 @@ from crc.models.workflow import WorkflowModel, WorkflowStatus, WorkflowSpecModel
from crc.services.file_service import FileService
from crc.services.lookup_service import LookupService
from crc.services.study_service import StudyService
from crc.services.user_service import UserService
from crc.services.workflow_processor import WorkflowProcessor
@ -154,10 +155,9 @@ class WorkflowService(object):
if len(field.options) > 0:
random_choice = random.choice(field.options)
if isinstance(random_choice, dict):
choice = random.choice(field.options)
return {
'value': choice['id'],
'label': choice['name']
'value': random_choice['id'],
'label': random_choice['name']
}
else:
# fixme: why it is sometimes an EnumFormFieldOption, and other times not?
@ -239,7 +239,7 @@ class WorkflowService(object):
nav_item['title'] = nav_item['task'].title # Prefer the task title.
user_uids = WorkflowService.get_users_assigned_to_task(processor, spiff_task)
if 'user' not in g or not g.user or g.user.uid not in user_uids:
if not UserService.in_list(user_uids, allow_admin_impersonate=True):
nav_item['state'] = WorkflowService.TASK_STATE_LOCKED
else:
@ -272,7 +272,7 @@ class WorkflowService(object):
workflow_api.next_task = WorkflowService.spiff_task_to_api_task(next_task, add_docs_and_forms=True)
# Update the state of the task to locked if the current user does not own the task.
user_uids = WorkflowService.get_users_assigned_to_task(processor, next_task)
if 'user' not in g or not g.user or g.user.uid not in user_uids:
if not UserService.in_list(user_uids, allow_admin_impersonate=True):
workflow_api.next_task.state = WorkflowService.TASK_STATE_LOCKED
return workflow_api
@ -471,6 +471,7 @@ class WorkflowService(object):
db.session.query(TaskEventModel). \
filter(TaskEventModel.workflow_id == processor.workflow_model.id). \
filter(TaskEventModel.action == WorkflowService.TASK_ACTION_ASSIGNMENT).delete()
db.session.commit()
for task in processor.get_current_user_tasks():
user_ids = WorkflowService.get_users_assigned_to_task(processor, task)

View File

@ -44,7 +44,7 @@
{% endif %}{% endfor %}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:properties>
<camunda:property name="display_name" value="Documents and Approvals" />
<camunda:property name="display_name" value="'Documents and Approvals'" />
</camunda:properties>
</bpmn:extensionElements>
<bpmn:incoming>Flow_142jtxs</bpmn:incoming>

View File

@ -1,33 +1,116 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_300b2c3" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_300b2c3" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.0.0">
<bpmn:collaboration id="Collaboration_163c7c8">
<bpmn:participant id="Participant_1mnua71" name="team" processRef="Process_cd666f3" />
</bpmn:collaboration>
<bpmn:process id="Process_cd666f3" isExecutable="true">
<bpmn:laneSet id="LaneSet_0ucxzw3">
<bpmn:lane id="Lane_16ml9fk">
<bpmn:flowNodeRef>StartEvent_1</bpmn:flowNodeRef>
<bpmn:flowNodeRef>Activity_1qpy9ra</bpmn:flowNodeRef>
<bpmn:flowNodeRef>Event_1m9fnmv</bpmn:flowNodeRef>
<bpmn:flowNodeRef>Activity_0c5drp3</bpmn:flowNodeRef>
</bpmn:lane>
<bpmn:lane id="Lane_1jw70kl" name="supervisor">
<bpmn:flowNodeRef>Gateway_0ved0t9</bpmn:flowNodeRef>
<bpmn:flowNodeRef>Activity_107ojvq</bpmn:flowNodeRef>
</bpmn:lane>
</bpmn:laneSet>
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0q51aiq</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0q51aiq" sourceRef="StartEvent_1" targetRef="Activity_1qpy9ra" />
<bpmn:userTask id="Activity_1qpy9ra" name="Placeholder" camunda:formKey="Formkey_placeholder">
<bpmn:userTask id="Activity_1qpy9ra" name="Assign Approver" camunda:formKey="form_assign_approver">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="FormField_2t2220o" label="Will this be updated later" type="boolean" />
<camunda:formField id="supervisor" label="Approver UID" type="string" defaultValue="dhf8r">
<camunda:validation>
<camunda:constraint name="required" config="true" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0q51aiq</bpmn:incoming>
<bpmn:outgoing>Flow_0ai4j1x</bpmn:outgoing>
<bpmn:incoming>Flow_1ugh4wn</bpmn:incoming>
<bpmn:outgoing>Flow_0d2snmk</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_0q51aiq" sourceRef="StartEvent_1" targetRef="Activity_1qpy9ra" />
<bpmn:sequenceFlow id="Flow_0d2snmk" sourceRef="Activity_1qpy9ra" targetRef="Activity_107ojvq" />
<bpmn:exclusiveGateway id="Gateway_0ved0t9" name="Approved?">
<bpmn:incoming>Flow_0apr3nj</bpmn:incoming>
<bpmn:outgoing>Flow_0mhtlkt</bpmn:outgoing>
<bpmn:outgoing>Flow_11tnx3n</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_0apr3nj" sourceRef="Activity_107ojvq" targetRef="Gateway_0ved0t9" />
<bpmn:sequenceFlow id="Flow_0mhtlkt" name="Yes" sourceRef="Gateway_0ved0t9" targetRef="Event_1m9fnmv">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">is_study_approved == True</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:endEvent id="Event_1m9fnmv">
<bpmn:incoming>Flow_0ai4j1x</bpmn:incoming>
<bpmn:incoming>Flow_0mhtlkt</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0ai4j1x" sourceRef="Activity_1qpy9ra" targetRef="Event_1m9fnmv" />
<bpmn:sequenceFlow id="Flow_11tnx3n" name="No" sourceRef="Gateway_0ved0t9" targetRef="Activity_0c5drp3">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">is_study_approved == False</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:userTask id="Activity_107ojvq" name="Approve Study" camunda:formKey="form_approve_study">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="is_study_approved" label="Approve this study?" type="boolean">
<camunda:validation>
<camunda:constraint name="required" config="true" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0d2snmk</bpmn:incoming>
<bpmn:outgoing>Flow_0apr3nj</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_1ugh4wn" sourceRef="Activity_0c5drp3" targetRef="Activity_1qpy9ra" />
<bpmn:manualTask id="Activity_0c5drp3" name="Review Feedback">
<bpmn:documentation>Your request was not approved. Try again.</bpmn:documentation>
<bpmn:incoming>Flow_11tnx3n</bpmn:incoming>
<bpmn:outgoing>Flow_1ugh4wn</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_cd666f3">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Collaboration_163c7c8">
<bpmndi:BPMNShape id="Participant_1mnua71_di" bpmnElement="Participant_1mnua71" isHorizontal="true">
<dc:Bounds x="129" y="117" width="600" height="250" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Lane_1jw70kl_di" bpmnElement="Lane_1jw70kl" isHorizontal="true">
<dc:Bounds x="159" y="242" width="570" height="125" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Lane_16ml9fk_di" bpmnElement="Lane_16ml9fk" isHorizontal="true">
<dc:Bounds x="159" y="117" width="570" height="125" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_11tnx3n_di" bpmnElement="Flow_11tnx3n">
<di:waypoint x="460" y="275" />
<di:waypoint x="460" y="217" />
<bpmndi:BPMNLabel>
<dc:Bounds x="468" y="241" width="15" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0mhtlkt_di" bpmnElement="Flow_0mhtlkt">
<di:waypoint x="485" y="300" />
<di:waypoint x="660" y="300" />
<di:waypoint x="660" y="195" />
<bpmndi:BPMNLabel>
<dc:Bounds x="563" y="282" width="19" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0apr3nj_di" bpmnElement="Flow_0apr3nj">
<di:waypoint x="370" y="300" />
<di:waypoint x="435" y="300" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0d2snmk_di" bpmnElement="Flow_0d2snmk">
<di:waypoint x="320" y="217" />
<di:waypoint x="320" y="260" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0q51aiq_di" bpmnElement="Flow_0q51aiq">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0ai4j1x_di" bpmnElement="Flow_0ai4j1x">
<bpmndi:BPMNEdge id="Flow_1ugh4wn_di" bpmnElement="Flow_1ugh4wn">
<di:waypoint x="400" y="177" />
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
@ -35,8 +118,20 @@
<bpmndi:BPMNShape id="Activity_14cpuv6_di" bpmnElement="Activity_1qpy9ra">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0ved0t9_di" bpmnElement="Gateway_0ved0t9" isMarkerVisible="true">
<dc:Bounds x="435" y="275" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="435" y="332" width="54" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1m9fnmv_di" bpmnElement="Event_1m9fnmv">
<dc:Bounds x="432" y="159" width="36" height="36" />
<dc:Bounds x="642" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1ps6jft_di" bpmnElement="Activity_107ojvq">
<dc:Bounds x="270" y="260" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1al86eb_di" bpmnElement="Activity_0c5drp3">
<dc:Bounds x="400" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>

View File

@ -116,7 +116,7 @@
</camunda:formField>
</camunda:formData>
<camunda:properties>
<camunda:property name="display_name" value="Select Template Type" />
<camunda:property name="display_name" value="'Select Template Type'" />
</camunda:properties>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0schnpa</bpmn:incoming>

View File

@ -30,7 +30,7 @@ StudyInfo['documents'] = study_info('documents')</bpmn:script>
<bpmn:parallelGateway id="Gateway_1nta7st" name="Some Name">
<bpmn:extensionElements>
<camunda:properties>
<camunda:property name="display_name" value="Some Name" />
<camunda:property name="display_name" value="'Some Name'" />
</camunda:properties>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_17ct47v</bpmn:incoming>

View File

@ -0,0 +1,32 @@
"""empty message
Revision ID: 2e7b377cbc7b
Revises: c4ddb69e7ef4
Create Date: 2020-07-28 17:03:23.586828
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2e7b377cbc7b'
down_revision = 'c4ddb69e7ef4'
branch_labels = None
depends_on = None
def upgrade():
op.execute('update study set protocol_builder_status = NULL;')
op.execute('ALTER TYPE protocolbuilderstatus RENAME TO pbs_old;')
op.execute("CREATE TYPE protocolbuilderstatus AS ENUM('incomplete', 'active', 'hold', 'open', 'abandoned')")
op.execute("ALTER TABLE study ALTER COLUMN protocol_builder_status TYPE protocolbuilderstatus USING protocol_builder_status::text::protocolbuilderstatus;")
op.execute('DROP TYPE pbs_old;')
op.execute("update study set protocol_builder_status = 'incomplete';")
def downgrade():
op.execute('update study set protocol_builder_status = NULL;')
op.execute('ALTER TYPE protocolbuilderstatus RENAME TO pbs_old;')
op.execute("CREATE TYPE protocolbuilderstatus AS ENUM('INCOMPLETE', 'ACTIVE', 'HOLD', 'OPEN', 'ABANDONED')")
op.execute("ALTER TABLE study ALTER COLUMN protocol_builder_status TYPE protocolbuilderstatus USING protocol_builder_status::text::protocolbuilderstatus;")
op.execute('DROP TYPE pbs_old;')

View File

@ -0,0 +1,34 @@
"""empty message
Revision ID: ab06a94e5d4c
Revises: 2e7b377cbc7b
Create Date: 2020-07-30 11:23:46.601338
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ab06a94e5d4c'
down_revision = '2e7b377cbc7b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('admin_session',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(), nullable=True),
sa.Column('admin_impersonate_uid', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('admin_session')
# ### end Alembic commands ###

View File

@ -0,0 +1,28 @@
"""empty message
Revision ID: c4ddb69e7ef4
Revises: ffef4661a37d
Create Date: 2020-07-22 09:04:09.769239
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c4ddb69e7ef4'
down_revision = 'ffef4661a37d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('study', sa.Column('enrollment_date', sa.DateTime(timezone=True), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('study', 'enrollment_date')
# ### end Alembic commands ###

View File

@ -1,3 +1,3 @@
from setuptools import setup
setup(setup_requires=["pbr"], pbr=True)
setup(setup_requires=["pbr"], pbr=True, install_requires=['box'])

View File

@ -16,17 +16,19 @@ from crc.models.api_models import WorkflowApiSchema, MultiInstanceType
from crc.models.approval import ApprovalModel, ApprovalStatus
from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES
from crc.models.protocol_builder import ProtocolBuilderStatus
from crc.models.task_event import TaskEventModel
from crc.models.study import StudyModel
from crc.models.task_event import TaskEventModel
from crc.models.user import UserModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecCategoryModel
from crc.services.file_service import FileService
from crc.services.study_service import StudyService
from crc.services.user_service import UserService
from crc.services.workflow_service import WorkflowService
from example_data import ExampleDataLoader
#UNCOMMENT THIS FOR DEBUGGING SQL ALCHEMY QUERIES
# UNCOMMENT THIS FOR DEBUGGING SQL ALCHEMY QUERIES
import logging
logging.basicConfig()
@ -37,48 +39,57 @@ class BaseTest(unittest.TestCase):
if not app.config['TESTING']:
raise (Exception("INVALID TEST CONFIGURATION. This is almost always in import order issue."
"The first class to import in each test should be the base_test.py file."))
"The first class to import in each test should be the base_test.py file."))
auths = {}
test_uid = "dhf8r"
users = [
{
'uid':'dhf8r',
'email_address':'dhf8r@virginia.EDU',
'display_name':'Daniel Harold Funk',
'affiliation':'staff@virginia.edu;member@virginia.edu',
'eppn':'dhf8r@virginia.edu',
'first_name':'Daniel',
'last_name':'Funk',
'title':'SOFTWARE ENGINEER V'
}
'uid': 'dhf8r',
'email_address': 'dhf8r@virginia.EDU',
'display_name': 'Daniel Harold Funk',
'affiliation': 'staff@virginia.edu;member@virginia.edu',
'eppn': 'dhf8r@virginia.edu',
'first_name': 'Daniel',
'last_name': 'Funk',
'title': 'SOFTWARE ENGINEER V'
},
{
'uid': 'lbd3p',
'email_address': 'lbd3p@virginia.EDU',
'display_name': 'Laura Barnes',
'affiliation': 'staff@virginia.edu;member@virginia.edu',
'eppn': 'lbd3p@virginia.edu',
'first_name': 'Laura',
'last_name': 'Barnes',
'title': 'Associate Professor of Systems and Information Engineering'
},
]
studies = [
{
'id':0,
'title':'The impact of fried pickles on beer consumption in bipedal software developers.',
'last_updated':datetime.datetime.now(),
'protocol_builder_status':ProtocolBuilderStatus.ACTIVE,
'primary_investigator_id':'dhf8r',
'sponsor':'Sartography Pharmaceuticals',
'ind_number':'1234',
'user_uid':'dhf8r'
'id': 0,
'title': 'The impact of fried pickles on beer consumption in bipedal software developers.',
'last_updated': datetime.datetime.now(),
'protocol_builder_status': ProtocolBuilderStatus.active,
'primary_investigator_id': 'dhf8r',
'sponsor': 'Sartography Pharmaceuticals',
'ind_number': '1234',
'user_uid': 'dhf8r'
},
{
'id':1,
'title':'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels',
'last_updated':datetime.datetime.now(),
'protocol_builder_status':ProtocolBuilderStatus.ACTIVE,
'primary_investigator_id':'dhf8r',
'sponsor':'Makerspace & Co.',
'ind_number':'5678',
'user_uid':'dhf8r'
'id': 1,
'title': 'Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels',
'last_updated': datetime.datetime.now(),
'protocol_builder_status': ProtocolBuilderStatus.active,
'primary_investigator_id': 'dhf8r',
'sponsor': 'Makerspace & Co.',
'ind_number': '5678',
'user_uid': 'dhf8r'
}
]
@classmethod
def setUpClass(cls):
app.config.from_object('config.testing')
@ -98,7 +109,7 @@ class BaseTest(unittest.TestCase):
def tearDown(self):
ExampleDataLoader.clean_db()
g.user = None
self.logout()
self.auths = {}
def logged_in_headers(self, user=None, redirect_url='http://some/frontend/url'):
@ -118,7 +129,8 @@ class BaseTest(unittest.TestCase):
self.assertIsNotNone(user_model.display_name)
self.assertEqual(user_model.uid, uid)
self.assertTrue('user' in g, 'User should be in Flask globals')
self.assertEqual(uid, g.user.uid, 'Logged in user should match given user uid')
user = UserService.current_user(allow_admin_impersonate=True)
self.assertEqual(uid, user.uid, 'Logged in user should match given user uid')
return dict(Authorization='Bearer ' + user_model.encode_auth_token().decode())
@ -135,16 +147,21 @@ class BaseTest(unittest.TestCase):
else:
ExampleDataLoader().load_test_data()
for user_json in self.users:
db.session.add(UserModel(**user_json))
db.session.commit()
# If in production mode, only add the first user.
if app.config['PRODUCTION']:
session.add(UserModel(**self.users[0]))
else:
for user_json in self.users:
session.add(UserModel(**user_json))
session.commit()
for study_json in self.studies:
study_model = StudyModel(**study_json)
db.session.add(study_model)
session.add(study_model)
StudyService._add_all_workflow_specs_to_study(study_model)
db.session.execute(Sequence(StudyModel.__tablename__ + '_id_seq'))
db.session.commit()
db.session.flush()
session.execute(Sequence(StudyModel.__tablename__ + '_id_seq'))
session.commit()
session.flush()
specs = session.query(WorkflowSpecModel).all()
self.assertIsNotNone(specs)
@ -168,8 +185,8 @@ class BaseTest(unittest.TestCase):
"""Loads a spec into the database based on a directory in /tests/data"""
if category_id is None:
category = WorkflowSpecCategoryModel(name="test", display_name="Test Workflows", display_order=0)
db.session.add(category)
db.session.commit()
session.add(category)
session.commit()
category_id = category.id
if session.query(WorkflowSpecModel).filter_by(id=dir_name).count() > 0:
@ -217,14 +234,13 @@ class BaseTest(unittest.TestCase):
return '?%s' % '&'.join(query_string_list)
def replace_file(self, name, file_path):
"""Replaces a stored file with the given name with the contents of the file at the given path."""
file_service = FileService()
file = open(file_path, "rb")
data = file.read()
file_model = db.session.query(FileModel).filter(FileModel.name == name).first()
file_model = session.query(FileModel).filter(FileModel.name == name).first()
noise, file_extension = os.path.splitext(file_path)
content_type = CONTENT_TYPES[file_extension[1:]]
file_service.update_file(file_model, data, content_type)
@ -233,18 +249,19 @@ class BaseTest(unittest.TestCase):
user = session.query(UserModel).filter(UserModel.uid == uid).first()
if user is None:
user = UserModel(uid=uid, email_address=email, display_name=display_name)
db.session.add(user)
db.session.commit()
session.add(user)
session.commit()
return user
def create_study(self, uid="dhf8r", title="Beer consumption in the bipedal software engineer", primary_investigator_id="lb3dp"):
def create_study(self, uid="dhf8r", title="Beer consumption in the bipedal software engineer",
primary_investigator_id="lb3dp"):
study = session.query(StudyModel).filter_by(user_uid=uid).filter_by(title=title).first()
if study is None:
user = self.create_user(uid=uid)
study = StudyModel(title=title, protocol_builder_status=ProtocolBuilderStatus.ACTIVE,
study = StudyModel(title=title, protocol_builder_status=ProtocolBuilderStatus.active,
user_uid=user.uid, primary_investigator_id=primary_investigator_id)
db.session.add(study)
db.session.commit()
session.add(study)
session.commit()
return study
def _create_study_workflow_approvals(self, user_uid, title, primary_investigator_id, approver_uids, statuses,
@ -271,8 +288,8 @@ class BaseTest(unittest.TestCase):
return full_study
def create_workflow(self, workflow_name, display_name=None, study=None, category_id=None, as_user="dhf8r"):
db.session.flush()
spec = db.session.query(WorkflowSpecModel).filter(WorkflowSpecModel.name == workflow_name).first()
session.flush()
spec = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.name == workflow_name).first()
if spec is None:
if display_name is None:
display_name = workflow_name
@ -291,29 +308,31 @@ class BaseTest(unittest.TestCase):
file.close()
def create_approval(
self,
study=None,
workflow=None,
approver_uid=None,
status=None,
version=None,
self,
study=None,
workflow=None,
approver_uid=None,
status=None,
version=None,
):
study = study or self.create_study()
workflow = workflow or self.create_workflow()
approver_uid = approver_uid or self.test_uid
status = status or ApprovalStatus.PENDING.value
version = version or 1
approval = ApprovalModel(study=study, workflow=workflow, approver_uid=approver_uid, status=status, version=version)
db.session.add(approval)
db.session.commit()
approval = ApprovalModel(study=study, workflow=workflow, approver_uid=approver_uid, status=status,
version=version)
session.add(approval)
session.commit()
return approval
def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False, user_uid="dhf8r"):
def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False, do_engine_steps=True, user_uid="dhf8r"):
user = session.query(UserModel).filter_by(uid=user_uid).first()
self.assertIsNotNone(user)
rv = self.app.get('/v1.0/workflow/%i?soft_reset=%s&hard_reset=%s' %
(workflow.id, str(soft_reset), str(hard_reset)),
rv = self.app.get(f'/v1.0/workflow/{workflow.id}'
f'?soft_reset={str(soft_reset)}'
f'&hard_reset={str(hard_reset)}'
f'&do_engine_steps={str(do_engine_steps)}',
headers=self.logged_in_headers(user),
content_type="application/json")
self.assert_success(rv)
@ -322,7 +341,6 @@ class BaseTest(unittest.TestCase):
self.assertEqual(workflow.workflow_spec_id, workflow_api.workflow_spec_id)
return workflow_api
def complete_form(self, workflow_in, task_in, dict_data, error_code=None, terminate_loop=None, user_uid="dhf8r"):
prev_completed_task_count = workflow_in.completed_tasks
if isinstance(task_in, dict):
@ -387,12 +405,18 @@ class BaseTest(unittest.TestCase):
self.assertEqual(task_in.multi_instance_count, event.mi_count)
if task_in.multi_instance_type == 'looping' and not terminate_loop:
self.assertEqual(task_in.multi_instance_index+1, event.mi_index)
self.assertEqual(task_in.multi_instance_index + 1, event.mi_index)
else:
self.assertEqual(task_in.multi_instance_index, event.mi_index)
self.assertEqual(task_in.process_name, event.process_name)
self.assertIsNotNone(event.date)
workflow = WorkflowApiSchema().load(json_data)
return workflow
def logout(self):
if 'user' in g:
del g.user
if 'impersonate_user' in g:
del g.impersonate_user

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_17fwemw" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_17fwemw" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="MultiInstance" isExecutable="true">
<bpmn:startEvent id="StartEvent_1" name="StartEvent_1">
<bpmn:outgoing>Flow_0t6p1sb</bpmn:outgoing>

View File

@ -72,10 +72,10 @@ class TestFilesApi(BaseTest):
self.assertEqual(file, file2)
def test_add_file_from_task_and_form_errors_on_invalid_form_field_name(self):
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
processor.do_engine_steps()
task = processor.next_task()
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
correct_name = task.task_spec.form.fields[0].id
@ -96,6 +96,7 @@ class TestFilesApi(BaseTest):
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
processor.do_engine_steps()
task = processor.next_task()
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
correct_name = task.task_spec.form.fields[0].id

View File

@ -1,4 +1,6 @@
import json
from profile import Profile
from tests.base_test import BaseTest
from datetime import datetime, timezone
@ -13,6 +15,7 @@ from crc.models.study import StudyModel, StudySchema
from crc.models.workflow import WorkflowSpecModel, WorkflowModel
from crc.services.file_service import FileService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.workflow_service import WorkflowService
class TestStudyApi(BaseTest):
@ -21,7 +24,7 @@ class TestStudyApi(BaseTest):
"title": "Phase III Trial of Genuine People Personalities (GPP) Autonomous Intelligent Emotional Agents "
"for Interstellar Spacecraft",
"last_updated": datetime.now(tz=timezone.utc),
"protocol_builder_status": ProtocolBuilderStatus.ACTIVE,
"protocol_builder_status": ProtocolBuilderStatus.active,
"primary_investigator_id": "tmm2x",
"user_uid": "dhf8r",
}
@ -132,7 +135,7 @@ class TestStudyApi(BaseTest):
self.load_example_data()
study: StudyModel = session.query(StudyModel).first()
study.title = "Pilot Study of Fjord Placement for Single Fraction Outcomes to Cortisol Susceptibility"
study.protocol_builder_status = ProtocolBuilderStatus.ACTIVE
study.protocol_builder_status = ProtocolBuilderStatus.active
rv = self.app.put('/v1.0/study/%i' % study.id,
content_type="application/json",
headers=self.logged_in_headers(),
@ -182,11 +185,11 @@ class TestStudyApi(BaseTest):
num_open = 0
for study in json_data:
if study['protocol_builder_status'] == 'ABANDONED': # One study does not exist in user_studies.json
if study['protocol_builder_status'] == 'abandoned': # One study does not exist in user_studies.json
num_abandoned += 1
if study['protocol_builder_status'] == 'ACTIVE': # One study is marked complete without HSR Number
if study['protocol_builder_status'] == 'active': # One study is marked complete without HSR Number
num_active += 1
if study['protocol_builder_status'] == 'OPEN': # One study is marked complete and has an HSR Number
if study['protocol_builder_status'] == 'open': # One study is marked complete and has an HSR Number
num_open += 1
db_studies_after = session.query(StudyModel).all()

View File

@ -40,7 +40,7 @@ class TestStudyService(BaseTest):
for study in db.session.query(StudyModel).all():
StudyService().delete_study(study.id)
study = StudyModel(title="My title", protocol_builder_status=ProtocolBuilderStatus.ACTIVE, user_uid=user.uid)
study = StudyModel(title="My title", protocol_builder_status=ProtocolBuilderStatus.active, user_uid=user.uid)
db.session.add(study)
self.load_test_spec("random_fact", category_id=cat.id)
@ -79,6 +79,7 @@ class TestStudyService(BaseTest):
# Initialize the Workflow with the workflow processor.
workflow_model = db.session.query(WorkflowModel).filter(WorkflowModel.id == workflow.id).first()
processor = WorkflowProcessor(workflow_model)
processor.do_engine_steps()
# Assure the workflow is now started, and knows the total and completed tasks.
studies = StudyService.get_studies_for_user(user)

View File

@ -5,7 +5,7 @@ from datetime import timezone, datetime, timedelta
import jwt
from tests.base_test import BaseTest
from crc import db, app
from crc import app, session
from crc.api.common import ApiError
from crc.models.protocol_builder import ProtocolBuilderStatus
from crc.models.study import StudySchema, StudyModel
@ -13,6 +13,8 @@ from crc.models.user import UserModel
class TestAuthentication(BaseTest):
admin_uid = 'dhf8r'
non_admin_uid = 'lb3dp'
def tearDown(self):
# Assure we set the production flag back to false.
@ -58,9 +60,9 @@ class TestAuthentication(BaseTest):
self.assertTrue(expected_exp_3 - 1000 <= actual_exp_3 <= expected_exp_3 + 1000)
def test_non_production_auth_creates_user(self):
new_uid = 'lb3dp' ## Assure this user id is in the fake responses from ldap.
new_uid = self.non_admin_uid ## Assure this user id is in the fake responses from ldap.
self.load_example_data()
user = db.session.query(UserModel).filter(UserModel.uid == new_uid).first()
user = session.query(UserModel).filter(UserModel.uid == new_uid).first()
self.assertIsNone(user)
user_info = {'uid': new_uid, 'first_name': 'Cordi', 'last_name': 'Nator',
@ -72,7 +74,7 @@ class TestAuthentication(BaseTest):
self.assertTrue(rv_1.status_code == 302)
self.assertTrue(str.startswith(rv_1.location, redirect_url))
user = db.session.query(UserModel).filter(UserModel.uid == new_uid).first()
user = session.query(UserModel).filter(UserModel.uid == new_uid).first()
self.assertIsNotNone(user)
self.assertIsNotNone(user.display_name)
self.assertIsNotNone(user.email_address)
@ -88,21 +90,20 @@ class TestAuthentication(BaseTest):
self.load_example_data()
new_uid = 'lb3dp' # This user is in the test ldap system.
user = db.session.query(UserModel).filter_by(uid=new_uid).first()
# User should not be in the system yet.
user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first()
self.assertIsNone(user)
redirect_url = 'http://worlds.best.website/admin'
headers = dict(Uid=new_uid)
db.session.flush()
rv = self.app.get('v1.0/login', follow_redirects=False, headers=headers)
self.assert_success(rv)
user = db.session.query(UserModel).filter_by(uid=new_uid).first()
self.assertIsNotNone(user)
self.assertEqual(new_uid, user.uid)
self.assertEqual("Laura Barnes", user.display_name)
self.assertEqual("lb3dp@virginia.edu", user.email_address)
self.assertEqual("E0:Associate Professor of Systems and Information Engineering", user.title)
# Log in
non_admin_user = self._login_as_non_admin()
# User should be in the system now.
redirect_url = 'http://worlds.best.website/admin'
rv_user = self.app.get('/v1.0/user', headers=self.logged_in_headers(non_admin_user, redirect_url=redirect_url))
self.assert_success(rv_user)
user_data = json.loads(rv_user.get_data(as_text=True))
self.assertEqual(self.non_admin_uid, user_data['uid'])
self.assertFalse(user_data['is_admin'])
# Switch production mode back off
app.config['PRODUCTION'] = False
@ -119,6 +120,8 @@ class TestAuthentication(BaseTest):
user = UserModel(uid="dhf8r", first_name='Dan', last_name='Funk', email_address='dhf8r@virginia.edu')
rv = self.app.get('/v1.0/user', headers=self.logged_in_headers(user, redirect_url='http://omg.edu/lolwut'))
self.assert_success(rv)
user_data = json.loads(rv.get_data(as_text=True))
self.assertTrue(user_data['is_admin'])
def test_admin_can_access_admin_only_endpoints(self):
# Switch production mode on
@ -126,21 +129,8 @@ class TestAuthentication(BaseTest):
self.load_example_data()
admin_uids = app.config['ADMIN_UIDS']
self.assertGreater(len(admin_uids), 0)
admin_uid = admin_uids[0]
self.assertEqual(admin_uid, 'dhf8r') # This user is in the test ldap system.
admin_headers = dict(Uid=admin_uid)
rv = self.app.get('v1.0/login', follow_redirects=False, headers=admin_headers)
self.assert_success(rv)
admin_user = db.session.query(UserModel).filter(UserModel.uid == admin_uid).first()
self.assertIsNotNone(admin_user)
self.assertEqual(admin_uid, admin_user.uid)
admin_study = self._make_fake_study(admin_uid)
admin_user = self._login_as_admin()
admin_study = self._make_fake_study(admin_user.uid)
admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode())
rv_add_study = self.app.post(
@ -153,7 +143,7 @@ class TestAuthentication(BaseTest):
self.assert_success(rv_add_study, 'Admin user should be able to add a study')
new_admin_study = json.loads(rv_add_study.get_data(as_text=True))
db_admin_study = db.session.query(StudyModel).filter_by(id=new_admin_study['id']).first()
db_admin_study = session.query(StudyModel).filter_by(id=new_admin_study['id']).first()
self.assertIsNotNone(db_admin_study)
rv_del_study = self.app.delete(
@ -173,26 +163,9 @@ class TestAuthentication(BaseTest):
self.load_example_data()
# Non-admin user should not be able to delete a study
non_admin_uid = 'lb3dp'
admin_uids = app.config['ADMIN_UIDS']
self.assertGreater(len(admin_uids), 0)
self.assertNotIn(non_admin_uid, admin_uids)
non_admin_headers = dict(Uid=non_admin_uid)
rv = self.app.get(
'v1.0/login',
follow_redirects=False,
headers=non_admin_headers
)
self.assert_success(rv)
non_admin_user = db.session.query(UserModel).filter_by(uid=non_admin_uid).first()
self.assertIsNotNone(non_admin_user)
non_admin_user = self._login_as_non_admin()
non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token().decode())
non_admin_study = self._make_fake_study(non_admin_uid)
non_admin_study = self._make_fake_study(non_admin_user.uid)
rv_add_study = self.app.post(
'/v1.0/study',
@ -203,7 +176,7 @@ class TestAuthentication(BaseTest):
self.assert_success(rv_add_study, 'Non-admin user should be able to add a study')
new_non_admin_study = json.loads(rv_add_study.get_data(as_text=True))
db_non_admin_study = db.session.query(StudyModel).filter_by(id=new_non_admin_study['id']).first()
db_non_admin_study = session.query(StudyModel).filter_by(id=new_non_admin_study['id']).first()
self.assertIsNotNone(db_non_admin_study)
rv_non_admin_del_study = self.app.delete(
@ -216,11 +189,131 @@ class TestAuthentication(BaseTest):
# Switch production mode back off
app.config['PRODUCTION'] = False
def test_list_all_users(self):
self.load_example_data()
rv = self.app.get('/v1.0/user')
self.assert_failure(rv, 401)
rv = self.app.get('/v1.0/user', headers=self.logged_in_headers())
self.assert_success(rv)
all_users = session.query(UserModel).all()
rv = self.app.get('/v1.0/list_users', headers=self.logged_in_headers())
self.assert_success(rv)
user_data = json.loads(rv.get_data(as_text=True))
self.assertEqual(len(user_data), len(all_users))
def test_admin_can_impersonate_another_user(self):
# Switch production mode on
app.config['PRODUCTION'] = True
self.load_example_data()
admin_user = self._login_as_admin()
admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode())
# User should not be in the system yet.
non_admin_user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first()
self.assertIsNone(non_admin_user)
# Admin should not be able to impersonate non-existent user
rv_1 = self.app.get(
'/v1.0/user?admin_impersonate_uid=' + self.non_admin_uid,
content_type="application/json",
headers=admin_token_headers,
follow_redirects=False
)
self.assert_failure(rv_1, 400)
# Add the non-admin user now
self.logout()
non_admin_user = self._login_as_non_admin()
self.assertEqual(non_admin_user.uid, self.non_admin_uid)
non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token().decode())
# Add a study for the non-admin user
non_admin_study = self._make_fake_study(self.non_admin_uid)
rv_add_study = self.app.post(
'/v1.0/study',
content_type="application/json",
headers=non_admin_token_headers,
data=json.dumps(StudySchema().dump(non_admin_study))
)
self.assert_success(rv_add_study, 'Non-admin user should be able to add a study')
self.logout()
# Admin should be able to impersonate user now
admin_user = self._login_as_admin()
rv_2 = self.app.get(
'/v1.0/user?admin_impersonate_uid=' + self.non_admin_uid,
content_type="application/json",
headers=admin_token_headers,
follow_redirects=False
)
self.assert_success(rv_2)
user_data_2 = json.loads(rv_2.get_data(as_text=True))
self.assertEqual(user_data_2['uid'], self.non_admin_uid, 'Admin user should impersonate non-admin user')
# Study endpoint should return non-admin user's studies
rv_study = self.app.get(
'/v1.0/study',
content_type="application/json",
headers=admin_token_headers,
follow_redirects=False
)
self.assert_success(rv_study, 'Admin user should be able to get impersonated user studies')
study_data = json.loads(rv_study.get_data(as_text=True))
self.assertGreaterEqual(len(study_data), 1)
self.assertEqual(study_data[0]['user_uid'], self.non_admin_uid)
# Switch production mode back off
app.config['PRODUCTION'] = False
def _make_fake_study(self, uid):
return {
"title": "blah",
"last_updated": datetime.now(tz=timezone.utc),
"protocol_builder_status": ProtocolBuilderStatus.ACTIVE,
"protocol_builder_status": ProtocolBuilderStatus.active,
"primary_investigator_id": uid,
"user_uid": uid,
}
def _login_as_admin(self):
admin_uids = app.config['ADMIN_UIDS']
self.assertGreater(len(admin_uids), 0)
self.assertIn(self.admin_uid, admin_uids)
admin_headers = dict(Uid=self.admin_uid)
rv = self.app.get('v1.0/login', follow_redirects=False, headers=admin_headers)
self.assert_success(rv)
admin_user = session.query(UserModel).filter(UserModel.uid == self.admin_uid).first()
self.assertIsNotNone(admin_user)
self.assertEqual(self.admin_uid, admin_user.uid)
self.assertTrue(admin_user.is_admin())
return admin_user
def _login_as_non_admin(self):
admin_uids = app.config['ADMIN_UIDS']
self.assertGreater(len(admin_uids), 0)
self.assertNotIn(self.non_admin_uid, admin_uids)
non_admin_headers = dict(Uid=self.non_admin_uid)
rv = self.app.get(
'v1.0/login?uid=' + self.non_admin_uid,
follow_redirects=False,
headers=non_admin_headers
)
self.assert_success(rv)
user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first()
self.assertIsNotNone(user)
self.assertFalse(user.is_admin())
self.assertIsNotNone(user)
self.assertEqual(self.non_admin_uid, user.uid)
self.assertEqual("Laura Barnes", user.display_name)
self.assertEqual("lb3dp@virginia.edu", user.email_address)
self.assertEqual("E0:Associate Professor of Systems and Information Engineering", user.title)
return user

43
tests/test_events.py Normal file
View File

@ -0,0 +1,43 @@
import json
from tests.base_test import BaseTest
from crc.models.workflow import WorkflowStatus
from crc import db
from crc.api.common import ApiError
from crc.models.task_event import TaskEventModel, TaskEventSchema
from crc.services.workflow_service import WorkflowService
class TestEvents(BaseTest):
def test_list_events_by_workflow(self):
workflow_one = self.create_workflow('exclusive_gateway')
# Start a the workflow.
first_task = self.get_workflow_api(workflow_one).next_task
self.complete_form(workflow_one, first_task, {"has_bananas": True})
workflow_one = self.get_workflow_api(workflow_one)
self.assertEqual('Task_Num_Bananas', workflow_one.next_task.name)
# Start a second workflow
workflow_two = self.create_workflow('subprocess')
workflow_api_two = self.get_workflow_api(workflow_two)
# Get all action events across workflows
rv = self.app.get('/v1.0/task_events?action=ASSIGNMENT',
headers=self.logged_in_headers(),
content_type="application/json")
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
tasks = TaskEventSchema(many=True).load(json_data)
self.assertEqual(2, len(tasks))
# Get action events for a single workflow
rv = self.app.get(f'/v1.0/task_events?action=ASSIGNMENT&workflow={workflow_one.id}',
headers=self.logged_in_headers(),
content_type="application/json")
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
tasks = TaskEventSchema(many=True).load(json_data)
self.assertEqual(1, len(tasks))

View File

@ -9,6 +9,7 @@ from crc import session, app
from crc.models.api_models import WorkflowApiSchema, MultiInstanceType, TaskSchema
from crc.models.file import FileModelSchema
from crc.models.workflow import WorkflowStatus
from crc.models.task_event import TaskEventModel
class TestTasksApi(BaseTest):
@ -42,6 +43,24 @@ class TestTasksApi(BaseTest):
"""
self.assertTrue(str.startswith(task.documentation, expected_docs))
def test_get_workflow_without_running_engine_steps(self):
# Set up a new workflow
workflow = self.create_workflow('two_forms')
# get the first form in the two form workflow.
workflow_api = self.get_workflow_api(workflow, do_engine_steps=False)
# There should be no task event logs related to the workflow at this point.
task_events = session.query(TaskEventModel).filter(TaskEventModel.workflow_id == workflow.id).all()
self.assertEqual(0, len(task_events))
# Since the workflow was not started, the call to read-only should not execute any engine steps the
# current task should be the start event.
self.assertEqual("Start", workflow_api.next_task.name)
def test_get_form_for_previously_completed_task(self):
"""Assure we can look at previously completed steps without moving the token for the workflow."""
def test_two_forms_task(self):
# Set up a new workflow
self.load_example_data()
@ -69,7 +88,6 @@ class TestTasksApi(BaseTest):
self.assertIsNotNone(val)
def test_error_message_on_bad_gateway_expression(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
# get the first form in the two form workflow.
@ -77,7 +95,6 @@ class TestTasksApi(BaseTest):
self.complete_form(workflow, task, {"has_bananas": True})
def test_workflow_with_parallel_forms(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
# get the first form in the two form workflow.
@ -89,7 +106,6 @@ class TestTasksApi(BaseTest):
self.assertEqual("Task_Num_Bananas", workflow_api.next_task.name)
def test_navigation_with_parallel_forms(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
# get the first form in the two form workflow.
@ -107,7 +123,6 @@ class TestTasksApi(BaseTest):
self.assertEqual("NOOP", nav[3]['state'])
def test_navigation_with_exclusive_gateway(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway_2')
# get the first form in the two form workflow.
@ -124,7 +139,6 @@ class TestTasksApi(BaseTest):
self.assertEqual("Task 3", nav[6]['title'])
def test_document_added_to_workflow_shows_up_in_file_list(self):
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('docx')
@ -153,7 +167,6 @@ class TestTasksApi(BaseTest):
def test_get_documentation_populated_in_end(self):
self.load_example_data()
workflow = self.create_workflow('random_fact')
workflow_api = self.get_workflow_api(workflow)
task = workflow_api.next_task
@ -167,9 +180,7 @@ class TestTasksApi(BaseTest):
self.assertTrue("norris" in workflow_api.next_task.documentation)
def test_load_workflow_from_outdated_spec(self):
# Start the basic two_forms workflow and complete a task.
self.load_example_data()
workflow = self.create_workflow('two_forms')
workflow_api = self.get_workflow_api(workflow)
self.complete_form(workflow, workflow_api.next_task, {"color": "blue"})
@ -194,9 +205,7 @@ class TestTasksApi(BaseTest):
self.assertTrue(workflow_api.is_latest_spec)
def test_soft_reset_errors_out_and_next_result_is_on_original_version(self):
# Start the basic two_forms workflow and complete a task.
self.load_example_data()
workflow = self.create_workflow('two_forms')
workflow_api = self.get_workflow_api(workflow)
self.complete_form(workflow, workflow_api.next_task, {"color": "blue"})
@ -221,7 +230,6 @@ class TestTasksApi(BaseTest):
def test_manual_task_with_external_documentation(self):
self.load_example_data()
workflow = self.create_workflow('manual_task_with_external_documentation')
# get the first form in the two form workflow.
@ -235,7 +243,6 @@ class TestTasksApi(BaseTest):
self.assertTrue('Dan' in workflow_api.next_task.documentation)
def test_bpmn_extension_properties_are_populated(self):
self.load_example_data()
workflow = self.create_workflow('manual_task_with_external_documentation')
# get the first form in the two form workflow.
@ -268,9 +275,7 @@ class TestTasksApi(BaseTest):
# Assure that the names for each task are properly updated, so they aren't all the same.
self.assertEqual("Primary Investigator", workflow.next_task.properties['display_name'])
def test_lookup_endpoint_for_task_field_enumerations(self):
self.load_example_data()
workflow = self.create_workflow('enum_options_with_search')
# get the first form in the two form workflow.
workflow = self.get_workflow_api(workflow)
@ -286,7 +291,6 @@ class TestTasksApi(BaseTest):
self.assert_options_populated(results, ['CUSTOMER_NUMBER', 'CUSTOMER_NAME', 'CUSTOMER_CLASS_MEANING'])
def test_lookup_endpoint_for_task_field_using_lookup_entry_id(self):
self.load_example_data()
workflow = self.create_workflow('enum_options_with_search')
# get the first form in the two form workflow.
workflow = self.get_workflow_api(workflow)
@ -316,7 +320,6 @@ class TestTasksApi(BaseTest):
# the key/values from the spreadsheet are added directly to the form and it shows up as
# a dropdown. This tests the case of wanting to get additional data when a user selects
# something from a dropdown.
self.load_example_data()
workflow = self.create_workflow('enum_options_from_file')
# get the first form in the two form workflow.
workflow = self.get_workflow_api(workflow)
@ -334,7 +337,6 @@ class TestTasksApi(BaseTest):
self.assertIsInstance(results[0]['data'], dict)
def test_enum_from_task_data(self):
self.load_example_data()
workflow = self.create_workflow('enum_options_from_task_data')
# get the first form in the two form workflow.
workflow_api = self.get_workflow_api(workflow)
@ -359,7 +361,6 @@ class TestTasksApi(BaseTest):
self.assertEqual('Chesterfield', options[2]['data']['first_name'])
def test_lookup_endpoint_for_task_ldap_field_lookup(self):
self.load_example_data()
workflow = self.create_workflow('ldap_lookup')
# get the first form
workflow = self.get_workflow_api(workflow)
@ -378,7 +379,6 @@ class TestTasksApi(BaseTest):
self.assertEqual(1, len(results))
def test_sub_process(self):
self.load_example_data()
workflow = self.create_workflow('subprocess')
workflow_api = self.get_workflow_api(workflow)
@ -399,7 +399,6 @@ class TestTasksApi(BaseTest):
self.assertEqual(WorkflowStatus.complete, workflow_api.status)
def test_update_task_resets_token(self):
self.load_example_data()
workflow = self.create_workflow('exclusive_gateway')
# Start the workflow.
@ -477,3 +476,5 @@ class TestTasksApi(BaseTest):
workflow = self.get_workflow_api(workflow)
self.assertEqual(WorkflowStatus.complete, workflow.status)

View File

@ -111,6 +111,7 @@ class TestTasksApi(BaseTest):
data['approval'] = True
self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid)
def test_navigation_and_current_task_updates_through_workflow(self):
submitter = self.create_user(uid='lje5u')
@ -200,4 +201,67 @@ class TestTasksApi(BaseTest):
workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid)
self.assertEquals('COMPLETED', workflow_api.next_task.state)
self.assertEquals('EndEvent', workflow_api.next_task.type) # Are are at the end.
self.assertEquals(WorkflowStatus.complete, workflow_api.status)
self.assertEquals(WorkflowStatus.complete, workflow_api.status)
def get_assignment_task_events(self, uid):
return db.session.query(TaskEventModel). \
filter(TaskEventModel.user_uid == uid). \
filter(TaskEventModel.action == WorkflowService.TASK_ACTION_ASSIGNMENT).all()
def test_workflow_reset_correctly_resets_the_task_events(self):
submitter = self.create_user(uid='lje5u')
supervisor = self.create_user(uid='lb3dp')
workflow = self.create_workflow('roles', display_name="Roles", as_user=submitter.uid)
workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid)
# User lje5u can complete the first task, and set her supervisor
data = workflow_api.next_task.data
data['supervisor'] = supervisor.uid
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
# At this point there should be a task_log with an action of ASSIGNMENT on it for
# the supervisor.
self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid)))
# Resetting the workflow at this point should clear the event log.
workflow_api = self.get_workflow_api(workflow, hard_reset=True, user_uid=submitter.uid)
self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid)))
# Re-complete first task, and awaiting tasks should shift to 0 for for submitter, and 1 for supervisor
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid)))
# Complete the supervisor task with rejected approval, and the assignments should switch.
workflow_api = self.get_workflow_api(workflow, user_uid=supervisor.uid)
data = workflow_api.next_task.data
data["approval"] = False
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid)
self.assertEquals(1, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid)))
# Mark the return form review page as complete, and then recomplete the form, and assignments switch yet again.
workflow_api = self.get_workflow_api(workflow, user_uid=submitter.uid)
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(1, len(self.get_assignment_task_events(supervisor.uid)))
# Complete the supervisor task, accepting the approval, and the workflow is completed.
# When it is all done, there should be no outstanding assignments.
workflow_api = self.get_workflow_api(workflow, user_uid=supervisor.uid)
data = workflow_api.next_task.data
data["approval"] = True
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=supervisor.uid)
self.assertEquals(WorkflowStatus.complete, workflow_api.status)
self.assertEquals('EndEvent', workflow_api.next_task.type) # Are are at the end.
self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid)))
# Sending any subsequent complete forms does not result in a new task event
with self.assertRaises(AssertionError) as _api_error:
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)
self.assertEquals(0, len(self.get_assignment_task_events(submitter.uid)))
self.assertEquals(0, len(self.get_assignment_task_events(supervisor.uid)))

View File

@ -36,6 +36,7 @@ class TestWorkflowProcessor(BaseTest):
workflow_spec_model = self.load_test_spec("random_fact")
study = session.query(StudyModel).first()
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(study.id, processor.bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY])
self.assertIsNotNone(processor)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
@ -62,6 +63,7 @@ class TestWorkflowProcessor(BaseTest):
files = session.query(FileModel).filter_by(workflow_spec_id='decision_table').all()
self.assertEqual(2, len(files))
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(1, len(next_user_tasks))
@ -86,6 +88,7 @@ class TestWorkflowProcessor(BaseTest):
workflow_spec_model = self.load_test_spec("parallel_tasks")
study = session.query(StudyModel).first()
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
# Complete the first steps of the 4 parallel tasks
@ -127,6 +130,7 @@ class TestWorkflowProcessor(BaseTest):
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("parallel_tasks")
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(4, len(next_user_tasks))
@ -215,6 +219,7 @@ class TestWorkflowProcessor(BaseTest):
self.assertEqual(2, len(files))
workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id="docx").first()
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(1, len(next_user_tasks))
@ -278,6 +283,7 @@ class TestWorkflowProcessor(BaseTest):
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
processor = self.get_processor(study, workflow_spec_model)
processor.do_engine_steps()
self.assertEqual(processor.workflow_model.workflow_spec_id, workflow_spec_model.id)
task = processor.next_task()
task.data = {"color": "blue"}

View File

@ -47,6 +47,7 @@ class TestWorkflowProcessorMultiInstance(BaseTest):
workflow_spec_model = self.load_test_spec("multi_instance")
study = session.query(StudyModel).first()
processor = self.get_processor(study, workflow_spec_model)
processor.bpmn_workflow.do_engine_steps()
self.assertEqual(study.id, processor.bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY])
self.assertIsNotNone(processor)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())