mirror of
https://github.com/sartography/cr-connect-workflow.git
synced 2025-02-23 05:08:32 +00:00
Merge pull request #24 from sartography/feature/reference_files
Feature/reference files
This commit is contained in:
commit
e2331330e5
2
Pipfile
2
Pipfile
@ -34,6 +34,8 @@ psycopg2-binary = "*"
|
||||
docxtpl = "*"
|
||||
flask-sso = "*"
|
||||
python-dateutil = "*"
|
||||
pandas = "*"
|
||||
xlrd = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.7"
|
||||
|
266
Pipfile.lock
generated
266
Pipfile.lock
generated
@ -1,7 +1,7 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "60e48d05048f627878a5c81377318bc2ff2a94b2574441c166fda3a523c789df"
|
||||
"sha256": "51f307cebb352f07cb4241ed716ff7abff66581033169ce0fcf936e0b8e16b2f"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
@ -25,10 +25,10 @@
|
||||
},
|
||||
"alembic": {
|
||||
"hashes": [
|
||||
"sha256:2df2519a5b002f881517693b95626905a39c5faf4b5a1f94de4f1441095d1d26"
|
||||
"sha256:035ab00497217628bf5d0be82d664d8713ab13d37b630084da8e1f98facf4dbf"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.4.0"
|
||||
"version": "==1.4.2"
|
||||
},
|
||||
"amqp": {
|
||||
"hashes": [
|
||||
@ -104,10 +104,10 @@
|
||||
},
|
||||
"celery": {
|
||||
"hashes": [
|
||||
"sha256:7c544f37a84a5eadc44cab1aa8c9580dff94636bb81978cdf9bf8012d9ea7d8f",
|
||||
"sha256:d3363bb5df72d74420986a435449f3c3979285941dff57d5d97ecba352a0e3e2"
|
||||
"sha256:108a0bf9018a871620936c33a3ee9f6336a89f8ef0a0f567a9001f4aa361415f",
|
||||
"sha256:5b4b37e276033fe47575107a2775469f0b721646a08c96ec2c61531e4fe45f2a"
|
||||
],
|
||||
"version": "==4.4.0"
|
||||
"version": "==4.4.2"
|
||||
},
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
@ -158,10 +158,10 @@
|
||||
},
|
||||
"click": {
|
||||
"hashes": [
|
||||
"sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13",
|
||||
"sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
|
||||
"sha256:8a18b4ea89d8820c5d0c7da8a64b2c324b4dabb695804dbfea19b9be9d88c0cc",
|
||||
"sha256:e345d143d80bf5ee7534056164e5e112ea5e22716bbb1ce727941f4c8b471b9a"
|
||||
],
|
||||
"version": "==7.0"
|
||||
"version": "==7.1.1"
|
||||
},
|
||||
"clickclick": {
|
||||
"hashes": [
|
||||
@ -197,40 +197,40 @@
|
||||
},
|
||||
"coverage": {
|
||||
"hashes": [
|
||||
"sha256:15cf13a6896048d6d947bf7d222f36e4809ab926894beb748fc9caa14605d9c3",
|
||||
"sha256:1daa3eceed220f9fdb80d5ff950dd95112cd27f70d004c7918ca6dfc6c47054c",
|
||||
"sha256:1e44a022500d944d42f94df76727ba3fc0a5c0b672c358b61067abb88caee7a0",
|
||||
"sha256:25dbf1110d70bab68a74b4b9d74f30e99b177cde3388e07cc7272f2168bd1477",
|
||||
"sha256:3230d1003eec018ad4a472d254991e34241e0bbd513e97a29727c7c2f637bd2a",
|
||||
"sha256:3dbb72eaeea5763676a1a1efd9b427a048c97c39ed92e13336e726117d0b72bf",
|
||||
"sha256:5012d3b8d5a500834783689a5d2292fe06ec75dc86ee1ccdad04b6f5bf231691",
|
||||
"sha256:51bc7710b13a2ae0c726f69756cf7ffd4362f4ac36546e243136187cfcc8aa73",
|
||||
"sha256:527b4f316e6bf7755082a783726da20671a0cc388b786a64417780b90565b987",
|
||||
"sha256:722e4557c8039aad9592c6a4213db75da08c2cd9945320220634f637251c3894",
|
||||
"sha256:76e2057e8ffba5472fd28a3a010431fd9e928885ff480cb278877c6e9943cc2e",
|
||||
"sha256:77afca04240c40450c331fa796b3eab6f1e15c5ecf8bf2b8bee9706cd5452fef",
|
||||
"sha256:7afad9835e7a651d3551eab18cbc0fdb888f0a6136169fbef0662d9cdc9987cf",
|
||||
"sha256:9bea19ac2f08672636350f203db89382121c9c2ade85d945953ef3c8cf9d2a68",
|
||||
"sha256:a8b8ac7876bc3598e43e2603f772d2353d9931709345ad6c1149009fd1bc81b8",
|
||||
"sha256:b0840b45187699affd4c6588286d429cd79a99d509fe3de0f209594669bb0954",
|
||||
"sha256:b26aaf69713e5674efbde4d728fb7124e429c9466aeaf5f4a7e9e699b12c9fe2",
|
||||
"sha256:b63dd43f455ba878e5e9f80ba4f748c0a2156dde6e0e6e690310e24d6e8caf40",
|
||||
"sha256:be18f4ae5a9e46edae3f329de2191747966a34a3d93046dbdf897319923923bc",
|
||||
"sha256:c312e57847db2526bc92b9bfa78266bfbaabac3fdcd751df4d062cd4c23e46dc",
|
||||
"sha256:c60097190fe9dc2b329a0eb03393e2e0829156a589bd732e70794c0dd804258e",
|
||||
"sha256:c62a2143e1313944bf4a5ab34fd3b4be15367a02e9478b0ce800cb510e3bbb9d",
|
||||
"sha256:cc1109f54a14d940b8512ee9f1c3975c181bbb200306c6d8b87d93376538782f",
|
||||
"sha256:cd60f507c125ac0ad83f05803063bed27e50fa903b9c2cfee3f8a6867ca600fc",
|
||||
"sha256:d513cc3db248e566e07a0da99c230aca3556d9b09ed02f420664e2da97eac301",
|
||||
"sha256:d649dc0bcace6fcdb446ae02b98798a856593b19b637c1b9af8edadf2b150bea",
|
||||
"sha256:d7008a6796095a79544f4da1ee49418901961c97ca9e9d44904205ff7d6aa8cb",
|
||||
"sha256:da93027835164b8223e8e5af2cf902a4c80ed93cb0909417234f4a9df3bcd9af",
|
||||
"sha256:e69215621707119c6baf99bda014a45b999d37602cb7043d943c76a59b05bf52",
|
||||
"sha256:ea9525e0fef2de9208250d6c5aeeee0138921057cd67fcef90fbed49c4d62d37",
|
||||
"sha256:fca1669d464f0c9831fd10be2eef6b86f5ebd76c724d1e0706ebdff86bb4adf0"
|
||||
"sha256:03f630aba2b9b0d69871c2e8d23a69b7fe94a1e2f5f10df5049c0df99db639a0",
|
||||
"sha256:046a1a742e66d065d16fb564a26c2a15867f17695e7f3d358d7b1ad8a61bca30",
|
||||
"sha256:0a907199566269e1cfa304325cc3b45c72ae341fbb3253ddde19fa820ded7a8b",
|
||||
"sha256:165a48268bfb5a77e2d9dbb80de7ea917332a79c7adb747bd005b3a07ff8caf0",
|
||||
"sha256:1b60a95fc995649464e0cd48cecc8288bac5f4198f21d04b8229dc4097d76823",
|
||||
"sha256:1f66cf263ec77af5b8fe14ef14c5e46e2eb4a795ac495ad7c03adc72ae43fafe",
|
||||
"sha256:2e08c32cbede4a29e2a701822291ae2bc9b5220a971bba9d1e7615312efd3037",
|
||||
"sha256:3844c3dab800ca8536f75ae89f3cf566848a3eb2af4d9f7b1103b4f4f7a5dad6",
|
||||
"sha256:408ce64078398b2ee2ec08199ea3fcf382828d2f8a19c5a5ba2946fe5ddc6c31",
|
||||
"sha256:443be7602c790960b9514567917af538cac7807a7c0c0727c4d2bbd4014920fd",
|
||||
"sha256:4482f69e0701139d0f2c44f3c395d1d1d37abd81bfafbf9b6efbe2542679d892",
|
||||
"sha256:4a8a259bf990044351baf69d3b23e575699dd60b18460c71e81dc565f5819ac1",
|
||||
"sha256:513e6526e0082c59a984448f4104c9bf346c2da9961779ede1fc458e8e8a1f78",
|
||||
"sha256:5f587dfd83cb669933186661a351ad6fc7166273bc3e3a1531ec5c783d997aac",
|
||||
"sha256:62061e87071497951155cbccee487980524d7abea647a1b2a6eb6b9647df9006",
|
||||
"sha256:641e329e7f2c01531c45c687efcec8aeca2a78a4ff26d49184dce3d53fc35014",
|
||||
"sha256:65a7e00c00472cd0f59ae09d2fb8a8aaae7f4a0cf54b2b74f3138d9f9ceb9cb2",
|
||||
"sha256:6ad6ca45e9e92c05295f638e78cd42bfaaf8ee07878c9ed73e93190b26c125f7",
|
||||
"sha256:73aa6e86034dad9f00f4bbf5a666a889d17d79db73bc5af04abd6c20a014d9c8",
|
||||
"sha256:7c9762f80a25d8d0e4ab3cb1af5d9dffbddb3ee5d21c43e3474c84bf5ff941f7",
|
||||
"sha256:85596aa5d9aac1bf39fe39d9fa1051b0f00823982a1de5766e35d495b4a36ca9",
|
||||
"sha256:86a0ea78fd851b313b2e712266f663e13b6bc78c2fb260b079e8b67d970474b1",
|
||||
"sha256:8a620767b8209f3446197c0e29ba895d75a1e272a36af0786ec70fe7834e4307",
|
||||
"sha256:922fb9ef2c67c3ab20e22948dcfd783397e4c043a5c5fa5ff5e9df5529074b0a",
|
||||
"sha256:9fad78c13e71546a76c2f8789623eec8e499f8d2d799f4b4547162ce0a4df435",
|
||||
"sha256:a37c6233b28e5bc340054cf6170e7090a4e85069513320275a4dc929144dccf0",
|
||||
"sha256:c3fc325ce4cbf902d05a80daa47b645d07e796a80682c1c5800d6ac5045193e5",
|
||||
"sha256:cda33311cb9fb9323958a69499a667bd728a39a7aa4718d7622597a44c4f1441",
|
||||
"sha256:db1d4e38c9b15be1521722e946ee24f6db95b189d1447fa9ff18dd16ba89f732",
|
||||
"sha256:eda55e6e9ea258f5e4add23bcf33dc53b2c319e70806e180aecbff8d90ea24de",
|
||||
"sha256:f372cdbb240e09ee855735b9d85e7f50730dcfb6296b74b95a3e5dea0615c4c1"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==5.0.3"
|
||||
"version": "==5.0.4"
|
||||
},
|
||||
"docutils": {
|
||||
"hashes": [
|
||||
@ -294,11 +294,11 @@
|
||||
},
|
||||
"flask-migrate": {
|
||||
"hashes": [
|
||||
"sha256:6fb038be63d4c60727d5dfa5f581a6189af5b4e2925bc378697b4f0a40cfb4e1",
|
||||
"sha256:a96ff1875a49a40bd3e8ac04fce73fdb0870b9211e6168608cbafa4eb839d502"
|
||||
"sha256:4dc4a5cce8cbbb06b8dc963fd86cf8136bd7d875aabe2d840302ea739b243732",
|
||||
"sha256:a69d508c2e09d289f6e55a417b3b8c7bfe70e640f53d2d9deb0d056a384f37ee"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.5.2"
|
||||
"version": "==2.5.3"
|
||||
},
|
||||
"flask-restful": {
|
||||
"hashes": [
|
||||
@ -393,10 +393,10 @@
|
||||
},
|
||||
"kombu": {
|
||||
"hashes": [
|
||||
"sha256:2a9e7adff14d046c9996752b2c48b6d9185d0b992106d5160e1a179907a5d4ac",
|
||||
"sha256:67b32ccb6fea030f8799f8fd50dd08e03a4b99464ebc4952d71d8747b1a52ad1"
|
||||
"sha256:2d1cda774126a044d91a7ff5fa6d09edf99f46924ab332a810760fe6740e9b76",
|
||||
"sha256:598e7e749d6ab54f646b74b2d2df67755dee13894f73ab02a2a9feb8870c7cb2"
|
||||
],
|
||||
"version": "==4.6.7"
|
||||
"version": "==4.6.8"
|
||||
},
|
||||
"lxml": {
|
||||
"hashes": [
|
||||
@ -432,9 +432,10 @@
|
||||
},
|
||||
"mako": {
|
||||
"hashes": [
|
||||
"sha256:2984a6733e1d472796ceef37ad48c26f4a984bb18119bb2dbc37a44d8f6e75a4"
|
||||
"sha256:3139c5d64aa5d175dbafb95027057128b5fbd05a40c53999f3905ceb53366d9d",
|
||||
"sha256:8e8b53c71c7e59f3de716b6832c4e401d903af574f6962edbbbf6ecc2a5fe6c9"
|
||||
],
|
||||
"version": "==1.1.1"
|
||||
"version": "==1.1.2"
|
||||
},
|
||||
"markupsafe": {
|
||||
"hashes": [
|
||||
@ -476,11 +477,11 @@
|
||||
},
|
||||
"marshmallow": {
|
||||
"hashes": [
|
||||
"sha256:3a94945a7461f2ab4df9576e51c97d66bee2c86155d3d3933fab752b31effab8",
|
||||
"sha256:4b95c7735f93eb781dfdc4dded028108998cad759dda8dd9d4b5b4ac574cbf13"
|
||||
"sha256:90854221bbb1498d003a0c3cc9d8390259137551917961c8b5258c64026b2f85",
|
||||
"sha256:ac2e13b30165501b7d41fc0371b8df35944f5849769d136f20e2c5f6cdc6e665"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.5.0"
|
||||
"version": "==3.5.1"
|
||||
},
|
||||
"marshmallow-enum": {
|
||||
"hashes": [
|
||||
@ -492,11 +493,37 @@
|
||||
},
|
||||
"marshmallow-sqlalchemy": {
|
||||
"hashes": [
|
||||
"sha256:a370e247216e1a005277d92079d2f0d8d5b0a70fba68ee645730f6a1200991d1",
|
||||
"sha256:f3155e87717e3a52def3a177b4022fd0500e71f626cbb0672adcb95588a99aa3"
|
||||
"sha256:9301c6fd197bd97337820ea1417aa1233d0ee3e22748ebd5821799bc841a57e8",
|
||||
"sha256:dde9e20bcb710e9e59f765a38e3d6d17f1b2d6b4320cbdc2cea0f6b57f70d08c"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.22.2"
|
||||
"version": "==0.22.3"
|
||||
},
|
||||
"numpy": {
|
||||
"hashes": [
|
||||
"sha256:1598a6de323508cfeed6b7cd6c4efb43324f4692e20d1f76e1feec7f59013448",
|
||||
"sha256:1b0ece94018ae21163d1f651b527156e1f03943b986188dd81bc7e066eae9d1c",
|
||||
"sha256:2e40be731ad618cb4974d5ba60d373cdf4f1b8dcbf1dcf4d9dff5e212baf69c5",
|
||||
"sha256:4ba59db1fcc27ea31368af524dcf874d9277f21fd2e1f7f1e2e0c75ee61419ed",
|
||||
"sha256:59ca9c6592da581a03d42cc4e270732552243dc45e87248aa8d636d53812f6a5",
|
||||
"sha256:5e0feb76849ca3e83dd396254e47c7dba65b3fa9ed3df67c2556293ae3e16de3",
|
||||
"sha256:6d205249a0293e62bbb3898c4c2e1ff8a22f98375a34775a259a0523111a8f6c",
|
||||
"sha256:6fcc5a3990e269f86d388f165a089259893851437b904f422d301cdce4ff25c8",
|
||||
"sha256:82847f2765835c8e5308f136bc34018d09b49037ec23ecc42b246424c767056b",
|
||||
"sha256:87902e5c03355335fc5992a74ba0247a70d937f326d852fc613b7f53516c0963",
|
||||
"sha256:9ab21d1cb156a620d3999dd92f7d1c86824c622873841d6b080ca5495fa10fef",
|
||||
"sha256:a1baa1dc8ecd88fb2d2a651671a84b9938461e8a8eed13e2f0a812a94084d1fa",
|
||||
"sha256:a244f7af80dacf21054386539699ce29bcc64796ed9850c99a34b41305630286",
|
||||
"sha256:a35af656a7ba1d3decdd4fae5322b87277de8ac98b7d9da657d9e212ece76a61",
|
||||
"sha256:b1fe1a6f3a6f355f6c29789b5927f8bd4f134a4bd9a781099a7c4f66af8850f5",
|
||||
"sha256:b5ad0adb51b2dee7d0ee75a69e9871e2ddfb061c73ea8bc439376298141f77f5",
|
||||
"sha256:ba3c7a2814ec8a176bb71f91478293d633c08582119e713a0c5351c0f77698da",
|
||||
"sha256:cd77d58fb2acf57c1d1ee2835567cd70e6f1835e32090538f17f8a3a99e5e34b",
|
||||
"sha256:cdb3a70285e8220875e4d2bc394e49b4988bdb1298ffa4e0bd81b2f613be397c",
|
||||
"sha256:deb529c40c3f1e38d53d5ae6cd077c21f1d49e13afc7936f7f868455e16b64a0",
|
||||
"sha256:e7894793e6e8540dbeac77c87b489e331947813511108ae097f1715c018b8f3d"
|
||||
],
|
||||
"version": "==1.18.2"
|
||||
},
|
||||
"openapi-spec-validator": {
|
||||
"hashes": [
|
||||
@ -515,10 +542,32 @@
|
||||
},
|
||||
"packaging": {
|
||||
"hashes": [
|
||||
"sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73",
|
||||
"sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334"
|
||||
"sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3",
|
||||
"sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752"
|
||||
],
|
||||
"version": "==20.1"
|
||||
"version": "==20.3"
|
||||
},
|
||||
"pandas": {
|
||||
"hashes": [
|
||||
"sha256:07c1b58936b80eafdfe694ce964ac21567b80a48d972879a359b3ebb2ea76835",
|
||||
"sha256:0ebe327fb088df4d06145227a4aa0998e4f80a9e6aed4b61c1f303bdfdf7c722",
|
||||
"sha256:11c7cb654cd3a0e9c54d81761b5920cdc86b373510d829461d8f2ed6d5905266",
|
||||
"sha256:12f492dd840e9db1688126216706aa2d1fcd3f4df68a195f9479272d50054645",
|
||||
"sha256:167a1315367cea6ec6a5e11e791d9604f8e03f95b57ad227409de35cf850c9c5",
|
||||
"sha256:1a7c56f1df8d5ad8571fa251b864231f26b47b59cbe41aa5c0983d17dbb7a8e4",
|
||||
"sha256:1fa4bae1a6784aa550a1c9e168422798104a85bf9c77a1063ea77ee6f8452e3a",
|
||||
"sha256:32f42e322fb903d0e189a4c10b75ba70d90958cc4f66a1781ed027f1a1d14586",
|
||||
"sha256:387dc7b3c0424327fe3218f81e05fc27832772a5dffbed385013161be58df90b",
|
||||
"sha256:6597df07ea361231e60c00692d8a8099b519ed741c04e65821e632bc9ccb924c",
|
||||
"sha256:743bba36e99d4440403beb45a6f4f3a667c090c00394c176092b0b910666189b",
|
||||
"sha256:858a0d890d957ae62338624e4aeaf1de436dba2c2c0772570a686eaca8b4fc85",
|
||||
"sha256:863c3e4b7ae550749a0bb77fa22e601a36df9d2905afef34a6965bed092ba9e5",
|
||||
"sha256:a210c91a02ec5ff05617a298ad6f137b9f6f5771bf31f2d6b6367d7f71486639",
|
||||
"sha256:ca84a44cf727f211752e91eab2d1c6c1ab0f0540d5636a8382a3af428542826e",
|
||||
"sha256:d234bcf669e8b4d6cbcd99e3ce7a8918414520aeb113e2a81aeb02d0a533d7f7"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.0.3"
|
||||
},
|
||||
"psycopg2-binary": {
|
||||
"hashes": [
|
||||
@ -560,16 +609,17 @@
|
||||
},
|
||||
"pycparser": {
|
||||
"hashes": [
|
||||
"sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
|
||||
"sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
|
||||
"sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
|
||||
],
|
||||
"version": "==2.19"
|
||||
"version": "==2.20"
|
||||
},
|
||||
"pygments": {
|
||||
"hashes": [
|
||||
"sha256:2a3fe295e54a20164a9df49c75fa58526d3be48e14aceba6d6b1e8ac0bfd6f1b",
|
||||
"sha256:98c8aa5a9f778fcd1026a17361ddaf7330d1b7c62ae97c3bb0ae73e0b9b6b0fe"
|
||||
"sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",
|
||||
"sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"
|
||||
],
|
||||
"version": "==2.5.2"
|
||||
"version": "==2.6.1"
|
||||
},
|
||||
"pyjwt": {
|
||||
"hashes": [
|
||||
@ -623,19 +673,19 @@
|
||||
},
|
||||
"pyyaml": {
|
||||
"hashes": [
|
||||
"sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6",
|
||||
"sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf",
|
||||
"sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5",
|
||||
"sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e",
|
||||
"sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811",
|
||||
"sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e",
|
||||
"sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d",
|
||||
"sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20",
|
||||
"sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689",
|
||||
"sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994",
|
||||
"sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615"
|
||||
"sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",
|
||||
"sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76",
|
||||
"sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2",
|
||||
"sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648",
|
||||
"sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf",
|
||||
"sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f",
|
||||
"sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2",
|
||||
"sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee",
|
||||
"sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d",
|
||||
"sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c",
|
||||
"sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"
|
||||
],
|
||||
"version": "==5.3"
|
||||
"version": "==5.3.1"
|
||||
},
|
||||
"recommonmark": {
|
||||
"hashes": [
|
||||
@ -676,25 +726,25 @@
|
||||
},
|
||||
"sphinx": {
|
||||
"hashes": [
|
||||
"sha256:776ff8333181138fae52df65be733127539623bb46cc692e7fa0fcfc80d7aa88",
|
||||
"sha256:ca762da97c3b5107cbf0ab9e11d3ec7ab8d3c31377266fd613b962ed971df709"
|
||||
"sha256:b4c750d546ab6d7e05bdff6ac24db8ae3e8b8253a3569b754e445110a0a12b66",
|
||||
"sha256:fc312670b56cb54920d6cc2ced455a22a547910de10b3142276495ced49231cb"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.4.3"
|
||||
"version": "==2.4.4"
|
||||
},
|
||||
"sphinxcontrib-applehelp": {
|
||||
"hashes": [
|
||||
"sha256:edaa0ab2b2bc74403149cb0209d6775c96de797dfd5b5e2a71981309efab3897",
|
||||
"sha256:fb8dee85af95e5c30c91f10e7eb3c8967308518e0f7488a2828ef7bc191d0d5d"
|
||||
"sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a",
|
||||
"sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"
|
||||
],
|
||||
"version": "==1.0.1"
|
||||
"version": "==1.0.2"
|
||||
},
|
||||
"sphinxcontrib-devhelp": {
|
||||
"hashes": [
|
||||
"sha256:6c64b077937330a9128a4da74586e8c2130262f014689b4b89e2d08ee7294a34",
|
||||
"sha256:9512ecb00a2b0821a146736b39f7aeb90759834b07e81e8cc23a9c70bacb9981"
|
||||
"sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e",
|
||||
"sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"
|
||||
],
|
||||
"version": "==1.0.1"
|
||||
"version": "==1.0.2"
|
||||
},
|
||||
"sphinxcontrib-htmlhelp": {
|
||||
"hashes": [
|
||||
@ -712,17 +762,17 @@
|
||||
},
|
||||
"sphinxcontrib-qthelp": {
|
||||
"hashes": [
|
||||
"sha256:513049b93031beb1f57d4daea74068a4feb77aa5630f856fcff2e50de14e9a20",
|
||||
"sha256:79465ce11ae5694ff165becda529a600c754f4bc459778778c7017374d4d406f"
|
||||
"sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72",
|
||||
"sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"
|
||||
],
|
||||
"version": "==1.0.2"
|
||||
"version": "==1.0.3"
|
||||
},
|
||||
"sphinxcontrib-serializinghtml": {
|
||||
"hashes": [
|
||||
"sha256:c0efb33f8052c04fd7a26c0a07f1678e8512e0faec19f4aa8f2473a8b81d5227",
|
||||
"sha256:db6615af393650bf1151a6cd39120c29abaf93cc60db8c48eb2dddbfdc3a9768"
|
||||
"sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc",
|
||||
"sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"
|
||||
],
|
||||
"version": "==1.1.3"
|
||||
"version": "==1.1.4"
|
||||
},
|
||||
"spiffworkflow": {
|
||||
"editable": true,
|
||||
@ -731,9 +781,9 @@
|
||||
},
|
||||
"sqlalchemy": {
|
||||
"hashes": [
|
||||
"sha256:64a7b71846db6423807e96820993fa12a03b89127d278290ca25c0b11ed7b4fb"
|
||||
"sha256:c4cca4aed606297afbe90d4306b49ad3a4cd36feb3f87e4bfd655c57fd9ef445"
|
||||
],
|
||||
"version": "==1.3.13"
|
||||
"version": "==1.3.15"
|
||||
},
|
||||
"swagger-ui-bundle": {
|
||||
"hashes": [
|
||||
@ -787,6 +837,14 @@
|
||||
],
|
||||
"version": "==1.0.0"
|
||||
},
|
||||
"xlrd": {
|
||||
"hashes": [
|
||||
"sha256:546eb36cee8db40c3eaa46c351e67ffee6eeb5fa2650b71bc4c758a29a1b29b2",
|
||||
"sha256:e551fb498759fa3a5384a94ccd4c3c02eb7c00ea424426e212ac0c57be9dfbde"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.2.0"
|
||||
},
|
||||
"xlsxwriter": {
|
||||
"hashes": [
|
||||
"sha256:488e1988ab16ff3a9cd58c7656d0a58f8abe46ee58b98eecea78c022db28656b",
|
||||
@ -797,10 +855,10 @@
|
||||
},
|
||||
"zipp": {
|
||||
"hashes": [
|
||||
"sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2",
|
||||
"sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a"
|
||||
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
|
||||
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
|
||||
],
|
||||
"version": "==3.0.0"
|
||||
"version": "==3.1.0"
|
||||
}
|
||||
},
|
||||
"develop": {
|
||||
@ -828,10 +886,10 @@
|
||||
},
|
||||
"packaging": {
|
||||
"hashes": [
|
||||
"sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73",
|
||||
"sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334"
|
||||
"sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3",
|
||||
"sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752"
|
||||
],
|
||||
"version": "==20.1"
|
||||
"version": "==20.3"
|
||||
},
|
||||
"pluggy": {
|
||||
"hashes": [
|
||||
@ -856,11 +914,11 @@
|
||||
},
|
||||
"pytest": {
|
||||
"hashes": [
|
||||
"sha256:0d5fe9189a148acc3c3eb2ac8e1ac0742cb7618c084f3d228baaec0c254b318d",
|
||||
"sha256:ff615c761e25eb25df19edddc0b970302d2a9091fbce0e7213298d85fb61fef6"
|
||||
"sha256:0e5b30f5cb04e887b91b1ee519fa3d89049595f428c1db76e73bd7f17b09b172",
|
||||
"sha256:84dde37075b8805f3d1f392cc47e38a0e59518fb46a431cfdaf7cf1ce805f970"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==5.3.5"
|
||||
"version": "==5.4.1"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
@ -878,10 +936,10 @@
|
||||
},
|
||||
"zipp": {
|
||||
"hashes": [
|
||||
"sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2",
|
||||
"sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a"
|
||||
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
|
||||
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
|
||||
],
|
||||
"version": "==3.0.0"
|
||||
"version": "==3.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
65
crc/api.yml
65
crc/api.yml
@ -405,7 +405,7 @@ paths:
|
||||
- name: form_field_key
|
||||
in: query
|
||||
required: false
|
||||
description: The unique key of a workflow task form field
|
||||
description: The unique key of a workflow task form field. Make sure this matches a document in the irb_documents.xslx reference sheet.
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
@ -538,6 +538,67 @@ paths:
|
||||
format: binary
|
||||
example: '<?xml version="1.0" encoding="UTF-8"?><bpmn:definitions></bpmn:definitions>'
|
||||
# /v1.0/workflow/0
|
||||
/reference_file:
|
||||
get:
|
||||
operationId: crc.api.file.get_reference_files
|
||||
summary: Provides a list of existing reference files that are available in the system.
|
||||
tags:
|
||||
- Files
|
||||
responses:
|
||||
'200':
|
||||
description: An array of file descriptions (not the file content)
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/File"
|
||||
/reference_file/{name}:
|
||||
parameters:
|
||||
- name: name
|
||||
in: path
|
||||
required: true
|
||||
description: The special name of the reference file.
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: crc.api.file.get_reference_file
|
||||
summary: Reference files are called by name rather than by id.
|
||||
tags:
|
||||
- Files
|
||||
responses:
|
||||
'200':
|
||||
description: Returns the actual file
|
||||
content:
|
||||
application/octet-stream:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
example: '<?xml version="1.0" encoding="UTF-8"?><bpmn:definitions></bpmn:definitions>'
|
||||
put:
|
||||
operationId: crc.api.file.set_reference_file
|
||||
summary: Update the contents of a named reference file.
|
||||
tags:
|
||||
- Files
|
||||
requestBody:
|
||||
content:
|
||||
multipart/form-data:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
file:
|
||||
type: string
|
||||
format: binary
|
||||
responses:
|
||||
'200':
|
||||
description: Returns the actual file
|
||||
content:
|
||||
application/octet-stream:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
example: '<?xml version="1.0" encoding="UTF-8"?><bpmn:definitions></bpmn:definitions>'
|
||||
# /v1.0/workflow/0
|
||||
/workflow/{workflow_id}:
|
||||
parameters:
|
||||
- name: workflow_id
|
||||
@ -836,7 +897,6 @@ components:
|
||||
example: "2019-12-25T09:12:33.001Z"
|
||||
type:
|
||||
type: string
|
||||
enum: ['bpmn','svg', 'dmn']
|
||||
primary:
|
||||
type: boolean
|
||||
content_type:
|
||||
@ -845,6 +905,7 @@ components:
|
||||
workflow_spec_id:
|
||||
type: string
|
||||
example: "random_fact"
|
||||
x-nullable: true
|
||||
file:
|
||||
type: file
|
||||
format: binary
|
||||
|
@ -1,13 +1,11 @@
|
||||
import io
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
import connexion
|
||||
from flask import send_file
|
||||
|
||||
from crc import session
|
||||
from crc.api.common import ApiErrorSchema, ApiError
|
||||
from crc.models.file import FileModelSchema, FileModel, FileDataModel, FileType
|
||||
from crc.models.file import FileModelSchema, FileModel, FileDataModel
|
||||
from crc.models.workflow import WorkflowSpecModel
|
||||
from crc.services.file_service import FileService
|
||||
|
||||
@ -21,6 +19,10 @@ def get_files(workflow_spec_id=None, study_id=None, workflow_id=None, task_id=No
|
||||
results = FileService.get_files(workflow_spec_id, study_id, workflow_id, task_id, form_field_key)
|
||||
return FileModelSchema(many=True).dump(results)
|
||||
|
||||
def get_reference_files():
|
||||
results = FileService.get_files(is_reference=True)
|
||||
return FileModelSchema(many=True).dump(results)
|
||||
|
||||
|
||||
def add_file(workflow_spec_id=None, study_id=None, workflow_id=None, task_id=None, form_field_key=None):
|
||||
all_none = all(v is None for v in [workflow_spec_id, study_id, workflow_id, task_id, form_field_key])
|
||||
@ -28,7 +30,7 @@ def add_file(workflow_spec_id=None, study_id=None, workflow_id=None, task_id=Non
|
||||
if all_none or missing_some:
|
||||
return ApiErrorSchema().dump(ApiError('missing_parameter',
|
||||
'Please specify either a workflow_spec_id or all 3 of study_id, '
|
||||
'workflow_id, and task_id for this file in the HTTP parameters')), 404
|
||||
'workflow_id, task_id and field_id for this file in the HTTP parameters')), 404
|
||||
if 'file' not in connexion.request.files:
|
||||
return ApiErrorSchema().dump(ApiError('invalid_file',
|
||||
'Expected a file named "file" in the multipart form request')), 404
|
||||
@ -43,6 +45,42 @@ def add_file(workflow_spec_id=None, study_id=None, workflow_id=None, task_id=Non
|
||||
return FileModelSchema().dump(file_model)
|
||||
|
||||
|
||||
def get_reference_file(name):
|
||||
file_data = FileService.get_reference_file_data(name)
|
||||
return send_file(
|
||||
io.BytesIO(file_data.data),
|
||||
attachment_filename=file_data.file_model.name,
|
||||
mimetype=file_data.file_model.content_type,
|
||||
cache_timeout=-1 # Don't cache these files on the browser.
|
||||
)
|
||||
|
||||
|
||||
def set_reference_file(name):
|
||||
"""Uses the file service to manage reference-files. They will be used in script tasks to compute values."""
|
||||
if 'file' not in connexion.request.files:
|
||||
raise ApiError('invalid_file',
|
||||
'Expected a file named "file" in the multipart form request', status_code=400)
|
||||
|
||||
file = connexion.request.files['file']
|
||||
|
||||
name_extension = FileService.get_extension(name)
|
||||
file_extension = FileService.get_extension(file.filename)
|
||||
if name_extension != file_extension:
|
||||
raise ApiError('invalid_file_type',
|
||||
"The file you uploaded has an extension '%s', but it should have an extension of '%s' " %
|
||||
(file_extension, name_extension))
|
||||
|
||||
|
||||
file_models = FileService.get_files(name=name, is_reference=True)
|
||||
if len(file_models) == 0:
|
||||
file_model = FileService.add_reference_file(name, file.content_type, file.stream.read())
|
||||
else:
|
||||
file_model = file_models[0]
|
||||
FileService.update_file(file_models[0], file.stream.read(), file.content_type)
|
||||
|
||||
return FileModelSchema().dump(file_model)
|
||||
|
||||
|
||||
def update_file_data(file_id):
|
||||
file_model = session.query(FileModel).filter_by(id=file_id).with_for_update().first()
|
||||
file = connexion.request.files['file']
|
||||
|
@ -42,14 +42,11 @@ def get_workflow_specification(spec_id):
|
||||
@auth.login_required
|
||||
def update_workflow_specification(spec_id, body):
|
||||
if spec_id is None:
|
||||
error = ApiError('unknown_spec', 'Please provide a valid Workflow Spec ID.')
|
||||
return ApiErrorSchema.dump(error), 404
|
||||
|
||||
raise ApiError('unknown_spec', 'Please provide a valid Workflow Spec ID.')
|
||||
spec = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
||||
|
||||
if spec is None:
|
||||
error = ApiError('unknown_study', 'The spec "' + spec_id + '" is not recognized.')
|
||||
return ApiErrorSchema.dump(error), 404
|
||||
raise ApiError('unknown_study', 'The spec "' + spec_id + '" is not recognized.')
|
||||
|
||||
schema = WorkflowSpecModelSchema()
|
||||
spec = schema.load(body, session=session, instance=spec, partial=True)
|
||||
@ -169,14 +166,12 @@ def add_workflow_spec_category(body):
|
||||
@auth.login_required
|
||||
def update_workflow_spec_category(cat_id, body):
|
||||
if cat_id is None:
|
||||
error = ApiError('unknown_category', 'Please provide a valid Workflow Spec Category ID.')
|
||||
return ApiErrorSchema.dump(error), 404
|
||||
raise ApiError('unknown_category', 'Please provide a valid Workflow Spec Category ID.')
|
||||
|
||||
category = session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).first()
|
||||
|
||||
if category is None:
|
||||
error = ApiError('unknown_category', 'The category "' + cat_id + '" is not recognized.')
|
||||
return ApiErrorSchema.dump(error), 404
|
||||
raise ApiError('unknown_category', 'The category "' + cat_id + '" is not recognized.')
|
||||
|
||||
schema = WorkflowSpecCategoryModelSchema()
|
||||
category = schema.load(body, session=session, instance=category, partial=True)
|
||||
|
@ -107,6 +107,7 @@ class WorkflowApi(object):
|
||||
self.is_latest_spec = is_latest_spec
|
||||
self.is_active = is_active
|
||||
|
||||
|
||||
class WorkflowApiSchema(ma.Schema):
|
||||
class Meta:
|
||||
model = WorkflowApi
|
||||
|
@ -70,13 +70,15 @@ class FileModel(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String)
|
||||
type = db.Column(db.Enum(FileType))
|
||||
primary = db.Column(db.Boolean)
|
||||
is_status = db.Column(db.Boolean)
|
||||
content_type = db.Column(db.String)
|
||||
is_reference = db.Column(db.Boolean, nullable=False, default=False) # A global reference file.
|
||||
primary = db.Column(db.Boolean, nullable=False, default=False) # Is this the primary BPMN in a workflow?
|
||||
workflow_spec_id = db.Column(db.String, db.ForeignKey('workflow_spec.id'), nullable=True)
|
||||
workflow_id = db.Column(db.Integer, db.ForeignKey('workflow.id'), nullable=True)
|
||||
study_id = db.Column(db.Integer, db.ForeignKey('study.id'), nullable=True)
|
||||
task_id = db.Column(db.String, nullable=True)
|
||||
irb_doc_code = db.Column(db.String, nullable=True) # Code reference to the irb_documents.xlsx reference file.
|
||||
form_field_key = db.Column(db.String, nullable=True)
|
||||
latest_version = db.Column(db.Integer, default=0)
|
||||
|
||||
|
@ -59,41 +59,6 @@ class ProtocolBuilderInvestigatorSchema(ma.Schema):
|
||||
|
||||
|
||||
class ProtocolBuilderRequiredDocument(object):
|
||||
DOC_TYPES = {
|
||||
1: "Investigators Brochure",
|
||||
6: "Cancer Center's PRC Approval Form",
|
||||
8: "SOM CTO IND/IDE Review Letter",
|
||||
9: "HIRE Approval",
|
||||
10: "Cancer Center's PRC Approval Waiver",
|
||||
12: "Certificate of Confidentiality Application",
|
||||
14: "Institutional Biosafety Committee Approval",
|
||||
18: "SOM CTO Approval Letter - UVA PI Multisite Trial",
|
||||
20: "IRB Approval or Letter of Approval from Administration: Study Conducted at non- UVA Facilities ",
|
||||
21: "New Medical Device Form",
|
||||
22: "SOM CTO Review regarding need for IDE",
|
||||
23: "SOM CTO Review regarding need for IND",
|
||||
24: "InfoSec Approval",
|
||||
25: "Scientific Pre-review Documentation",
|
||||
26: "IBC Number",
|
||||
32: "IDS - Investigational Drug Service Approval",
|
||||
36: "RDRC Approval ",
|
||||
40: "SBS/IRB Approval-FERPA",
|
||||
41: "HIRE Standard Radiation Language",
|
||||
42: "COI Management Plan ",
|
||||
43: "SOM CTO Approval Letter-Non UVA, Non Industry PI MultiSite Study",
|
||||
44: "GRIME Approval",
|
||||
45: "GMEC Approval",
|
||||
46: "IRB Reliance Agreement Request Form- IRB-HSR is IRB of Record",
|
||||
47: "Non UVA IRB Approval - Initial and Last Continuation",
|
||||
48: "MR Physicist Approval- Use of Gadolinium",
|
||||
49: "SOM CTO Approval- Non- UVA Academia PI of IDE",
|
||||
51: "IDS Waiver",
|
||||
52: "Package Inserts",
|
||||
53: "IRB Reliance Agreement Request Form- IRB-HSR Not IRB of Record",
|
||||
54: "ESCRO Approval",
|
||||
57: "Laser Safety Officer Approval",
|
||||
}
|
||||
|
||||
def __init__(self, AUXDOCID: str, AUXDOC: str):
|
||||
self.AUXDOCID = AUXDOCID
|
||||
self.AUXDOC = AUXDOC
|
||||
|
@ -17,16 +17,25 @@ from crc.services.workflow_processor import WorkflowProcessor
|
||||
class CompleteTemplate(Script):
|
||||
|
||||
def get_description(self):
|
||||
return """Takes one argument, which is the name of a MS Word docx file to use as a template.
|
||||
All data currently collected up to this Task will be available for use in the template."""
|
||||
return """
|
||||
Using the Jinja template engine, takes data available in the current task, and uses it to populate
|
||||
a word document that contains Jinja markup. Please see https://docxtpl.readthedocs.io/en/latest/
|
||||
for more information on exact syntax.
|
||||
Takes two arguments:
|
||||
1. The name of a MS Word docx file to use as a template.
|
||||
2. The 'code' of the IRB Document as set in the irb_documents.xlsx file."
|
||||
"""
|
||||
|
||||
def do_task(self, task, study_id, *args, **kwargs):
|
||||
"""Entry point, mostly worried about wiring it all up."""
|
||||
if len(args) != 1:
|
||||
if len(args) != 2:
|
||||
raise ApiError(code="missing_argument",
|
||||
message="The CompleteTask script requires a single argument with "
|
||||
"the name of the docx template to use.")
|
||||
message="The CompleteTemplate script requires 2 arguments. The first argument is "
|
||||
"the name of the docx template to use. The second "
|
||||
"argument is a code for the document, as "
|
||||
"set in the reference document %s. " % FileService.IRB_PRO_CATEGORIES_FILE)
|
||||
file_name = args[0]
|
||||
irb_doc_code = args[1]
|
||||
workflow_spec_model = self.find_spec_model_in_db(task.workflow)
|
||||
task_study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
|
||||
|
||||
@ -53,7 +62,8 @@ class CompleteTemplate(Script):
|
||||
FileService.add_task_file(study_id=study_id, workflow_id=workflow_id, task_id=task.id,
|
||||
name=file_name,
|
||||
content_type=CONTENT_TYPES['docx'],
|
||||
binary_data=final_document_stream.read())
|
||||
binary_data=final_document_stream.read(),
|
||||
irb_doc_code=irb_doc_code)
|
||||
|
||||
print("Complete Task was called with %s" % str(args))
|
||||
|
||||
|
80
crc/scripts/required_docs.py
Normal file
80
crc/scripts/required_docs.py
Normal file
@ -0,0 +1,80 @@
|
||||
from pandas import ExcelFile
|
||||
|
||||
from crc import session, ma
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.study import StudyModel, StudyModelSchema
|
||||
from crc.scripts.script import Script, ScriptValidationError
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.protocol_builder import ProtocolBuilderService
|
||||
|
||||
|
||||
class RequiredDocs(Script):
|
||||
"""Provides information about the documents required by Protocol Builder."""
|
||||
pb = ProtocolBuilderService()
|
||||
type_options = ['info', 'investigators', 'required_docs', 'details']
|
||||
|
||||
def get_description(self):
|
||||
return """
|
||||
Provides detailed information about the documents required by the Protocol Builder.
|
||||
Makes an immediate call to the IRB Protocol Builder API to get a list of currently required
|
||||
documents. It then collects all the information in a reference file called 'irb_pro_categories.xls',
|
||||
if the Id from Protcol Builder matches an Id in this table, all data available in that row
|
||||
is also provided.
|
||||
|
||||
This place a dictionary of values in the current task, where the key is the numeric id.
|
||||
|
||||
For example:
|
||||
``` "required_docs" :
|
||||
{
|
||||
6: {
|
||||
"name": "Cancer Center's PRC Approval Form",
|
||||
"category1": "UVA Compliance",
|
||||
"category2": "PRC Approval",
|
||||
"category3": "",
|
||||
"Who Uploads?": "CRC",
|
||||
"required": True,
|
||||
"upload_count": 0
|
||||
},
|
||||
24: { ...
|
||||
}
|
||||
```
|
||||
"""
|
||||
|
||||
|
||||
def do_task(self, task, study_id, *args, **kwargs):
|
||||
"""Takes data from the protocol builder, and merges it with data from the IRB Pro Categories
|
||||
spreadsheet to return pertinant details about the required documents."""
|
||||
self.get_required_docs(study_id)
|
||||
task.data["required_docs"] = self.get_required_docs(study_id)
|
||||
|
||||
def get_required_docs(self, study_id):
|
||||
"""Takes data from the protocol builder, and merges it with data from the IRB Pro Categories spreadsheet to return
|
||||
pertinant details about the required documents."""
|
||||
pb_docs = self.pb.get_required_docs(study_id)
|
||||
doc_dictionary = FileService.get_file_reference_dictionary()
|
||||
required_docs = []
|
||||
for doc in pb_docs:
|
||||
id = int(doc['AUXDOCID'])
|
||||
required_doc = {'id': id, 'name': doc['AUXDOC'], 'required': True,
|
||||
'count': 0}
|
||||
if id in doc_dictionary:
|
||||
required_doc = {**required_doc, **doc_dictionary[id]}
|
||||
required_doc['count'] = self.get_count(study_id, doc_dictionary[id]["Code"])
|
||||
required_docs.append(required_doc)
|
||||
return required_docs
|
||||
|
||||
def get_count(self, study_id, irb_doc_code):
|
||||
"""Returns the total number of documents that have been uploaded that match
|
||||
the given document id. """
|
||||
return(len(FileService.get_files(study_id=study_id, irb_doc_code=irb_doc_code)))
|
||||
|
||||
# Verifies that information is available for this script task to function
|
||||
# correctly. Returns a list of validation errors.
|
||||
@staticmethod
|
||||
def validate():
|
||||
errors = []
|
||||
try:
|
||||
dict = FileService.get_file_reference_dictionary()
|
||||
except ApiError as ae:
|
||||
errors.append(ScriptValidationError.from_api_error(ae))
|
||||
return errors
|
@ -18,6 +18,13 @@ class Script:
|
||||
"This is an internal error. The script you are trying to execute " +
|
||||
"does not properly implement the do_task function.")
|
||||
|
||||
def validate(self):
|
||||
"""Override this method to perform an early check that the script has access to
|
||||
everything it needs to properly process requests.
|
||||
Should return an array of ScriptValidationErrors.
|
||||
"""
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def get_all_subclasses():
|
||||
return Script._get_all_subclasses(Script)
|
||||
@ -38,4 +45,15 @@ class Script:
|
||||
all_subclasses.append(subclass)
|
||||
all_subclasses.extend(Script._get_all_subclasses(subclass))
|
||||
|
||||
return all_subclasses
|
||||
return all_subclasses
|
||||
|
||||
|
||||
class ScriptValidationError:
|
||||
|
||||
def __init__(self, code, message):
|
||||
self.code = code
|
||||
self.message = message
|
||||
|
||||
@classmethod
|
||||
def from_api_error(cls, api_error: ApiError):
|
||||
return cls(api_error.code, api_error.message)
|
||||
|
@ -8,10 +8,10 @@ from crc.services.protocol_builder import ProtocolBuilderService
|
||||
class StudyInfo(Script):
|
||||
"""Just your basic class that can pull in data from a few api endpoints and do a basic task."""
|
||||
pb = ProtocolBuilderService()
|
||||
type_options = ['info', 'investigators', 'required_docs', 'details']
|
||||
type_options = ['info', 'investigators', 'details']
|
||||
|
||||
def get_description(self):
|
||||
return """StudyInfo [TYPE], where TYPE is one of 'info', 'investigators','required_docs', or 'details'
|
||||
return """StudyInfo [TYPE], where TYPE is one of 'info', 'investigators', or 'details'
|
||||
Adds details about the current study to the Task Data. The type of information required should be
|
||||
provided as an argument. Basic returns the basic information such as the title. Investigators provides
|
||||
detailed information about each investigator in th study. Details provides a large number
|
||||
@ -36,23 +36,6 @@ class StudyInfo(Script):
|
||||
study_info["info"] = schema.dump(study)
|
||||
if cmd == 'investigators':
|
||||
study_info["investigators"] = self.pb.get_investigators(study_id)
|
||||
if cmd == 'required_docs':
|
||||
study_info["required_docs"] = self.pb.get_required_docs(study_id)
|
||||
if cmd == 'details':
|
||||
study_info["details"] = self.pb.get_study_details(study_id)
|
||||
task.data["study"] = study_info
|
||||
|
||||
|
||||
def get_required_docs(self, study_id):
|
||||
required_docs = self.pb.get_required_docs(study_id)
|
||||
return required_docs
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -3,9 +3,11 @@ from datetime import datetime
|
||||
from uuid import UUID
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from pandas import ExcelFile
|
||||
|
||||
from crc import session
|
||||
from crc.api.common import ApiErrorSchema, ApiError
|
||||
from crc.models.file import FileType, FileDataModel, FileModelSchema, FileModel, CONTENT_TYPES
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileType, FileDataModel, FileModel
|
||||
from crc.models.workflow import WorkflowSpecModel
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
import hashlib
|
||||
@ -13,6 +15,7 @@ import hashlib
|
||||
|
||||
class FileService(object):
|
||||
"""Provides consistent management and rules for storing, retrieving and processing files."""
|
||||
IRB_PRO_CATEGORIES_FILE = "irb_documents.xlsx"
|
||||
|
||||
@staticmethod
|
||||
def add_workflow_spec_file(workflow_spec: WorkflowSpecModel,
|
||||
@ -33,28 +36,76 @@ class FileService(object):
|
||||
|
||||
@staticmethod
|
||||
def add_form_field_file(study_id, workflow_id, task_id, form_field_key, name, content_type, binary_data):
|
||||
"""Create a new file and associate it with a user task form field within a workflow."""
|
||||
"""Create a new file and associate it with a user task form field within a workflow.
|
||||
Please note that the form_field_key MUST be a known file in the irb_documents.xslx reference document."""
|
||||
if not FileService.irb_document_reference_exists(form_field_key):
|
||||
raise ApiError("invalid_form_field_key",
|
||||
"When uploading files, the form field id must match a known document in the "
|
||||
"irb_docunents.xslx reference file. This code is not found in that file '%s'" % form_field_key)
|
||||
|
||||
file_model = FileModel(
|
||||
version=0,
|
||||
study_id=study_id,
|
||||
workflow_id=workflow_id,
|
||||
task_id=task_id,
|
||||
name=name,
|
||||
form_field_key=form_field_key
|
||||
form_field_key=form_field_key,
|
||||
irb_doc_code=form_field_key
|
||||
)
|
||||
return FileService.update_file(file_model, binary_data, content_type)
|
||||
|
||||
@staticmethod
|
||||
def add_task_file(study_id, workflow_id, task_id, name, content_type, binary_data):
|
||||
def irb_document_reference_exists(code):
|
||||
data_model = FileService.get_reference_file_data(FileService.IRB_PRO_CATEGORIES_FILE)
|
||||
xls = ExcelFile(data_model.data)
|
||||
df = xls.parse(xls.sheet_names[0])
|
||||
return code in df['Code'].values
|
||||
|
||||
@staticmethod
|
||||
def get_file_reference_dictionary():
|
||||
"""Loads up the xsl file that contains the IRB Pro Categories and converts it to
|
||||
a Panda's data frame for processing."""
|
||||
data_model = FileService.get_reference_file_data(FileService.IRB_PRO_CATEGORIES_FILE)
|
||||
xls = ExcelFile(data_model.data)
|
||||
df = xls.parse(xls.sheet_names[0])
|
||||
# Pandas is lovely, but weird. Here we drop records without an Id, and convert it to an integer.
|
||||
df = df.drop_duplicates(subset='Id').astype({'Id': 'Int64'})
|
||||
# Now we index on the ID column and convert to a dictionary, where the key is the id, and the value
|
||||
# is a dictionary with all the remaining data in it. It's kinda pretty really.
|
||||
all_dict = df.set_index('Id').to_dict('index')
|
||||
return all_dict
|
||||
|
||||
@staticmethod
|
||||
def add_task_file(study_id, workflow_id, task_id, name, content_type, binary_data,
|
||||
irb_doc_code=None):
|
||||
"""Create a new file and associate it with an executing task within a workflow."""
|
||||
file_model = FileModel(
|
||||
study_id=study_id,
|
||||
workflow_id=workflow_id,
|
||||
task_id=task_id,
|
||||
name=name,
|
||||
irb_doc_code=irb_doc_code
|
||||
)
|
||||
return FileService.update_file(file_model, binary_data, content_type)
|
||||
|
||||
@staticmethod
|
||||
def add_reference_file(name, content_type, binary_data):
|
||||
"""Create a file with the given name, but not associated with a spec or workflow.
|
||||
Only one file with the given reference name can exist."""
|
||||
file_model = session.query(FileModel). \
|
||||
filter(FileModel.is_reference == True). \
|
||||
filter(FileModel.name == name).first()
|
||||
if not file_model:
|
||||
file_model = FileModel(
|
||||
name=name,
|
||||
is_reference=True
|
||||
)
|
||||
return FileService.update_file(file_model, binary_data, content_type)
|
||||
|
||||
@staticmethod
|
||||
def get_extension(file_name):
|
||||
basename, file_extension = os.path.splitext(file_name)
|
||||
return file_extension.lower().strip()[1:]
|
||||
|
||||
@staticmethod
|
||||
def update_file(file_model, binary_data, content_type):
|
||||
|
||||
@ -68,12 +119,11 @@ class FileService(object):
|
||||
return file_model
|
||||
|
||||
# Verify the extension
|
||||
basename, file_extension = os.path.splitext(file_model.name)
|
||||
file_extension = file_extension.lower().strip()[1:]
|
||||
file_extension = FileService.get_extension(file_model.name)
|
||||
if file_extension not in FileType._member_names_:
|
||||
return ApiErrorSchema().dump(ApiError('unknown_extension',
|
||||
'The file you provided does not have an accepted extension:' +
|
||||
file_extension)), 404
|
||||
raise ApiError('unknown_extension',
|
||||
'The file you provided does not have an accepted extension:' +
|
||||
file_extension, status_code=404)
|
||||
else:
|
||||
file_model.type = FileType[file_extension]
|
||||
file_model.content_type = content_type
|
||||
@ -93,8 +143,10 @@ class FileService(object):
|
||||
return file_model
|
||||
|
||||
@staticmethod
|
||||
def get_files(workflow_spec_id=None, study_id=None, workflow_id=None, task_id=None, form_field_key=None):
|
||||
query = session.query(FileModel)
|
||||
def get_files(workflow_spec_id=None,
|
||||
study_id=None, workflow_id=None, task_id=None, form_field_key=None,
|
||||
name=None, is_reference=False, irb_doc_code=None):
|
||||
query = session.query(FileModel).filter_by(is_reference=is_reference)
|
||||
if workflow_spec_id:
|
||||
query = query.filter_by(workflow_spec_id=workflow_spec_id)
|
||||
if study_id:
|
||||
@ -105,15 +157,30 @@ class FileService(object):
|
||||
query = query.filter_by(task_id=str(task_id))
|
||||
if form_field_key:
|
||||
query = query.filter_by(form_field_key=form_field_key)
|
||||
if name:
|
||||
query = query.filter_by(name=form_field_key)
|
||||
if irb_doc_code:
|
||||
query = query.filter_by(irb_doc_code=irb_doc_code)
|
||||
|
||||
results = query.all()
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def get_file_data(file_id):
|
||||
"""Returns the file_data that is associated with the file model id"""
|
||||
file_model = session.query(FileModel).filter(FileModel.id == file_id).first()
|
||||
return session.query(FileDataModel)\
|
||||
.filter(FileDataModel.file_model_id == file_id)\
|
||||
.filter(FileDataModel.version == file_model.latest_version)\
|
||||
def get_file_data(file_id, file_model=None):
|
||||
"""Returns the file_data that is associated with the file model id, if an actual file_model
|
||||
is provided, uses that rather than looking it up again."""
|
||||
if file_model is None:
|
||||
file_model = session.query(FileModel).filter(FileModel.id == file_id).first()
|
||||
return session.query(FileDataModel) \
|
||||
.filter(FileDataModel.file_model_id == file_id) \
|
||||
.filter(FileDataModel.version == file_model.latest_version) \
|
||||
.first()
|
||||
|
||||
@staticmethod
|
||||
def get_reference_file_data(file_name):
|
||||
file_model = session.query(FileModel). \
|
||||
filter(FileModel.is_reference == True). \
|
||||
filter(FileModel.name == file_name).first()
|
||||
if not file_model:
|
||||
raise ApiError("file_not_found", "There is no reference file with the name '%s'" % file_name)
|
||||
return FileService.get_file_data(file_model.id, file_model)
|
||||
|
BIN
crc/static/reference/irb_documents.xlsx
Normal file
BIN
crc/static/reference/irb_documents.xlsx
Normal file
Binary file not shown.
@ -22,6 +22,9 @@ class ExampleDataLoader:
|
||||
session.flush()
|
||||
|
||||
def load_all(self):
|
||||
|
||||
self.load_reference_documents()
|
||||
|
||||
users = [
|
||||
UserModel(
|
||||
uid='dhf8r',
|
||||
@ -111,3 +114,11 @@ class ExampleDataLoader:
|
||||
if file:
|
||||
file.close()
|
||||
return spec
|
||||
|
||||
def load_reference_documents(self):
|
||||
file_path = os.path.join(app.root_path, 'static', 'reference', 'irb_documents.xlsx')
|
||||
file = open(file_path, "rb")
|
||||
FileService.add_reference_file(FileService.IRB_PRO_CATEGORIES_FILE,
|
||||
binary_data=file.read(),
|
||||
content_type=CONTENT_TYPES['xls'])
|
||||
file.close()
|
||||
|
36
migrations/versions/ddd5fc9ea75b_.py
Normal file
36
migrations/versions/ddd5fc9ea75b_.py
Normal file
@ -0,0 +1,36 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: ddd5fc9ea75b
|
||||
Revises: 65f3fce6031a
|
||||
Create Date: 2020-03-20 11:19:01.825283
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'ddd5fc9ea75b'
|
||||
down_revision = '65f3fce6031a'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('file', sa.Column('irb_doc_code', sa.String(), nullable=True))
|
||||
op.add_column('file', sa.Column('is_reference', sa.Boolean(), nullable=False))
|
||||
op.alter_column('file', 'primary',
|
||||
existing_type=sa.BOOLEAN(),
|
||||
nullable=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('file', 'primary',
|
||||
existing_type=sa.BOOLEAN(),
|
||||
nullable=True)
|
||||
op.drop_column('file', 'is_reference')
|
||||
op.drop_column('file', 'irb_doc_code')
|
||||
# ### end Alembic commands ###
|
@ -5,12 +5,14 @@ import os
|
||||
import unittest
|
||||
import urllib.parse
|
||||
|
||||
from crc.models.study import StudyModel
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
|
||||
os.environ["TESTING"] = "true"
|
||||
|
||||
from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES
|
||||
from crc.models.workflow import WorkflowSpecModel
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel
|
||||
from crc.models.user import UserModel
|
||||
|
||||
from crc import app, db, session
|
||||
@ -144,3 +146,24 @@ class BaseTest(unittest.TestCase):
|
||||
noise, file_extension = os.path.splitext(file_path)
|
||||
content_type = CONTENT_TYPES[file_extension[1:]]
|
||||
file_service.update_file(file_model, data, content_type)
|
||||
|
||||
def create_workflow(self, workflow_name):
|
||||
study = session.query(StudyModel).first()
|
||||
spec = self.load_test_spec(workflow_name)
|
||||
processor = WorkflowProcessor.create(study.id, spec.id)
|
||||
rv = self.app.post(
|
||||
'/v1.0/study/%i/workflows' % study.id,
|
||||
headers=self.logged_in_headers(),
|
||||
content_type="application/json",
|
||||
data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
|
||||
self.assert_success(rv)
|
||||
workflow = session.query(WorkflowModel).filter_by(study_id=study.id, workflow_spec_id=workflow_name).first()
|
||||
return workflow
|
||||
|
||||
def create_reference_document(self):
|
||||
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'reference', 'irb_documents.xlsx')
|
||||
file = open(file_path, "rb")
|
||||
FileService.add_reference_file(FileService.IRB_PRO_CATEGORIES_FILE,
|
||||
binary_data=file.read(),
|
||||
content_type=CONTENT_TYPES['xls'])
|
||||
file.close()
|
||||
|
@ -27,7 +27,7 @@
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1i7hk1a</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_11c35oq</bpmn:outgoing>
|
||||
<bpmn:script>CompleteTemplate Letter.docx</bpmn:script>
|
||||
<bpmn:script>CompleteTemplate Letter.docx AncillaryDocument.CoCApplication</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:endEvent id="EndEvent_0evb22x">
|
||||
<bpmn:incoming>SequenceFlow_11c35oq</bpmn:incoming>
|
||||
|
55
tests/data/file_upload_form/file_upload_form.bpmn
Normal file
55
tests/data/file_upload_form/file_upload_form.bpmn
Normal file
@ -0,0 +1,55 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1wrlvk8" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
|
||||
<bpmn:process id="Finance" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1p6s47e">
|
||||
<bpmn:outgoing>SequenceFlow_0ea9hvd</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:endEvent id="EndEvent_14p904o">
|
||||
<bpmn:incoming>SequenceFlow_1h0d349</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:userTask id="Task_112migv" name="Upload Executed Non-Funded" camunda:formKey="FormKey_ExecutedNonFunded">
|
||||
<bpmn:documentation>#### Non-Funded Executed Agreement
|
||||
|
||||
|
||||
#### Process:
|
||||
OGC will upload the Non-Funded Executed Agreement after it has been negotiated by OSP contract negotiator.</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="UVACompliance.PRCApproval" label="Non-Funded Executed Agreement" type="file">
|
||||
<camunda:properties>
|
||||
<camunda:property id="group" value="upload" />
|
||||
<camunda:property id="repeat" value="upload" />
|
||||
<camunda:property id="Property_02h2d61" />
|
||||
</camunda:properties>
|
||||
</camunda:formField>
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0ea9hvd</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1h0d349</bpmn:outgoing>
|
||||
<bpmn:standardLoopCharacteristics />
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0ea9hvd" sourceRef="StartEvent_1p6s47e" targetRef="Task_112migv" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1h0d349" sourceRef="Task_112migv" targetRef="EndEvent_14p904o" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Finance">
|
||||
<bpmndi:BPMNShape id="StartEvent_1p6s47e_di" bpmnElement="StartEvent_1p6s47e">
|
||||
<dc:Bounds x="112" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_14p904o_di" bpmnElement="EndEvent_14p904o">
|
||||
<dc:Bounds x="682" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="UserTask_1peopdt_di" bpmnElement="Task_112migv">
|
||||
<dc:Bounds x="350" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0ea9hvd_di" bpmnElement="SequenceFlow_0ea9hvd">
|
||||
<di:waypoint x="148" y="117" />
|
||||
<di:waypoint x="350" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1h0d349_di" bpmnElement="SequenceFlow_1h0d349">
|
||||
<di:waypoint x="450" y="117" />
|
||||
<di:waypoint x="682" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
BIN
tests/data/reference/irb_documents.xlsx
Normal file
BIN
tests/data/reference/irb_documents.xlsx
Normal file
Binary file not shown.
@ -27,6 +27,7 @@
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_00p5po6</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_17ggqu2</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics />
|
||||
</bpmn:userTask>
|
||||
<bpmn:textAnnotation id="TextAnnotation_1haj11l">
|
||||
<bpmn:text>We have a test that replaces tow_forms with this file, which adds a new step to the process. A breaking change.</bpmn:text>
|
||||
|
@ -5,6 +5,8 @@ from datetime import datetime
|
||||
from crc import session
|
||||
from crc.models.file import FileModel, FileType, FileModelSchema, FileDataModel
|
||||
from crc.models.workflow import WorkflowSpecModel
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from tests.base_test import BaseTest
|
||||
|
||||
|
||||
@ -58,6 +60,73 @@ class TestFilesApi(BaseTest):
|
||||
file2 = FileModelSchema().load(json_data, session=session)
|
||||
self.assertEqual(file, file2)
|
||||
|
||||
def test_add_file_from_task_and_form_errors_on_invalid_form_field_name(self):
|
||||
self.load_example_data()
|
||||
self.create_reference_document()
|
||||
workflow = self.create_workflow('file_upload_form')
|
||||
processor = WorkflowProcessor(workflow)
|
||||
task = processor.next_task()
|
||||
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
|
||||
correct_name = task.task_spec.form.fields[0].id
|
||||
|
||||
rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_id=%i&form_field_key=%s' %
|
||||
(workflow.study_id, workflow.id, task.id, "not_a_known_file"), data=data, follow_redirects=True,
|
||||
content_type='multipart/form-data')
|
||||
self.assert_failure(rv, error_code="invalid_form_field_key")
|
||||
|
||||
data = {'file': (io.BytesIO(b"abcdef"), 'random_fact.svg')}
|
||||
rv = self.app.post('/v1.0/file?study_id=%i&workflow_id=%s&task_id=%i&form_field_key=%s' %
|
||||
(workflow.study_id, workflow.id, task.id, correct_name), data=data, follow_redirects=True,
|
||||
content_type='multipart/form-data')
|
||||
self.assert_success(rv)
|
||||
|
||||
|
||||
def test_set_reference_file(self):
|
||||
file_name = "irb_document_types.xls"
|
||||
data = {'file': (io.BytesIO(b"abcdef"), "does_not_matter.xls")}
|
||||
rv = self.app.put('/v1.0/reference_file/%s' % file_name, data=data, follow_redirects=True,
|
||||
content_type='multipart/form-data')
|
||||
self.assert_success(rv)
|
||||
self.assertIsNotNone(rv.get_data())
|
||||
json_data = json.loads(rv.get_data(as_text=True))
|
||||
file = FileModelSchema().load(json_data, session=session)
|
||||
self.assertEqual(FileType.xls, file.type)
|
||||
self.assertTrue(file.is_reference)
|
||||
self.assertEqual("application/vnd.ms-excel", file.content_type)
|
||||
|
||||
def test_set_reference_file_bad_extension(self):
|
||||
file_name = FileService.IRB_PRO_CATEGORIES_FILE
|
||||
data = {'file': (io.BytesIO(b"abcdef"), "does_not_matter.ppt")}
|
||||
rv = self.app.put('/v1.0/reference_file/%s' % file_name, data=data, follow_redirects=True,
|
||||
content_type='multipart/form-data')
|
||||
self.assert_failure(rv, error_code="invalid_file_type")
|
||||
|
||||
def test_get_reference_file(self):
|
||||
file_name = "irb_document_types.xls"
|
||||
data = {'file': (io.BytesIO(b"abcdef"), "some crazy thing do not care.xls")}
|
||||
rv = self.app.put('/v1.0/reference_file/%s' % file_name, data=data, follow_redirects=True,
|
||||
content_type='multipart/form-data')
|
||||
rv = self.app.get('/v1.0/reference_file/%s' % file_name)
|
||||
self.assert_success(rv)
|
||||
data_out = rv.get_data()
|
||||
self.assertEqual(b"abcdef", data_out)
|
||||
|
||||
def test_list_reference_files(self):
|
||||
file_name = FileService.IRB_PRO_CATEGORIES_FILE
|
||||
data = {'file': (io.BytesIO(b"abcdef"), file_name)}
|
||||
rv = self.app.put('/v1.0/reference_file/%s' % file_name, data=data, follow_redirects=True,
|
||||
content_type='multipart/form-data')
|
||||
|
||||
rv = self.app.get('/v1.0/reference_file',
|
||||
follow_redirects=True,
|
||||
content_type="application/json")
|
||||
self.assert_success(rv)
|
||||
json_data = json.loads(rv.get_data(as_text=True))
|
||||
self.assertEqual(1, len(json_data))
|
||||
file = FileModelSchema(many=True).load(json_data, session=session)
|
||||
self.assertEqual(file_name, file[0].name)
|
||||
self.assertTrue(file[0].is_reference)
|
||||
|
||||
def test_update_file_info(self):
|
||||
self.load_example_data()
|
||||
file: FileModel = session.query(FileModel).first()
|
||||
|
90
tests/test_required_docs_script.py
Normal file
90
tests/test_required_docs_script.py
Normal file
@ -0,0 +1,90 @@
|
||||
import io
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
from crc import app, db
|
||||
from crc.models.file import CONTENT_TYPES, FileDataModel, FileModel
|
||||
from crc.models.study import StudyModel
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowModel
|
||||
from crc.scripts.required_docs import RequiredDocs
|
||||
from crc.scripts.study_info import StudyInfo
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.protocol_builder import ProtocolBuilderService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from tests.base_test import BaseTest
|
||||
|
||||
|
||||
class TestRequiredDocsScript(BaseTest):
|
||||
test_uid = "dhf8r"
|
||||
test_study_id = 1
|
||||
|
||||
"""
|
||||
1. get a list of only the required documents for the study.
|
||||
2. For this study, is this document required accroding to the protocol builder?
|
||||
3. For ALL uploaded documents, what the total number of files that were uploaded? per instance of this document naming
|
||||
convention that we are implementing for the IRB.
|
||||
"""
|
||||
|
||||
def test_validate_returns_error_if_reference_files_do_not_exist(self):
|
||||
file_model = db.session.query(FileModel). \
|
||||
filter(FileModel.is_reference == True). \
|
||||
filter(FileModel.name == FileService.IRB_PRO_CATEGORIES_FILE).first()
|
||||
if file_model:
|
||||
db.session.query(FileDataModel).filter(FileDataModel.file_model_id == file_model.id).delete()
|
||||
db.session.query(FileModel).filter(FileModel.id == file_model.id).delete()
|
||||
db.session.commit()
|
||||
db.session.flush()
|
||||
errors = RequiredDocs.validate()
|
||||
self.assertTrue(len(errors) > 0)
|
||||
self.assertEquals("file_not_found", errors[0].code)
|
||||
|
||||
def test_no_validation_error_when_correct_file_exists(self):
|
||||
self.create_reference_document()
|
||||
errors = RequiredDocs.validate()
|
||||
self.assertTrue(len(errors) == 0)
|
||||
|
||||
def test_load_lookup_data(self):
|
||||
self.create_reference_document()
|
||||
dict = FileService.get_file_reference_dictionary()
|
||||
self.assertIsNotNone(dict)
|
||||
|
||||
@patch('crc.services.protocol_builder.requests.get')
|
||||
def test_get_required_docs(self, mock_get):
|
||||
mock_get.return_value.ok = True
|
||||
mock_get.return_value.text = self.protocol_builder_response('required_docs.json')
|
||||
self.create_reference_document()
|
||||
script = RequiredDocs()
|
||||
required_docs = script.get_required_docs(12)
|
||||
self.assertIsNotNone(required_docs)
|
||||
self.assertTrue(len(required_docs) == 5)
|
||||
self.assertEquals(6, required_docs[0]['id'])
|
||||
self.assertEquals("Cancer Center's PRC Approval Form", required_docs[0]['name'])
|
||||
self.assertEquals("UVA Compliance", required_docs[0]['category1'])
|
||||
self.assertEquals("PRC Approval", required_docs[0]['category2'])
|
||||
self.assertEquals("CRC", required_docs[0]['Who Uploads?'])
|
||||
self.assertEquals(0, required_docs[0]['count'])
|
||||
|
||||
@patch('crc.services.protocol_builder.requests.get')
|
||||
def test_get_required_docs_has_correct_count_when_a_file_exists(self, mock_get):
|
||||
|
||||
self.load_example_data()
|
||||
|
||||
# Mock out the protocol builder
|
||||
mock_get.return_value.ok = True
|
||||
mock_get.return_value.text = self.protocol_builder_response('required_docs.json')
|
||||
|
||||
# Make sure the xslt refernce document is in place.
|
||||
self.create_reference_document()
|
||||
script = RequiredDocs()
|
||||
|
||||
# Add a document to the study with the correct code.
|
||||
workflow = self.create_workflow('docx')
|
||||
irb_code = "UVACompliance.PRCApproval" # The first file referenced in pb required docs.
|
||||
FileService.add_task_file(study_id = workflow.study_id, workflow_id = workflow.id,
|
||||
task_id ="fakingthisout",
|
||||
name="anything.png", content_type="text",
|
||||
binary_data=b'1234', irb_doc_code=irb_code)
|
||||
|
||||
required_docs = script.get_required_docs(workflow.study_id)
|
||||
self.assertIsNotNone(required_docs)
|
||||
self.assertEquals(1, required_docs[0]['count'])
|
@ -13,18 +13,6 @@ from tests.base_test import BaseTest
|
||||
|
||||
class TestTasksApi(BaseTest):
|
||||
|
||||
def create_workflow(self, workflow_name):
|
||||
study = session.query(StudyModel).first()
|
||||
spec = self.load_test_spec(workflow_name)
|
||||
processor = WorkflowProcessor.create(study.id, spec.id)
|
||||
rv = self.app.post(
|
||||
'/v1.0/study/%i/workflows' % study.id,
|
||||
headers=self.logged_in_headers(),
|
||||
content_type="application/json",
|
||||
data=json.dumps(WorkflowSpecModelSchema().dump(spec)))
|
||||
self.assert_success(rv)
|
||||
workflow = session.query(WorkflowModel).filter_by(study_id=study.id, workflow_spec_id=workflow_name).first()
|
||||
return workflow
|
||||
|
||||
def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False):
|
||||
rv = self.app.get('/v1.0/workflow/%i?soft_reset=%s&hard_reset=%s' %
|
||||
@ -60,6 +48,7 @@ class TestTasksApi(BaseTest):
|
||||
workflow = WorkflowApiSchema().load(json_data)
|
||||
return workflow
|
||||
|
||||
|
||||
def test_get_current_user_tasks(self):
|
||||
self.load_example_data()
|
||||
workflow = self.create_workflow('random_fact')
|
||||
@ -145,8 +134,10 @@ class TestTasksApi(BaseTest):
|
||||
self.assertIsNotNone(workflow_api.last_task)
|
||||
self.assertIsNotNone(workflow_api.next_task)
|
||||
|
||||
|
||||
def test_document_added_to_workflow_shows_up_in_file_list(self):
|
||||
self.load_example_data()
|
||||
self.create_reference_document()
|
||||
workflow = self.create_workflow('docx')
|
||||
# get the first form in the two form workflow.
|
||||
tasks = self.get_workflow_api(workflow).user_tasks
|
||||
|
Loading…
x
Reference in New Issue
Block a user