Merge branch 'dev'

This commit is contained in:
Dan 2022-02-17 13:16:26 -05:00
commit 82b0cb453a
30 changed files with 584 additions and 277 deletions

309
Pipfile.lock generated
View File

@ -25,11 +25,11 @@
},
"alembic": {
"hashes": [
"sha256:7c328694a2e68f03ee971e63c3bd885846470373a5b532cf2c9f1601c413b153",
"sha256:a9dde941534e3d7573d9644e8ea62a2953541e27bc1793e166f60b777ae098b4"
"sha256:6c0c05e9768a896d804387e20b299880fe01bc56484246b0dffe8075d6d3d847",
"sha256:ad842f2c3ab5c5d4861232730779c05e33db4ba880a08b85eb505e87c01095bc"
],
"index": "pypi",
"version": "==1.7.5"
"version": "==1.7.6"
},
"amqp": {
"hashes": [
@ -201,11 +201,11 @@
},
"charset-normalizer": {
"hashes": [
"sha256:2842d8f5e82a1f6aa437380934d5e1cd4fcf2003b06fed6940769c164a480a45",
"sha256:98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"
"sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597",
"sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"
],
"markers": "python_version >= '3'",
"version": "==2.0.11"
"version": "==2.0.12"
},
"click": {
"hashes": [
@ -242,61 +242,58 @@
"swagger-ui"
],
"hashes": [
"sha256:b10df8b67dde1b9d61ff0676f354d02ab838bed9818eb89d582504b3a3acbd71",
"sha256:de0cab04ff548e392ba206228ded9d46620765a4b27f0d0c8ab3999ad20b53dc"
"sha256:66620b10b2c03eab6af981f8489d0ff7ada19f66710274effc71258fb8221419",
"sha256:b5e5ba236894a02b8da4d10face412f471abb6ff77de10dad32fa88cb894acf7"
],
"index": "pypi",
"version": "==2.10.0"
"version": "==2.11.1"
},
"coverage": {
"hashes": [
"sha256:012157499ec4f135fc36cd2177e3d1a1840af9b236cbe80e9a5ccfc83d912a69",
"sha256:0a34d313105cdd0d3644c56df2d743fe467270d6ab93b5d4a347eb9fec8924d6",
"sha256:11e61c5548ecf74ea1f8b059730b049871f0e32b74f88bd0d670c20c819ad749",
"sha256:152cc2624381df4e4e604e21bd8e95eb8059535f7b768c1fb8b8ae0b26f47ab0",
"sha256:1b4285fde5286b946835a1a53bba3ad41ef74285ba9e8013e14b5ea93deaeafc",
"sha256:27a94db5dc098c25048b0aca155f5fac674f2cf1b1736c5272ba28ead2fc267e",
"sha256:27ac7cb84538e278e07569ceaaa6f807a029dc194b1c819a9820b9bb5dbf63ab",
"sha256:2a491e159294d756e7fc8462f98175e2d2225e4dbe062cca7d3e0d5a75ba6260",
"sha256:2bc85664b06ba42d14bb74d6ddf19d8bfc520cb660561d2d9ce5786ae72f71b5",
"sha256:32168001f33025fd756884d56d01adebb34e6c8c0b3395ca8584cdcee9c7c9d2",
"sha256:3c4ce3b647bd1792d4394f5690d9df6dc035b00bcdbc5595099c01282a59ae01",
"sha256:433b99f7b0613bdcdc0b00cc3d39ed6d756797e3b078d2c43f8a38288520aec6",
"sha256:4578728c36de2801c1deb1c6b760d31883e62e33f33c7ba8f982e609dc95167d",
"sha256:509c68c3e2015022aeda03b003dd68fa19987cdcf64e9d4edc98db41cfc45d30",
"sha256:51372e24b1f7143ee2df6b45cff6a721f3abe93b1e506196f3ffa4155c2497f7",
"sha256:5d008e0f67ac800b0ca04d7914b8501312c8c6c00ad8c7ba17754609fae1231a",
"sha256:649df3641eb351cdfd0d5533c92fc9df507b6b2bf48a7ef8c71ab63cbc7b5c3c",
"sha256:6e78b1e25e5c5695dea012be473e442f7094d066925604be20b30713dbd47f89",
"sha256:72d9d186508325a456475dd05b1756f9a204c7086b07fffb227ef8cee03b1dc2",
"sha256:7d82c610a2e10372e128023c5baf9ce3d270f3029fe7274ff5bc2897c68f1318",
"sha256:7ee317486593193e066fc5e98ac0ce712178c21529a85c07b7cb978171f25d53",
"sha256:7eed8459a2b81848cafb3280b39d7d49950d5f98e403677941c752e7e7ee47cb",
"sha256:823f9325283dc9565ba0aa2d240471a93ca8999861779b2b6c7aded45b58ee0f",
"sha256:85c5fc9029043cf8b07f73fbb0a7ab6d3b717510c3b5642b77058ea55d7cacde",
"sha256:86c91c511853dfda81c2cf2360502cb72783f4b7cebabef27869f00cbe1db07d",
"sha256:8e0c3525b1a182c8ffc9bca7e56b521e0c2b8b3e82f033c8e16d6d721f1b54d6",
"sha256:987a84ff98a309994ca77ed3cc4b92424f824278e48e4bf7d1bb79a63cfe2099",
"sha256:9ed3244b415725f08ca3bdf02ed681089fd95e9465099a21c8e2d9c5d6ca2606",
"sha256:a189036c50dcd56100746139a459f0d27540fef95b09aba03e786540b8feaa5f",
"sha256:a4748349734110fd32d46ff8897b561e6300d8989a494ad5a0a2e4f0ca974fc7",
"sha256:a5d79c9af3f410a2b5acad91258b4ae179ee9c83897eb9de69151b179b0227f5",
"sha256:a7596aa2f2b8fa5604129cfc9a27ad9beec0a96f18078cb424d029fdd707468d",
"sha256:ab4fc4b866b279740e0d917402f0e9a08683e002f43fa408e9655818ed392196",
"sha256:bde4aeabc0d1b2e52c4036c54440b1ad05beeca8113f47aceb4998bb7471e2c2",
"sha256:c72bb4679283c6737f452eeb9b2a0e570acaef2197ad255fb20162adc80bea76",
"sha256:c8582e9280f8d0f38114fe95a92ae8d0790b56b099d728cc4f8a2e14b1c4a18c",
"sha256:ca29c352389ea27a24c79acd117abdd8a865c6eb01576b6f0990cd9a4e9c9f48",
"sha256:ce443a3e6df90d692c38762f108fc4c88314bf477689f04de76b3f252e7a351c",
"sha256:d1675db48490e5fa0b300f6329ecb8a9a37c29b9ab64fa9c964d34111788ca2d",
"sha256:da1a428bdbe71f9a8c270c7baab29e9552ac9d0e0cba5e7e9a4c9ee6465d258d",
"sha256:e4ff163602c5c77e7bb4ea81ba5d3b793b4419f8acd296aae149370902cf4e92",
"sha256:e67ccd53da5958ea1ec833a160b96357f90859c220a00150de011b787c27b98d",
"sha256:e8071e7d9ba9f457fc674afc3de054450be2c9b195c470147fbbc082468d8ff7",
"sha256:fff16a30fdf57b214778eff86391301c4509e327a65b877862f7c929f10a4253"
"sha256:1245ab82e8554fa88c4b2ab1e098ae051faac5af829efdcf2ce6b34dccd5567c",
"sha256:1bc6d709939ff262fd1432f03f080c5042dc6508b6e0d3d20e61dd045456a1a0",
"sha256:25e73d4c81efa8ea3785274a2f7f3bfbbeccb6fcba2a0bdd3be9223371c37554",
"sha256:276b13cc085474e482566c477c25ed66a097b44c6e77132f3304ac0b039f83eb",
"sha256:2aed4761809640f02e44e16b8b32c1a5dee5e80ea30a0ff0912158bde9c501f2",
"sha256:2dd70a167843b4b4b2630c0c56f1b586fe965b4f8ac5da05b6690344fd065c6b",
"sha256:352c68e233409c31048a3725c446a9e48bbff36e39db92774d4f2380d630d8f8",
"sha256:3f2b05757c92ad96b33dbf8e8ec8d4ccb9af6ae3c9e9bd141c7cc44d20c6bcba",
"sha256:448d7bde7ceb6c69e08474c2ddbc5b4cd13c9e4aa4a717467f716b5fc938a734",
"sha256:463e52616ea687fd323888e86bf25e864a3cc6335a043fad6bbb037dbf49bbe2",
"sha256:482fb42eea6164894ff82abbcf33d526362de5d1a7ed25af7ecbdddd28fc124f",
"sha256:56c4a409381ddd7bbff134e9756077860d4e8a583d310a6f38a2315b9ce301d0",
"sha256:56d296cbc8254a7dffdd7bcc2eb70be5a233aae7c01856d2d936f5ac4e8ac1f1",
"sha256:5e15d424b8153756b7c903bde6d4610be0c3daca3986173c18dd5c1a1625e4cd",
"sha256:618eeba986cea7f621d8607ee378ecc8c2504b98b3fdc4952b30fe3578304687",
"sha256:61d47a897c1e91f33f177c21de897267b38fbb45f2cd8e22a710bcef1df09ac1",
"sha256:621f6ea7260ea2ffdaec64fe5cb521669984f567b66f62f81445221d4754df4c",
"sha256:6a5cdc3adb4f8bb8d8f5e64c2e9e282bc12980ef055ec6da59db562ee9bdfefa",
"sha256:6c3f6158b02ac403868eea390930ae64e9a9a2a5bbfafefbb920d29258d9f2f8",
"sha256:704f89b87c4f4737da2860695a18c852b78ec7279b24eedacab10b29067d3a38",
"sha256:72128176fea72012063200b7b395ed8a57849282b207321124d7ff14e26988e8",
"sha256:78fbb2be068a13a5d99dce9e1e7d168db880870f7bc73f876152130575bd6167",
"sha256:7bff3a98f63b47464480de1b5bdd80c8fade0ba2832c9381253c9b74c4153c27",
"sha256:84f2436d6742c01136dd940ee158bfc7cf5ced3da7e4c949662b8703b5cd8145",
"sha256:9976fb0a5709988778ac9bc44f3d50fccd989987876dfd7716dee28beed0a9fa",
"sha256:9ad0a117b8dc2061ce9461ea4c1b4799e55edceb236522c5b8f958ce9ed8fa9a",
"sha256:9e3dd806f34de38d4c01416344e98eab2437ac450b3ae39c62a0ede2f8b5e4ed",
"sha256:9eb494070aa060ceba6e4bbf44c1bc5fa97bfb883a0d9b0c9049415f9e944793",
"sha256:9fde6b90889522c220dd56a670102ceef24955d994ff7af2cb786b4ba8fe11e4",
"sha256:9fff3ff052922cb99f9e52f63f985d4f7a54f6b94287463bc66b7cdf3eb41217",
"sha256:a06c358f4aed05fa1099c39decc8022261bb07dfadc127c08cfbd1391b09689e",
"sha256:a4f923b9ab265136e57cc14794a15b9dcea07a9c578609cd5dbbfff28a0d15e6",
"sha256:c5b81fb37db76ebea79aa963b76d96ff854e7662921ce742293463635a87a78d",
"sha256:d5ed164af5c9078596cfc40b078c3b337911190d3faeac830c3f1274f26b8320",
"sha256:d651fde74a4d3122e5562705824507e2f5b2d3d57557f1916c4b27635f8fbe3f",
"sha256:de73fca6fb403dd72d4da517cfc49fcf791f74eee697d3219f6be29adf5af6ce",
"sha256:e647a0be741edbb529a72644e999acb09f2ad60465f80757da183528941ff975",
"sha256:e92c7a5f7d62edff50f60a045dc9542bf939758c95b2fcd686175dd10ce0ed10",
"sha256:eeffd96882d8c06d31b65dddcf51db7c612547babc1c4c5db6a011abe9798525",
"sha256:f5a4551dfd09c3bd12fca8144d47fe7745275adf3229b7223c2f9e29a975ebda",
"sha256:fac0bcc5b7e8169bffa87f0dcc24435446d329cbc2b5486d155c2e0f3b493ae1"
],
"index": "pypi",
"version": "==6.3"
"version": "==6.3.1"
},
"dateparser": {
"hashes": [
@ -530,11 +527,11 @@
},
"importlib-metadata": {
"hashes": [
"sha256:899e2a40a8c4a1aec681feef45733de8a6c58f3f6a0dbed2eb6574b4387a77b6",
"sha256:951f0d8a5b7260e9db5e41d429285b5f451e928479f19d80818878527d36e95e"
"sha256:175f4ee440a0317f6e8d81b7f8d4869f93316170a65ad2b007d2929186c8052c",
"sha256:e0bc84ff355328a4adfc5240c4f211e0ab386f80aa640d1b11f0618a1d282094"
],
"markers": "python_version < '3.10'",
"version": "==4.10.1"
"version": "==4.11.1"
},
"importlib-resources": {
"hashes": [
@ -784,31 +781,28 @@
},
"numpy": {
"hashes": [
"sha256:0d245a2bf79188d3f361137608c3cd12ed79076badd743dc660750a9f3074f7c",
"sha256:26b4018a19d2ad9606ce9089f3d52206a41b23de5dfe8dc947d2ec49ce45d015",
"sha256:2db01d9838a497ba2aa9a87515aeaf458f42351d72d4e7f3b8ddbd1eba9479f2",
"sha256:3d62d6b0870b53799204515145935608cdeb4cebb95a26800b6750e48884cc5b",
"sha256:45a7dfbf9ed8d68fd39763940591db7637cf8817c5bce1a44f7b56c97cbe211e",
"sha256:4ac4d7c9f8ea2a79d721ebfcce81705fc3cd61a10b731354f1049eb8c99521e8",
"sha256:60f19c61b589d44fbbab8ff126640ae712e163299c2dd422bfe4edc7ec51aa9b",
"sha256:632e062569b0fe05654b15ef0e91a53c0a95d08ffe698b66f6ba0f927ad267c2",
"sha256:65f5e257987601fdfc63f1d02fca4d1c44a2b85b802f03bd6abc2b0b14648dd2",
"sha256:69958735d5e01f7b38226a6c6e7187d72b7e4d42b6b496aca5860b611ca0c193",
"sha256:78bfbdf809fc236490e7e65715bbd98377b122f329457fffde206299e163e7f3",
"sha256:7e957ca8112c689b728037cea9c9567c27cf912741fabda9efc2c7d33d29dfa1",
"sha256:800dfeaffb2219d49377da1371d710d7952c9533b57f3d51b15e61c4269a1b5b",
"sha256:831f2df87bd3afdfc77829bc94bd997a7c212663889d56518359c827d7113b1f",
"sha256:88d54b7b516f0ca38a69590557814de2dd638d7d4ed04864826acaac5ebb8f01",
"sha256:8d1563060e77096367952fb44fca595f2b2f477156de389ce7c0ade3aef29e21",
"sha256:b5ec9a5eaf391761c61fd873363ef3560a3614e9b4ead17347e4deda4358bca4",
"sha256:bcd19dab43b852b03868796f533b5f5561e6c0e3048415e675bec8d2e9d286c1",
"sha256:c51124df17f012c3b757380782ae46eee85213a3215e51477e559739f57d9bf6",
"sha256:e348ccf5bc5235fc405ab19d53bec215bb373300e5523c7b476cc0da8a5e9973",
"sha256:e60ef82c358ded965fdd3132b5738eade055f48067ac8a5a8ac75acc00cad31f",
"sha256:f8ad59e6e341f38266f1549c7c2ec70ea0e3d1effb62a44e5c3dba41c55f0187"
"sha256:03ae5850619abb34a879d5f2d4bb4dcd025d6d8fb72f5e461dae84edccfe129f",
"sha256:076aee5a3763d41da6bef9565fdf3cb987606f567cd8b104aded2b38b7b47abf",
"sha256:0b536b6840e84c1c6a410f3a5aa727821e6108f3454d81a5cd5900999ef04f89",
"sha256:15efb7b93806d438e3bc590ca8ef2f953b0ce4f86f337ef4559d31ec6cf9d7dd",
"sha256:168259b1b184aa83a514f307352c25c56af111c269ffc109d9704e81f72e764b",
"sha256:2638389562bda1635b564490d76713695ff497242a83d9b684d27bb4a6cc9d7a",
"sha256:3556c5550de40027d3121ebbb170f61bbe19eb639c7ad0c7b482cd9b560cd23b",
"sha256:4a176959b6e7e00b5a0d6f549a479f869829bfd8150282c590deee6d099bbb6e",
"sha256:515a8b6edbb904594685da6e176ac9fbea8f73a5ebae947281de6613e27f1956",
"sha256:55535c7c2f61e2b2fc817c5cbe1af7cb907c7f011e46ae0a52caa4be1f19afe2",
"sha256:59153979d60f5bfe9e4c00e401e24dfe0469ef8da6d68247439d3278f30a180f",
"sha256:60cb8e5933193a3cc2912ee29ca331e9c15b2da034f76159b7abc520b3d1233a",
"sha256:6767ad399e9327bfdbaa40871be4254d1995f4a3ca3806127f10cec778bd9896",
"sha256:76a4f9bce0278becc2da7da3b8ef854bed41a991f4226911a24a9711baad672c",
"sha256:8cf33634b60c9cef346663a222d9841d3bbbc0a2f00221d6bcfd0d993d5543f6",
"sha256:94dd11d9f13ea1be17bac39c1942f527cbf7065f94953cf62dfe805653da2f8f",
"sha256:aafa46b5a39a27aca566198d3312fb3bde95ce9677085efd02c86f7ef6be4ec7",
"sha256:badca914580eb46385e7f7e4e426fea6de0a37b9e06bec252e481ae7ec287082",
"sha256:d76a26c5118c4d96e264acc9e3242d72e1a2b92e739807b3b69d8d47684b6677"
],
"markers": "python_version < '3.10' and platform_machine != 'aarch64' and platform_machine != 'arm64'",
"version": "==1.22.1"
"version": "==1.22.2"
},
"openapi-schema-validator": {
"hashes": [
@ -845,30 +839,30 @@
},
"pandas": {
"hashes": [
"sha256:0f19504f2783526fb5b4de675ea69d68974e21c1624f4b92295d057a31d5ec5f",
"sha256:156aac90dd7b303bf0b91bae96c0503212777f86c731e41929c571125d26c8e9",
"sha256:1d59c958d6b8f96fdf850c7821571782168d5acfe75ccf78cd8d1ac15fb921df",
"sha256:1f3b74335390dda49f5d5089fab71958812bf56f42aa27663ee4c16d19f4f1c5",
"sha256:23c04dab11f3c6359cfa7afa83d3d054a8f8c283d773451184d98119ef54da97",
"sha256:2dad075089e17a72391de33021ad93720aff258c3c4b68c78e1cafce7e447045",
"sha256:46a18572f3e1cb75db59d9461940e9ba7ee38967fa48dd58f4139197f6e32280",
"sha256:4a8d5a200f8685e7ea562b2f022c77ab7cb82c1ca5b240e6965faa6f84e5c1e9",
"sha256:51e5da3802aaee1aa4254108ffaf1129a15fb3810b7ce8da1ec217c655b418f5",
"sha256:5229c95db3a907451dacebc551492db6f7d01743e49bbc862f4a6010c227d187",
"sha256:5280d057ddae06fe4a3cd6aa79040b8c205cd6dd21743004cf8635f39ed01712",
"sha256:55ec0e192eefa26d823fc25a1f213d6c304a3592915f368e360652994cdb8d9a",
"sha256:73f7da2ccc38cc988b74e5400b430b7905db5f2c413ff215506bea034eaf832d",
"sha256:784cca3f69cfd7f6bd7c7fdb44f2bbab17e6de55725e9ff36d6f382510dfefb5",
"sha256:b5af258c7b090cca7b742cf2bd67ad1919aa9e4e681007366c9edad2d6a3d42b",
"sha256:cdd76254c7f0a1583bd4e4781fb450d0ebf392e10d3f12e92c95575942e37df5",
"sha256:de62cf699122dcef175988f0714678e59c453dc234c5b47b7136bfd7641e3c8c",
"sha256:de8f8999864399529e8514a2e6bfe00fd161f0a667903655552ed12e583ae3cb",
"sha256:f045bb5c6bfaba536089573bf97d6b8ccc7159d951fe63904c395a5e486fbe14",
"sha256:f103a5cdcd66cb18882ccdc18a130c31c3cfe3529732e7f10a8ab3559164819c",
"sha256:fe454180ad31bbbe1e5d111b44443258730467f035e26b4e354655ab59405871"
"sha256:0259cd11e7e6125aaea3af823b80444f3adad6149ff4c97fef760093598b3e34",
"sha256:04dd15d9db538470900c851498e532ef28d4e56bfe72c9523acb32042de43dfb",
"sha256:0b1a13f647e4209ed7dbb5da3497891d0045da9785327530ab696417ef478f84",
"sha256:19f7c632436b1b4f84615c3b127bbd7bc603db95e3d4332ed259dc815c9aaa26",
"sha256:1b384516dbb4e6aae30e3464c2e77c563da5980440fbdfbd0968e3942f8f9d70",
"sha256:1d85d5f6be66dfd6d1d8d13b9535e342a2214260f1852654b19fa4d7b8d1218b",
"sha256:2e5a7a1e0ecaac652326af627a3eca84886da9e667d68286866d4e33f6547caf",
"sha256:3129a35d9dad1d80c234dd78f8f03141b914395d23f97cf92a366dcd19f8f8bf",
"sha256:358b0bc98a5ff067132d23bf7a2242ee95db9ea5b7bbc401cf79205f11502fd3",
"sha256:3dfb32ed50122fe8c5e7f2b8d97387edd742cc78f9ec36f007ee126cd3720907",
"sha256:4e1176f45981c8ccc8161bc036916c004ca51037a7ed73f2d2a9857e6dbe654f",
"sha256:508c99debccd15790d526ce6b1624b97a5e1e4ca5b871319fb0ebfd46b8f4dad",
"sha256:6105af6533f8b63a43ea9f08a2ede04e8f43e49daef0209ab0d30352bcf08bee",
"sha256:6d6ad1da00c7cc7d8dd1559a6ba59ba3973be6b15722d49738b2be0977eb8a0c",
"sha256:7ea47ba1d6f359680130bd29af497333be6110de8f4c35b9211eec5a5a9630fa",
"sha256:8db93ec98ac7cb5f8ac1420c10f5e3c43533153f253fe7fb6d891cf5aa2b80d2",
"sha256:96e9ece5759f9b47ae43794b6359bbc54805d76e573b161ae770c1ea59393106",
"sha256:bbb15ad79050e8b8d39ec40dd96a30cd09b886a2ae8848d0df1abba4d5502a67",
"sha256:c614001129b2a5add5e3677c3a213a9e6fd376204cb8d17c04e84ff7dfc02a73",
"sha256:e6a7bbbb7950063bfc942f8794bc3e31697c020a14f1cd8905fc1d28ec674a01",
"sha256:f02e85e6d832be37d7f16cf6ac8bb26b519ace3e5f3235564a91c7f658ab2a43"
],
"index": "pypi",
"version": "==1.4.0"
"version": "==1.4.1"
},
"psycopg2-binary": {
"hashes": [
@ -1272,7 +1266,7 @@
},
"spiffworkflow": {
"git": "https://github.com/sartography/SpiffWorkflow",
"ref": "c9f2af14888bc12e22e2cdf6c20bc624c6ddb681"
"ref": "747b0a9cafeb2900264dbc5235c01c2386c55bd1"
},
"sqlalchemy": {
"hashes": [
@ -1489,53 +1483,50 @@
},
"coverage": {
"hashes": [
"sha256:012157499ec4f135fc36cd2177e3d1a1840af9b236cbe80e9a5ccfc83d912a69",
"sha256:0a34d313105cdd0d3644c56df2d743fe467270d6ab93b5d4a347eb9fec8924d6",
"sha256:11e61c5548ecf74ea1f8b059730b049871f0e32b74f88bd0d670c20c819ad749",
"sha256:152cc2624381df4e4e604e21bd8e95eb8059535f7b768c1fb8b8ae0b26f47ab0",
"sha256:1b4285fde5286b946835a1a53bba3ad41ef74285ba9e8013e14b5ea93deaeafc",
"sha256:27a94db5dc098c25048b0aca155f5fac674f2cf1b1736c5272ba28ead2fc267e",
"sha256:27ac7cb84538e278e07569ceaaa6f807a029dc194b1c819a9820b9bb5dbf63ab",
"sha256:2a491e159294d756e7fc8462f98175e2d2225e4dbe062cca7d3e0d5a75ba6260",
"sha256:2bc85664b06ba42d14bb74d6ddf19d8bfc520cb660561d2d9ce5786ae72f71b5",
"sha256:32168001f33025fd756884d56d01adebb34e6c8c0b3395ca8584cdcee9c7c9d2",
"sha256:3c4ce3b647bd1792d4394f5690d9df6dc035b00bcdbc5595099c01282a59ae01",
"sha256:433b99f7b0613bdcdc0b00cc3d39ed6d756797e3b078d2c43f8a38288520aec6",
"sha256:4578728c36de2801c1deb1c6b760d31883e62e33f33c7ba8f982e609dc95167d",
"sha256:509c68c3e2015022aeda03b003dd68fa19987cdcf64e9d4edc98db41cfc45d30",
"sha256:51372e24b1f7143ee2df6b45cff6a721f3abe93b1e506196f3ffa4155c2497f7",
"sha256:5d008e0f67ac800b0ca04d7914b8501312c8c6c00ad8c7ba17754609fae1231a",
"sha256:649df3641eb351cdfd0d5533c92fc9df507b6b2bf48a7ef8c71ab63cbc7b5c3c",
"sha256:6e78b1e25e5c5695dea012be473e442f7094d066925604be20b30713dbd47f89",
"sha256:72d9d186508325a456475dd05b1756f9a204c7086b07fffb227ef8cee03b1dc2",
"sha256:7d82c610a2e10372e128023c5baf9ce3d270f3029fe7274ff5bc2897c68f1318",
"sha256:7ee317486593193e066fc5e98ac0ce712178c21529a85c07b7cb978171f25d53",
"sha256:7eed8459a2b81848cafb3280b39d7d49950d5f98e403677941c752e7e7ee47cb",
"sha256:823f9325283dc9565ba0aa2d240471a93ca8999861779b2b6c7aded45b58ee0f",
"sha256:85c5fc9029043cf8b07f73fbb0a7ab6d3b717510c3b5642b77058ea55d7cacde",
"sha256:86c91c511853dfda81c2cf2360502cb72783f4b7cebabef27869f00cbe1db07d",
"sha256:8e0c3525b1a182c8ffc9bca7e56b521e0c2b8b3e82f033c8e16d6d721f1b54d6",
"sha256:987a84ff98a309994ca77ed3cc4b92424f824278e48e4bf7d1bb79a63cfe2099",
"sha256:9ed3244b415725f08ca3bdf02ed681089fd95e9465099a21c8e2d9c5d6ca2606",
"sha256:a189036c50dcd56100746139a459f0d27540fef95b09aba03e786540b8feaa5f",
"sha256:a4748349734110fd32d46ff8897b561e6300d8989a494ad5a0a2e4f0ca974fc7",
"sha256:a5d79c9af3f410a2b5acad91258b4ae179ee9c83897eb9de69151b179b0227f5",
"sha256:a7596aa2f2b8fa5604129cfc9a27ad9beec0a96f18078cb424d029fdd707468d",
"sha256:ab4fc4b866b279740e0d917402f0e9a08683e002f43fa408e9655818ed392196",
"sha256:bde4aeabc0d1b2e52c4036c54440b1ad05beeca8113f47aceb4998bb7471e2c2",
"sha256:c72bb4679283c6737f452eeb9b2a0e570acaef2197ad255fb20162adc80bea76",
"sha256:c8582e9280f8d0f38114fe95a92ae8d0790b56b099d728cc4f8a2e14b1c4a18c",
"sha256:ca29c352389ea27a24c79acd117abdd8a865c6eb01576b6f0990cd9a4e9c9f48",
"sha256:ce443a3e6df90d692c38762f108fc4c88314bf477689f04de76b3f252e7a351c",
"sha256:d1675db48490e5fa0b300f6329ecb8a9a37c29b9ab64fa9c964d34111788ca2d",
"sha256:da1a428bdbe71f9a8c270c7baab29e9552ac9d0e0cba5e7e9a4c9ee6465d258d",
"sha256:e4ff163602c5c77e7bb4ea81ba5d3b793b4419f8acd296aae149370902cf4e92",
"sha256:e67ccd53da5958ea1ec833a160b96357f90859c220a00150de011b787c27b98d",
"sha256:e8071e7d9ba9f457fc674afc3de054450be2c9b195c470147fbbc082468d8ff7",
"sha256:fff16a30fdf57b214778eff86391301c4509e327a65b877862f7c929f10a4253"
"sha256:1245ab82e8554fa88c4b2ab1e098ae051faac5af829efdcf2ce6b34dccd5567c",
"sha256:1bc6d709939ff262fd1432f03f080c5042dc6508b6e0d3d20e61dd045456a1a0",
"sha256:25e73d4c81efa8ea3785274a2f7f3bfbbeccb6fcba2a0bdd3be9223371c37554",
"sha256:276b13cc085474e482566c477c25ed66a097b44c6e77132f3304ac0b039f83eb",
"sha256:2aed4761809640f02e44e16b8b32c1a5dee5e80ea30a0ff0912158bde9c501f2",
"sha256:2dd70a167843b4b4b2630c0c56f1b586fe965b4f8ac5da05b6690344fd065c6b",
"sha256:352c68e233409c31048a3725c446a9e48bbff36e39db92774d4f2380d630d8f8",
"sha256:3f2b05757c92ad96b33dbf8e8ec8d4ccb9af6ae3c9e9bd141c7cc44d20c6bcba",
"sha256:448d7bde7ceb6c69e08474c2ddbc5b4cd13c9e4aa4a717467f716b5fc938a734",
"sha256:463e52616ea687fd323888e86bf25e864a3cc6335a043fad6bbb037dbf49bbe2",
"sha256:482fb42eea6164894ff82abbcf33d526362de5d1a7ed25af7ecbdddd28fc124f",
"sha256:56c4a409381ddd7bbff134e9756077860d4e8a583d310a6f38a2315b9ce301d0",
"sha256:56d296cbc8254a7dffdd7bcc2eb70be5a233aae7c01856d2d936f5ac4e8ac1f1",
"sha256:5e15d424b8153756b7c903bde6d4610be0c3daca3986173c18dd5c1a1625e4cd",
"sha256:618eeba986cea7f621d8607ee378ecc8c2504b98b3fdc4952b30fe3578304687",
"sha256:61d47a897c1e91f33f177c21de897267b38fbb45f2cd8e22a710bcef1df09ac1",
"sha256:621f6ea7260ea2ffdaec64fe5cb521669984f567b66f62f81445221d4754df4c",
"sha256:6a5cdc3adb4f8bb8d8f5e64c2e9e282bc12980ef055ec6da59db562ee9bdfefa",
"sha256:6c3f6158b02ac403868eea390930ae64e9a9a2a5bbfafefbb920d29258d9f2f8",
"sha256:704f89b87c4f4737da2860695a18c852b78ec7279b24eedacab10b29067d3a38",
"sha256:72128176fea72012063200b7b395ed8a57849282b207321124d7ff14e26988e8",
"sha256:78fbb2be068a13a5d99dce9e1e7d168db880870f7bc73f876152130575bd6167",
"sha256:7bff3a98f63b47464480de1b5bdd80c8fade0ba2832c9381253c9b74c4153c27",
"sha256:84f2436d6742c01136dd940ee158bfc7cf5ced3da7e4c949662b8703b5cd8145",
"sha256:9976fb0a5709988778ac9bc44f3d50fccd989987876dfd7716dee28beed0a9fa",
"sha256:9ad0a117b8dc2061ce9461ea4c1b4799e55edceb236522c5b8f958ce9ed8fa9a",
"sha256:9e3dd806f34de38d4c01416344e98eab2437ac450b3ae39c62a0ede2f8b5e4ed",
"sha256:9eb494070aa060ceba6e4bbf44c1bc5fa97bfb883a0d9b0c9049415f9e944793",
"sha256:9fde6b90889522c220dd56a670102ceef24955d994ff7af2cb786b4ba8fe11e4",
"sha256:9fff3ff052922cb99f9e52f63f985d4f7a54f6b94287463bc66b7cdf3eb41217",
"sha256:a06c358f4aed05fa1099c39decc8022261bb07dfadc127c08cfbd1391b09689e",
"sha256:a4f923b9ab265136e57cc14794a15b9dcea07a9c578609cd5dbbfff28a0d15e6",
"sha256:c5b81fb37db76ebea79aa963b76d96ff854e7662921ce742293463635a87a78d",
"sha256:d5ed164af5c9078596cfc40b078c3b337911190d3faeac830c3f1274f26b8320",
"sha256:d651fde74a4d3122e5562705824507e2f5b2d3d57557f1916c4b27635f8fbe3f",
"sha256:de73fca6fb403dd72d4da517cfc49fcf791f74eee697d3219f6be29adf5af6ce",
"sha256:e647a0be741edbb529a72644e999acb09f2ad60465f80757da183528941ff975",
"sha256:e92c7a5f7d62edff50f60a045dc9542bf939758c95b2fcd686175dd10ce0ed10",
"sha256:eeffd96882d8c06d31b65dddcf51db7c612547babc1c4c5db6a011abe9798525",
"sha256:f5a4551dfd09c3bd12fca8144d47fe7745275adf3229b7223c2f9e29a975ebda",
"sha256:fac0bcc5b7e8169bffa87f0dcc24435446d329cbc2b5486d155c2e0f3b493ae1"
],
"index": "pypi",
"version": "==6.3"
"version": "==6.3.1"
},
"iniconfig": {
"hashes": [
@ -1554,11 +1545,11 @@
},
"pbr": {
"hashes": [
"sha256:176e8560eaf61e127817ef93d8a844803abb27a4d4637f0ff3bb783129be2e0a",
"sha256:672d8ebee84921862110f23fcec2acea191ef58543d34dfe9ef3d9f13c31cddf"
"sha256:27108648368782d07bbf1cb468ad2e2eeef29086affd14087a6d04b7de8af4ec",
"sha256:66bc5a34912f408bb3925bf21231cb6f59206267b7f63f3503ef865c1a292e25"
],
"index": "pypi",
"version": "==5.8.0"
"version": "==5.8.1"
},
"pluggy": {
"hashes": [
@ -1586,19 +1577,19 @@
},
"pytest": {
"hashes": [
"sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89",
"sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"
"sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db",
"sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171"
],
"index": "pypi",
"version": "==6.2.5"
"version": "==7.0.1"
},
"toml": {
"tomli": {
"hashes": [
"sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
"sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
"sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
"sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.10.2"
"markers": "python_version >= '3.7'",
"version": "==2.0.1"
}
}
}

View File

@ -79,11 +79,11 @@ GITHUB_REPO = environ.get('GITHUB_REPO', None)
TARGET_BRANCH = environ.get('TARGET_BRANCH', None)
# Git settings, used by git_service
# The above Github settings--used in file_service, will likely be deprecated
# You can override these settings in instance/config
GIT_REMOTE_PATH = environ.get('GIT_REMOTE_PATH', None)
GIT_BRANCH = environ.get('GIT_BRANCH', None)
GIT_MERGE_BRANCH = environ.get('GIT_MERGE_BRANCH', None) # Developers can set this to 'all' in instance.config
# Among other things, we use these to build a remote URL like https://username:password@host/path.git
GIT_REMOTE_SERVER = environ.get('GIT_REMOTE_SERVER', None) # example: 'github.com'
GIT_REMOTE_PATH = environ.get('GIT_REMOTE_PATH', None) # example: 'sartography/crconnect-workflow-specs
GIT_BRANCH = environ.get('GIT_BRANCH', None) # example: 'main'
GIT_MERGE_BRANCH = environ.get('GIT_MERGE_BRANCH', None) # Example: 'staging'
# Email configuration
DEFAULT_SENDER = 'uvacrconnect@virginia.edu'
@ -98,3 +98,6 @@ MAIL_PASSWORD = environ.get('MAIL_PASSWORD', default='')
# Local file path
SYNC_FILE_ROOT = environ.get('SYNC_FILE_ROOT', default='tests/data/IMPORT_TEST')
# Turn on/off processing waiting tasks
PROCESS_WAITING_TASKS = environ.get('PROCESS_WAITING_TASKS', default='true')

View File

@ -65,9 +65,10 @@ def process_waiting_tasks():
@app.before_first_request
def init_scheduler():
scheduler.add_job(process_waiting_tasks, 'interval', minutes=1)
# scheduler.add_job(FileService.cleanup_file_data, 'interval', minutes=1440) # once a day
scheduler.start()
if app.config['PROCESS_WAITING_TASKS']:
scheduler.add_job(process_waiting_tasks, 'interval', minutes=1)
scheduler.add_job(WorkflowService().process_erroring_workflows, 'interval', minutes=1440)
scheduler.start()
# Convert list of allowed origins to list of regexes
@ -121,8 +122,8 @@ def validate_all(study_id, category=None, spec_id=None):
"""Step through all the local workflows and validate them, returning any errors. This make take forever.
Please provide a real study id to use for validation, an optional category can be specified to only validate
that category, and you can further specify a specific spec, if needed."""
from crc.models.workflow import WorkflowSpecModel
from crc.services.workflow_service import WorkflowService
from crc.services.workflow_spec_service import WorkflowSpecService
from crc.api.common import ApiError
from crc.models.study import StudyModel
from crc.models.user import UserModel
@ -131,7 +132,7 @@ def validate_all(study_id, category=None, spec_id=None):
study = session.query(StudyModel).filter(StudyModel.id == study_id).first()
g.user = session.query(UserModel).filter(UserModel.uid == study.user_uid).first()
g.token = "anything_is_fine_just_need_something."
specs = session.query(WorkflowSpecModel).all()
specs = WorkflowSpecService.get_specs()
for spec in specs:
if spec_id and spec_id != spec.id:
continue

View File

@ -2381,10 +2381,9 @@ components:
merge_branch:
type: string
example: staging
# status:
# type: string
# example: staging
changes:
type: array
example: ['file_1.txt', 'file_2.txt']
untracked:
type: array
example: ['a_file.txt', 'b_file.txt']

View File

@ -214,8 +214,12 @@ def restart_workflow(workflow_id, clear_data=False, delete_files=False):
"""Restart a workflow with the latest spec.
Clear data allows user to restart the workflow without previous data."""
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
WorkflowProcessor.reset(workflow_model, clear_data=clear_data, delete_files=delete_files)
return get_workflow(workflow_model.id)
processor = WorkflowProcessor.reset(workflow_model, clear_data=clear_data, delete_files=delete_files)
processor.do_engine_steps()
processor.save()
WorkflowService.update_task_assignments(processor)
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
return WorkflowApiSchema().dump(workflow_api_model)
def get_task_events(action = None, workflow = None, study = None):
@ -254,6 +258,9 @@ def set_current_task(workflow_id, task_id):
processor = WorkflowProcessor(workflow_model)
task_id = uuid.UUID(task_id)
spiff_task = processor.bpmn_workflow.get_task(task_id)
if not spiff_task:
# An invalid task_id was requested.
raise ApiError("invalid_task", "The Task you requested no longer exists as a part of this workflow.")
_verify_user_and_role(processor, spiff_task)
user_uid = UserService.current_user(allow_admin_impersonate=True).uid
if spiff_task.state != spiff_task.COMPLETED and spiff_task.state != spiff_task.READY:

View File

@ -182,8 +182,8 @@ class LookupFileModel(db.Model):
task_spec_id = db.Column(db.String)
field_id = db.Column(db.String)
file_name = db.Column(db.String)
file_timestamp = db.Column(db.FLOAT) #The file systems time stamp, to check for changes to the file.
is_ldap = db.Column(db.Boolean) # Allows us to run an ldap query instead of a db lookup.
last_updated = db.Column(db.DateTime(timezone=True))
dependencies = db.relationship("LookupDataModel", lazy="select", backref="lookup_file_model",
cascade="all, delete, delete-orphan")

View File

@ -0,0 +1,22 @@
from crc.api.common import ApiError
from crc.models.workflow import WorkflowSpecInfoSchema
from crc.scripts.script import Script
from crc.services.workflow_spec_service import WorkflowSpecService
class ScriptTemplate(Script):
def get_description(self):
return """Get workflow spec information from a workflow spec id"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
return self.do_task(task, study_id, workflow_id, *args, **kwargs)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
if len(args) < 1:
raise ApiError(code='missing_spec_id',
message='The get_spec_from_id script requires a spec_id.')
spec_id = args[0]
workflow_spec = WorkflowSpecService().get_spec(spec_id)
return WorkflowSpecInfoSchema().dump(workflow_spec)

View File

@ -63,7 +63,6 @@ class DocumentService(object):
expand = file.workflow_id == int(workflow_id)
else:
expand = False
print(expand)
categories = [x for x in [doc_code['category1'], doc_code['category2'], doc_code['category3'], file] if x]
DocumentService.ensure_exists(directory, categories, expanded=expand)
return directory
@ -94,8 +93,6 @@ class DocumentService(object):
new_level.expanded = expanded
output.append(new_level)
DocumentService.ensure_exists(new_level.children, categories[1:], expanded)
else:
print("Found it")
else:
new_level = DocumentDirectory(file=current_item)
new_level.expanded = expanded

View File

@ -2,6 +2,8 @@ import datetime
import os
from typing import List
import pytz
from crc import app
from crc.api.common import ApiError
from crc.models.file import FileType, CONTENT_TYPES, File
@ -82,11 +84,17 @@ class FileSystemService(object):
'The file you provided does not have an accepted extension:' +
file_extension, status_code=404)
@staticmethod
def _timestamp(file_path: str):
return os.path.getmtime(file_path)
@staticmethod
def _last_modified(file_path: str):
# Returns the last modified date of the given file.
timestamp = os.path.getmtime(file_path)
return datetime.datetime.fromtimestamp(timestamp)
utc_dt = datetime.datetime.utcfromtimestamp(timestamp)
aware_utc_dt = utc_dt.replace(tzinfo=pytz.utc)
return aware_utc_dt
@staticmethod
def file_type(file_name):
@ -100,8 +108,10 @@ class FileSystemService(object):
items = os.scandir(file_path)
for item in items:
if item.is_file():
if item.name.startswith('.'):
continue # Ignore hidden files
if item.name == FileSystemService.WF_JSON_FILE:
continue # Ignore the json files.
continue # Ignore the json files.
if file_name is not None and item.name != file_name:
continue
file = FileSystemService.to_file_object_from_dir_entry(item)
@ -127,6 +137,6 @@ class FileSystemService(object):
raise ApiError("invalid_type", "Invalid File Type: %s, for file %s" % (extension, item.name))
stats = item.stat()
file_size = stats.st_size
last_modified = datetime.datetime.fromtimestamp(stats.st_mtime)
last_modified = FileSystemService._last_modified(item.path)
return File.from_file_system(item.name, file_type, content_type, last_modified, file_size)

View File

@ -29,11 +29,10 @@ class GitService(object):
@staticmethod
def get_remote_url(remote_path):
# we use github
# Note that the 'password' is a token generated by github, not the site password
host = app.config['GIT_REMOTE_SERVER']
username = app.config["GIT_USER_NAME"]
password = app.config["GIT_USER_PASS"]
remote_url = f"https://{username}:{password}@github.com/{remote_path}.git"
remote_url = f"https://{username}:{password}@{host}/{remote_path}.git"
return remote_url
@staticmethod

View File

@ -52,14 +52,16 @@ class LookupService(object):
@staticmethod
def get_lookup_model_for_reference(file_name, value_column, label_column):
timestamp = ReferenceFileService().timestamp(file_name)
lookup_model = db.session.query(LookupFileModel).\
filter(LookupFileModel.file_name == file_name). \
filter(LookupFileModel.workflow_spec_id == None).\
filter(LookupFileModel.file_timestamp == timestamp).\
first() # use "==" not "is none" which does NOT work, and makes this constantly expensive.
if not lookup_model:
logging.warning("!!!! Making a very expensive call to update the lookup model.")
file_data = ReferenceFileService().get_data(file_name)
lookup_model = LookupService.build_lookup_table(file_name, file_data, value_column, label_column)
lookup_model = LookupService.build_lookup_table(file_name, file_data, timestamp, value_column, label_column)
return lookup_model
@staticmethod
@ -76,10 +78,12 @@ class LookupService(object):
if lookup_model:
if lookup_model.is_ldap: # LDAP is always current
is_current = True
else:
elif lookup_model.file_name is not None and lookup_model.file_timestamp is not None:
# In some legacy cases, the lookup model might exist, but not have a file name, in which case we need
# to rebuild.
workflow_spec = WorkflowSpecService().get_spec(workflow.workflow_spec_id)
current_date = SpecFileService.last_modified(workflow_spec, lookup_model.file_name)
is_current = current_date == lookup_model.last_updated
timestamp = SpecFileService.timestamp(workflow_spec, lookup_model.file_name)
is_current = timestamp == lookup_model.file_timestamp
if not is_current:
# Very very very expensive, but we don't know need this till we do.
@ -144,8 +148,9 @@ class LookupService(object):
file = latest_files[0]
file_data = SpecFileService().get_data(workflow_spec, file_name)
timestamp = SpecFileService.timestamp(workflow_spec, file_name)
lookup_model = LookupService.build_lookup_table(file_name, file_data, value_column, label_column,
lookup_model = LookupService.build_lookup_table(file_name, file_data, timestamp, value_column, label_column,
workflow_model.workflow_spec_id, task_spec_id, field_id)
# Use the results of an LDAP request to populate enum field options
@ -164,7 +169,7 @@ class LookupService(object):
return lookup_model
@staticmethod
def build_lookup_table(file_name, file_data, value_column, label_column,
def build_lookup_table(file_name, file_data, timestamp, value_column, label_column,
workflow_spec_id=None, task_spec_id=None, field_id=None):
""" In some cases the lookup table can be very large. This method will add all values to the database
in a way that can be searched and returned via an api call - rather than sending the full set of
@ -200,6 +205,7 @@ class LookupService(object):
field_id=field_id,
task_spec_id=task_spec_id,
file_name=file_name,
file_timestamp=timestamp,
is_ldap=False)
db.session.add(lookup_model)

View File

@ -73,3 +73,11 @@ class ReferenceFileService(FileSystemService):
def delete(file_name):
file_path = ReferenceFileService.file_path(file_name)
os.remove(file_path)
@staticmethod
def last_modified(file_name):
return FileSystemService._last_modified(ReferenceFileService.file_path(file_name))
@staticmethod
def timestamp(file_name):
return FileSystemService._timestamp(ReferenceFileService.file_path(file_name))

View File

@ -76,6 +76,11 @@ class SpecFileService(FileSystemService):
path = SpecFileService.file_path(spec, file_name)
return FileSystemService._last_modified(path)
@staticmethod
def timestamp(spec: WorkflowSpecInfo, file_name: str):
path = SpecFileService.file_path(spec, file_name)
return FileSystemService._timestamp(path)
@staticmethod
def delete_file(spec, file_name):
# Fixme: Remember to remove the lookup files when the spec file is removed.

View File

@ -195,66 +195,6 @@ class UserFileService(object):
app.logger.info("Failed to delete file, so archiving it instead. %i, due to %s" % (file_id, str(ie)))
raise ApiError('Delete Failed', "Unable to delete file. ")
@staticmethod
def get_repo_branches():
gh_token = app.config['GITHUB_TOKEN']
github_repo = app.config['GITHUB_REPO']
_github = Github(gh_token)
repo = _github.get_user().get_repo(github_repo)
branches = [branch.name for branch in repo.get_branches()]
return branches
@staticmethod
def update_from_github(file_ids, source_target=GithubObject.NotSet):
gh_token = app.config['GITHUB_TOKEN']
github_repo = app.config['GITHUB_REPO']
_github = Github(gh_token)
repo = _github.get_user().get_repo(github_repo)
for file_id in file_ids:
file_data_model = FileDataModel.query.filter_by(
file_model_id=file_id
).order_by(
desc(FileDataModel.version)
).first()
try:
repo_file = repo.get_contents(file_data_model.file_model.name, ref=source_target)
except UnknownObjectException:
return {'error': 'Attempted to update from repository but file was not present'}
else:
file_data_model.data = repo_file.decoded_content
session.add(file_data_model)
session.commit()
@staticmethod
def publish_to_github(file_ids):
target_branch = app.config['TARGET_BRANCH'] if app.config['TARGET_BRANCH'] else GithubObject.NotSet
gh_token = app.config['GITHUB_TOKEN']
github_repo = app.config['GITHUB_REPO']
_github = Github(gh_token)
repo = _github.get_user().get_repo(github_repo)
for file_id in file_ids:
file_data_model = FileDataModel.query.filter_by(file_model_id=file_id).first()
try:
repo_file = repo.get_contents(file_data_model.file_model.name, ref=target_branch)
except UnknownObjectException:
repo.create_file(
path=file_data_model.file_model.name,
message=f'Creating {file_data_model.file_model.name}',
content=file_data_model.data,
branch=target_branch
)
return {'created': True}
else:
updated = repo.update_file(
path=repo_file.path,
message=f'Updating {file_data_model.file_model.name}',
content=file_data_model.data + b'brah-model',
sha=repo_file.sha,
branch=target_branch
)
return {'updated': True}
@staticmethod
def dmn_from_spreadsheet(ss_data):

View File

@ -1,6 +1,7 @@
from typing import List
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.specs.events import EndEvent, CancelEventDefinition
from SpiffWorkflow.serializer.exceptions import MissingSpecError
from SpiffWorkflow.util.metrics import timeit, firsttime, sincetime
from lxml import etree
@ -9,7 +10,6 @@ from datetime import datetime
from SpiffWorkflow import Task as SpiffTask, WorkflowException, Task
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
@ -145,12 +145,11 @@ class WorkflowProcessor(object):
@staticmethod
def reset(workflow_model, clear_data=False, delete_files=False):
print('WorkflowProcessor: reset: ')
# Try to execute a cancel notify
try:
wp = WorkflowProcessor(workflow_model)
wp.cancel_notify() # The executes a notification to all endpoints that
wp.cancel_notify() # The executes a notification to all endpoints that
except Exception as e:
app.logger.error(f"Unable to send a cancel notify for workflow %s during a reset."
f" Continuing with the reset anyway so we don't get in an unresolvable"
@ -277,8 +276,10 @@ class WorkflowProcessor(object):
def cancel_notify(self):
try:
self.bpmn_workflow.signal('cancel') # generate a cancel signal.
self.bpmn_workflow.cancel_notify() # call cancel_notify in
# A little hackly, but make the bpmn_workflow catch a cancel event.
self.bpmn_workflow.signal('cancel') # generate a cancel signal.
self.bpmn_workflow.catch(CancelEventDefinition())
self.bpmn_workflow.do_engine_steps()
except WorkflowTaskExecException as we:
raise ApiError.from_workflow_exception("task_error", str(we), we)

View File

@ -11,11 +11,10 @@ from typing import List
import jinja2
from SpiffWorkflow import Task as SpiffTask, WorkflowException, NavItem
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask
from SpiffWorkflow.bpmn.specs.ScriptTask import ScriptTask
from SpiffWorkflow.bpmn.specs.StartEvent import StartEvent
from SpiffWorkflow.bpmn.specs.UserTask import UserTask
from SpiffWorkflow.bpmn.specs.events import EndEvent, StartEvent
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
from SpiffWorkflow.specs import CancelTask, StartTask
from SpiffWorkflow.util.deep_merge import DeepMerge
@ -42,6 +41,9 @@ from crc.services.user_service import UserService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.workflow_spec_service import WorkflowSpecService
from flask import request
from sentry_sdk import capture_message, push_scope
class WorkflowService(object):
TASK_ACTION_COMPLETE = "COMPLETE"
@ -125,6 +127,53 @@ class WorkflowService(object):
workflow_model.study_id,
str(e)))
@staticmethod
def get_erroring_workflows():
workflows = session.query(WorkflowModel).filter(WorkflowModel.status==WorkflowStatus.erroring).all()
return workflows
@staticmethod
def get_workflow_url(workflow):
base_url = app.config['FRONTEND']
workflow_url = f'https://{base_url}/workflow/{workflow.id}'
return workflow_url
def process_erroring_workflows(self):
workflows = self.get_erroring_workflows()
if len(workflows) > 0:
workflow_urls = []
if len(workflows) == 1:
workflow = workflows[0]
workflow_url_link = self.get_workflow_url(workflow)
workflow_urls.append(workflow_url_link)
message = 'There is one workflow in an error state.'
message += f'\n You can restart the workflow at {workflow_url_link}.'
else:
message = f'There are {len(workflows)} workflows in an error state.'
message += '\nYou can restart the workflows at these URLs:'
for workflow in workflows:
workflow_url_link = self.get_workflow_url(workflow)
workflow_urls.append(workflow_url_link)
message += f'\n{workflow_url_link}'
with push_scope() as scope:
scope.user = {"urls": workflow_urls}
scope.set_extra("workflow_urls", workflow_urls)
# this sends a message through sentry
capture_message(message)
# We return message so we can use it in a test
return message
@staticmethod
def raise_if_disabled(spec_id, study_id):
"""Raise an exception of the workflow is not enabled and can not be executed."""
if study_id is not None:
study_model = session.query(StudyModel).filter(StudyModel.id == study_id).first()
spec_model = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.id == spec_id).first()
status = StudyService._get_study_status(study_model)
if spec_model.id in status and status[spec_model.id]['status'] == 'disabled':
raise ApiError(code='disabled_workflow', message=f"This workflow is disabled. {status[spec_model.id]['message']}")
@staticmethod
@timeit
def test_spec(spec_id, validate_study_id=None, test_until=None, required_only=False):
@ -542,6 +591,8 @@ class WorkflowService(object):
return FileSchema().dump(file)
elif field.type == 'files':
return random.randrange(1, 100)
elif field.type == 'date':
return datetime.utcnow()
else:
return WorkflowService._random_string()

View File

@ -44,7 +44,7 @@ markupsafe==1.1.1
marshmallow==3.9.1
marshmallow-enum==1.5.1
marshmallow-sqlalchemy==0.24.1
numpy==1.19.4
numpy==1.21.0
openapi-spec-validator==0.2.9
openpyxl==3.0.5
packaging==20.4

View File

@ -0,0 +1,30 @@
"""empty message
Revision ID: 3c56c894ff5c
Revises: 29bad12c9945
Create Date: 2022-02-17 11:52:52.335700
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '3c56c894ff5c'
down_revision = '29bad12c9945'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('lookup_file', sa.Column('file_timestamp', sa.FLOAT(), nullable=True))
op.drop_column('lookup_file', 'last_updated')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('lookup_file', sa.Column('last_updated', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.drop_column('lookup_file', 'file_timestamp')
# ### end Alembic commands ###

View File

@ -0,0 +1,82 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_19xdwix" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.2.0">
<bpmn:process id="Process_1wfi0e5" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_09e6w2a</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_09e6w2a" sourceRef="StartEvent_1" targetRef="Activity_1lg09if" />
<bpmn:sequenceFlow id="Flow_0cbbsi7" sourceRef="Activity_1lg09if" targetRef="Activity_05yevzg" />
<bpmn:sequenceFlow id="Flow_0dvxkh6" sourceRef="Activity_05yevzg" targetRef="Activity_0phz7ks" />
<bpmn:endEvent id="Event_1ieukoa">
<bpmn:incoming>Flow_19hbirj</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_19hbirj" sourceRef="Activity_0phz7ks" targetRef="Event_1ieukoa" />
<bpmn:userTask id="Activity_1lg09if" name="Get Date" camunda:formKey="DateForm">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="a_date" label="A Date" type="date">
<camunda:validation>
<camunda:constraint name="required" config="True" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_09e6w2a</bpmn:incoming>
<bpmn:outgoing>Flow_0cbbsi7</bpmn:outgoing>
</bpmn:userTask>
<bpmn:scriptTask id="Activity_05yevzg" name="Modify Date">
<bpmn:incoming>Flow_0cbbsi7</bpmn:incoming>
<bpmn:outgoing>Flow_0dvxkh6</bpmn:outgoing>
<bpmn:script>delta1 = timedelta(hours=2)
format = '%Y-%m-%dT%H:%M:%S.%fZ'
the_date = datetime.datetime.strptime(a_date, format)
modified_date = the_date + delta1
del(delta1)</bpmn:script>
</bpmn:scriptTask>
<bpmn:manualTask id="Activity_0phz7ks" name="Display Dates">
<bpmn:documentation># Dates
## A Date
{{ a_date }}
</bpmn:documentation>
<bpmn:incoming>Flow_0dvxkh6</bpmn:incoming>
<bpmn:outgoing>Flow_19hbirj</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1wfi0e5">
<bpmndi:BPMNEdge id="Flow_19hbirj_di" bpmnElement="Flow_19hbirj">
<di:waypoint x="690" y="117" />
<di:waypoint x="752" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0dvxkh6_di" bpmnElement="Flow_0dvxkh6">
<di:waypoint x="530" y="117" />
<di:waypoint x="590" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0cbbsi7_di" bpmnElement="Flow_0cbbsi7">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_09e6w2a_di" bpmnElement="Flow_09e6w2a">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1ieukoa_di" bpmnElement="Event_1ieukoa">
<dc:Bounds x="752" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_05be3cs_di" bpmnElement="Activity_1lg09if">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1s4snzz_di" bpmnElement="Activity_05yevzg">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_15nchcr_di" bpmnElement="Activity_0phz7ks">
<dc:Bounds x="590" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,75 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_daa6ad0" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_GetSpecFromID" name="Get Spec From ID" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_08i8lxh</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_08i8lxh" sourceRef="StartEvent_1" targetRef="Activity_GetSpecID" />
<bpmn:sequenceFlow id="Flow_1fhu6em" sourceRef="Activity_GetSpecID" targetRef="Activity_GetSpec" />
<bpmn:sequenceFlow id="Flow_06e4nx2" sourceRef="Activity_GetSpec" targetRef="Activity_DisplaySpec" />
<bpmn:endEvent id="Event_1o8wpdp">
<bpmn:incoming>Flow_0saprky</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0saprky" sourceRef="Activity_DisplaySpec" targetRef="Event_1o8wpdp" />
<bpmn:userTask id="Activity_GetSpecID" name="Get Spec ID" camunda:formKey="SpecIDForm">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="spec_id" label="Spec ID" type="string">
<camunda:validation>
<camunda:constraint name="required" config="True" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_08i8lxh</bpmn:incoming>
<bpmn:outgoing>Flow_1fhu6em</bpmn:outgoing>
</bpmn:userTask>
<bpmn:scriptTask id="Activity_GetSpec" name="Get Spec">
<bpmn:incoming>Flow_1fhu6em</bpmn:incoming>
<bpmn:outgoing>Flow_06e4nx2</bpmn:outgoing>
<bpmn:script>spec = get_spec_from_id(spec_id)</bpmn:script>
</bpmn:scriptTask>
<bpmn:manualTask id="Activity_DisplaySpec" name="Display Spec">
<bpmn:documentation>## Spec
{{ spec }}
</bpmn:documentation>
<bpmn:incoming>Flow_06e4nx2</bpmn:incoming>
<bpmn:outgoing>Flow_0saprky</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_GetSpecFromID">
<bpmndi:BPMNEdge id="Flow_0saprky_di" bpmnElement="Flow_0saprky">
<di:waypoint x="690" y="177" />
<di:waypoint x="752" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_06e4nx2_di" bpmnElement="Flow_06e4nx2">
<di:waypoint x="530" y="177" />
<di:waypoint x="590" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1fhu6em_di" bpmnElement="Flow_1fhu6em">
<di:waypoint x="370" y="177" />
<di:waypoint x="430" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_08i8lxh_di" bpmnElement="Flow_08i8lxh">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1o8wpdp_di" bpmnElement="Event_1o8wpdp">
<dc:Bounds x="752" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0dq9ius_di" bpmnElement="Activity_GetSpecID">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0ux2vk8_di" bpmnElement="Activity_GetSpec">
<dc:Bounds x="430" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0fzla1m_di" bpmnElement="Activity_DisplaySpec">
<dc:Bounds x="590" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -167,4 +167,4 @@ print('Second Title')</bpmn:script>
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>
</bpmn:definitions>

View File

@ -0,0 +1,15 @@
from tests.base_test import BaseTest
class TestSpecFromWorkflowID(BaseTest):
def test_get_spec_from_workflow_id(self):
workflow = self.create_workflow('spec_from_id')
workflow_spec_id = workflow.workflow_spec_id
workflow_api = self.get_workflow_api(workflow)
task = workflow_api.next_task
workflow_api = self.complete_form(workflow, task, {'spec_id': workflow_spec_id})
task = workflow_api.next_task
self.assertEqual('spec_from_id', task.data['spec']['id'])
self.assertEqual('spec_from_id', task.data['spec']['display_name'])

View File

@ -1,6 +1,6 @@
from tests.base_test import BaseTest
from crc import session
from crc import session, db
from crc.models.study import StudyModel, StudySchema
import json
@ -37,7 +37,6 @@ class TestStudyCancellations(BaseTest):
return study_result
def load_workflow(self):
workflow = self.create_workflow('study_cancellations')
study_id = workflow.study_id
return workflow, study_id
@ -66,9 +65,13 @@ class TestStudyCancellations(BaseTest):
mock_details.return_value = json.loads(details_response)
workflow, study_id = self.load_workflow()
self.get_first_task(workflow)
self.get_first_task(workflow) # Asserts we are on the first task in the workflow.
study = db.session.query(StudyModel).filter(StudyModel.id == study_id).first()
self.assertEqual('Beer consumption in the bipedal software engineer', study.title)
study_result = self.put_study_on_hold(study_id)
self.get_first_task(workflow) # Asserts we are on the first task in the workflow.
self.assertEqual('Beer consumption in the bipedal software engineer', study_result.title)
@patch('crc.services.protocol_builder.ProtocolBuilderService.get_study_details') # mock_details
@ -96,6 +99,7 @@ class TestStudyCancellations(BaseTest):
workflow_api, next_task = self.get_second_task(workflow)
self.complete_form(workflow, next_task, {'how_many': 3})
workflow_api, next_task = self.get_third_task(workflow)
study_result = self.put_study_on_hold(study_id)
self.assertEqual('Second Title', study_result.title)

View File

@ -135,6 +135,7 @@ class TestStudyDetailsDocumentsScript(BaseTest):
@patch('crc.services.protocol_builder.requests.get')
def test_file_data_set_invalid_irb_code_fails(self, mock_get):
self.create_reference_document()
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('required_docs.json')
self.add_studies()

View File

@ -1,5 +1,6 @@
from tests.base_test import BaseTest
from crc import app
from crc.services.git_service import GitService
from unittest.mock import patch, Mock, call
@ -57,6 +58,10 @@ class TestGitService(BaseTest):
self.assertIn(call.index.commit('This is my comment'), method_calls)
self.assertIn(call.remotes.origin.push(), method_calls)
# def test_pull_from_remote(self):
# result = GitService.pull_from_remote()
# print(result)
def test_get_remote_url(self):
app.config['GIT_REMOTE_SERVER'] = 'test_server.com'
app.config['GIT_USER_NAME'] = 'test_username'
app.config['GIT_USER_PASS'] = 'test_pass'
result = GitService.get_remote_url('my_test_path')
self.assertEqual('https://test_username:test_pass@test_server.com/my_test_path.git', result)

View File

@ -6,9 +6,9 @@ from crc.api.common import ApiError
from crc import session, app
from crc.models.file import FileDataModel, FileModel, LookupFileModel, LookupDataModel, CONTENT_TYPES
from crc.services.lookup_service import LookupService
from crc.services.reference_file_service import ReferenceFileService
from crc.services.spec_file_service import SpecFileService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.document_service import DocumentService
class TestLookupService(BaseTest):
@ -24,8 +24,13 @@ class TestLookupService(BaseTest):
def test_lookup_table_is_not_created_more_than_once(self):
spec = self.load_test_spec('enum_options_with_search')
workflow = self.create_workflow('enum_options_with_search')
self.assertEqual(0, session.query(LookupFileModel).count())
LookupService.lookup(workflow, "Task_Enum_Lookup", "sponsor", "sam", limit=10)
self.assertEqual(1, session.query(LookupFileModel).count())
lookup_table_orig = session.query(LookupFileModel).first()
LookupService.lookup(workflow, "Task_Enum_Lookup", "sponsor", "something", limit=10)
lookup_table = session.query(LookupFileModel).first()
self.assertEqual(lookup_table_orig, lookup_table)
LookupService.lookup(workflow, "Task_Enum_Lookup", "sponsor", "blah", limit=10)
lookup_records = session.query(LookupFileModel).all()
self.assertIsNotNone(lookup_records)
@ -182,14 +187,24 @@ class TestLookupService(BaseTest):
# Using an old xls file should raise an error
file_data_xls = SpecFileService().get_data(spec, 'sponsors.xls')
timestamp = SpecFileService().timestamp(spec, 'sponsors.xls')
with self.assertRaises(ApiError) as ae:
LookupService.build_lookup_table('sponsors.xls', file_data_xls, 'CUSTOMER_NUMBER', 'CUSTOMER_NAME')
LookupService.build_lookup_table('sponsors.xls', file_data_xls, timestamp, 'CUSTOMER_NUMBER', 'CUSTOMER_NAME')
self.assertIn('Error opening excel file', ae.exception.args[0])
# Using an xlsx file should work
file_data_xlsx = SpecFileService().get_data(spec, 'sponsors.xlsx')
lookup_model = LookupService.build_lookup_table('sponsors.xlsx', file_data_xlsx,
timestamp = SpecFileService().timestamp(spec, 'sponsors.xlsx')
lookup_model = LookupService.build_lookup_table('sponsors.xlsx', file_data_xlsx, timestamp,
'CUSTOMER_NUMBER', 'CUSTOMER_NAME')
self.assertEqual(28, len(lookup_model.dependencies))
self.assertIn('CUSTOMER_NAME', lookup_model.dependencies[0].data.keys())
self.assertIn('CUSTOMER_NUMBER', lookup_model.dependencies[0].data.keys())
def test_lookup_for_reference_caches_properly(self):
self.create_reference_document()
lookup_model_1 = LookupService.get_lookup_model_for_reference(DocumentService.DOCUMENT_LIST,
'code', 'description')
lookup_model_2 = LookupService.get_lookup_model_for_reference(DocumentService.DOCUMENT_LIST,
'code', 'description')
self.assertEqual(lookup_model_1, lookup_model_2)

View File

@ -0,0 +1,22 @@
from tests.base_test import BaseTest
from crc import session
from crc.models.workflow import WorkflowStatus
from crc.services.workflow_service import WorkflowService
class TestErrorWorkflows(BaseTest):
def test_error_workflows(self):
"""We only test whether we have good information in the message.
We do not test whether the message was sent by Sentry."""
workflow_1 = self.create_workflow('random_fact')
workflow_1.status = WorkflowStatus.erroring
workflow_2 = self.create_workflow('random_fact')
workflow_2.status = WorkflowStatus.erroring
session.commit()
message = WorkflowService().process_erroring_workflows()
self.assertIn('There are 2 workflows in an error state.', message)
self.assertIn(f'workflow/{workflow_1.id}', message)
self.assertIn(f'workflow/{workflow_2.id}', message)

View File

@ -162,10 +162,13 @@ class TestUserRoles(BaseTest):
self.assertEqual('LOCKED', nav[1].state) # First item belongs to the submitter, and is locked.
self.assertEqual('COMPLETED', nav[2].state) # Second item is locked, it is the review and doesn't belong to this user.
self.assertEqual('READY', nav[3].state) # Gateway is ready, and should be unfolded
self.assertEqual(None, nav[3].children[0].state) # sequence flow for approved is none - we aren't going this way.
self.assertEqual('READY', nav[3].children[1].state) # sequence flow for denied is ready
self.assertEqual('LOCKED', nav[3].children[1].children[0].state) # Feedback is locked, it belongs to submitter
self.assertEqual('LOCKED', nav[3].children[1].children[0].state) # Approval is locked, it belongs to the submitter
# order of these is unclear ...
approved = list(filter(lambda child: child.name == 'approved', nav[3].children))[0]
rejected = list(filter(lambda child: child.name == 'rejected', nav[3].children))[0]
self.assertEqual(None, approved.state) # sequence flow for approved is none - we aren't going this way.
self.assertEqual('READY', rejected.state) # sequence flow for denied is ready
self.assertEqual('LOCKED', rejected.children[0].state) # Feedback is locked, it belongs to submitter
self.assertEqual('LOCKED', rejected.children[0].state) # Approval is locked, it belongs to the submitter
self.assertEqual('LOCKED', workflow_api.next_task.state)
# Navigation as Submitter, coming back in to a rejected workflow to view the rejection message.
@ -175,10 +178,12 @@ class TestUserRoles(BaseTest):
self.assertEqual('COMPLETED', nav[1].state) # First item belongs to the submitter, and is locked.
self.assertEqual('LOCKED', nav[2].state) # Second item is locked, it is the review and doesn't belong to this user.
self.assertEqual('READY', nav[3].state)
self.assertEqual(None, nav[3].children[0].state) # sequence flow for approved is none - we aren't going this way.
self.assertEqual('READY', nav[3].children[1].state) # sequence flow for denied is ready
self.assertEqual('READY', nav[3].children[1].children[0].state) # Feedback is locked, it belongs to submitter
self.assertEqual('READY', nav[3].children[1].children[0].state) # Approval is locked, it belongs to the submitter
# order of these is unclear ...
approved = list(filter(lambda child: child.name == 'approved', nav[3].children))[0]
rejected = list(filter(lambda child: child.name == 'rejected', nav[3].children))[0]
self.assertEqual(None, approved.state) # sequence flow for approved is none - we aren't going this way.
self.assertEqual('READY', rejected.state) # sequence flow for denied is ready
self.assertEqual('READY', rejected.children[0].state) # Feedback is locked, it belongs to submitter
# Navigation as Submitter, re-completing the original request a second time, and sending it for review.
workflow_api = self.complete_form(workflow, workflow_api.next_task, data, user_uid=submitter.uid)

View File

@ -0,0 +1,12 @@
from tests.base_test import BaseTest
class TestDateValidation(BaseTest):
def test_date_validation(self):
"""We were not instantiating date fields correctly during validation.
This is a simple test to make sure we seed an actual date in date fields instead of a random string."""
spec_model = self.load_test_spec('date_validation')
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % spec_model.id, headers=self.logged_in_headers())
self.assertEqual([], rv.json)

View File

@ -3,9 +3,10 @@ import logging
import os
from unittest.mock import patch
from SpiffWorkflow.bpmn.specs.events import EndEvent
from tests.base_test import BaseTest
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
from SpiffWorkflow.camunda.specs.UserTask import FormField
from crc import session, db, app