Pi migration (#1892)

* some initial code to migrate a process instance w/ burnettk essweine

* the migration test is working now w/ burnettk essweine

* use the persist method from the pi migration method w/ burnettk

* updated spiffworkflow w/ burnettk

* added api to migrate a process instance w/ burnettk

* fixed tests w/ burnettk

* added api to check if a process instance can be migrated w/ burnettk

* return error if pi is not suspended when attempting to migrate w/ burnettk

* return error if pi is not suspended when attempting to migrate w/ burnettk

---------

Co-authored-by: jasquat <jasquat@users.noreply.github.com>
Co-authored-by: Kevin Burnett <18027+burnettk@users.noreply.github.com>
This commit is contained in:
jasquat 2024-07-09 14:35:14 -04:00 committed by GitHub
parent 738446147e
commit ec21ffb735
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 750 additions and 28 deletions

View File

@ -1789,8 +1789,6 @@ files = [
{file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"},
{file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"},
{file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"},
{file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"},
{file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"},
{file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"},
{file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"},
{file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"},
@ -2115,7 +2113,6 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
@ -2492,51 +2489,37 @@ python-versions = ">=3.6"
files = [ files = [
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"},
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"},
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"},
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"},
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"},
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"},
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"},
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"},
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"},
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"},
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"},
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"},
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"},
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"},
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"},
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"},
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"},
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"},
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"},
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"},
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"},
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"},
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"},
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"},
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"},
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"},
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"},
{file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"},
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"},
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"},
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"},
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"},
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"},
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"},
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"},
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"},
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"},
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"},
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"},
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"},
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"},
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"},
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"},
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"},
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"},
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"},
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"},
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"},
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"},
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"},
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"},
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"},
{file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"},
@ -2850,7 +2833,7 @@ doc = ["sphinx", "sphinx_rtd_theme"]
type = "git" type = "git"
url = "https://github.com/sartography/SpiffWorkflow" url = "https://github.com/sartography/SpiffWorkflow"
reference = "main" reference = "main"
resolved_reference = "7e15a0b68f926161bfa88e1306f3145ee028fad4" resolved_reference = "a7ddab83831eb98371a26d94ecc6978287b965c5"
[[package]] [[package]]
name = "spiffworkflow-connector-command" name = "spiffworkflow-connector-command"

View File

@ -1330,6 +1330,39 @@ paths:
schema: schema:
$ref: "#/components/schemas/OkTrue" $ref: "#/components/schemas/OkTrue"
/process-instances/{modified_process_model_identifier}/{process_instance_id}/check-can-migrate:
parameters:
- name: modified_process_model_identifier
in: path
required: true
description: The unique id of an existing process model
schema:
type: string
- name: process_instance_id
in: path
required: true
description: The unique id of an existing process instance.
schema:
type: integer
get:
tags:
- Process Instances
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_check_can_migrate
summary: Checks if a given process instance can be migrated to the newest version of the process model.
responses:
"200":
description: The result
content:
application/json:
schema:
properties:
can_migrate:
type: boolean
description: True if it can migrate and false if not.
process_instance_id:
type: integer
description: Process instance id.
/process-instances/{modified_process_model_identifier}/{process_instance_id}/run: /process-instances/{modified_process_model_identifier}/{process_instance_id}/run:
parameters: parameters:
- name: process_instance_id - name: process_instance_id
@ -1462,6 +1495,33 @@ paths:
schema: schema:
$ref: "#/components/schemas/OkTrue" $ref: "#/components/schemas/OkTrue"
/process-instance-migrate/{modified_process_model_identifier}/{process_instance_id}:
parameters:
- name: modified_process_model_identifier
in: path
required: true
description: The modified process model id
schema:
type: string
- name: process_instance_id
in: path
required: true
description: The unique id of an existing process instance.
schema:
type: integer
post:
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_migrate
summary: Migrate a process instance to the new version of its process model.
tags:
- Process Instances
responses:
"200":
description: Empty ok true response on successful reset.
content:
application/json:
schema:
$ref: "#/components/schemas/OkTrue"
/process-instances/reports: /process-instances/reports:
parameters: parameters:
- name: page - name: page

View File

@ -6,7 +6,12 @@ class RefreshTokenStorageError(Exception):
pass pass
# These could be either 'id' OR 'access' tokens and we can't always know which class ProcessInstanceMigrationNotSafeError(Exception):
pass
class ProcessInstanceMigrationError(Exception):
pass
class TokenExpiredError(Exception): class TokenExpiredError(Exception):

View File

@ -16,6 +16,7 @@ from spiffworkflow_backend.models.user import UserModel
class ProcessInstanceEventType(SpiffEnum): class ProcessInstanceEventType(SpiffEnum):
process_instance_error = "process_instance_error" process_instance_error = "process_instance_error"
process_instance_force_run = "process_instance_force_run" process_instance_force_run = "process_instance_force_run"
process_instance_migrated = "process_instance_migrated"
process_instance_resumed = "process_instance_resumed" process_instance_resumed = "process_instance_resumed"
process_instance_rewound_to_task = "process_instance_rewound_to_task" process_instance_rewound_to_task = "process_instance_rewound_to_task"
process_instance_suspended = "process_instance_suspended" process_instance_suspended = "process_instance_suspended"

View File

@ -1,3 +1,4 @@
from spiffworkflow_backend.exceptions.error import ProcessInstanceMigrationError, ProcessInstanceMigrationNotSafeError
from spiffworkflow_backend.helpers.spiff_enum import ProcessInstanceExecutionMode from spiffworkflow_backend.helpers.spiff_enum import ProcessInstanceExecutionMode
# black and ruff are in competition with each other in import formatting so ignore ruff # black and ruff are in competition with each other in import formatting so ignore ruff
@ -546,6 +547,36 @@ def process_instance_reset(
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_instance_check_can_migrate(
process_instance_id: int,
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
can_migrate = True
try:
ProcessInstanceService.check_process_instance_can_be_migrated(process_instance)
except ProcessInstanceMigrationNotSafeError:
can_migrate = False
return Response(
json.dumps({"can_migrate": can_migrate, "process_instance_id": process_instance.id}),
status=200,
mimetype="application/json",
)
def process_instance_migrate(
process_instance_id: int,
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
if process_instance.status != "suspended":
raise ProcessInstanceMigrationError(
f"The process instance needs to be suspended to migrate it. It is currently: {process_instance.status}"
)
ProcessInstanceService.migrate_process_instance_to_newest_model_version(process_instance, user=g.user)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_instance_find_by_id( def process_instance_find_by_id(
process_instance_id: int, process_instance_id: int,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:

View File

@ -70,6 +70,7 @@ PATH_SEGMENTS_FOR_PERMISSION_ALL = [
}, },
{"path": "/process-data", "relevant_permissions": ["read"]}, {"path": "/process-data", "relevant_permissions": ["read"]},
{"path": "/process-data-file-download", "relevant_permissions": ["read"]}, {"path": "/process-data-file-download", "relevant_permissions": ["read"]},
{"path": "/process-instance-migrate", "relevant_permissions": ["create"]},
{"path": "/process-instance-suspend", "relevant_permissions": ["create"]}, {"path": "/process-instance-suspend", "relevant_permissions": ["create"]},
{"path": "/process-instance-terminate", "relevant_permissions": ["create"]}, {"path": "/process-instance-terminate", "relevant_permissions": ["create"]},
{"path": "/process-model-natural-language", "relevant_permissions": ["create"]}, {"path": "/process-model-natural-language", "relevant_permissions": ["create"]},
@ -637,7 +638,7 @@ class AuthorizationService:
def set_support_permissions(cls) -> list[PermissionToAssign]: def set_support_permissions(cls) -> list[PermissionToAssign]:
"""Just like elevated permissions minus access to secrets.""" """Just like elevated permissions minus access to secrets."""
permissions_to_assign = cls.set_basic_permissions() permissions_to_assign = cls.set_basic_permissions()
for process_instance_action in ["resume", "terminate", "suspend", "reset"]: for process_instance_action in ["migrate", "resume", "terminate", "suspend", "reset"]:
permissions_to_assign.append( permissions_to_assign.append(
PermissionToAssign(permission="create", target_uri=f"/process-instance-{process_instance_action}/*") PermissionToAssign(permission="create", target_uri=f"/process-instance-{process_instance_action}/*")
) )

View File

@ -19,6 +19,7 @@ from typing import Any
from typing import NewType from typing import NewType
from typing import TypedDict from typing import TypedDict
from uuid import UUID from uuid import UUID
from uuid import uuid4
import dateparser import dateparser
import pytz import pytz
@ -36,6 +37,7 @@ from SpiffWorkflow.bpmn.serializer.default.task_spec import EventConverter # ty
from SpiffWorkflow.bpmn.serializer.helpers.registry import DefaultRegistry # type: ignore from SpiffWorkflow.bpmn.serializer.helpers.registry import DefaultRegistry # type: ignore
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
from SpiffWorkflow.bpmn.specs.bpmn_process_spec import BpmnProcessSpec # type: ignore from SpiffWorkflow.bpmn.specs.bpmn_process_spec import BpmnProcessSpec # type: ignore
from SpiffWorkflow.bpmn.util.diff import WorkflowDiff # type: ignore
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.exceptions import WorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowException # type: ignore
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
@ -414,6 +416,7 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
IdToBpmnProcessSpecMapping = NewType("IdToBpmnProcessSpecMapping", dict[str, BpmnProcessSpec]) IdToBpmnProcessSpecMapping = NewType("IdToBpmnProcessSpecMapping", dict[str, BpmnProcessSpec])
SubprocessUuidToWorkflowDiffMapping = NewType("SubprocessUuidToWorkflowDiffMapping", dict[UUID, WorkflowDiff])
class ProcessInstanceProcessor: class ProcessInstanceProcessor:
@ -470,7 +473,7 @@ class ProcessInstanceProcessor:
self.bpmn_subprocess_mapping: dict[str, BpmnProcessModel] = {} self.bpmn_subprocess_mapping: dict[str, BpmnProcessModel] = {}
# this caches the bpmn_process_definition_identifier and task_identifier back to the bpmn_process_id # this caches the bpmn_process_definition_identifier and task_identifier back to the bpmn_process_id
# in the database. This is to cut down on database queries while adding new tasks to the database. # intthe database. This is to cut down on database queries while adding new tasks to the database.
# Structure: # Structure:
# { "[[BPMN_PROCESS_DEFINITION_IDENTIFIER]]": { # { "[[BPMN_PROCESS_DEFINITION_IDENTIFIER]]": {
# "[[TASK_IDENTIFIER]]": [[TASK_DEFINITION]], # "[[TASK_IDENTIFIER]]": [[TASK_DEFINITION]],
@ -524,6 +527,7 @@ class ProcessInstanceProcessor:
bpmn_definition_to_task_definitions_mappings: dict, bpmn_definition_to_task_definitions_mappings: dict,
process_instance_model: ProcessInstanceModel, process_instance_model: ProcessInstanceModel,
store_process_instance_events: bool = True, store_process_instance_events: bool = True,
bpmn_process_instance: BpmnWorkflow | None = None,
) -> None: ) -> None:
cls._add_bpmn_process_definitions( cls._add_bpmn_process_definitions(
bpmn_process_dict, bpmn_process_dict,
@ -531,7 +535,10 @@ class ProcessInstanceProcessor:
process_instance_model=process_instance_model, process_instance_model=process_instance_model,
force_update=True, force_update=True,
) )
bpmn_process_instance = cls.initialize_bpmn_process_instance(bpmn_process_dict)
if bpmn_process_instance is None:
bpmn_process_instance = cls.initialize_bpmn_process_instance(bpmn_process_dict)
task_model_mapping, bpmn_subprocess_mapping = cls.get_db_mappings_from_bpmn_process_dict(bpmn_process_dict) task_model_mapping, bpmn_subprocess_mapping = cls.get_db_mappings_from_bpmn_process_dict(bpmn_process_dict)
task_service = TaskService( task_service = TaskService(
@ -1338,6 +1345,42 @@ class ProcessInstanceProcessor:
processor.save() processor.save()
processor.suspend() processor.suspend()
@classmethod
def update_guids_on_tasks(cls, bpmn_process_instance_dict: dict) -> None:
# old -> new
guid_map = {}
def get_guid_map(proc_dict: dict) -> None:
for guid in proc_dict["tasks"].keys():
guid_map[guid] = str(uuid4())
def update_guids(proc_dict: dict) -> None:
new_tasks = {}
for task_guid, task_dict in proc_dict["tasks"].items():
new_guid = guid_map[task_guid]
new_tasks[new_guid] = task_dict
if task_dict["parent"] is not None:
new_tasks[new_guid]["parent"] = guid_map[task_dict["parent"]]
new_children_guids = [guid_map[cg] for cg in task_dict["children"]]
new_tasks[new_guid]["children"] = new_children_guids
new_tasks[new_guid]["id"] = guid_map[task_dict["id"]]
proc_dict["tasks"] = new_tasks
proc_dict["root"] = guid_map[proc_dict["root"]]
proc_dict["last_task"] = guid_map[proc_dict["last_task"]]
get_guid_map(bpmn_process_instance_dict)
for subproc_dict in bpmn_process_instance_dict["subprocesses"].values():
get_guid_map(subproc_dict)
update_guids(bpmn_process_instance_dict)
new_subprocesses = {}
for subproc_guid, subproc_dict in bpmn_process_instance_dict["subprocesses"].items():
new_guid = guid_map[subproc_guid]
new_subprocesses[new_guid] = subproc_dict
new_subprocesses[new_guid]["parent_task_id"] = guid_map[subproc_dict["parent_task_id"]]
update_guids(new_subprocesses[new_guid])
bpmn_process_instance_dict["subprocesses"] = new_subprocesses
@staticmethod @staticmethod
def get_parser() -> MyCustomParser: def get_parser() -> MyCustomParser:
parser = MyCustomParser() parser = MyCustomParser()

View File

@ -7,14 +7,20 @@ from datetime import datetime
from datetime import timezone from datetime import timezone
from typing import Any from typing import Any
from urllib.parse import unquote from urllib.parse import unquote
from uuid import UUID
import sentry_sdk import sentry_sdk
from flask import current_app from flask import current_app
from flask import g from flask import g
from SpiffWorkflow.bpmn.specs.bpmn_process_spec import BpmnProcessSpec # type: ignore
from SpiffWorkflow.bpmn.specs.control import BoundaryEventSplit # type: ignore from SpiffWorkflow.bpmn.specs.control import BoundaryEventSplit # type: ignore
from SpiffWorkflow.bpmn.specs.defaults import BoundaryEvent # type: ignore from SpiffWorkflow.bpmn.specs.defaults import BoundaryEvent # type: ignore
from SpiffWorkflow.bpmn.specs.event_definitions.timer import TimerEventDefinition # type: ignore from SpiffWorkflow.bpmn.specs.event_definitions.timer import TimerEventDefinition # type: ignore
from SpiffWorkflow.bpmn.util import PendingBpmnEvent # type: ignore from SpiffWorkflow.bpmn.util import PendingBpmnEvent # type: ignore
from SpiffWorkflow.bpmn.util.diff import WorkflowDiff # type: ignore
from SpiffWorkflow.bpmn.util.diff import diff_workflow
from SpiffWorkflow.bpmn.util.diff import filter_tasks
from SpiffWorkflow.bpmn.util.diff import migrate_workflow
from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from SpiffWorkflow.util.task import TaskState # type: ignore from SpiffWorkflow.util.task import TaskState # type: ignore
@ -27,6 +33,7 @@ from spiffworkflow_backend.data_migrations.process_instance_migrator import Proc
from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.exceptions.error import HumanTaskAlreadyCompletedError from spiffworkflow_backend.exceptions.error import HumanTaskAlreadyCompletedError
from spiffworkflow_backend.exceptions.error import HumanTaskNotFoundError from spiffworkflow_backend.exceptions.error import HumanTaskNotFoundError
from spiffworkflow_backend.exceptions.error import ProcessInstanceMigrationNotSafeError
from spiffworkflow_backend.exceptions.error import UserDoesNotHaveAccessToTaskError from spiffworkflow_backend.exceptions.error import UserDoesNotHaveAccessToTaskError
from spiffworkflow_backend.helpers.spiff_enum import ProcessInstanceExecutionMode from spiffworkflow_backend.helpers.spiff_enum import ProcessInstanceExecutionMode
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
@ -41,6 +48,7 @@ from spiffworkflow_backend.models.process_instance_file_data import ProcessInsta
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.process_model_cycle import ProcessModelCycleModel from spiffworkflow_backend.models.process_model_cycle import ProcessModelCycleModel
from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
@ -48,7 +56,9 @@ from spiffworkflow_backend.services.git_service import GitCommandError
from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.jinja_service import JinjaService from spiffworkflow_backend.services.jinja_service import JinjaService
from spiffworkflow_backend.services.process_instance_processor import CustomBpmnScriptEngine from spiffworkflow_backend.services.process_instance_processor import CustomBpmnScriptEngine
from spiffworkflow_backend.services.process_instance_processor import IdToBpmnProcessSpecMapping
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
from spiffworkflow_backend.services.process_instance_processor import SubprocessUuidToWorkflowDiffMapping
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsNotEnqueuedError from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsNotEnqueuedError
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
@ -134,6 +144,82 @@ class ProcessInstanceService:
ProcessInstanceQueueService.enqueue_new_process_instance(process_instance_model, run_at_in_seconds) ProcessInstanceQueueService.enqueue_new_process_instance(process_instance_model, run_at_in_seconds)
return (process_instance_model, start_configuration) return (process_instance_model, start_configuration)
@classmethod
def check_process_instance_can_be_migrated(
cls, process_instance: ProcessInstanceModel
) -> tuple[
ProcessInstanceProcessor, BpmnProcessSpec, IdToBpmnProcessSpecMapping, WorkflowDiff, SubprocessUuidToWorkflowDiffMapping
]:
(target_bpmn_process_spec, target_subprocess_specs) = ProcessInstanceProcessor.get_process_model_and_subprocesses(
process_instance.process_model_identifier,
)
processor = ProcessInstanceProcessor(
process_instance, include_task_data_for_completed_tasks=True, include_completed_subprocesses=True
)
# tasks that were in the old workflow and are in the new one as well
top_level_bpmn_process_diff, subprocesses_diffs = diff_workflow(
processor._serializer.registry, processor.bpmn_process_instance, target_bpmn_process_spec, target_subprocess_specs
)
if not cls.can_migrate(top_level_bpmn_process_diff, subprocesses_diffs):
raise ProcessInstanceMigrationNotSafeError(
f"It is not safe to migrate process instance {process_instance.id} to "
f"new version of '{process_instance.process_model_identifier}'"
)
return (
processor,
target_bpmn_process_spec,
target_subprocess_specs,
top_level_bpmn_process_diff,
subprocesses_diffs,
)
@classmethod
def migrate_process_instance_to_newest_model_version(
cls, process_instance: ProcessInstanceModel, user: UserModel, preserve_old_process_instance: bool = False
) -> None:
(
processor,
target_bpmn_process_spec,
target_subprocess_specs,
top_level_bpmn_process_diff,
subprocesses_diffs,
) = cls.check_process_instance_can_be_migrated(process_instance)
ProcessInstanceTmpService.add_event_to_process_instance(
process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value
)
migrate_workflow(top_level_bpmn_process_diff, processor.bpmn_process_instance, target_bpmn_process_spec)
for sp_id, sp in processor.bpmn_process_instance.subprocesses.items():
migrate_workflow(subprocesses_diffs[sp_id], sp, target_subprocess_specs.get(sp.spec.name))
processor.bpmn_process_instance.subprocess_specs = target_subprocess_specs
if preserve_old_process_instance:
# TODO: write tests for this code path - no one has a requirement for it yet
bpmn_process_dict = processor.serialize()
ProcessInstanceProcessor.update_guids_on_tasks(bpmn_process_dict)
new_process_instance, _ = cls.create_process_instance_from_process_model_identifier(
process_instance.process_model_identifier, user
)
ProcessInstanceProcessor.persist_bpmn_process_dict(
bpmn_process_dict, bpmn_definition_to_task_definitions_mappings={}, process_instance_model=new_process_instance
)
else:
future_tasks = TaskModel.query.filter(
TaskModel.process_instance_id == process_instance.id,
TaskModel.state.in_(["FUTURE", "MAYBE", "LIKELY"]), # type: ignore
).all()
for ft in future_tasks:
db.session.delete(ft)
db.session.commit()
bpmn_process_dict = processor.serialize()
ProcessInstanceProcessor.persist_bpmn_process_dict(
bpmn_process_dict,
bpmn_definition_to_task_definitions_mappings={},
process_instance_model=process_instance,
bpmn_process_instance=processor.bpmn_process_instance,
)
@classmethod @classmethod
def create_process_instance_from_process_model_identifier( def create_process_instance_from_process_model_identifier(
cls, cls,
@ -642,3 +728,15 @@ class ProcessInstanceService:
) from e ) from e
raise e raise e
return processor return processor
@classmethod
def can_migrate(cls, top_level_bpmn_process_diff: WorkflowDiff, subprocesses_diffs: dict[UUID, WorkflowDiff]) -> bool:
def safe(result: WorkflowDiff) -> bool:
mask = TaskState.COMPLETED | TaskState.STARTED
tasks = result.changed + result.removed
return len(filter_tasks(tasks, state=mask)) == 0
for diff in subprocesses_diffs.values():
if diff is None or not safe(diff):
return False
return safe(top_level_bpmn_process_diff)

View File

@ -0,0 +1,71 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_migration_test_wlm607w" isExecutable="true">
<bpmn:startEvent id="StartEvent_top">
<bpmn:outgoing>Flow_17db3yp</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_17db3yp" sourceRef="StartEvent_top" targetRef="subprocess_one" />
<bpmn:endEvent id="EndEvent_1">
<bpmn:incoming>Flow_0m0he21</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0m0he21" sourceRef="subprocess_one" targetRef="EndEvent_1" />
<bpmn:subProcess id="subprocess_one">
<bpmn:incoming>Flow_17db3yp</bpmn:incoming>
<bpmn:outgoing>Flow_0m0he21</bpmn:outgoing>
<bpmn:startEvent id="StartEvent_sub">
<bpmn:outgoing>Flow_01eckoj</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_01eckoj" sourceRef="StartEvent_sub" targetRef="manual_task_one" />
<bpmn:endEvent id="Event_0fx2psf">
<bpmn:incoming>Flow_0s7769x</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0s7769x" sourceRef="manual_task_one" targetRef="Event_0fx2psf" />
<bpmn:manualTask id="manual_task_one">
<bpmn:incoming>Flow_01eckoj</bpmn:incoming>
<bpmn:outgoing>Flow_0s7769x</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:subProcess>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_migration_test_wlm607w">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_top">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_14za570_di" bpmnElement="EndEvent_1">
<dc:Bounds x="462" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_115q6jv_di" bpmnElement="subprocess_one">
<dc:Bounds x="280" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_17db3yp_di" bpmnElement="Flow_17db3yp">
<di:waypoint x="215" y="177" />
<di:waypoint x="280" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0m0he21_di" bpmnElement="Flow_0m0he21">
<di:waypoint x="380" y="177" />
<di:waypoint x="462" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
<bpmndi:BPMNDiagram id="BPMNDiagram_0fjhef4">
<bpmndi:BPMNPlane id="BPMNPlane_1rqwee3" bpmnElement="subprocess_one">
<bpmndi:BPMNShape id="Event_0bneyqp_di" bpmnElement="StartEvent_sub">
<dc:Bounds x="452" y="302" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0fx2psf_di" bpmnElement="Event_0fx2psf">
<dc:Bounds x="692" y="302" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1lv4tyw_di" bpmnElement="manual_task_one">
<dc:Bounds x="540" y="280" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_01eckoj_di" bpmnElement="Flow_01eckoj">
<di:waypoint x="488" y="320" />
<di:waypoint x="540" y="320" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0s7769x_di" bpmnElement="Flow_0s7769x">
<di:waypoint x="640" y="320" />
<di:waypoint x="692" y="320" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,83 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_migration_test_wlm607w" isExecutable="true">
<bpmn:startEvent id="StartEvent_top">
<bpmn:outgoing>Flow_17db3yp</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_17db3yp" sourceRef="StartEvent_top" targetRef="subprocess_one" />
<bpmn:endEvent id="EndEvent_1">
<bpmn:incoming>Flow_0m0he21</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0m0he21" sourceRef="subprocess_one" targetRef="EndEvent_1" />
<bpmn:subProcess id="subprocess_one">
<bpmn:incoming>Flow_17db3yp</bpmn:incoming>
<bpmn:outgoing>Flow_0m0he21</bpmn:outgoing>
<bpmn:startEvent id="StartEvent_sub">
<bpmn:outgoing>Flow_01eckoj</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_01eckoj" sourceRef="StartEvent_sub" targetRef="manual_task_one" />
<bpmn:endEvent id="Event_0fx2psf">
<bpmn:incoming>Flow_17fvyk2</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0s7769x" sourceRef="manual_task_one" targetRef="manual_task_two" />
<bpmn:manualTask id="manual_task_one">
<bpmn:incoming>Flow_01eckoj</bpmn:incoming>
<bpmn:outgoing>Flow_0s7769x</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:sequenceFlow id="Flow_17fvyk2" sourceRef="manual_task_two" targetRef="Event_0fx2psf" />
<bpmn:manualTask id="manual_task_two">
<bpmn:incoming>Flow_0s7769x</bpmn:incoming>
<bpmn:outgoing>Flow_17fvyk2</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:subProcess>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_migration_test_wlm607w">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_top">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_14za570_di" bpmnElement="EndEvent_1">
<dc:Bounds x="462" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_115q6jv_di" bpmnElement="subprocess_one">
<dc:Bounds x="280" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_17db3yp_di" bpmnElement="Flow_17db3yp">
<di:waypoint x="215" y="177" />
<di:waypoint x="280" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0m0he21_di" bpmnElement="Flow_0m0he21">
<di:waypoint x="380" y="177" />
<di:waypoint x="462" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
<bpmndi:BPMNDiagram id="BPMNDiagram_0fjhef4">
<bpmndi:BPMNPlane id="BPMNPlane_1rqwee3" bpmnElement="subprocess_one">
<bpmndi:BPMNShape id="Event_0bneyqp_di" bpmnElement="StartEvent_sub">
<dc:Bounds x="452" y="302" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1lv4tyw_di" bpmnElement="manual_task_one">
<dc:Bounds x="540" y="280" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0fx2psf_di" bpmnElement="Event_0fx2psf">
<dc:Bounds x="812" y="302" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0938qt9_di" bpmnElement="manual_task_two">
<dc:Bounds x="670" y="280" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_01eckoj_di" bpmnElement="Flow_01eckoj">
<di:waypoint x="488" y="320" />
<di:waypoint x="540" y="320" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0s7769x_di" bpmnElement="Flow_0s7769x">
<di:waypoint x="640" y="320" />
<di:waypoint x="670" y="320" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_17fvyk2_di" bpmnElement="Flow_17fvyk2">
<di:waypoint x="770" y="320" />
<di:waypoint x="812" y="320" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -100,6 +100,38 @@ class BaseTest:
return process_model return process_model
def create_and_run_process_instance(
self,
client: FlaskClient,
user: UserModel,
process_group_id: str | None = "test_group",
process_model_id: str | None = "random_fact",
bpmn_file_name: str | None = None,
bpmn_file_location: str | None = None,
) -> tuple[ProcessModelInfo, int]:
process_model = self.create_group_and_model_with_bpmn(
client=client,
user=user,
process_group_id=process_group_id,
process_model_id=process_model_id,
bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
)
headers = self.logged_in_headers(user)
response = self.create_process_instance_from_process_model_id_with_api(client, process_model.id, headers)
assert response.json is not None
process_instance_id = response.json["id"]
response = client.post(
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance_id}/run",
headers=self.logged_in_headers(user),
)
assert response.status_code == 200
assert response.json is not None
return (process_model, int(process_instance_id))
def create_process_group( def create_process_group(
self, self,
process_group_id: str, process_group_id: str,

View File

@ -1,6 +1,12 @@
import os
from flask.app import Flask from flask.app import Flask
from flask.testing import FlaskClient from flask.testing import FlaskClient
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
from spiffworkflow_backend.services.spec_file_service import SpecFileService
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@ -49,3 +55,110 @@ class TestProcessInstancesController(BaseTest):
assert "process_instance" in response.json assert "process_instance" in response.json
assert response.json["process_instance"]["id"] == process_instance.id assert response.json["process_instance"]["id"] == process_instance.id
assert response.json["uri_type"] is None assert response.json["uri_type"] is None
def test_process_instance_migrate(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
process_model, process_instance_id = self.create_and_run_process_instance(
client=client,
user=with_super_admin_user,
process_model_id="migration-test-with-subprocess",
bpmn_file_name="migration-initial.bpmn",
)
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
assert process_instance is not None
processor = ProcessInstanceProcessor(process_instance)
assert "manual_task_two" not in processor.bpmn_process_instance.spec.task_specs
human_task_one = process_instance.active_human_tasks[0]
assert human_task_one.task_model.task_definition.bpmn_identifier == "manual_task_one"
new_file_path = os.path.join(
app.instance_path,
"..",
"..",
"tests",
"data",
"migration-test-with-subprocess",
"migration-new.bpmn",
)
with open(new_file_path) as f:
new_contents = f.read().encode()
SpecFileService.update_file(
process_model_info=process_model,
file_name="migration-initial.bpmn",
binary_data=new_contents,
update_process_cache_only=True,
)
processor.suspend()
response = client.post(
f"/v1.0/process-instance-migrate/{self.modify_process_identifier_for_path_param(process_instance.process_model_identifier)}/{process_instance_id}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
processor = ProcessInstanceProcessor(process_instance)
human_task_one = process_instance.active_human_tasks[0]
assert human_task_one.task_model.task_definition.bpmn_identifier == "manual_task_one"
self.complete_next_manual_task(processor)
human_task_one = process_instance.active_human_tasks[0]
assert human_task_one.task_model.task_definition.bpmn_identifier == "manual_task_two"
self.complete_next_manual_task(processor)
assert process_instance.status == ProcessInstanceStatus.complete.value
def test_process_instance_check_can_migrate(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
process_model, process_instance_id = self.create_and_run_process_instance(
client=client,
user=with_super_admin_user,
process_model_id="migration-test-with-subprocess",
bpmn_file_name="migration-initial.bpmn",
)
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
assert process_instance is not None
processor = ProcessInstanceProcessor(process_instance)
assert "manual_task_two" not in processor.bpmn_process_instance.spec.task_specs
human_task_one = process_instance.active_human_tasks[0]
assert human_task_one.task_model.task_definition.bpmn_identifier == "manual_task_one"
new_file_path = os.path.join(
app.instance_path,
"..",
"..",
"tests",
"data",
"migration-test-with-subprocess",
"migration-new.bpmn",
)
with open(new_file_path) as f:
new_contents = f.read().encode()
SpecFileService.update_file(
process_model_info=process_model,
file_name="migration-initial.bpmn",
binary_data=new_contents,
update_process_cache_only=True,
)
response = client.get(
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_instance.process_model_identifier)}/{process_instance_id}/check-can-migrate",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
assert response.json == {"can_migrate": True, "process_instance_id": process_instance.id}

View File

@ -108,6 +108,10 @@ class TestAuthorizationService(BaseTest):
("/process-groups/some-process-group:some-process-model:*", "delete"), ("/process-groups/some-process-group:some-process-model:*", "delete"),
("/process-groups/some-process-group:some-process-model:*", "read"), ("/process-groups/some-process-group:some-process-model:*", "read"),
("/process-groups/some-process-group:some-process-model:*", "update"), ("/process-groups/some-process-group:some-process-model:*", "update"),
(
"/process-instance-migrate/some-process-group:some-process-model:*",
"create",
),
( (
"/process-instance-suspend/some-process-group:some-process-model:*", "/process-instance-suspend/some-process-group:some-process-model:*",
"create", "create",
@ -192,6 +196,10 @@ class TestAuthorizationService(BaseTest):
("/logs/typeahead-filter-values/some-process-group:some-process-model/*", "read"), ("/logs/typeahead-filter-values/some-process-group:some-process-model/*", "read"),
("/message-models/some-process-group:some-process-model/*", "read"), ("/message-models/some-process-group:some-process-model/*", "read"),
("/process-data/some-process-group:some-process-model/*", "read"), ("/process-data/some-process-group:some-process-model/*", "read"),
(
"/process-instance-migrate/some-process-group:some-process-model/*",
"create",
),
( (
"/process-instance-suspend/some-process-group:some-process-model/*", "/process-instance-suspend/some-process-group:some-process-model/*",
"create", "create",
@ -530,6 +538,7 @@ class TestAuthorizationService(BaseTest):
("/messages/*", "create"), ("/messages/*", "create"),
("/process-data-file-download/*", "read"), ("/process-data-file-download/*", "read"),
("/process-data/*", "read"), ("/process-data/*", "read"),
("/process-instance-migrate/*", "create"),
("/process-instance-reset/*", "create"), ("/process-instance-reset/*", "create"),
("/process-instance-resume/*", "create"), ("/process-instance-resume/*", "create"),
("/process-instance-suspend/*", "create"), ("/process-instance-suspend/*", "create"),

View File

@ -1077,6 +1077,53 @@ class TestProcessInstanceProcessor(BaseTest):
# mypy thinks this is unreachable but it is reachable. summary can be str | None # mypy thinks this is unreachable but it is reachable. summary can be str | None
assert len(process_instance.summary) == 255 # type: ignore assert len(process_instance.summary) == 255 # type: ignore
def test_it_can_update_guids_in_bpmn_process_dict(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
initiator_user = self.find_or_create_user("initiator_user")
process_model = load_test_spec(
process_model_id="test_group/loopback_to_subprocess",
process_model_source_directory="loopback_to_subprocess",
)
process_instance = self.create_process_instance_from_process_model(process_model=process_model, user=initiator_user)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
assert len(process_instance.active_human_tasks) == 1
assert len(process_instance.human_tasks) == 1
human_task_one = process_instance.active_human_tasks[0]
spiff_task = processor.get_task_by_guid(human_task_one.task_id)
ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_one)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
assert len(process_instance.active_human_tasks) == 1
assert len(process_instance.human_tasks) == 2
human_task_two = process_instance.active_human_tasks[0]
spiff_task = processor.get_task_by_guid(human_task_two.task_id)
ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_two)
old_tasks = processor.bpmn_process_instance.get_tasks()
old_task_names = [t.task_spec.name for t in old_tasks]
bpmn_process_dict = processor.serialize()
task_one_guid = sorted(bpmn_process_dict["tasks"].keys())[0]
subprocess_one_guid = sorted(bpmn_process_dict["subprocesses"].keys())[0]
ProcessInstanceProcessor.update_guids_on_tasks(bpmn_process_dict)
task_two_guid = sorted(bpmn_process_dict["tasks"].keys())[0]
subprocess_two_guid = sorted(bpmn_process_dict["subprocesses"].keys())[0]
assert task_one_guid != task_two_guid
assert subprocess_one_guid != subprocess_two_guid
new_bpmn_process_instance = ProcessInstanceProcessor.initialize_bpmn_process_instance(bpmn_process_dict)
new_tasks = new_bpmn_process_instance.get_tasks()
new_task_names = [t.task_spec.name for t in new_tasks]
assert old_task_names == new_task_names
# # To test processing times with multiinstance subprocesses # # To test processing times with multiinstance subprocesses
# def test_large_multiinstance( # def test_large_multiinstance(
# self, # self,

View File

@ -1,5 +1,6 @@
import base64 import base64
import hashlib import hashlib
import os
from datetime import datetime from datetime import datetime
from datetime import timezone from datetime import timezone
from typing import Any from typing import Any
@ -7,9 +8,15 @@ from typing import Any
import pytest import pytest
from flask.app import Flask from flask.app import Flask
from SpiffWorkflow.bpmn.util import PendingBpmnEvent # type: ignore from SpiffWorkflow.bpmn.util import PendingBpmnEvent # type: ignore
from spiffworkflow_backend.exceptions.error import ProcessInstanceMigrationNotSafeError
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
def _file_content(i: int) -> bytes: def _file_content(i: int) -> bytes:
@ -146,3 +153,141 @@ class TestProcessInstanceService(BaseTest):
datetime.fromisoformat("2023-04-27T20:15:10.626656+00:00"), datetime.fromisoformat("2023-04-27T20:15:10.626656+00:00"),
) )
) )
def test_it_can_migrate_a_process_instance(
self,
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
initiator_user = self.find_or_create_user("initiator_user")
process_model = load_test_spec(
process_model_id="test_group/migration-test-with-subprocess",
process_model_source_directory="migration-test-with-subprocess",
bpmn_file_name="migration-initial.bpmn",
)
process_instance = self.create_process_instance_from_process_model(process_model=process_model, user=initiator_user)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
initial_tasks = processor.bpmn_process_instance.get_tasks()
assert "manual_task_two" not in processor.bpmn_process_instance.spec.task_specs
new_file_path = os.path.join(
app.instance_path,
"..",
"..",
"tests",
"data",
"migration-test-with-subprocess",
"migration-new.bpmn",
)
with open(new_file_path) as f:
new_contents = f.read().encode()
SpecFileService.update_file(
process_model_info=process_model,
file_name="migration-initial.bpmn",
binary_data=new_contents,
update_process_cache_only=True,
)
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
ProcessInstanceService.migrate_process_instance_to_newest_model_version(process_instance, user=initiator_user)
for initial_task in initial_tasks:
new_task = processor.bpmn_process_instance.get_task_from_id(initial_task.id)
assert new_task is not None
assert new_task.last_state_change == initial_task.last_state_change
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
human_task_one = process_instance.active_human_tasks[0]
assert human_task_one.task_model.task_definition.bpmn_identifier == "manual_task_one"
self.complete_next_manual_task(processor)
human_task_one = process_instance.active_human_tasks[0]
assert human_task_one.task_model.task_definition.bpmn_identifier == "manual_task_two"
self.complete_next_manual_task(processor)
assert process_instance.status == ProcessInstanceStatus.complete.value
def test_it_can_check_if_a_process_instance_can_be_migrated(
self,
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
initiator_user = self.find_or_create_user("initiator_user")
process_model = load_test_spec(
process_model_id="test_group/migration-test-with-subprocess",
process_model_source_directory="migration-test-with-subprocess",
bpmn_file_name="migration-initial.bpmn",
)
process_instance = self.create_process_instance_from_process_model(process_model=process_model, user=initiator_user)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
new_file_path = os.path.join(
app.instance_path,
"..",
"..",
"tests",
"data",
"migration-test-with-subprocess",
"migration-new.bpmn",
)
with open(new_file_path) as f:
new_contents = f.read().encode()
SpecFileService.update_file(
process_model_info=process_model,
file_name="migration-initial.bpmn",
binary_data=new_contents,
update_process_cache_only=True,
)
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
ProcessInstanceService.check_process_instance_can_be_migrated(process_instance)
def test_it_raises_if_a_process_instance_cannot_be_migrated_to_new_process_model_version(
self,
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
initiator_user = self.find_or_create_user("initiator_user")
process_model = load_test_spec(
process_model_id="test_group/migration-test-with-subprocess",
process_model_source_directory="migration-test-with-subprocess",
bpmn_file_name="migration-initial.bpmn",
)
process_instance = self.create_process_instance_from_process_model(process_model=process_model, user=initiator_user)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True, execution_strategy_name="greedy")
human_task_one = process_instance.active_human_tasks[0]
assert human_task_one.task_model.task_definition.bpmn_identifier == "manual_task_one"
self.complete_next_manual_task(processor)
assert process_instance.status == ProcessInstanceStatus.complete.value
new_file_path = os.path.join(
app.instance_path,
"..",
"..",
"tests",
"data",
"migration-test-with-subprocess",
"migration-new.bpmn",
)
with open(new_file_path) as f:
new_contents = f.read().encode()
SpecFileService.update_file(
process_model_info=process_model,
file_name="migration-initial.bpmn",
binary_data=new_contents,
update_process_cache_only=True,
)
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
with pytest.raises(ProcessInstanceMigrationNotSafeError):
ProcessInstanceService.check_process_instance_can_be_migrated(process_instance)