Adding file management. Files are stored in the database along with basic meta data.
This commit is contained in:
parent
bb6ab4b8e8
commit
0b71833946
|
@ -62,11 +62,11 @@
|
||||||
},
|
},
|
||||||
"beautifulsoup4": {
|
"beautifulsoup4": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:5279c36b4b2ec2cb4298d723791467e3000e5384a43ea0cdf5d45207c7e97169",
|
"sha256:05fd825eb01c290877657a56df4c6e4c311b3965bda790c613a3d6fb01a5462a",
|
||||||
"sha256:6135db2ba678168c07950f9a16c4031822c6f4aec75a65e0a97bc5ca09789931",
|
"sha256:9fbb4d6e48ecd30bcacc5b63b94088192dcda178513b2ae3c394229f8911b887",
|
||||||
"sha256:dcdef580e18a76d54002088602eba453eec38ebbcafafeaabd8cab12b6155d57"
|
"sha256:e1505eeed31b0f4ce2dbb3bc8eb256c04cc2b3b72af7d551a4ab6efd5cbe5dae"
|
||||||
],
|
],
|
||||||
"version": "==4.8.1"
|
"version": "==4.8.2"
|
||||||
},
|
},
|
||||||
"certifi": {
|
"certifi": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
|
@ -146,48 +146,48 @@
|
||||||
"swagger-ui"
|
"swagger-ui"
|
||||||
],
|
],
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:0fa5776a44b32668f20d59e6e478f15a1dc19def8e4d07d837e10d837379c2ba",
|
"sha256:3f6c2cb8a3f24af281ff561eebaaa926700b24f4c430f4d945c8ace0384c220c",
|
||||||
"sha256:4b643821a775927b2ec6220c427779b6d9c3a83ddf43662d69e68dcdad4be603"
|
"sha256:56854c65334281b303120a1097df09a79ffa44bdaecfc4400e7194821e17566d"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==2.5.0"
|
"version": "==2.5.1"
|
||||||
},
|
},
|
||||||
"coverage": {
|
"coverage": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:0cd13a6e98c37b510a2d34c8281d5e1a226aaf9b65b7d770ef03c63169965351",
|
"sha256:0101888bd1592a20ccadae081ba10e8b204d20235d18d05c6f7d5e904a38fc10",
|
||||||
"sha256:1a4b6b6a2a3a6612e6361130c2cc3dc4378d8c221752b96167ccbad94b47f3cd",
|
"sha256:04b961862334687549eb91cd5178a6fbe977ad365bddc7c60f2227f2f9880cf4",
|
||||||
"sha256:2ee55e6dba516ddf6f484aa83ccabbb0adf45a18892204c23486938d12258cde",
|
"sha256:1ca43dbd739c0fc30b0a3637a003a0d2c7edc1dd618359d58cc1e211742f8bd1",
|
||||||
"sha256:3be5338a2eb4ef03c57f20917e1d12a1fd10e3853fed060b6d6b677cb3745898",
|
"sha256:1cbb88b34187bdb841f2599770b7e6ff8e259dc3bb64fc7893acf44998acf5f8",
|
||||||
"sha256:44b783b02db03c4777d8cf71bae19eadc171a6f2a96777d916b2c30a1eb3d070",
|
"sha256:232f0b52a5b978288f0bbc282a6c03fe48cd19a04202df44309919c142b3bb9c",
|
||||||
"sha256:475bf7c4252af0a56e1abba9606f1e54127cdf122063095c75ab04f6f99cf45e",
|
"sha256:24bcfa86fd9ce86b73a8368383c39d919c497a06eebb888b6f0c12f13e920b1a",
|
||||||
"sha256:47c81ee687eafc2f1db7f03fbe99aab81330565ebc62fb3b61edfc2216a550c8",
|
"sha256:25b8f60b5c7da71e64c18888f3067d5b6f1334b9681876b2fb41eea26de881ae",
|
||||||
"sha256:4a7f8e72b18f2aca288ff02255ce32cc830bc04d993efbc87abf6beddc9e56c0",
|
"sha256:2714160a63da18aed9340c70ed514973971ee7e665e6b336917ff4cca81a25b1",
|
||||||
"sha256:50197163a22fd17f79086e087a787883b3ec9280a509807daf158dfc2a7ded02",
|
"sha256:2ca2cd5264e84b2cafc73f0045437f70c6378c0d7dbcddc9ee3fe192c1e29e5d",
|
||||||
"sha256:56b13000acf891f700f5067512b804d1ec8c301d627486c678b903859d07f798",
|
"sha256:2cc707fc9aad2592fc686d63ef72dc0031fc98b6fb921d2f5395d9ab84fbc3ef",
|
||||||
"sha256:79388ae29c896299b3567965dbcd93255f175c17c6c7bca38614d12718c47466",
|
"sha256:348630edea485f4228233c2f310a598abf8afa5f8c716c02a9698089687b6085",
|
||||||
"sha256:79fd5d3d62238c4f583b75d48d53cdae759fe04d4fb18fe8b371d88ad2b6f8be",
|
"sha256:40fbfd6b044c9db13aeec1daf5887d322c710d811f944011757526ef6e323fd9",
|
||||||
"sha256:7fe3e2fde2bf1d7ce25ebcd2d3de3650b8d60d9a73ce6dcef36e20191291613d",
|
"sha256:46c9c6a1d1190c0b75ec7c0f339088309952b82ae8d67a79ff1319eb4e749b96",
|
||||||
"sha256:81042a24f67b96e4287774014fa27220d8a4d91af1043389e4d73892efc89ac6",
|
"sha256:591506e088901bdc25620c37aec885e82cc896528f28c57e113751e3471fc314",
|
||||||
"sha256:81326f1095c53111f8afc95da281e1414185f4a538609a77ca50bdfa39a6c207",
|
"sha256:5ac71bba1e07eab403b082c4428f868c1c9e26a21041436b4905c4c3d4e49b08",
|
||||||
"sha256:8873dc0d8f42142ea9f20c27bbdc485190fff93823c6795be661703369e5877d",
|
"sha256:5f622f19abda4e934938e24f1d67599249abc201844933a6f01aaa8663094489",
|
||||||
"sha256:88d2cbcb0a112f47eef71eb95460b6995da18e6f8ca50c264585abc2c473154b",
|
"sha256:65bead1ac8c8930cf92a1ccaedcce19a57298547d5d1db5c9d4d068a0675c38b",
|
||||||
"sha256:91f2491aeab9599956c45a77c5666d323efdec790bfe23fcceafcd91105d585a",
|
"sha256:7362a7f829feda10c7265b553455de596b83d1623b3d436b6d3c51c688c57bf6",
|
||||||
"sha256:979daa8655ae5a51e8e7a24e7d34e250ae8309fd9719490df92cbb2fe2b0422b",
|
"sha256:7f2675750c50151f806070ec11258edf4c328340916c53bac0adbc465abd6b1e",
|
||||||
"sha256:9c871b006c878a890c6e44a5b2f3c6291335324b298c904dc0402ee92ee1f0be",
|
"sha256:960d7f42277391e8b1c0b0ae427a214e1b31a1278de6b73f8807b20c2e913bba",
|
||||||
"sha256:a6d092545e5af53e960465f652e00efbf5357adad177b2630d63978d85e46a72",
|
"sha256:a50b0888d8a021a3342d36a6086501e30de7d840ab68fca44913e97d14487dc1",
|
||||||
"sha256:b5ed7837b923d1d71c4f587ae1539ccd96bfd6be9788f507dbe94dab5febbb5d",
|
"sha256:b7dbc5e8c39ea3ad3db22715f1b5401cd698a621218680c6daf42c2f9d36e205",
|
||||||
"sha256:ba259f68250f16d2444cbbfaddaa0bb20e1560a4fdaad50bece25c199e6af864",
|
"sha256:bb3d29df5d07d5399d58a394d0ef50adf303ab4fbf66dfd25b9ef258effcb692",
|
||||||
"sha256:be1d89614c6b6c36d7578496dc8625123bda2ff44f224cf8b1c45b810ee7383f",
|
"sha256:c0fff2733f7c2950f58a4fd09b5db257b00c6fec57bf3f68c5bae004d804b407",
|
||||||
"sha256:c1b030a79749aa8d1f1486885040114ee56933b15ccfc90049ba266e4aa2139f",
|
"sha256:c792d3707a86c01c02607ae74364854220fb3e82735f631cd0a345dea6b4cee5",
|
||||||
"sha256:c95bb147fab76f2ecde332d972d8f4138b8f2daee6c466af4ff3b4f29bd4c19e",
|
"sha256:c90bda74e16bcd03861b09b1d37c0a4158feda5d5a036bb2d6e58de6ff65793e",
|
||||||
"sha256:d52c1c2d7e856cecc05aa0526453cb14574f821b7f413cc279b9514750d795c1",
|
"sha256:cfce79ce41cc1a1dc7fc85bb41eeeb32d34a4cf39a645c717c0550287e30ff06",
|
||||||
"sha256:d609a6d564ad3d327e9509846c2c47f170456344521462b469e5cb39e48ba31c",
|
"sha256:eeafb646f374988c22c8e6da5ab9fb81367ecfe81c70c292623373d2a021b1a1",
|
||||||
"sha256:e1bad043c12fb58e8c7d92b3d7f2f49977dcb80a08a6d1e7a5114a11bf819fca",
|
"sha256:f425f50a6dd807cb9043d15a4fcfba3b5874a54d9587ccbb748899f70dc18c47",
|
||||||
"sha256:e5a675f6829c53c87d79117a8eb656cc4a5f8918185a32fc93ba09778e90f6db",
|
"sha256:fcd4459fe35a400b8f416bc57906862693c9f88b66dc925e7f2a933e77f6b18b",
|
||||||
"sha256:fec32646b98baf4a22fdceb08703965bd16dea09051fbeb31a04b5b6e72b846c"
|
"sha256:ff3936dd5feaefb4f91c8c1f50a06c588b5dc69fba4f7d9c79a6617ad80bb7df"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==5.0"
|
"version": "==5.0.1"
|
||||||
},
|
},
|
||||||
"et-xmlfile": {
|
"et-xmlfile": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
|
@ -518,7 +518,7 @@
|
||||||
"spiffworkflow": {
|
"spiffworkflow": {
|
||||||
"editable": true,
|
"editable": true,
|
||||||
"git": "https://github.com/sartography/SpiffWorkflow.git",
|
"git": "https://github.com/sartography/SpiffWorkflow.git",
|
||||||
"ref": "7f67509e514f934eaab032b2783bae20d2c4040b"
|
"ref": "d9f406c57ab05ce6e978318a22b6d327b635e1ae"
|
||||||
},
|
},
|
||||||
"sqlalchemy": {
|
"sqlalchemy": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
|
@ -544,10 +544,10 @@
|
||||||
},
|
},
|
||||||
"waitress": {
|
"waitress": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:278e09d6849acc1365404bbf7d790d0423b159802e850c726e8cd0a126a2dac7",
|
"sha256:3776cbb9abebefb51e5b654f8728928aa17b656d9f6943c58ce8f48e87cef4e3",
|
||||||
"sha256:f103e557725b17ae3c62f9e6005cdd85103f8b68fa86cf7c764ba7adc38ca5a2"
|
"sha256:f4118cbce75985fd60aeb4f0d781aba8dc7ae28c18e50753e913d7a7dee76b62"
|
||||||
],
|
],
|
||||||
"version": "==1.3.1"
|
"version": "==1.4.1"
|
||||||
},
|
},
|
||||||
"webob": {
|
"webob": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
|
@ -573,11 +573,11 @@
|
||||||
},
|
},
|
||||||
"xlsxwriter": {
|
"xlsxwriter": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:027fa3d22ccfb5da5d77c29ed740aece286a9a6cc101b564f2f7ca11eb1d490b",
|
"sha256:18fe8f891a4adf7556c05d56059e136f9fbce5b19f9335f6d7b42c389c4592bc",
|
||||||
"sha256:5d480cee5babf3865227d5c81269d96be8e87914fc96403ca6fa1b1e4f64c080"
|
"sha256:5d3630ff9b2a277c939bd5053d0e7466499593abebbab9ce1dc9b1481a8ebbb6"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==1.2.6"
|
"version": "==1.2.7"
|
||||||
},
|
},
|
||||||
"zipp": {
|
"zipp": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
|
@ -633,10 +633,10 @@
|
||||||
},
|
},
|
||||||
"pyparsing": {
|
"pyparsing": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:20f995ecd72f2a1f4bf6b072b63b22e2eb457836601e76d6e5dfcd75436acc1f",
|
"sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f",
|
||||||
"sha256:4ca62001be367f01bd3e92ecbb79070272a9d4964dce6a48a82ff0b8bc7e683a"
|
"sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec"
|
||||||
],
|
],
|
||||||
"version": "==2.4.5"
|
"version": "==2.4.6"
|
||||||
},
|
},
|
||||||
"pytest": {
|
"pytest": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
|
|
180
crc/api.yml
180
crc/api.yml
|
@ -10,7 +10,7 @@ paths:
|
||||||
# /v1.0/study
|
# /v1.0/study
|
||||||
/study:
|
/study:
|
||||||
get:
|
get:
|
||||||
operationId: crc.api.all_studies
|
operationId: crc.api.study.all_studies
|
||||||
summary: Provides a list of studies related to the current user.
|
summary: Provides a list of studies related to the current user.
|
||||||
tags:
|
tags:
|
||||||
- Studies
|
- Studies
|
||||||
|
@ -29,7 +29,7 @@ paths:
|
||||||
$ref: "#/components/schemas/Error"
|
$ref: "#/components/schemas/Error"
|
||||||
/study/{study_id}:
|
/study/{study_id}:
|
||||||
get:
|
get:
|
||||||
operationId: crc.api.get_study
|
operationId: crc.api.study.get_study
|
||||||
summary: Provides a single study
|
summary: Provides a single study
|
||||||
tags:
|
tags:
|
||||||
- Studies
|
- Studies
|
||||||
|
@ -56,7 +56,7 @@ paths:
|
||||||
$ref: "#/components/schemas/Error"
|
$ref: "#/components/schemas/Error"
|
||||||
/study-update/{study_id}:
|
/study-update/{study_id}:
|
||||||
post:
|
post:
|
||||||
operationId: crc.api.post_update_study_from_protocol_builder
|
operationId: crc.api.study.post_update_study_from_protocol_builder
|
||||||
summary: If the study is up-to-date with Protocol Builder, returns a 304 Not Modified. If out of date, return a 202 Accepted and study state changes to updating.
|
summary: If the study is up-to-date with Protocol Builder, returns a 304 Not Modified. If out of date, return a 202 Accepted and study state changes to updating.
|
||||||
tags:
|
tags:
|
||||||
- Study Status
|
- Study Status
|
||||||
|
@ -82,7 +82,7 @@ paths:
|
||||||
|
|
||||||
/study/{study_id}/workflows:
|
/study/{study_id}/workflows:
|
||||||
get:
|
get:
|
||||||
operationId: crc.api.get_study_workflows
|
operationId: crc.api.study.get_study_workflows
|
||||||
summary: Provides a list of workflows to be completed for the given study.
|
summary: Provides a list of workflows to be completed for the given study.
|
||||||
tags:
|
tags:
|
||||||
- Studies
|
- Studies
|
||||||
|
@ -110,7 +110,7 @@ paths:
|
||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/Error"
|
$ref: "#/components/schemas/Error"
|
||||||
post:
|
post:
|
||||||
operationId: crc.api.add_workflow_to_study
|
operationId: crc.api.study.add_workflow_to_study
|
||||||
summary: Starts a new workflow for the given study using the provided spec. This is atypical, and should be left to the protocol builder.
|
summary: Starts a new workflow for the given study using the provided spec. This is atypical, and should be left to the protocol builder.
|
||||||
tags:
|
tags:
|
||||||
- Studies
|
- Studies
|
||||||
|
@ -142,13 +142,12 @@ paths:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/Error"
|
$ref: "#/components/schemas/Error"
|
||||||
# /v1.0/workflow/0
|
|
||||||
/workflow-specification:
|
/workflow-specification:
|
||||||
get:
|
get:
|
||||||
operationId: crc.api.all_specifications
|
operationId: crc.api.workflow.all_specifications
|
||||||
summary: Provides a list of workflows specifications that can be added to a study manually. Please note that Protocol Builder will handle this most of the time.
|
summary: Provides a list of workflows specifications that can be added to a study manually. Please note that Protocol Builder will handle this most of the time.
|
||||||
tags:
|
tags:
|
||||||
- Workflow Specifications
|
- Workflow
|
||||||
responses:
|
responses:
|
||||||
'200':
|
'200':
|
||||||
description: An array of workflow specifications
|
description: An array of workflow specifications
|
||||||
|
@ -164,10 +163,135 @@ paths:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/Error"
|
$ref: "#/components/schemas/Error"
|
||||||
|
/file:
|
||||||
|
parameters:
|
||||||
|
- name: spec_id
|
||||||
|
in: query
|
||||||
|
required: false
|
||||||
|
description: The unique name of a specification
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
get:
|
||||||
|
operationId: crc.api.file.get_files
|
||||||
|
summary: Provides a list of files that match the given parameters (such as a spec id) IMPORTANT, only includes metadata, not the file content.
|
||||||
|
tags:
|
||||||
|
- Files
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: An array of file descriptions (not the file content)
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: "#/components/schemas/File"
|
||||||
|
default:
|
||||||
|
description: unexpected error
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Error"
|
||||||
|
post:
|
||||||
|
operationId: crc.api.file.add_file
|
||||||
|
summary: Add a new file
|
||||||
|
tags:
|
||||||
|
- Files
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
multipart/form-data:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
workflow_spec_id:
|
||||||
|
type: string
|
||||||
|
file:
|
||||||
|
type: string
|
||||||
|
format: binary
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Metadata about the uploaded file, but not the file content.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/File"
|
||||||
|
|
||||||
|
/file/{file_id}:
|
||||||
|
parameters:
|
||||||
|
- name: file_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
description: The id of the File requested
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
get:
|
||||||
|
operationId: crc.api.file.get_file_info
|
||||||
|
summary: Returns metadata about a file.
|
||||||
|
tags:
|
||||||
|
- Files
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Returns the file information requested.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/File"
|
||||||
|
put:
|
||||||
|
operationId: crc.api.file.update_file
|
||||||
|
summary: Update a file
|
||||||
|
tags:
|
||||||
|
- Files
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
multipart/form-data:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
workflow_spec_id:
|
||||||
|
type: string
|
||||||
|
file:
|
||||||
|
type: string
|
||||||
|
format: binary
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Metadata about the uploaded file, but not the file content.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/File"
|
||||||
|
delete:
|
||||||
|
operationId: crc.api.file.delete_file
|
||||||
|
summary: Removes an existing file
|
||||||
|
tags:
|
||||||
|
- Files
|
||||||
|
responses:
|
||||||
|
'204':
|
||||||
|
description: The file has been removed.
|
||||||
|
/file/{file_id}/data:
|
||||||
|
parameters:
|
||||||
|
- name: file_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
description: The id of the File requested
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
get:
|
||||||
|
operationId: crc.api.file.get_file
|
||||||
|
summary: Returns only the file contents
|
||||||
|
tags:
|
||||||
|
- Files
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Returns the actual file
|
||||||
|
content:
|
||||||
|
application/octet-stream:
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
format: binary
|
||||||
|
example: '<?xml version="1.0" encoding="UTF-8"?><bpmn:definitions></bpmn:definitions>'
|
||||||
# /v1.0/workflow/0
|
# /v1.0/workflow/0
|
||||||
/workflow/{workflow_id}:
|
/workflow/{workflow_id}:
|
||||||
get:
|
get:
|
||||||
operationId: crc.api.get_workflow
|
operationId: crc.api.workflow.get_workflow
|
||||||
summary: Detailed information for a specific workflow instance
|
summary: Detailed information for a specific workflow instance
|
||||||
tags:
|
tags:
|
||||||
- Workflows and Tasks
|
- Workflows and Tasks
|
||||||
|
@ -194,7 +318,7 @@ paths:
|
||||||
$ref: "#/components/schemas/Error"
|
$ref: "#/components/schemas/Error"
|
||||||
/workflow/{workflow_id}/tasks:
|
/workflow/{workflow_id}/tasks:
|
||||||
get:
|
get:
|
||||||
operationId: crc.api.get_tasks
|
operationId: crc.api.workflow.get_tasks
|
||||||
summary: Return a list of all tasks for this workflow
|
summary: Return a list of all tasks for this workflow
|
||||||
tags:
|
tags:
|
||||||
- Workflows and Tasks
|
- Workflows and Tasks
|
||||||
|
@ -224,7 +348,7 @@ paths:
|
||||||
# /v1.0/workflow/0/task/0
|
# /v1.0/workflow/0/task/0
|
||||||
/workflow/{workflow_id}/task/{task_id}:
|
/workflow/{workflow_id}/task/{task_id}:
|
||||||
get:
|
get:
|
||||||
operationId: crc.api.get_task
|
operationId: crc.api.workflow.get_task
|
||||||
summary: Get details of specific task in specific workflow instance
|
summary: Get details of specific task in specific workflow instance
|
||||||
tags:
|
tags:
|
||||||
- Workflows and Tasks
|
- Workflows and Tasks
|
||||||
|
@ -257,7 +381,7 @@ paths:
|
||||||
$ref: "#/components/schemas/Error"
|
$ref: "#/components/schemas/Error"
|
||||||
|
|
||||||
put:
|
put:
|
||||||
operationId: crc.api.update_task
|
operationId: crc.api.workflow.update_task
|
||||||
summary: Update, attempt to complete a workflow task
|
summary: Update, attempt to complete a workflow task
|
||||||
tags:
|
tags:
|
||||||
- Workflows and Tasks
|
- Workflows and Tasks
|
||||||
|
@ -329,6 +453,38 @@ components:
|
||||||
type: string
|
type: string
|
||||||
svg_url:
|
svg_url:
|
||||||
type: string
|
type: string
|
||||||
|
File:
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: number
|
||||||
|
readOnly: true
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
example: "random_fact.bpmn"
|
||||||
|
version:
|
||||||
|
type: string
|
||||||
|
readOnly: true
|
||||||
|
last_updated:
|
||||||
|
type: string
|
||||||
|
format: date_time
|
||||||
|
example: "2019-12-25T09:12:33.001Z"
|
||||||
|
readOnly: true
|
||||||
|
type:
|
||||||
|
type: enum
|
||||||
|
enum: ['bpmn','svg', 'dmn']
|
||||||
|
readOnly: true
|
||||||
|
primary:
|
||||||
|
type: boolean
|
||||||
|
readOnly: true
|
||||||
|
content_type:
|
||||||
|
type: string
|
||||||
|
example: "application/xml"
|
||||||
|
workflow_spec_id:
|
||||||
|
type: string
|
||||||
|
example: "random_fact"
|
||||||
|
file:
|
||||||
|
type: file
|
||||||
|
format: binary
|
||||||
Workflow:
|
Workflow:
|
||||||
properties:
|
properties:
|
||||||
id:
|
id:
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
from crc import ma
|
||||||
|
|
||||||
|
|
||||||
|
class ApiError:
|
||||||
|
def __init__(self, code, message):
|
||||||
|
self.code = code
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
|
||||||
|
class ApiErrorSchema(ma.Schema):
|
||||||
|
class Meta:
|
||||||
|
fields = ("code", "message")
|
||||||
|
|
|
@ -0,0 +1,91 @@
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import connexion
|
||||||
|
from flask import send_file
|
||||||
|
|
||||||
|
from crc import db
|
||||||
|
from crc.api.common import ApiErrorSchema, ApiError
|
||||||
|
from crc.models import FileSchema, FileModel, FileDataModel, FileType
|
||||||
|
|
||||||
|
|
||||||
|
def update_file_from_request(file_model):
|
||||||
|
if 'file' not in connexion.request.files:
|
||||||
|
return ApiErrorSchema().dump(ApiError('invalid_file',
|
||||||
|
'Expected a file named "file" in the multipart form request')), 404
|
||||||
|
file = connexion.request.files['file']
|
||||||
|
file_model.name = file.filename
|
||||||
|
file_model.version = file_model.version + 1
|
||||||
|
file_model.last_updated = datetime.now()
|
||||||
|
file_model.content_type = file.content_type
|
||||||
|
|
||||||
|
# Verify the extension
|
||||||
|
basename, file_extension = os.path.splitext(file.filename)
|
||||||
|
file_extension = file_extension.lower().strip()[1:]
|
||||||
|
if file_extension not in FileType._member_names_:
|
||||||
|
return ApiErrorSchema().dump(ApiError('unknown_extension',
|
||||||
|
'The file you provided does not have an accepted extension:' +
|
||||||
|
file_extension)), 404
|
||||||
|
else:
|
||||||
|
file_model.type = FileType[file_extension]
|
||||||
|
|
||||||
|
file_data_model = db.session.query(FileDataModel).filter_by(id=file_model.id).with_for_update().first()
|
||||||
|
if file_data_model is None:
|
||||||
|
file_data_model = FileDataModel(data=file.stream.read(), file_model = file_model)
|
||||||
|
else:
|
||||||
|
file_data_model.data = file.stream.read()
|
||||||
|
|
||||||
|
db.session.add(file_data_model)
|
||||||
|
db.session.add(file_model)
|
||||||
|
db.session.flush() # Assure the id is set on the model before returning it.
|
||||||
|
|
||||||
|
|
||||||
|
def get_files(spec_id):
|
||||||
|
if spec_id:
|
||||||
|
schema = FileSchema(many=True)
|
||||||
|
return schema.dump(db.session.query(FileModel).filter_by(workflow_spec_id=spec_id).all())
|
||||||
|
else:
|
||||||
|
error = ApiError('no_files_found', 'Please provide some parameters so we can find the files you need.')
|
||||||
|
return ApiErrorSchema().dump(error), 400
|
||||||
|
|
||||||
|
|
||||||
|
def add_file():
|
||||||
|
if 'workflow_spec_id' not in connexion.request.form:
|
||||||
|
return ApiErrorSchema().dump(ApiError('missing_spec_id',
|
||||||
|
'Please specify a workflow_spec_id for this file in the form')), 404
|
||||||
|
file_model = FileModel(version=0, workflow_spec_id=connexion.request.form['workflow_spec_id'])
|
||||||
|
update_file_from_request(file_model)
|
||||||
|
return FileSchema().dump(file_model)
|
||||||
|
|
||||||
|
|
||||||
|
def update_file(file_id):
|
||||||
|
file_model = db.session.query(FileModel).filter_by(id=file_id).with_for_update().first()
|
||||||
|
if file_model is None:
|
||||||
|
return ApiErrorSchema().dump(ApiError('no_such_file', 'The file id you provided does not exist')), 404
|
||||||
|
update_file_from_request(file_model)
|
||||||
|
return FileSchema().dump(file_model)
|
||||||
|
|
||||||
|
|
||||||
|
def get_file(file_id):
|
||||||
|
file_data = db.session.query(FileDataModel).filter_by(id=file_id).first()
|
||||||
|
if file_data is None:
|
||||||
|
return ApiErrorSchema().dump(ApiError('no_such_file', 'The file id you provided does not exist')), 404
|
||||||
|
return send_file(
|
||||||
|
io.BytesIO(file_data.data),
|
||||||
|
attachment_filename=file_data.file_model.name,
|
||||||
|
mimetype=file_data.file_model.content_type
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_info(file_id):
|
||||||
|
file_model = db.session.query(FileModel).filter_by(id=file_id).with_for_update().first()
|
||||||
|
if file_model is None:
|
||||||
|
return ApiErrorSchema().dump(ApiError('no_such_file', 'The file id you provided does not exist')), 404
|
||||||
|
return FileSchema().dump(file_model)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_file(file_id):
|
||||||
|
db.session.query(FileDataModel).filter_by(id=file_id).delete()
|
||||||
|
db.session.query(FileModel).filter_by(id=file_id).delete()
|
||||||
|
|
|
@ -1,23 +1,17 @@
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import connexion
|
||||||
from connexion import NoContent
|
from connexion import NoContent
|
||||||
from flask_marshmallow import Schema
|
from flask_marshmallow import Schema
|
||||||
|
|
||||||
from crc import db, ma
|
from crc import db, ma, connexion_app
|
||||||
|
from crc.api.common import ApiError, ApiErrorSchema
|
||||||
from crc.models import WorkflowModel, WorkflowSchema, StudySchema, StudyModel, WorkflowSpecSchema, WorkflowSpecModel, \
|
from crc.models import WorkflowModel, WorkflowSchema, StudySchema, StudyModel, WorkflowSpecSchema, WorkflowSpecModel, \
|
||||||
WorkflowStatus, Task, TaskSchema
|
WorkflowStatus, Task, TaskSchema, FileSchema, FileModel, FileDataModel, FileType
|
||||||
from crc.workflow_processor import WorkflowProcessor
|
from crc.workflow_processor import WorkflowProcessor
|
||||||
|
|
||||||
|
|
||||||
class ApiError:
|
|
||||||
def __init__(self, code, message):
|
|
||||||
self.code = code
|
|
||||||
self.message = message
|
|
||||||
|
|
||||||
|
|
||||||
class ApiErrorSchema(ma.Schema):
|
|
||||||
class Meta:
|
|
||||||
fields = ("code", "message")
|
|
||||||
|
|
||||||
|
|
||||||
def all_studies():
|
def all_studies():
|
||||||
# todo: Limit returned studies to a user
|
# todo: Limit returned studies to a user
|
||||||
schema = StudySchema(many=True)
|
schema = StudySchema(many=True)
|
||||||
|
@ -32,11 +26,6 @@ def get_study(study_id):
|
||||||
return schema.dump(study)
|
return schema.dump(study)
|
||||||
|
|
||||||
|
|
||||||
def all_specifications():
|
|
||||||
schema = WorkflowSpecSchema(many=True)
|
|
||||||
return schema.dump(db.session.query(WorkflowSpecModel).all())
|
|
||||||
|
|
||||||
|
|
||||||
def post_update_study_from_protocol_builder(study_id):
|
def post_update_study_from_protocol_builder(study_id):
|
||||||
# todo: Actually get data from an external service here
|
# todo: Actually get data from an external service here
|
||||||
return NoContent, 304
|
return NoContent, 304
|
||||||
|
@ -64,29 +53,3 @@ def add_workflow_to_study(study_id, body):
|
||||||
return get_study_workflows(study_id)
|
return get_study_workflows(study_id)
|
||||||
|
|
||||||
|
|
||||||
def get_workflow(workflow_id):
|
|
||||||
schema = WorkflowSchema()
|
|
||||||
workflow = db.session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
|
||||||
return schema.dump(workflow)
|
|
||||||
|
|
||||||
|
|
||||||
def get_tasks(workflow_id):
|
|
||||||
workflow = db.session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
|
||||||
processor = WorkflowProcessor(workflow.workflow_spec_id, workflow.bpmn_workflow_json)
|
|
||||||
spiff_tasks = processor.get_ready_user_tasks()
|
|
||||||
tasks = []
|
|
||||||
for st in spiff_tasks:
|
|
||||||
tasks.append(Task.from_spiff(st))
|
|
||||||
return TaskSchema(many=True).dump(tasks)
|
|
||||||
|
|
||||||
def get_task(workflow_id, task_id):
|
|
||||||
workflow = db.session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
|
||||||
return workflow.bpmn_workflow().get_task(task_id)
|
|
||||||
|
|
||||||
|
|
||||||
def update_task(workflow_id, task_id, body):
|
|
||||||
global bpmn_workflow
|
|
||||||
for field in body["task"]["form"]:
|
|
||||||
print("Setting " + field["id"] + " to " + field["value"])
|
|
||||||
return body
|
|
||||||
|
|
|
@ -0,0 +1,44 @@
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import connexion
|
||||||
|
from connexion import NoContent
|
||||||
|
from flask_marshmallow import Schema
|
||||||
|
|
||||||
|
from crc import db, ma, connexion_app
|
||||||
|
from crc.models import WorkflowModel, WorkflowSchema, StudySchema, StudyModel, WorkflowSpecSchema, WorkflowSpecModel, \
|
||||||
|
WorkflowStatus, Task, TaskSchema, FileSchema, FileModel, FileDataModel, FileType
|
||||||
|
from crc.workflow_processor import WorkflowProcessor
|
||||||
|
|
||||||
|
|
||||||
|
def all_specifications():
|
||||||
|
schema = WorkflowSpecSchema(many=True)
|
||||||
|
return schema.dump(db.session.query(WorkflowSpecModel).all())
|
||||||
|
|
||||||
|
|
||||||
|
def get_workflow(workflow_id):
|
||||||
|
schema = WorkflowSchema()
|
||||||
|
workflow = db.session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
||||||
|
return schema.dump(workflow)
|
||||||
|
|
||||||
|
|
||||||
|
def get_tasks(workflow_id):
|
||||||
|
workflow = db.session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
||||||
|
processor = WorkflowProcessor(workflow.workflow_spec_id, workflow.bpmn_workflow_json)
|
||||||
|
spiff_tasks = processor.get_ready_user_tasks()
|
||||||
|
tasks = []
|
||||||
|
for st in spiff_tasks:
|
||||||
|
tasks.append(Task.from_spiff(st))
|
||||||
|
return TaskSchema(many=True).dump(tasks)
|
||||||
|
|
||||||
|
|
||||||
|
def get_task(workflow_id, task_id):
|
||||||
|
workflow = db.session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
||||||
|
return workflow.bpmn_workflow().get_task(task_id)
|
||||||
|
|
||||||
|
|
||||||
|
def update_task(workflow_id, task_id, body):
|
||||||
|
global bpmn_workflow
|
||||||
|
for field in body["task"]["form"]:
|
||||||
|
print("Setting " + field["id"] + " to " + field["value"])
|
||||||
|
return body
|
|
@ -39,10 +39,43 @@ class WorkflowSpecModel(db.Model):
|
||||||
display_name = db.Column(db.String)
|
display_name = db.Column(db.String)
|
||||||
description = db.Column(db.Text)
|
description = db.Column(db.Text)
|
||||||
|
|
||||||
|
|
||||||
class WorkflowSpecSchema(ModelSchema):
|
class WorkflowSpecSchema(ModelSchema):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = WorkflowSpecModel
|
model = WorkflowSpecModel
|
||||||
|
|
||||||
|
|
||||||
|
class FileType(enum.Enum):
|
||||||
|
bpmn = "bpmm"
|
||||||
|
svg = "svg"
|
||||||
|
dmn = "dmn"
|
||||||
|
|
||||||
|
|
||||||
|
class FileDataModel(db.Model):
|
||||||
|
__tablename__ = 'file_data'
|
||||||
|
id = db.Column(db.Integer, db.ForeignKey('file.id'), primary_key=True)
|
||||||
|
data = db.Column(db.LargeBinary)
|
||||||
|
file_model = db.relationship("FileModel")
|
||||||
|
|
||||||
|
|
||||||
|
class FileModel(db.Model):
|
||||||
|
__tablename__ = 'file'
|
||||||
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
|
name = db.Column(db.String)
|
||||||
|
version = db.Column(db.Integer, default=0)
|
||||||
|
last_updated = db.Column(db.DateTime(timezone=True), default=func.now())
|
||||||
|
type = db.Column(db.Enum(FileType))
|
||||||
|
primary = db.Column(db.Boolean)
|
||||||
|
content_type = db.Column(db.String)
|
||||||
|
workflow_spec_id = db.Column(db.Integer, db.ForeignKey('workflow_spec.id'))
|
||||||
|
|
||||||
|
|
||||||
|
class FileSchema(ModelSchema):
|
||||||
|
class Meta:
|
||||||
|
model = FileModel
|
||||||
|
type = EnumField(FileType)
|
||||||
|
|
||||||
|
|
||||||
class WorkflowStatus(enum.Enum):
|
class WorkflowStatus(enum.Enum):
|
||||||
new = "new"
|
new = "new"
|
||||||
user_input_required = "user_input_required"
|
user_input_required = "user_input_required"
|
||||||
|
|
|
@ -1,14 +1,12 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.BpmnScriptEngine import BpmnScriptEngine
|
from SpiffWorkflow.bpmn.BpmnScriptEngine import BpmnScriptEngine
|
||||||
from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer
|
|
||||||
from SpiffWorkflow.bpmn.serializer.CompactWorkflowSerializer import CompactWorkflowSerializer
|
from SpiffWorkflow.bpmn.serializer.CompactWorkflowSerializer import CompactWorkflowSerializer
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||||
from SpiffWorkflow.camunda.serializer.CamundaSerializer import CamundaSerializer
|
from SpiffWorkflow.camunda.serializer.CamundaSerializer import CamundaSerializer
|
||||||
from SpiffWorkflow.serializer.json import JSONSerializer
|
|
||||||
|
|
||||||
from crc import app
|
from crc import app
|
||||||
from crc.models import WorkflowModel, WorkflowStatus, WorkflowSpecModel
|
from crc.models import WorkflowStatus
|
||||||
|
|
||||||
|
|
||||||
class CustomBpmnScriptEngine(BpmnScriptEngine):
|
class CustomBpmnScriptEngine(BpmnScriptEngine):
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
import datetime
|
import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
from crc import db
|
from crc import db, app
|
||||||
from crc.models import StudyModel, WorkflowSpecModel
|
from crc.models import StudyModel, WorkflowSpecModel, FileType, FileModel, FileDataModel
|
||||||
|
|
||||||
|
|
||||||
class ExampleDataLoader:
|
class ExampleDataLoader:
|
||||||
|
@ -19,7 +20,27 @@ class ExampleDataLoader:
|
||||||
description='Displays a random fact about a topic of your choosing.',
|
description='Displays a random fact about a topic of your choosing.',
|
||||||
)]
|
)]
|
||||||
|
|
||||||
|
workflow_spec_files = [WorkflowSpecModel(
|
||||||
|
id="random_fact",
|
||||||
|
display_name="Random Fact Generator",
|
||||||
|
description='Displays a random fact about a topic of your choosing.',
|
||||||
|
)]
|
||||||
|
|
||||||
|
workflow_spec_files = [FileModel(name="random_fact.bpmn",
|
||||||
|
type=FileType.bpmn,
|
||||||
|
version="1",
|
||||||
|
last_updated=datetime.datetime.now(),
|
||||||
|
primary=True,
|
||||||
|
workflow_spec_id=workflow_specs[0].id)]
|
||||||
|
|
||||||
|
filename = os.path.join(app.root_path, 'static', 'bpmn', 'random_fact', 'random_fact.bpmn')
|
||||||
|
file = open(filename, "rb")
|
||||||
|
workflow_data = [FileDataModel(data=file.read(), file_model=workflow_spec_files[0])]
|
||||||
|
|
||||||
def load_all(self):
|
def load_all(self):
|
||||||
db.session.bulk_save_objects(ExampleDataLoader.studies)
|
db.session.bulk_save_objects(ExampleDataLoader.studies)
|
||||||
db.session.bulk_save_objects(ExampleDataLoader.workflow_specs)
|
db.session.bulk_save_objects(ExampleDataLoader.workflow_specs)
|
||||||
|
db.session.bulk_save_objects(ExampleDataLoader.workflow_spec_files)
|
||||||
|
db.session.bulk_save_objects(ExampleDataLoader.workflow_data)
|
||||||
|
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
|
@ -7,7 +7,14 @@ os.environ["TESTING"] = "true"
|
||||||
from crc import app, db
|
from crc import app, db
|
||||||
|
|
||||||
|
|
||||||
|
# UNCOMMENT THIS FOR DEBUGGING SQL ALCHEMY QUERIES
|
||||||
|
# import logging
|
||||||
|
# logging.basicConfig()
|
||||||
|
# logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
def clean_db():
|
def clean_db():
|
||||||
|
db.session.flush() # Clear out any transactions before deleting it all to avoid spurious errors.
|
||||||
for table in reversed(db.metadata.sorted_tables):
|
for table in reversed(db.metadata.sorted_tables):
|
||||||
db.session.execute(table.delete())
|
db.session.execute(table.delete())
|
||||||
db.session.flush()
|
db.session.flush()
|
||||||
|
|
|
@ -0,0 +1,104 @@
|
||||||
|
import io
|
||||||
|
import json
|
||||||
|
import unittest
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from crc import db
|
||||||
|
from crc.models import WorkflowSpecModel, FileModel, FileType, FileSchema
|
||||||
|
from tests.base_test import BaseTest
|
||||||
|
|
||||||
|
|
||||||
|
class TestApiFiles(BaseTest, unittest.TestCase):
|
||||||
|
|
||||||
|
def test_list_files_for_workflow_spec(self):
|
||||||
|
self.load_example_data()
|
||||||
|
spec = db.session.query(WorkflowSpecModel).first()
|
||||||
|
rv = self.app.get('/v1.0/file?spec_id=%s' % spec.id,
|
||||||
|
follow_redirects=True,
|
||||||
|
content_type="application/json")
|
||||||
|
self.assert_success(rv)
|
||||||
|
json_data = json.loads(rv.get_data(as_text=True))
|
||||||
|
self.assertEqual(1, len(json_data))
|
||||||
|
file = FileSchema(many=True).load(json_data, session=db.session)
|
||||||
|
self.assertEqual("random_fact.bpmn", file[0].name)
|
||||||
|
|
||||||
|
def test_list_multiple_files_for_workflow_spec(self):
|
||||||
|
self.load_example_data()
|
||||||
|
spec = db.session.query(WorkflowSpecModel).first()
|
||||||
|
svgFile = FileModel(name="test.svg", type=FileType.svg, version=1, last_updated=datetime.now(),
|
||||||
|
primary=False, workflow_spec_id=spec.id)
|
||||||
|
db.session.add(svgFile)
|
||||||
|
db.session.flush()
|
||||||
|
rv = self.app.get('/v1.0/file?spec_id=%s' % spec.id,
|
||||||
|
follow_redirects=True,
|
||||||
|
content_type="application/json")
|
||||||
|
self.assert_success(rv)
|
||||||
|
json_data = json.loads(rv.get_data(as_text=True))
|
||||||
|
self.assertEqual(2, len(json_data))
|
||||||
|
|
||||||
|
def test_create_file(self):
|
||||||
|
self.load_example_data()
|
||||||
|
spec = db.session.query(WorkflowSpecModel).first()
|
||||||
|
|
||||||
|
data = {'workflow_spec_id': spec.id}
|
||||||
|
data['file'] = io.BytesIO(b"abcdef"), 'random_fact.svg'
|
||||||
|
|
||||||
|
rv = self.app.post('/v1.0/file', data=data, follow_redirects=True,
|
||||||
|
content_type='multipart/form-data')
|
||||||
|
|
||||||
|
self.assert_success(rv)
|
||||||
|
self.assertIsNotNone(rv.get_data())
|
||||||
|
json_data = json.loads(rv.get_data(as_text=True))
|
||||||
|
file = FileSchema().load(json_data, session=db.session)
|
||||||
|
self.assertEqual(1, file.version)
|
||||||
|
self.assertEqual(FileType.svg, file.type)
|
||||||
|
self.assertFalse(file.primary)
|
||||||
|
self.assertEqual("image/svg+xml", file.content_type)
|
||||||
|
self.assertEqual(spec.id, file.workflow_spec_id)
|
||||||
|
|
||||||
|
rv = self.app.get('/v1.0/file/%i' % file.id)
|
||||||
|
self.assert_success(rv)
|
||||||
|
json_data = json.loads(rv.get_data(as_text=True))
|
||||||
|
file2 = FileSchema().load(json_data, session=db.session)
|
||||||
|
self.assertEqual(file, file2)
|
||||||
|
|
||||||
|
def test_update_file(self):
|
||||||
|
self.load_example_data()
|
||||||
|
spec = db.session.query(WorkflowSpecModel).first()
|
||||||
|
file = db.session.query(FileModel).filter_by(workflow_spec_id = spec.id).first()
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
data['file'] = io.BytesIO(b"abcdef"), 'random_fact.bpmn'
|
||||||
|
|
||||||
|
rv = self.app.put('/v1.0/file/%i' % file.id, data=data, follow_redirects=True,
|
||||||
|
content_type='multipart/form-data')
|
||||||
|
|
||||||
|
self.assert_success(rv)
|
||||||
|
self.assertIsNotNone(rv.get_data())
|
||||||
|
json_data = json.loads(rv.get_data(as_text=True))
|
||||||
|
file = FileSchema().load(json_data, session=db.session)
|
||||||
|
self.assertEqual(2, file.version)
|
||||||
|
self.assertEqual(FileType.bpmn, file.type)
|
||||||
|
self.assertTrue(file.primary)
|
||||||
|
self.assertEqual("application/octet-stream", file.content_type)
|
||||||
|
self.assertEqual(spec.id, file.workflow_spec_id)
|
||||||
|
|
||||||
|
def test_get_file(self):
|
||||||
|
self.load_example_data()
|
||||||
|
spec = db.session.query(WorkflowSpecModel).first()
|
||||||
|
file = db.session.query(FileModel).filter_by(workflow_spec_id=spec.id).first()
|
||||||
|
rv = self.app.get('/v1.0/file/%i/data' % file.id)
|
||||||
|
self.assert_success(rv)
|
||||||
|
self.assertEquals("application/octet-stream", rv.content_type)
|
||||||
|
self.assertTrue(rv.content_length > 1)
|
||||||
|
|
||||||
|
def test_delete_file(self):
|
||||||
|
self.load_example_data()
|
||||||
|
spec = db.session.query(WorkflowSpecModel).first()
|
||||||
|
file = db.session.query(FileModel).filter_by(workflow_spec_id=spec.id).first()
|
||||||
|
rv = self.app.get('/v1.0/file/%i' % file.id)
|
||||||
|
self.assert_success(rv)
|
||||||
|
rv = self.app.delete('/v1.0/file/%i' % file.id)
|
||||||
|
rv = self.app.get('/v1.0/file/%i' % file.id)
|
||||||
|
self.assertEqual(404, rv.status_code)
|
||||||
|
|
Loading…
Reference in New Issue