Merge branch 'master' into feature/crc_demo_workflows
This commit is contained in:
commit
0ef1a178ed
1
Pipfile
1
Pipfile
|
@ -32,6 +32,7 @@ sphinx = "*"
|
|||
recommonmark = "*"
|
||||
psycopg2 = "*"
|
||||
psycopg2-binary = "*"
|
||||
docxtpl = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.7"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "32eb2d02f21e83658d1c07484179af7f952f8772c41a52275d67e92fc2c5768c"
|
||||
"sha256": "10f677a7dbae5545a263aba1e0becb33afbf73d9c4c9f217883b390b8e3927e8"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
|
@ -112,41 +112,36 @@
|
|||
},
|
||||
"cffi": {
|
||||
"hashes": [
|
||||
"sha256:0b49274afc941c626b605fb59b59c3485c17dc776dc3cc7cc14aca74cc19cc42",
|
||||
"sha256:0e3ea92942cb1168e38c05c1d56b0527ce31f1a370f6117f1d490b8dcd6b3a04",
|
||||
"sha256:135f69aecbf4517d5b3d6429207b2dff49c876be724ac0c8bf8e1ea99df3d7e5",
|
||||
"sha256:19db0cdd6e516f13329cba4903368bff9bb5a9331d3410b1b448daaadc495e54",
|
||||
"sha256:2781e9ad0e9d47173c0093321bb5435a9dfae0ed6a762aabafa13108f5f7b2ba",
|
||||
"sha256:291f7c42e21d72144bb1c1b2e825ec60f46d0a7468f5346841860454c7aa8f57",
|
||||
"sha256:2c5e309ec482556397cb21ede0350c5e82f0eb2621de04b2633588d118da4396",
|
||||
"sha256:2e9c80a8c3344a92cb04661115898a9129c074f7ab82011ef4b612f645939f12",
|
||||
"sha256:32a262e2b90ffcfdd97c7a5e24a6012a43c61f1f5a57789ad80af1d26c6acd97",
|
||||
"sha256:3c9fff570f13480b201e9ab69453108f6d98244a7f495e91b6c654a47486ba43",
|
||||
"sha256:415bdc7ca8c1c634a6d7163d43fb0ea885a07e9618a64bda407e04b04333b7db",
|
||||
"sha256:42194f54c11abc8583417a7cf4eaff544ce0de8187abaf5d29029c91b1725ad3",
|
||||
"sha256:4424e42199e86b21fc4db83bd76909a6fc2a2aefb352cb5414833c030f6ed71b",
|
||||
"sha256:4a43c91840bda5f55249413037b7a9b79c90b1184ed504883b72c4df70778579",
|
||||
"sha256:599a1e8ff057ac530c9ad1778293c665cb81a791421f46922d80a86473c13346",
|
||||
"sha256:5c4fae4e9cdd18c82ba3a134be256e98dc0596af1e7285a3d2602c97dcfa5159",
|
||||
"sha256:5ecfa867dea6fabe2a58f03ac9186ea64da1386af2159196da51c4904e11d652",
|
||||
"sha256:62f2578358d3a92e4ab2d830cd1c2049c9c0d0e6d3c58322993cc341bdeac22e",
|
||||
"sha256:6471a82d5abea994e38d2c2abc77164b4f7fbaaf80261cb98394d5793f11b12a",
|
||||
"sha256:6d4f18483d040e18546108eb13b1dfa1000a089bcf8529e30346116ea6240506",
|
||||
"sha256:71a608532ab3bd26223c8d841dde43f3516aa5d2bf37b50ac410bb5e99053e8f",
|
||||
"sha256:74a1d8c85fb6ff0b30fbfa8ad0ac23cd601a138f7509dc617ebc65ef305bb98d",
|
||||
"sha256:7b93a885bb13073afb0aa73ad82059a4c41f4b7d8eb8368980448b52d4c7dc2c",
|
||||
"sha256:7d4751da932caaec419d514eaa4215eaf14b612cff66398dd51129ac22680b20",
|
||||
"sha256:7f627141a26b551bdebbc4855c1157feeef18241b4b8366ed22a5c7d672ef858",
|
||||
"sha256:8169cf44dd8f9071b2b9248c35fc35e8677451c52f795daa2bb4643f32a540bc",
|
||||
"sha256:aa00d66c0fab27373ae44ae26a66a9e43ff2a678bf63a9c7c1a9a4d61172827a",
|
||||
"sha256:ccb032fda0873254380aa2bfad2582aedc2959186cce61e3a17abc1a55ff89c3",
|
||||
"sha256:d754f39e0d1603b5b24a7f8484b22d2904fa551fe865fd0d4c3332f078d20d4e",
|
||||
"sha256:d75c461e20e29afc0aee7172a0950157c704ff0dd51613506bd7d82b718e7410",
|
||||
"sha256:dcd65317dd15bc0451f3e01c80da2216a31916bdcffd6221ca1202d96584aa25",
|
||||
"sha256:e570d3ab32e2c2861c4ebe6ffcad6a8abf9347432a37608fe1fbd157b3f0036b",
|
||||
"sha256:fd43a88e045cf992ed09fa724b5315b790525f2676883a6ea64e3263bae6549d"
|
||||
"sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff",
|
||||
"sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b",
|
||||
"sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac",
|
||||
"sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0",
|
||||
"sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384",
|
||||
"sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26",
|
||||
"sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6",
|
||||
"sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b",
|
||||
"sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e",
|
||||
"sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd",
|
||||
"sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2",
|
||||
"sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66",
|
||||
"sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc",
|
||||
"sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8",
|
||||
"sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55",
|
||||
"sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4",
|
||||
"sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5",
|
||||
"sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d",
|
||||
"sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78",
|
||||
"sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa",
|
||||
"sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793",
|
||||
"sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f",
|
||||
"sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a",
|
||||
"sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f",
|
||||
"sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30",
|
||||
"sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f",
|
||||
"sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3",
|
||||
"sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c"
|
||||
],
|
||||
"version": "==1.13.2"
|
||||
"version": "==1.14.0"
|
||||
},
|
||||
"chardet": {
|
||||
"hashes": [
|
||||
|
@ -238,6 +233,14 @@
|
|||
],
|
||||
"version": "==0.16"
|
||||
},
|
||||
"docxtpl": {
|
||||
"hashes": [
|
||||
"sha256:a502d1137bc44527f801ccb87026dd85493b4984d75c9fccc8802a4239926900",
|
||||
"sha256:c65f5142a03d52cd669e2cdcf34de6eed71c8440887bcaa1b7acf0352936198c"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.6.3"
|
||||
},
|
||||
"et-xmlfile": {
|
||||
"hashes": [
|
||||
"sha256:614d9722d572f6246302c4491846d2c393c199cfa4edc9af593437691683335b"
|
||||
|
@ -602,6 +605,12 @@
|
|||
],
|
||||
"version": "==2.8.1"
|
||||
},
|
||||
"python-docx": {
|
||||
"hashes": [
|
||||
"sha256:ba9f2a7ca391b78ab385d796b38af3f21bab23c727fc8e0c5e630448d1a11fe3"
|
||||
],
|
||||
"version": "==0.8.7"
|
||||
},
|
||||
"python-editor": {
|
||||
"hashes": [
|
||||
"sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d",
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from crc import ma
|
||||
|
||||
|
||||
class ApiError:
|
||||
class ApiError(Exception):
|
||||
def __init__(self, code, message):
|
||||
self.code = code
|
||||
self.message = message
|
||||
|
|
|
@ -8,38 +8,7 @@ from flask import send_file
|
|||
from crc import session
|
||||
from crc.api.common import ApiErrorSchema, ApiError
|
||||
from crc.models.file import FileModelSchema, FileModel, FileDataModel, FileType
|
||||
|
||||
|
||||
def update_file_from_request(file_model):
|
||||
if 'file' not in connexion.request.files:
|
||||
return ApiErrorSchema().dump(ApiError('invalid_file',
|
||||
'Expected a file named "file" in the multipart form request')), 404
|
||||
file = connexion.request.files['file']
|
||||
file_model.name = file.filename
|
||||
file_model.version = file_model.version + 1
|
||||
file_model.last_updated = datetime.now()
|
||||
file_model.content_type = file.content_type
|
||||
|
||||
# Verify the extension
|
||||
basename, file_extension = os.path.splitext(file.filename)
|
||||
file_extension = file_extension.lower().strip()[1:]
|
||||
if file_extension not in FileType._member_names_:
|
||||
return ApiErrorSchema().dump(ApiError('unknown_extension',
|
||||
'The file you provided does not have an accepted extension:' +
|
||||
file_extension)), 404
|
||||
else:
|
||||
file_model.type = FileType[file_extension]
|
||||
|
||||
file_data_model = session.query(FileDataModel).filter_by(file_model_id=file_model.id).with_for_update().first()
|
||||
if file_data_model is None:
|
||||
file_data_model = FileDataModel(data=file.stream.read(), file_model=file_model)
|
||||
else:
|
||||
file_data_model.data = file.stream.read()
|
||||
|
||||
session.add_all([file_model, file_data_model])
|
||||
session.commit()
|
||||
session.flush() # Assure the id is set on the model before returning it.
|
||||
return FileModelSchema().dump(file_model)
|
||||
from crc.services.FileService import FileService
|
||||
|
||||
|
||||
def get_files(workflow_spec_id=None, study_id=None, workflow_id=None, task_id=None, form_field_key=None):
|
||||
|
@ -48,15 +17,8 @@ def get_files(workflow_spec_id=None, study_id=None, workflow_id=None, task_id=No
|
|||
'Please specify at least one of workflow_spec_id, study_id, '
|
||||
'workflow_id, and task_id for this file in the HTTP parameters')), 400
|
||||
|
||||
schema = FileModelSchema(many=True)
|
||||
results = session.query(FileModel).filter_by(
|
||||
workflow_spec_id=workflow_spec_id,
|
||||
study_id=study_id,
|
||||
workflow_id=workflow_id,
|
||||
task_id=task_id,
|
||||
form_field_key=form_field_key
|
||||
).all()
|
||||
return schema.dump(results)
|
||||
results = FileService.get_files(workflow_spec_id, study_id, workflow_id, task_id, form_field_key)
|
||||
return FileModelSchema(many=True).dump(results)
|
||||
|
||||
|
||||
def add_file(workflow_spec_id=None, study_id=None, workflow_id=None, task_id=None, form_field_key=None):
|
||||
|
@ -66,27 +28,30 @@ def add_file(workflow_spec_id=None, study_id=None, workflow_id=None, task_id=Non
|
|||
return ApiErrorSchema().dump(ApiError('missing_parameter',
|
||||
'Please specify either a workflow_spec_id or all 3 of study_id, '
|
||||
'workflow_id, and task_id for this file in the HTTP parameters')), 404
|
||||
if 'file' not in connexion.request.files:
|
||||
return ApiErrorSchema().dump(ApiError('invalid_file',
|
||||
'Expected a file named "file" in the multipart form request')), 404
|
||||
|
||||
file_model = FileModel(
|
||||
version=0,
|
||||
workflow_spec_id=workflow_spec_id,
|
||||
study_id=study_id,
|
||||
workflow_id=workflow_id,
|
||||
task_id=task_id,
|
||||
form_field_key=form_field_key
|
||||
)
|
||||
return update_file_from_request(file_model)
|
||||
file = connexion.request.files['file']
|
||||
if workflow_spec_id:
|
||||
file_model = FileService.add_workflow_spec_file(workflow_spec_id, file.filename, file.content_type, file.stream.read())
|
||||
else:
|
||||
file_model = FileService.add_task_file(study_id, workflow_id, task_id, file.filename, file.content_type, file.stream.read())
|
||||
|
||||
return FileModelSchema().dump(file_model)
|
||||
|
||||
|
||||
def update_file_data(file_id):
|
||||
file_model = session.query(FileModel).filter_by(id=file_id).with_for_update().first()
|
||||
file = connexion.request.files['file']
|
||||
if file_model is None:
|
||||
return ApiErrorSchema().dump(ApiError('no_such_file', 'The file id you provided does not exist')), 404
|
||||
return update_file_from_request(file_model)
|
||||
file_model = FileService.update_file(file_model, file.stream.read(), file.content_type)
|
||||
return FileModelSchema().dump(file_model)
|
||||
|
||||
|
||||
def get_file_data(file_id):
|
||||
file_data = session.query(FileDataModel).filter_by(id=file_id).first()
|
||||
file_data = FileService.get_file_data(file_id)
|
||||
if file_data is None:
|
||||
return ApiErrorSchema().dump(ApiError('no_such_file', 'The file id you provided does not exist')), 404
|
||||
return send_file(
|
||||
|
|
|
@ -5,7 +5,7 @@ from crc.api.common import ApiError, ApiErrorSchema
|
|||
from crc.api.workflow import __get_workflow_api_model
|
||||
from crc.models.study import StudyModelSchema, StudyModel
|
||||
from crc.models.workflow import WorkflowModel, WorkflowApiSchema, WorkflowSpecModel
|
||||
from crc.workflow_processor import Workflow, WorkflowProcessor
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
|
||||
|
||||
def all_studies():
|
||||
|
@ -20,13 +20,7 @@ def add_study(body):
|
|||
session.commit()
|
||||
# FIXME: We need to ask the protocol builder what workflows to add to the study, not just add them all.
|
||||
for spec in session.query(WorkflowSpecModel).all():
|
||||
processor = WorkflowProcessor.create(spec.id)
|
||||
workflow = WorkflowModel(bpmn_workflow_json=processor.serialize(),
|
||||
status=processor.get_status(),
|
||||
study_id=study.id,
|
||||
workflow_spec_id=spec.id)
|
||||
session.add(workflow)
|
||||
session.commit()
|
||||
WorkflowProcessor.create(study.id, spec.id)
|
||||
return StudyModelSchema().dump(study)
|
||||
|
||||
|
||||
|
@ -66,7 +60,7 @@ def get_study_workflows(study_id):
|
|||
for workflow_model in workflow_models:
|
||||
processor = WorkflowProcessor(workflow_model.workflow_spec_id,
|
||||
workflow_model.bpmn_workflow_json)
|
||||
api_models.append( __get_workflow_api_model(workflow_model, processor))
|
||||
api_models.append( __get_workflow_api_model(processor))
|
||||
schema = WorkflowApiSchema(many=True)
|
||||
return schema.dump(api_models)
|
||||
|
||||
|
@ -76,12 +70,6 @@ def add_workflow_to_study(study_id, body):
|
|||
if workflow_spec_model is None:
|
||||
error = ApiError('unknown_spec', 'The specification "' + body['id'] + '" is not recognized.')
|
||||
return ApiErrorSchema.dump(error), 404
|
||||
processor = WorkflowProcessor.create(workflow_spec_model.id)
|
||||
workflow = WorkflowModel(bpmn_workflow_json=processor.serialize(),
|
||||
status=processor.get_status(),
|
||||
study_id=study_id,
|
||||
workflow_spec_id=workflow_spec_model.id)
|
||||
session.add(workflow)
|
||||
session.commit()
|
||||
return WorkflowApiSchema().dump(__get_workflow_api_model(workflow, processor))
|
||||
processor = WorkflowProcessor.create(study_id, workflow_spec_model.id)
|
||||
return WorkflowApiSchema().dump(__get_workflow_api_model(processor))
|
||||
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
import uuid
|
||||
|
||||
from flask import json
|
||||
|
||||
from crc.api.file import delete_file
|
||||
from crc import session
|
||||
from crc.api.common import ApiError, ApiErrorSchema
|
||||
from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, WorkflowSpecModel, \
|
||||
Task, TaskSchema, WorkflowApiSchema, WorkflowApi
|
||||
from crc.workflow_processor import WorkflowProcessor
|
||||
Task, WorkflowApiSchema, WorkflowApi
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from crc.models.file import FileModel
|
||||
|
||||
|
||||
|
@ -67,25 +65,27 @@ def delete_workflow_specification(spec_id):
|
|||
session.commit()
|
||||
|
||||
|
||||
def __get_workflow_api_model(workflow_model: WorkflowModel, processor: WorkflowProcessor):
|
||||
def __get_workflow_api_model(processor: WorkflowProcessor):
|
||||
spiff_tasks = processor.get_all_user_tasks()
|
||||
user_tasks = map(Task.from_spiff, spiff_tasks)
|
||||
return WorkflowApi(
|
||||
id=workflow_model.id,
|
||||
status=workflow_model.status,
|
||||
workflow_api = WorkflowApi(
|
||||
id=processor.get_workflow_id(),
|
||||
status=processor.get_status(),
|
||||
last_task=Task.from_spiff(processor.bpmn_workflow.last_task),
|
||||
next_task=Task.from_spiff(processor.next_task()),
|
||||
next_task=None,
|
||||
user_tasks=user_tasks,
|
||||
workflow_spec_id=workflow_model.workflow_spec_id
|
||||
workflow_spec_id=processor.workflow_spec_id
|
||||
)
|
||||
|
||||
if(processor.next_task()):
|
||||
workflow_api.next_task = Task.from_spiff(processor.next_task())
|
||||
return workflow_api
|
||||
|
||||
def get_workflow(workflow_id):
|
||||
schema = WorkflowApiSchema()
|
||||
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
||||
processor = WorkflowProcessor(workflow_model.workflow_spec_id,
|
||||
workflow_model.bpmn_workflow_json)
|
||||
return schema.dump(__get_workflow_api_model(workflow_model, processor))
|
||||
return schema.dump(__get_workflow_api_model(processor))
|
||||
|
||||
|
||||
def delete(workflow_id):
|
||||
|
@ -109,5 +109,5 @@ def update_task(workflow_id, task_id, body):
|
|||
workflow_model.bpmn_workflow_json = processor.serialize()
|
||||
session.add(workflow_model)
|
||||
session.commit()
|
||||
return WorkflowApiSchema().dump(__get_workflow_api_model(workflow_model, processor)
|
||||
return WorkflowApiSchema().dump(__get_workflow_api_model(processor)
|
||||
)
|
||||
|
|
|
@ -50,13 +50,17 @@ class Task(object):
|
|||
|
||||
@classmethod
|
||||
def from_spiff(cls, spiff_task):
|
||||
try:
|
||||
documentation = spiff_task.task_spec.documentation,
|
||||
except AttributeError:
|
||||
documentation = None
|
||||
instance = cls(spiff_task.id,
|
||||
spiff_task.task_spec.name,
|
||||
spiff_task.task_spec.description,
|
||||
spiff_task.task_spec.__class__.__name__,
|
||||
spiff_task.get_state_name(),
|
||||
None,
|
||||
spiff_task.task_spec.documentation,
|
||||
documentation,
|
||||
spiff_task.data)
|
||||
if hasattr(spiff_task.task_spec, "form"):
|
||||
instance.form = spiff_task.task_spec.form
|
||||
|
@ -124,7 +128,7 @@ class WorkflowApiSchema(ma.Schema):
|
|||
status = EnumField(WorkflowStatus)
|
||||
user_tasks = marshmallow.fields.List(marshmallow.fields.Nested(TaskSchema, dump_only=True))
|
||||
last_task = marshmallow.fields.Nested(TaskSchema, dump_only=True)
|
||||
next_task = marshmallow.fields.Nested(TaskSchema, dump_only=True)
|
||||
next_task = marshmallow.fields.Nested(TaskSchema, dump_only=True, required=False)
|
||||
|
||||
@marshmallow.post_load
|
||||
def make_workflow(self, data, **kwargs):
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
from io import StringIO, BytesIO
|
||||
|
||||
from jinja2 import UndefinedError
|
||||
|
||||
from crc import session
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileModel, FileDataModel
|
||||
from crc.models.workflow import WorkflowSpecModel
|
||||
from docxtpl import DocxTemplate
|
||||
import jinja2
|
||||
|
||||
from crc.services.FileService import FileService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
|
||||
|
||||
class CompleteTemplate(object):
|
||||
"""Completes a word template, using the data available in the current task. Heavy on the
|
||||
error messages, because there is so much that can go wrong here, and we want to provide
|
||||
as much feedback as possible. Some of this might move up to a higher level object or be
|
||||
passed into all tasks as we complete more work."""
|
||||
|
||||
|
||||
def do_task(self, task, *args, **kwargs):
|
||||
"""Entry point, mostly worried about wiring it all up."""
|
||||
if len(args) != 1:
|
||||
raise ApiError(code="missing_argument",
|
||||
message="The CompleteTask script requires a single argument with "
|
||||
"the name of the docx template to use.")
|
||||
file_name = args[0]
|
||||
workflow_spec_model = self.find_spec_model_in_db(task.workflow)
|
||||
|
||||
if workflow_spec_model is None:
|
||||
raise ApiError(code="workflow_model_error",
|
||||
message="Something is wrong. I can't find the workflow you are using.")
|
||||
|
||||
file_data_model = session.query(FileDataModel) \
|
||||
.join(FileModel) \
|
||||
.filter(FileModel.name == file_name) \
|
||||
.filter(FileModel.workflow_spec_id == workflow_spec_model.id).first()
|
||||
|
||||
|
||||
if file_data_model is None:
|
||||
raise ApiError(code="file_missing",
|
||||
message="Can not find a file called '%s' "
|
||||
"within workflow specification '%s'") % (args[0], workflow_spec_model.id)
|
||||
|
||||
final_document_stream = self.make_template(file_data_model, task.data)
|
||||
study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
|
||||
workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
|
||||
FileService.add_task_file(study_id=study_id, workflow_id=workflow_id, task_id=task.id,
|
||||
name=file_name,
|
||||
content_type=FileService.DOCX_MIME,
|
||||
binary_data=final_document_stream.read())
|
||||
|
||||
print("Complete Task was called with %s" % str(args))
|
||||
|
||||
def make_template(self, file_data_model, context):
|
||||
doc = DocxTemplate(BytesIO(file_data_model.data))
|
||||
jinja_env = jinja2.Environment()
|
||||
doc.render(context, jinja_env)
|
||||
target_stream = BytesIO()
|
||||
doc.save(target_stream)
|
||||
target_stream.seek(0) # move to the beginning of the stream.
|
||||
return target_stream
|
||||
|
||||
def find_spec_model_in_db(self, workflow):
|
||||
""" Search for the workflow """
|
||||
# When the workflow spec model is created, we record the primary process id,
|
||||
# then we can look it up. As there is the potential for sub-workflows, we
|
||||
# may need to travel up to locate the primary process.
|
||||
spec = workflow.spec
|
||||
workflow_model = session.query(WorkflowSpecModel). \
|
||||
filter(WorkflowSpecModel.primary_process_id == spec.name).first()
|
||||
if workflow_model is None and workflow != workflow.outer_workflow:
|
||||
return self.find_spec_model_in_db(workflow.outer_workflow)
|
||||
|
||||
return workflow_model
|
|
@ -16,11 +16,11 @@ class FactService:
|
|||
response = requests.get('https://api.chucknorris.io/jokes/random')
|
||||
return response.json()['value']
|
||||
|
||||
def do_task(self, data, **kwargs):
|
||||
if "Fact.type" not in data:
|
||||
def do_task(self, task, **kwargs):
|
||||
if "Fact.type" not in task.data:
|
||||
raise Exception("No Fact Provided.")
|
||||
else:
|
||||
fact = data["Fact.type"]
|
||||
fact = task.data["Fact.type"]
|
||||
|
||||
if True:
|
||||
details = "Assertively Incubate Seamless Niches"
|
||||
|
@ -32,4 +32,4 @@ class FactService:
|
|||
details = self.get_buzzword()
|
||||
else:
|
||||
details = "unknown fact type."
|
||||
data['details'] = details
|
||||
task.data['details'] = details
|
||||
|
|
|
@ -0,0 +1,85 @@
|
|||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from crc import session
|
||||
from crc.api.common import ApiErrorSchema, ApiError
|
||||
from crc.models.file import FileType, FileDataModel, FileModelSchema, FileModel
|
||||
|
||||
|
||||
class FileService(object):
|
||||
"""Provides consistent management and rules for storing, retrieving and processing files."""
|
||||
|
||||
DOCX_MIME = "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||
|
||||
|
||||
@staticmethod
|
||||
def add_workflow_spec_file(workflow_spec_id, name, content_type, binary_data):
|
||||
"""Create a new file and associate it with a workflow spec."""
|
||||
file_model = FileModel(
|
||||
version=0,
|
||||
workflow_spec_id=workflow_spec_id,
|
||||
name=name,
|
||||
)
|
||||
return FileService.update_file(file_model, binary_data, content_type)
|
||||
|
||||
@staticmethod
|
||||
def add_task_file(study_id, workflow_id, task_id, name, content_type, binary_data):
|
||||
"""Create a new file and associate it with an executing task within a workflow."""
|
||||
file_model = FileModel(
|
||||
version=0,
|
||||
study_id=study_id,
|
||||
workflow_id=workflow_id,
|
||||
task_id=task_id,
|
||||
name=name,
|
||||
)
|
||||
return FileService.update_file(file_model, binary_data, content_type)
|
||||
|
||||
@staticmethod
|
||||
def update_file(file_model, binary_data, content_type):
|
||||
|
||||
file_model.version = file_model.version + 1
|
||||
file_model.last_updated = datetime.now()
|
||||
file_model.content_type = content_type
|
||||
|
||||
# Verify the extension
|
||||
basename, file_extension = os.path.splitext(file_model.name)
|
||||
file_extension = file_extension.lower().strip()[1:]
|
||||
if file_extension not in FileType._member_names_:
|
||||
return ApiErrorSchema().dump(ApiError('unknown_extension',
|
||||
'The file you provided does not have an accepted extension:' +
|
||||
file_extension)), 404
|
||||
else:
|
||||
file_model.type = FileType[file_extension]
|
||||
|
||||
file_data_model = session.query(FileDataModel).filter_by(file_model_id=file_model.id).with_for_update().first()
|
||||
if file_data_model is None:
|
||||
file_data_model = FileDataModel(data=binary_data, file_model=file_model)
|
||||
else:
|
||||
file_data_model.data = binary_data
|
||||
|
||||
session.add_all([file_model, file_data_model])
|
||||
session.commit()
|
||||
session.flush() # Assure the id is set on the model before returning it.
|
||||
return file_model
|
||||
|
||||
@staticmethod
|
||||
def get_files(workflow_spec_id=None, study_id=None, workflow_id=None, task_id=None, form_field_key=None):
|
||||
query = session.query(FileModel)
|
||||
if workflow_spec_id:
|
||||
query = query.filter_by(workflow_spec_id=workflow_spec_id)
|
||||
if study_id:
|
||||
query = query.filter_by(study_id=study_id)
|
||||
if workflow_id:
|
||||
query = query.filter_by(workflow_id=workflow_id)
|
||||
if task_id:
|
||||
query = query.filter_by(task_id=str(task_id))
|
||||
if form_field_key:
|
||||
query = query.filter_by(form_field_key=form_field_key)
|
||||
|
||||
results = query.all()
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def get_file_data(file_id):
|
||||
"""Returns the file_data that is associated with the file model id"""
|
||||
return session.query(FileDataModel).filter(FileDataModel.file_model_id == file_id).first()
|
|
@ -11,7 +11,7 @@ from SpiffWorkflow.operators import Operator
|
|||
from crc import session
|
||||
from crc.api.rest_exception import RestException
|
||||
from crc.models.file import FileDataModel, FileModel, FileType
|
||||
from crc.models.workflow import WorkflowStatus
|
||||
from crc.models.workflow import WorkflowStatus, WorkflowModel
|
||||
|
||||
|
||||
class CustomBpmnScriptEngine(BpmnScriptEngine):
|
||||
|
@ -27,11 +27,12 @@ class CustomBpmnScriptEngine(BpmnScriptEngine):
|
|||
|
||||
This allows us to reference custom code from the BPMN diagram.
|
||||
"""
|
||||
module_name = "crc." + script
|
||||
commands = script.split(" ")
|
||||
module_name = "crc." + commands[0]
|
||||
class_name = module_name.split(".")[-1]
|
||||
mod = __import__(module_name, fromlist=[class_name])
|
||||
klass = getattr(mod, class_name)
|
||||
klass().do_task(task.data)
|
||||
klass().do_task(task, *commands[1:])
|
||||
|
||||
def evaluate(self, task, expression):
|
||||
"""
|
||||
|
@ -63,9 +64,12 @@ class MyCustomParser(BpmnDmnParser):
|
|||
class WorkflowProcessor(object):
|
||||
_script_engine = CustomBpmnScriptEngine()
|
||||
_serializer = BpmnSerializer()
|
||||
WORKFLOW_ID_KEY = "workflow_id"
|
||||
STUDY_ID_KEY = "session_id"
|
||||
|
||||
def __init__(self, workflow_spec_id, bpmn_json):
|
||||
wf_spec = self.get_spec(workflow_spec_id)
|
||||
self.workflow_spec_id = workflow_spec_id
|
||||
self.bpmn_workflow = self._serializer.deserialize_workflow(bpmn_json, workflow_spec=wf_spec)
|
||||
self.bpmn_workflow.script_engine = self._script_engine
|
||||
|
||||
|
@ -85,7 +89,7 @@ class WorkflowProcessor(object):
|
|||
if file_data.file_model.type == FileType.bpmn:
|
||||
bpmn: ElementTree.Element = ElementTree.fromstring(file_data.data)
|
||||
if file_data.file_model.primary:
|
||||
process_id = WorkflowProcessor.__get_process_id(bpmn)
|
||||
process_id = WorkflowProcessor.get_process_id(bpmn)
|
||||
parser.add_bpmn_xml(bpmn, filename=file_data.file_model.name)
|
||||
elif file_data.file_model.type == FileType.dmn:
|
||||
dmn: ElementTree.Element = ElementTree.fromstring(file_data.data)
|
||||
|
@ -95,14 +99,26 @@ class WorkflowProcessor(object):
|
|||
return parser.get_spec(process_id)
|
||||
|
||||
|
||||
|
||||
@classmethod
|
||||
def create(cls, workflow_spec_id):
|
||||
def create(cls, study_id, workflow_spec_id):
|
||||
spec = WorkflowProcessor.get_spec(workflow_spec_id)
|
||||
bpmn_workflow = BpmnWorkflow(spec, script_engine=cls._script_engine)
|
||||
bpmn_workflow.do_engine_steps()
|
||||
json = cls._serializer.serialize_workflow(bpmn_workflow)
|
||||
processor = cls(workflow_spec_id, json)
|
||||
workflow_model = WorkflowModel(status=processor.get_status(),
|
||||
study_id=study_id,
|
||||
workflow_spec_id=workflow_spec_id)
|
||||
session.add(workflow_model)
|
||||
session.commit()
|
||||
# Need to commit twice, first to get a unique id for the workflow model, and
|
||||
# a second time to store the serilaization so we can maintain this link within
|
||||
# the spiff-workflow process.
|
||||
processor.bpmn_workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY] = workflow_model.id
|
||||
processor.bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = study_id
|
||||
workflow_model.bpmn_workflow_json = processor.serialize()
|
||||
session.add(workflow_model)
|
||||
session.commit()
|
||||
return processor
|
||||
|
||||
def get_status(self):
|
||||
|
@ -135,7 +151,7 @@ class WorkflowProcessor(object):
|
|||
else:
|
||||
for task in ready_tasks:
|
||||
if task.parent == self.bpmn_workflow.last_task:
|
||||
return task;
|
||||
return task
|
||||
return ready_tasks[0]
|
||||
|
||||
def complete_task(self, task):
|
||||
|
@ -144,6 +160,12 @@ class WorkflowProcessor(object):
|
|||
def get_data(self):
|
||||
return self.bpmn_workflow.data
|
||||
|
||||
def get_workflow_id(self):
|
||||
return self.bpmn_workflow.data[self.WORKFLOW_ID_KEY]
|
||||
|
||||
def get_study_id(self):
|
||||
return self.bpmn_workflow.data[self.STUDY_ID_KEY]
|
||||
|
||||
def get_ready_user_tasks(self):
|
||||
return self.bpmn_workflow.get_ready_user_tasks()
|
||||
|
||||
|
@ -152,7 +174,7 @@ class WorkflowProcessor(object):
|
|||
return [t for t in all_tasks if not self.bpmn_workflow._is_engine_task(t.task_spec)]
|
||||
|
||||
@staticmethod
|
||||
def __get_process_id(et_root: ElementTree.Element):
|
||||
def get_process_id(et_root: ElementTree.Element):
|
||||
process_elements = []
|
||||
for child in et_root:
|
||||
if child.tag.endswith('process') and child.attrib.get('isExecutable', False):
|
Binary file not shown.
|
@ -0,0 +1,65 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96a17d9" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
|
||||
<bpmn:process id="Process_93a29b3" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>SequenceFlow_0637d8i</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0637d8i" sourceRef="StartEvent_1" targetRef="task_gather_information" />
|
||||
<bpmn:userTask id="task_gather_information" name="Gather Information" camunda:formKey="example_document_form">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="full_name" label="What is your name?" type="string" />
|
||||
<camunda:formField id="date" label="date" type="string" />
|
||||
<camunda:formField id="title" label="Title" type="string" />
|
||||
<camunda:formField id="company" label="Company" type="string" />
|
||||
<camunda:formField id="last_name" label="Last Name" type="string" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0637d8i</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1i7hk1a</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1i7hk1a" sourceRef="task_gather_information" targetRef="task_generate_document" />
|
||||
<bpmn:scriptTask id="task_generate_document" name="Generate Document">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:properties>
|
||||
<camunda:property name="template" />
|
||||
</camunda:properties>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1i7hk1a</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_11c35oq</bpmn:outgoing>
|
||||
<bpmn:script>scripts.CompleteTemplate Letter.docx</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:endEvent id="EndEvent_0evb22x">
|
||||
<bpmn:incoming>SequenceFlow_11c35oq</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_11c35oq" sourceRef="task_generate_document" targetRef="EndEvent_0evb22x" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_93a29b3">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0637d8i_di" bpmnElement="SequenceFlow_0637d8i">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="265" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="UserTask_02o51o8_di" bpmnElement="task_gather_information">
|
||||
<dc:Bounds x="265" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1i7hk1a_di" bpmnElement="SequenceFlow_1i7hk1a">
|
||||
<di:waypoint x="365" y="117" />
|
||||
<di:waypoint x="465" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="ScriptTask_0xjh8x4_di" bpmnElement="task_generate_document">
|
||||
<dc:Bounds x="465" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_0evb22x_di" bpmnElement="EndEvent_0evb22x">
|
||||
<dc:Bounds x="665" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_11c35oq_di" bpmnElement="SequenceFlow_11c35oq">
|
||||
<di:waypoint x="565" y="117" />
|
||||
<di:waypoint x="665" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -32,9 +32,9 @@
|
|||
<bpmn:sequenceFlow id="SequenceFlow_0n7ums4" sourceRef="Task_19r6r37" targetRef="ExclusiveGateway_0jsb2sc" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0hftq9w" sourceRef="Task_1o1we8w" targetRef="ExclusiveGateway_0jsb2sc" />
|
||||
<bpmn:endEvent id="EndEvent_0alhi16">
|
||||
<bpmn:incoming>SequenceFlow_06rqrvw</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_1y89ho1</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_06rqrvw" sourceRef="ExclusiveGateway_0jsb2sc" targetRef="EndEvent_0alhi16" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_06rqrvw" sourceRef="ExclusiveGateway_0jsb2sc" targetRef="Task_0c7seuo" />
|
||||
<bpmn:userTask id="Task_1sxp42p" name="Do the dishes" camunda:formKey="form_a_1">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
|
@ -120,6 +120,16 @@
|
|||
<bpmn:incoming>SequenceFlow_18z9ub0</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0ogto9b</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1y89ho1" sourceRef="Task_0c7seuo" targetRef="EndEvent_0alhi16" />
|
||||
<bpmn:userTask id="Task_0c7seuo" name="Time warp" camunda:formKey="last_form">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_159qrqu" label="Do the time warp again?" type="boolean" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_06rqrvw</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1y89ho1</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_68a66aa">
|
||||
|
@ -193,11 +203,11 @@
|
|||
<di:waypoint x="783" y="310" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="EndEvent_0alhi16_di" bpmnElement="EndEvent_0alhi16">
|
||||
<dc:Bounds x="953" y="317" width="36" height="36" />
|
||||
<dc:Bounds x="1091" y="317" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_06rqrvw_di" bpmnElement="SequenceFlow_06rqrvw">
|
||||
<di:waypoint x="808" y="335" />
|
||||
<di:waypoint x="953" y="335" />
|
||||
<di:waypoint x="898" y="335" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="UserTask_0hppz22_di" bpmnElement="Task_1sxp42p">
|
||||
<dc:Bounds x="365" y="137" width="100" height="80" />
|
||||
|
@ -223,6 +233,13 @@
|
|||
<bpmndi:BPMNShape id="UserTask_1rvpblb_di" bpmnElement="Task_03u4wg2">
|
||||
<dc:Bounds x="515" y="467" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1y89ho1_di" bpmnElement="SequenceFlow_1y89ho1">
|
||||
<di:waypoint x="998" y="335" />
|
||||
<di:waypoint x="1091" y="335" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="UserTask_0v7afjc_di" bpmnElement="Task_0c7seuo">
|
||||
<dc:Bounds x="898" y="295" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
||||
|
|
|
@ -6,7 +6,8 @@ from crc import app, db, session
|
|||
from crc.models.file import FileType, FileModel, FileDataModel
|
||||
from crc.models.study import StudyModel
|
||||
from crc.models.workflow import WorkflowSpecModel
|
||||
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
|
||||
class ExampleDataLoader:
|
||||
def make_data(self):
|
||||
|
@ -66,12 +67,11 @@ class ExampleDataLoader:
|
|||
name="exclusive_gateway",
|
||||
display_name="Exclusive Gateway Example",
|
||||
description='How to take different paths based on input.')
|
||||
|
||||
# workflow_specifications += \
|
||||
# self.create_spec(id="docx",
|
||||
# name="docx",
|
||||
# display_name="Form with document generation",
|
||||
# description='the name says it all')
|
||||
workflow_specifications += \
|
||||
self.create_spec(id="docx",
|
||||
name="docx",
|
||||
display_name="Form with document generation",
|
||||
description='the name says it all')
|
||||
|
||||
all_data = studies + workflow_specifications
|
||||
return all_data
|
||||
|
@ -98,8 +98,8 @@ class ExampleDataLoader:
|
|||
type = FileType.dmn
|
||||
elif file_extension.lower() == '.svg':
|
||||
type = FileType.svg
|
||||
# elif file_extension.lower() == '.docx':
|
||||
# type = FileType.docx
|
||||
elif file_extension.lower() == '.docx':
|
||||
type = FileType.docx
|
||||
else:
|
||||
raise Exception("Unsupported file type:" + file_path)
|
||||
continue
|
||||
|
@ -111,7 +111,12 @@ class ExampleDataLoader:
|
|||
models.append(file_model)
|
||||
try:
|
||||
file = open(file_path, "rb")
|
||||
models.append(FileDataModel(data=file.read(), file_model=file_model))
|
||||
data = file.read()
|
||||
if(is_primary):
|
||||
bpmn: ElementTree.Element = ElementTree.fromstring(data)
|
||||
spec.primary_process_id = WorkflowProcessor.get_process_id(bpmn)
|
||||
print("Locating Process Id for " + filename + " " + spec.primary_process_id)
|
||||
models.append(FileDataModel(data=data, file_model=file_model))
|
||||
finally:
|
||||
file.close()
|
||||
return models
|
||||
|
|
|
@ -68,6 +68,8 @@ class BaseTest(unittest.TestCase):
|
|||
|
||||
def load_test_spec(self, dir_name):
|
||||
"""Loads a spec into the database based on a directory in /tests/data"""
|
||||
if session.query(WorkflowSpecModel).filter_by(id=dir_name).count() > 0:
|
||||
return
|
||||
filepath = os.path.join(app.root_path, '..', 'tests', 'data', dir_name, "*")
|
||||
models = ExampleDataLoader().create_spec(id=dir_name, name=dir_name, filepath=filepath)
|
||||
spec = None
|
||||
|
|
Binary file not shown.
|
@ -0,0 +1,65 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96a17d9" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.4.1">
|
||||
<bpmn:process id="Process_93a29b3" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>SequenceFlow_0637d8i</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_0637d8i" sourceRef="StartEvent_1" targetRef="task_gather_information" />
|
||||
<bpmn:userTask id="task_gather_information" name="Gather Information" camunda:formKey="example_document_form">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="full_name" label="What is your name?" type="string" />
|
||||
<camunda:formField id="date" label="date" type="string" />
|
||||
<camunda:formField id="title" label="Title" type="string" />
|
||||
<camunda:formField id="company" label="Company" type="string" />
|
||||
<camunda:formField id="last_name" label="Last Name" type="string" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0637d8i</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1i7hk1a</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1i7hk1a" sourceRef="task_gather_information" targetRef="task_generate_document" />
|
||||
<bpmn:scriptTask id="task_generate_document" name="Generate Document">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:properties>
|
||||
<camunda:property name="template" />
|
||||
</camunda:properties>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1i7hk1a</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_11c35oq</bpmn:outgoing>
|
||||
<bpmn:script>scripts.CompleteTemplate Letter.docx</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:endEvent id="EndEvent_0evb22x">
|
||||
<bpmn:incoming>SequenceFlow_11c35oq</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_11c35oq" sourceRef="task_generate_document" targetRef="EndEvent_0evb22x" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_93a29b3">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_0637d8i_di" bpmnElement="SequenceFlow_0637d8i">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="265" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="UserTask_02o51o8_di" bpmnElement="task_gather_information">
|
||||
<dc:Bounds x="265" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1i7hk1a_di" bpmnElement="SequenceFlow_1i7hk1a">
|
||||
<di:waypoint x="365" y="117" />
|
||||
<di:waypoint x="465" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="ScriptTask_0xjh8x4_di" bpmnElement="task_generate_document">
|
||||
<dc:Bounds x="465" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="EndEvent_0evb22x_di" bpmnElement="EndEvent_0evb22x">
|
||||
<dc:Bounds x="665" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_11c35oq_di" bpmnElement="SequenceFlow_11c35oq">
|
||||
<di:waypoint x="565" y="117" />
|
||||
<di:waypoint x="665" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,11 +1,10 @@
|
|||
import json
|
||||
from datetime import datetime
|
||||
|
||||
from crc import session
|
||||
from crc.models.file import FileModel
|
||||
from crc.models.study import StudyModel, StudyModelSchema, ProtocolBuilderStatus
|
||||
from crc.models.file import FileModelSchema
|
||||
from crc.models.study import StudyModel
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, \
|
||||
WorkflowStatus, TaskSchema, WorkflowApiSchema
|
||||
WorkflowApiSchema, WorkflowStatus
|
||||
from tests.base_test import BaseTest
|
||||
|
||||
|
||||
|
@ -110,3 +109,25 @@ class TestTasksApi(BaseTest):
|
|||
workflow_api = self.get_workflow_api(workflow)
|
||||
self.assertIsNotNone(workflow_api.last_task)
|
||||
self.assertIsNotNone(workflow_api.next_task)
|
||||
|
||||
def test_document_added_to_workflow_shows_up_in_file_list(self):
|
||||
self.load_example_data()
|
||||
workflow = self.create_workflow('docx')
|
||||
# get the first form in the two form workflow.
|
||||
tasks = self.get_workflow_api(workflow).user_tasks
|
||||
data = {
|
||||
"full_name": "Buck of the Wild",
|
||||
"date": "5/1/2020",
|
||||
"title": "Leader of the Pack",
|
||||
"company": "In the company of wolves",
|
||||
"last_name": "Mr. Wolf"
|
||||
}
|
||||
workflow_api = self.complete_form(workflow, tasks[0], data)
|
||||
# workflow_api = self.get_workflow_api(workflow)
|
||||
self.assertIsNone(workflow_api.next_task)
|
||||
self.assertTrue(workflow_api.status == WorkflowStatus.complete)
|
||||
rv = self.app.get('/v1.0/file?workflow_id=%i' % workflow.id)
|
||||
self.assert_success(rv)
|
||||
json_data = json.loads(rv.get_data(as_text=True))
|
||||
files = FileModelSchema(many=True).load(json_data, session=session)
|
||||
self.assertTrue(len(files) == 1)
|
|
@ -3,15 +3,16 @@ import random
|
|||
|
||||
from crc import session
|
||||
from crc.api.rest_exception import RestException
|
||||
from crc.models.file import FileModel
|
||||
from crc.models.file import FileModel, FileDataModel
|
||||
from crc.models.study import StudyModel
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowStatus
|
||||
from crc.services.FileService import FileService
|
||||
from tests.base_test import BaseTest
|
||||
from crc.workflow_processor import Workflow, WorkflowProcessor
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
|
||||
|
||||
class TestWorkflowProcessor(BaseTest):
|
||||
|
||||
|
||||
def _randomString(self, stringLength=10):
|
||||
"""Generate a random string of fixed length """
|
||||
letters = string.ascii_lowercase
|
||||
|
@ -28,9 +29,9 @@ class TestWorkflowProcessor(BaseTest):
|
|||
def test_create_and_complete_workflow(self):
|
||||
self.load_example_data()
|
||||
workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id="random_fact").first()
|
||||
|
||||
processor = WorkflowProcessor.create(workflow_spec_model.id)
|
||||
|
||||
study = session.query(StudyModel).first()
|
||||
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
|
||||
self.assertEqual(study.id, processor.bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY])
|
||||
self.assertIsNotNone(processor)
|
||||
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
|
||||
next_user_tasks = processor.next_user_tasks()
|
||||
|
@ -51,10 +52,11 @@ class TestWorkflowProcessor(BaseTest):
|
|||
|
||||
def test_workflow_with_dmn(self):
|
||||
self.load_example_data()
|
||||
study = session.query(StudyModel).first()
|
||||
files = session.query(FileModel).filter_by(workflow_spec_id='decision_table').all()
|
||||
self.assertEqual(2, len(files))
|
||||
workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id="decision_table").first()
|
||||
processor = WorkflowProcessor.create(workflow_spec_model.id)
|
||||
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
|
||||
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
|
||||
next_user_tasks = processor.next_user_tasks()
|
||||
self.assertEqual(1, len(next_user_tasks))
|
||||
|
@ -71,11 +73,11 @@ class TestWorkflowProcessor(BaseTest):
|
|||
self.assertIn("message", data)
|
||||
self.assertEqual("Oh, Ginger.", data.get('message'))
|
||||
|
||||
|
||||
def test_workflow_with_parallel_forms(self):
|
||||
self.load_example_data()
|
||||
workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id="parallel_tasks").first()
|
||||
processor = WorkflowProcessor.create(workflow_spec_model.id)
|
||||
study = session.query(StudyModel).first()
|
||||
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
|
||||
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
|
||||
next_user_tasks = processor.next_user_tasks()
|
||||
self.assertEqual(4, len(next_user_tasks))
|
||||
|
@ -103,8 +105,9 @@ class TestWorkflowProcessor(BaseTest):
|
|||
|
||||
def test_workflow_processor_knows_the_text_task_even_when_parallel(self):
|
||||
self.load_example_data()
|
||||
study = session.query(StudyModel).first()
|
||||
workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id="parallel_tasks").first()
|
||||
processor = WorkflowProcessor.create(workflow_spec_model.id)
|
||||
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
|
||||
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
|
||||
next_user_tasks = processor.next_user_tasks()
|
||||
self.assertEqual(4, len(next_user_tasks))
|
||||
|
@ -121,10 +124,12 @@ class TestWorkflowProcessor(BaseTest):
|
|||
self.assertEqual(4, len(next_user_tasks))
|
||||
self.assertEqual(task.children[0], processor.next_task())
|
||||
|
||||
|
||||
def test_workflow_with_bad_expression_raises_sensible_error(self):
|
||||
self.load_example_data()
|
||||
|
||||
workflow_spec_model = self.load_test_spec("invalid_expression")
|
||||
processor = WorkflowProcessor.create(workflow_spec_model.id)
|
||||
study = session.query(StudyModel).first()
|
||||
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
|
||||
processor.do_engine_steps()
|
||||
next_user_tasks = processor.next_user_tasks()
|
||||
self.assertEqual(1, len(next_user_tasks))
|
||||
|
@ -134,21 +139,28 @@ class TestWorkflowProcessor(BaseTest):
|
|||
processor.do_engine_steps()
|
||||
self.assertEqual("invalid_expression", context.exception.payload['code'])
|
||||
|
||||
# def test_workflow_with_docx_template(self):
|
||||
# self.load_example_data()
|
||||
# files = session.query(FileModel).filter_by(workflow_spec_id='docx').all()
|
||||
# self.assertEquals(2, len(files))
|
||||
# workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id="docx").first()
|
||||
# processor = WorkflowProcessor.create(workflow_spec_model.id)
|
||||
# self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
|
||||
# next_user_tasks = processor.next_user_tasks()
|
||||
# self.assertEqual(1, len(next_user_tasks))
|
||||
# task = next_user_tasks[0]
|
||||
# self.assertEqual("task_gather_information", task.get_name())
|
||||
# self._complete_form_with_random_data(task)
|
||||
# processor.complete_task(task)
|
||||
# processor.do_engine_steps()
|
||||
|
||||
# workflow_files = session.query(FileModel).filter_by(workflow_id=).all()
|
||||
|
||||
def test_workflow_with_docx_template(self):
|
||||
self.load_example_data()
|
||||
study = session.query(StudyModel).first()
|
||||
workflow_spec_model = self.load_test_spec("docx")
|
||||
files = session.query(FileModel).filter_by(workflow_spec_id='docx').all()
|
||||
self.assertEqual(2, len(files))
|
||||
workflow_spec_model = session.query(WorkflowSpecModel).filter_by(id="docx").first()
|
||||
processor = WorkflowProcessor.create(study.id, workflow_spec_model.id)
|
||||
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
|
||||
next_user_tasks = processor.next_user_tasks()
|
||||
self.assertEqual(1, len(next_user_tasks))
|
||||
task = next_user_tasks[0]
|
||||
self.assertEqual("task_gather_information", task.get_name())
|
||||
self._populate_form_with_random_data(task)
|
||||
processor.complete_task(task)
|
||||
|
||||
files = session.query(FileModel).filter_by(study_id=study.id, workflow_id=processor.get_workflow_id()).all()
|
||||
self.assertEqual(0, len(files))
|
||||
processor.do_engine_steps()
|
||||
files = session.query(FileModel).filter_by(study_id=study.id, workflow_id=processor.get_workflow_id()).all()
|
||||
self.assertEqual(1, len(files), "The task should create a new file.")
|
||||
file_data = session.query(FileDataModel).filter(FileDataModel.file_model_id == files[0].id).first()
|
||||
self.assertIsNotNone(file_data.data)
|
||||
self.assertTrue(len(file_data.data) > 0)
|
||||
# Not going any farther here, assuming this is tested in libraries correctly.
|
Loading…
Reference in New Issue