2020-01-14 16:02:44 +00:00
|
|
|
import uuid
|
|
|
|
|
2020-03-24 18:15:21 +00:00
|
|
|
from crc import session
|
2020-01-21 20:24:17 +00:00
|
|
|
from crc.api.common import ApiError, ApiErrorSchema
|
2020-05-04 14:57:09 +00:00
|
|
|
from crc.models.api_models import WorkflowApi, WorkflowApiSchema
|
|
|
|
from crc.models.file import FileModel, LookupDataSchema
|
2020-03-16 20:12:39 +00:00
|
|
|
from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, WorkflowSpecModel, WorkflowSpecCategoryModel, \
|
|
|
|
WorkflowSpecCategoryModelSchema
|
2020-04-24 10:58:24 +00:00
|
|
|
from crc.services.file_service import FileService
|
2020-04-29 20:07:39 +00:00
|
|
|
from crc.services.study_service import StudyService
|
2020-02-10 21:19:23 +00:00
|
|
|
from crc.services.workflow_processor import WorkflowProcessor
|
2020-04-15 15:13:32 +00:00
|
|
|
from crc.services.workflow_service import WorkflowService
|
2019-12-27 18:50:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
def all_specifications():
|
2020-01-03 16:44:24 +00:00
|
|
|
schema = WorkflowSpecModelSchema(many=True)
|
2020-01-14 16:45:12 +00:00
|
|
|
return schema.dump(session.query(WorkflowSpecModel).all())
|
2019-12-27 18:50:03 +00:00
|
|
|
|
|
|
|
|
2020-01-14 02:43:09 +00:00
|
|
|
def add_workflow_specification(body):
|
2020-03-15 19:54:13 +00:00
|
|
|
new_spec: WorkflowSpecModel = WorkflowSpecModelSchema().load(body, session=session)
|
2020-01-14 19:10:53 +00:00
|
|
|
session.add(new_spec)
|
|
|
|
session.commit()
|
|
|
|
return WorkflowSpecModelSchema().dump(new_spec)
|
|
|
|
|
|
|
|
|
2020-01-28 15:14:06 +00:00
|
|
|
def get_workflow_specification(spec_id):
|
2020-01-14 19:10:53 +00:00
|
|
|
if spec_id is None:
|
2020-03-23 16:22:26 +00:00
|
|
|
raise ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.')
|
2020-01-14 19:10:53 +00:00
|
|
|
|
2020-01-21 20:24:17 +00:00
|
|
|
spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
2020-01-14 19:10:53 +00:00
|
|
|
|
2020-01-21 20:24:17 +00:00
|
|
|
if spec is None:
|
2020-03-23 16:22:26 +00:00
|
|
|
raise ApiError('unknown_spec', 'The Workflow Specification "' + spec_id + '" is not recognized.')
|
2020-01-14 19:10:53 +00:00
|
|
|
|
2020-01-28 15:14:06 +00:00
|
|
|
return WorkflowSpecModelSchema().dump(spec)
|
|
|
|
|
|
|
|
|
2020-03-27 12:29:31 +00:00
|
|
|
def validate_workflow_specification(spec_id):
|
|
|
|
|
|
|
|
errors = []
|
|
|
|
try:
|
2020-04-15 15:13:32 +00:00
|
|
|
WorkflowService.test_spec(spec_id)
|
2020-03-27 12:29:31 +00:00
|
|
|
except ApiError as ae:
|
|
|
|
errors.append(ae)
|
|
|
|
return ApiErrorSchema(many=True).dump(errors)
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-01-28 15:14:06 +00:00
|
|
|
def update_workflow_specification(spec_id, body):
|
2020-03-16 20:12:39 +00:00
|
|
|
if spec_id is None:
|
2020-03-20 15:07:55 +00:00
|
|
|
raise ApiError('unknown_spec', 'Please provide a valid Workflow Spec ID.')
|
2020-03-16 20:12:39 +00:00
|
|
|
spec = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
|
|
|
|
|
|
|
if spec is None:
|
2020-03-20 15:07:55 +00:00
|
|
|
raise ApiError('unknown_study', 'The spec "' + spec_id + '" is not recognized.')
|
2020-03-16 20:12:39 +00:00
|
|
|
|
|
|
|
schema = WorkflowSpecModelSchema()
|
|
|
|
spec = schema.load(body, session=session, instance=spec, partial=True)
|
2020-01-21 20:24:17 +00:00
|
|
|
session.add(spec)
|
2020-01-14 16:45:12 +00:00
|
|
|
session.commit()
|
2020-03-16 20:12:39 +00:00
|
|
|
return schema.dump(spec)
|
2020-01-14 02:43:09 +00:00
|
|
|
|
|
|
|
|
2020-01-23 21:05:09 +00:00
|
|
|
def delete_workflow_specification(spec_id):
|
|
|
|
if spec_id is None:
|
2020-03-23 16:22:26 +00:00
|
|
|
raise ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.')
|
2020-01-23 21:05:09 +00:00
|
|
|
|
|
|
|
spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
|
|
|
|
|
|
|
if spec is None:
|
2020-03-23 16:22:26 +00:00
|
|
|
raise ApiError('unknown_spec', 'The Workflow Specification "' + spec_id + '" is not recognized.')
|
2020-01-23 21:05:09 +00:00
|
|
|
|
2020-01-24 14:26:17 +00:00
|
|
|
# Delete all items in the database related to the deleted workflow spec.
|
|
|
|
files = session.query(FileModel).filter_by(workflow_spec_id=spec_id).all()
|
|
|
|
for file in files:
|
2020-04-24 10:58:24 +00:00
|
|
|
FileService.delete_file(file.id)
|
2020-01-24 14:26:17 +00:00
|
|
|
|
2020-03-23 16:48:31 +00:00
|
|
|
# Delete all stats and workflow models related to this specification
|
2020-04-29 20:07:39 +00:00
|
|
|
for workflow in session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id):
|
|
|
|
StudyService.delete_workflow(workflow)
|
2020-02-04 20:44:06 +00:00
|
|
|
session.query(WorkflowSpecModel).filter_by(id=spec_id).delete()
|
2020-01-23 21:05:09 +00:00
|
|
|
session.commit()
|
|
|
|
|
|
|
|
|
2020-05-04 14:57:09 +00:00
|
|
|
def __get_workflow_api_model(processor: WorkflowProcessor):
|
2020-02-07 16:34:44 +00:00
|
|
|
spiff_tasks = processor.get_all_user_tasks()
|
2020-04-15 15:13:32 +00:00
|
|
|
user_tasks = list(map(WorkflowService.spiff_task_to_api_task, spiff_tasks))
|
2020-03-15 19:54:13 +00:00
|
|
|
|
2020-02-10 21:19:23 +00:00
|
|
|
workflow_api = WorkflowApi(
|
|
|
|
id=processor.get_workflow_id(),
|
|
|
|
status=processor.get_status(),
|
2020-04-15 15:13:32 +00:00
|
|
|
last_task=WorkflowService.spiff_task_to_api_task(processor.bpmn_workflow.last_task),
|
2020-02-10 21:19:23 +00:00
|
|
|
next_task=None,
|
2020-05-04 14:57:09 +00:00
|
|
|
previous_task=processor.previous_task(),
|
2020-02-07 17:36:08 +00:00
|
|
|
user_tasks=user_tasks,
|
2020-03-27 18:55:53 +00:00
|
|
|
workflow_spec_id=processor.workflow_spec_id,
|
2020-03-05 21:45:44 +00:00
|
|
|
spec_version=processor.get_spec_version(),
|
2020-03-15 19:54:13 +00:00
|
|
|
is_latest_spec=processor.get_spec_version() == processor.get_latest_version_string(processor.workflow_spec_id),
|
2020-05-04 14:57:09 +00:00
|
|
|
total_tasks=processor.workflow_model.total_tasks,
|
|
|
|
completed_tasks=processor.workflow_model.completed_tasks,
|
|
|
|
last_updated=processor.workflow_model.last_updated
|
2020-02-07 17:36:08 +00:00
|
|
|
)
|
2020-05-01 16:11:39 +00:00
|
|
|
next_task = processor.next_task()
|
|
|
|
if next_task:
|
|
|
|
workflow_api.next_task = WorkflowService.spiff_task_to_api_task(next_task)
|
2020-05-04 14:57:09 +00:00
|
|
|
|
2020-02-10 21:19:23 +00:00
|
|
|
return workflow_api
|
2020-02-07 16:34:44 +00:00
|
|
|
|
2020-03-05 16:18:20 +00:00
|
|
|
|
2020-03-05 21:45:44 +00:00
|
|
|
def get_workflow(workflow_id, soft_reset=False, hard_reset=False):
|
2020-03-15 19:54:13 +00:00
|
|
|
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
2020-03-05 21:45:44 +00:00
|
|
|
processor = WorkflowProcessor(workflow_model, soft_reset=soft_reset, hard_reset=hard_reset)
|
2020-03-11 19:16:42 +00:00
|
|
|
workflow_api_model = __get_workflow_api_model(processor)
|
|
|
|
return WorkflowApiSchema().dump(workflow_api_model)
|
2019-12-27 18:50:03 +00:00
|
|
|
|
2019-12-31 16:31:30 +00:00
|
|
|
|
2020-04-29 20:07:39 +00:00
|
|
|
def delete_workflow(workflow_id):
|
|
|
|
StudyService.delete_workflow(workflow_id)
|
2019-12-27 18:50:03 +00:00
|
|
|
|
2020-04-28 21:25:53 +00:00
|
|
|
def set_current_task(workflow_id, task_id):
|
|
|
|
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
|
|
|
processor = WorkflowProcessor(workflow_model)
|
|
|
|
task_id = uuid.UUID(task_id)
|
|
|
|
task = processor.bpmn_workflow.get_task(task_id)
|
2020-05-04 14:57:09 +00:00
|
|
|
if task.state != task.COMPLETED:
|
|
|
|
raise ApiError("invalid_state", "You may not move the token to a task who's state is not "
|
|
|
|
"currently set to COMPLETE.")
|
2020-04-28 21:25:53 +00:00
|
|
|
|
2020-05-04 14:57:09 +00:00
|
|
|
task.reset_token(reset_data=False) # we could optionally clear the previous data.
|
|
|
|
processor.save()
|
|
|
|
WorkflowService.log_task_action(processor, task, WorkflowService.TASK_ACTION_TOKEN_RESET)
|
2020-04-28 21:25:53 +00:00
|
|
|
workflow_api_model = __get_workflow_api_model(processor)
|
|
|
|
return WorkflowApiSchema().dump(workflow_api_model)
|
2020-03-10 19:46:14 +00:00
|
|
|
|
2020-05-04 14:57:09 +00:00
|
|
|
|
2020-01-14 16:02:44 +00:00
|
|
|
def update_task(workflow_id, task_id, body):
|
2020-02-07 16:34:44 +00:00
|
|
|
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
2020-03-05 18:25:28 +00:00
|
|
|
processor = WorkflowProcessor(workflow_model)
|
2020-01-14 16:02:44 +00:00
|
|
|
task_id = uuid.UUID(task_id)
|
|
|
|
task = processor.bpmn_workflow.get_task(task_id)
|
2020-04-28 21:25:53 +00:00
|
|
|
if task.state != task.READY:
|
|
|
|
raise ApiError("invalid_state", "You may not update a task unless it is in the READY state. "
|
|
|
|
"Consider calling a token reset to make this task Ready.")
|
2020-04-17 17:51:48 +00:00
|
|
|
task.update_data(body)
|
2020-01-14 16:02:44 +00:00
|
|
|
processor.complete_task(task)
|
2020-02-03 20:15:36 +00:00
|
|
|
processor.do_engine_steps()
|
2020-05-04 14:57:09 +00:00
|
|
|
processor.save()
|
|
|
|
WorkflowService.log_task_action(processor, task, WorkflowService.TASK_ACTION_COMPLETE)
|
2020-03-11 01:29:57 +00:00
|
|
|
|
2020-03-11 18:28:53 +00:00
|
|
|
workflow_api_model = __get_workflow_api_model(processor)
|
2020-03-11 01:29:57 +00:00
|
|
|
return WorkflowApiSchema().dump(workflow_api_model)
|
2020-03-16 20:12:39 +00:00
|
|
|
|
|
|
|
|
|
|
|
def list_workflow_spec_categories():
|
|
|
|
schema = WorkflowSpecCategoryModelSchema(many=True)
|
|
|
|
return schema.dump(session.query(WorkflowSpecCategoryModel).all())
|
|
|
|
|
|
|
|
|
|
|
|
def get_workflow_spec_category(cat_id):
|
|
|
|
schema = WorkflowSpecCategoryModelSchema()
|
|
|
|
return schema.dump(session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).first())
|
|
|
|
|
|
|
|
|
|
|
|
def add_workflow_spec_category(body):
|
|
|
|
schema = WorkflowSpecCategoryModelSchema()
|
|
|
|
new_cat: WorkflowSpecCategoryModel = schema.load(body, session=session)
|
|
|
|
session.add(new_cat)
|
|
|
|
session.commit()
|
|
|
|
return schema.dump(new_cat)
|
|
|
|
|
|
|
|
|
|
|
|
def update_workflow_spec_category(cat_id, body):
|
|
|
|
if cat_id is None:
|
2020-03-20 15:07:55 +00:00
|
|
|
raise ApiError('unknown_category', 'Please provide a valid Workflow Spec Category ID.')
|
2020-03-16 20:12:39 +00:00
|
|
|
|
|
|
|
category = session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).first()
|
|
|
|
|
|
|
|
if category is None:
|
2020-03-20 15:07:55 +00:00
|
|
|
raise ApiError('unknown_category', 'The category "' + cat_id + '" is not recognized.')
|
2020-03-16 20:12:39 +00:00
|
|
|
|
|
|
|
schema = WorkflowSpecCategoryModelSchema()
|
|
|
|
category = schema.load(body, session=session, instance=category, partial=True)
|
|
|
|
session.add(category)
|
|
|
|
session.commit()
|
|
|
|
return schema.dump(category)
|
2020-03-16 20:30:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
def delete_workflow_spec_category(cat_id):
|
|
|
|
session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).delete()
|
|
|
|
session.commit()
|
2020-04-22 23:40:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
def lookup(workflow_id, task_id, field_id, query, limit):
|
|
|
|
"""
|
|
|
|
given a field in a task, attempts to find the lookup table associated with that field
|
|
|
|
and runs a full-text query against it to locate the values and labels that would be
|
|
|
|
returned to a type-ahead box.
|
|
|
|
"""
|
|
|
|
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
|
|
|
if not workflow_model:
|
|
|
|
raise ApiError("unknown_workflow", "No workflow found with id: %i" % workflow_id)
|
|
|
|
processor = WorkflowProcessor(workflow_model)
|
|
|
|
task_id = uuid.UUID(task_id)
|
|
|
|
spiff_task = processor.bpmn_workflow.get_task(task_id)
|
|
|
|
if not spiff_task:
|
|
|
|
raise ApiError("unknown_task", "No task with %s found in workflow: %i" % (task_id, workflow_id))
|
|
|
|
field = None
|
|
|
|
for f in spiff_task.task_spec.form.fields:
|
|
|
|
if f.id == field_id:
|
|
|
|
field = f
|
|
|
|
if not field:
|
|
|
|
raise ApiError("unknown_field", "No field named %s in task %s" % (task_id, spiff_task.task_spec.name))
|
|
|
|
|
2020-04-23 16:05:08 +00:00
|
|
|
lookup_table = WorkflowService.get_lookup_table(spiff_task, field)
|
2020-04-23 18:58:17 +00:00
|
|
|
lookup_data = WorkflowService.run_lookup_query(lookup_table, query, limit)
|
2020-04-23 16:05:08 +00:00
|
|
|
return LookupDataSchema(many=True).dump(lookup_data)
|