2020-01-14 11:02:44 -05:00
|
|
|
import uuid
|
|
|
|
|
2020-06-01 17:42:28 -04:00
|
|
|
from SpiffWorkflow.util.deep_merge import DeepMerge
|
2020-06-12 13:46:10 -04:00
|
|
|
from flask import g
|
|
|
|
from crc import session, app
|
2020-01-21 15:24:17 -05:00
|
|
|
from crc.api.common import ApiError, ApiErrorSchema
|
2020-05-15 15:54:53 -04:00
|
|
|
from crc.models.api_models import WorkflowApi, WorkflowApiSchema, NavigationItem, NavigationItemSchema
|
2020-05-04 10:57:09 -04:00
|
|
|
from crc.models.file import FileModel, LookupDataSchema
|
2020-07-14 22:16:44 -04:00
|
|
|
from crc.models.study import StudyModel, WorkflowMetadata
|
|
|
|
from crc.models.task_event import TaskEventModel, TaskEventModelSchema, TaskEvent, TaskEventSchema
|
2020-03-16 16:12:39 -04:00
|
|
|
from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, WorkflowSpecModel, WorkflowSpecCategoryModel, \
|
|
|
|
WorkflowSpecCategoryModelSchema
|
2020-04-24 06:58:24 -04:00
|
|
|
from crc.services.file_service import FileService
|
2020-05-19 16:11:43 -04:00
|
|
|
from crc.services.lookup_service import LookupService
|
2020-04-29 16:07:39 -04:00
|
|
|
from crc.services.study_service import StudyService
|
2020-07-27 14:38:57 -04:00
|
|
|
from crc.services.user_service import UserService
|
2020-02-10 16:19:23 -05:00
|
|
|
from crc.services.workflow_processor import WorkflowProcessor
|
2020-04-15 11:13:32 -04:00
|
|
|
from crc.services.workflow_service import WorkflowService
|
2019-12-27 13:50:03 -05:00
|
|
|
|
|
|
|
|
|
|
|
def all_specifications():
|
2020-01-03 11:44:24 -05:00
|
|
|
schema = WorkflowSpecModelSchema(many=True)
|
2020-01-14 11:45:12 -05:00
|
|
|
return schema.dump(session.query(WorkflowSpecModel).all())
|
2019-12-27 13:50:03 -05:00
|
|
|
|
|
|
|
|
2020-01-13 21:43:09 -05:00
|
|
|
def add_workflow_specification(body):
|
2020-03-15 15:54:13 -04:00
|
|
|
new_spec: WorkflowSpecModel = WorkflowSpecModelSchema().load(body, session=session)
|
2020-01-14 14:10:53 -05:00
|
|
|
session.add(new_spec)
|
|
|
|
session.commit()
|
|
|
|
return WorkflowSpecModelSchema().dump(new_spec)
|
|
|
|
|
|
|
|
|
2020-01-28 10:14:06 -05:00
|
|
|
def get_workflow_specification(spec_id):
|
2020-01-14 14:10:53 -05:00
|
|
|
if spec_id is None:
|
2020-03-23 12:22:26 -04:00
|
|
|
raise ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.')
|
2020-01-14 14:10:53 -05:00
|
|
|
|
2020-01-21 15:24:17 -05:00
|
|
|
spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
2020-01-14 14:10:53 -05:00
|
|
|
|
2020-01-21 15:24:17 -05:00
|
|
|
if spec is None:
|
2020-03-23 12:22:26 -04:00
|
|
|
raise ApiError('unknown_spec', 'The Workflow Specification "' + spec_id + '" is not recognized.')
|
2020-01-14 14:10:53 -05:00
|
|
|
|
2020-01-28 10:14:06 -05:00
|
|
|
return WorkflowSpecModelSchema().dump(spec)
|
|
|
|
|
|
|
|
|
2020-03-27 08:29:31 -04:00
|
|
|
def validate_workflow_specification(spec_id):
|
|
|
|
errors = []
|
|
|
|
try:
|
2020-04-15 11:13:32 -04:00
|
|
|
WorkflowService.test_spec(spec_id)
|
2020-05-30 18:43:20 -04:00
|
|
|
except ApiError as ae:
|
|
|
|
ae.message = "When populating all fields ... " + ae.message
|
|
|
|
errors.append(ae)
|
|
|
|
try:
|
|
|
|
# Run the validation twice, the second time, just populate the required fields.
|
2020-05-30 17:21:57 -04:00
|
|
|
WorkflowService.test_spec(spec_id, required_only=True)
|
2020-03-27 08:29:31 -04:00
|
|
|
except ApiError as ae:
|
2020-05-30 18:43:20 -04:00
|
|
|
ae.message = "When populating only required fields ... " + ae.message
|
2020-03-27 08:29:31 -04:00
|
|
|
errors.append(ae)
|
|
|
|
return ApiErrorSchema(many=True).dump(errors)
|
|
|
|
|
|
|
|
|
2020-01-28 10:14:06 -05:00
|
|
|
def update_workflow_specification(spec_id, body):
|
2020-03-16 16:12:39 -04:00
|
|
|
if spec_id is None:
|
2020-03-20 11:07:55 -04:00
|
|
|
raise ApiError('unknown_spec', 'Please provide a valid Workflow Spec ID.')
|
2020-03-16 16:12:39 -04:00
|
|
|
spec = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
|
|
|
|
|
|
|
if spec is None:
|
2020-03-20 11:07:55 -04:00
|
|
|
raise ApiError('unknown_study', 'The spec "' + spec_id + '" is not recognized.')
|
2020-03-16 16:12:39 -04:00
|
|
|
|
|
|
|
schema = WorkflowSpecModelSchema()
|
|
|
|
spec = schema.load(body, session=session, instance=spec, partial=True)
|
2020-01-21 15:24:17 -05:00
|
|
|
session.add(spec)
|
2020-01-14 11:45:12 -05:00
|
|
|
session.commit()
|
2020-03-16 16:12:39 -04:00
|
|
|
return schema.dump(spec)
|
2020-01-13 21:43:09 -05:00
|
|
|
|
|
|
|
|
2020-01-23 16:05:09 -05:00
|
|
|
def delete_workflow_specification(spec_id):
|
|
|
|
if spec_id is None:
|
2020-03-23 12:22:26 -04:00
|
|
|
raise ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.')
|
2020-01-23 16:05:09 -05:00
|
|
|
|
|
|
|
spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
|
|
|
|
|
|
|
if spec is None:
|
2020-03-23 12:22:26 -04:00
|
|
|
raise ApiError('unknown_spec', 'The Workflow Specification "' + spec_id + '" is not recognized.')
|
2020-01-23 16:05:09 -05:00
|
|
|
|
2020-01-24 09:26:17 -05:00
|
|
|
# Delete all items in the database related to the deleted workflow spec.
|
|
|
|
files = session.query(FileModel).filter_by(workflow_spec_id=spec_id).all()
|
|
|
|
for file in files:
|
2020-04-24 06:58:24 -04:00
|
|
|
FileService.delete_file(file.id)
|
2020-01-24 09:26:17 -05:00
|
|
|
|
2020-05-26 21:18:09 -04:00
|
|
|
session.query(TaskEventModel).filter(TaskEventModel.workflow_spec_id == spec_id).delete()
|
|
|
|
|
2020-07-14 22:16:44 -04:00
|
|
|
# Delete all events and workflow models related to this specification
|
2020-04-29 16:07:39 -04:00
|
|
|
for workflow in session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id):
|
2020-08-10 13:51:05 -04:00
|
|
|
StudyService.delete_workflow(workflow.id)
|
2020-02-04 15:44:06 -05:00
|
|
|
session.query(WorkflowSpecModel).filter_by(id=spec_id).delete()
|
2020-01-23 16:05:09 -05:00
|
|
|
session.commit()
|
|
|
|
|
|
|
|
|
2020-07-28 13:51:29 -04:00
|
|
|
def get_workflow(workflow_id, soft_reset=False, hard_reset=False, do_engine_steps=True):
|
2020-07-28 13:33:38 -04:00
|
|
|
"""Soft reset will attempt to update to the latest spec without starting over,
|
|
|
|
Hard reset will update to the latest spec and start from the beginning.
|
|
|
|
Read Only will return the workflow in a read only state, without running any
|
|
|
|
engine tasks or logging any events. """
|
2020-03-15 15:54:13 -04:00
|
|
|
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
2020-03-05 16:45:44 -05:00
|
|
|
processor = WorkflowProcessor(workflow_model, soft_reset=soft_reset, hard_reset=hard_reset)
|
2020-07-28 13:51:29 -04:00
|
|
|
if do_engine_steps:
|
2020-07-28 13:33:38 -04:00
|
|
|
processor.do_engine_steps()
|
|
|
|
processor.save()
|
|
|
|
WorkflowService.update_task_assignments(processor)
|
2020-06-17 17:11:15 -04:00
|
|
|
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
|
2020-03-11 15:16:42 -04:00
|
|
|
return WorkflowApiSchema().dump(workflow_api_model)
|
2019-12-27 13:50:03 -05:00
|
|
|
|
2019-12-31 11:31:30 -05:00
|
|
|
|
2020-07-28 10:16:48 -04:00
|
|
|
def get_task_events(action = None, workflow = None, study = None):
|
2020-07-14 22:16:44 -04:00
|
|
|
"""Provides a way to see a history of what has happened, or get a list of tasks that need your attention."""
|
|
|
|
query = session.query(TaskEventModel).filter(TaskEventModel.user_uid == g.user.uid)
|
|
|
|
if action:
|
|
|
|
query = query.filter(TaskEventModel.action == action)
|
2020-07-28 10:16:48 -04:00
|
|
|
if workflow:
|
|
|
|
query = query.filter(TaskEventModel.workflow_id == workflow)
|
|
|
|
if study:
|
|
|
|
query = query.filter(TaskEventModel.study_id == study)
|
2020-07-14 22:16:44 -04:00
|
|
|
events = query.all()
|
|
|
|
|
|
|
|
# Turn the database records into something a little richer for the UI to use.
|
|
|
|
task_events = []
|
|
|
|
for event in events:
|
|
|
|
study = session.query(StudyModel).filter(StudyModel.id == event.study_id).first()
|
|
|
|
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == event.workflow_id).first()
|
|
|
|
workflow_meta = WorkflowMetadata.from_workflow(workflow)
|
|
|
|
task_events.append(TaskEvent(event, study, workflow_meta))
|
|
|
|
return TaskEventSchema(many=True).dump(task_events)
|
|
|
|
|
|
|
|
|
2020-04-29 16:07:39 -04:00
|
|
|
def delete_workflow(workflow_id):
|
|
|
|
StudyService.delete_workflow(workflow_id)
|
2019-12-27 13:50:03 -05:00
|
|
|
|
2020-05-15 15:54:53 -04:00
|
|
|
|
2020-04-28 17:25:53 -04:00
|
|
|
def set_current_task(workflow_id, task_id):
|
|
|
|
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
|
|
|
processor = WorkflowProcessor(workflow_model)
|
|
|
|
task_id = uuid.UUID(task_id)
|
2020-06-01 17:42:28 -04:00
|
|
|
spiff_task = processor.bpmn_workflow.get_task(task_id)
|
2020-07-14 22:16:44 -04:00
|
|
|
_verify_user_and_role(processor, spiff_task)
|
2020-07-27 14:38:57 -04:00
|
|
|
user_uid = UserService.current_user(allow_admin_impersonate=True).uid
|
2020-06-01 17:42:28 -04:00
|
|
|
if spiff_task.state != spiff_task.COMPLETED and spiff_task.state != spiff_task.READY:
|
2020-05-04 10:57:09 -04:00
|
|
|
raise ApiError("invalid_state", "You may not move the token to a task who's state is not "
|
2020-05-15 15:54:53 -04:00
|
|
|
"currently set to COMPLETE or READY.")
|
2020-04-28 17:25:53 -04:00
|
|
|
|
2020-11-04 14:37:53 -05:00
|
|
|
# If we have an interrupt task, run it.
|
2020-11-16 09:59:22 -05:00
|
|
|
processor.bpmn_workflow.cancel_notify()
|
2020-11-04 14:37:53 -05:00
|
|
|
|
2020-05-17 13:29:36 -04:00
|
|
|
# Only reset the token if the task doesn't already have it.
|
2020-06-01 17:42:28 -04:00
|
|
|
if spiff_task.state == spiff_task.COMPLETED:
|
|
|
|
spiff_task.reset_token(reset_data=True) # Don't try to copy the existing data back into this task.
|
|
|
|
|
2020-05-04 10:57:09 -04:00
|
|
|
processor.save()
|
2020-07-14 22:16:44 -04:00
|
|
|
WorkflowService.log_task_action(user_uid, processor, spiff_task, WorkflowService.TASK_ACTION_TOKEN_RESET)
|
|
|
|
WorkflowService.update_task_assignments(processor)
|
|
|
|
|
2020-06-17 17:11:15 -04:00
|
|
|
workflow_api_model = WorkflowService.processor_to_workflow_api(processor, spiff_task)
|
2020-04-28 17:25:53 -04:00
|
|
|
return WorkflowApiSchema().dump(workflow_api_model)
|
2020-03-10 15:46:14 -04:00
|
|
|
|
2020-05-04 10:57:09 -04:00
|
|
|
|
2020-06-17 11:35:06 -04:00
|
|
|
def update_task(workflow_id, task_id, body, terminate_loop=None):
|
2020-02-07 11:34:44 -05:00
|
|
|
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
2020-06-12 13:46:10 -04:00
|
|
|
if workflow_model is None:
|
|
|
|
raise ApiError("invalid_workflow_id", "The given workflow id is not valid.", status_code=404)
|
|
|
|
|
|
|
|
elif workflow_model.study is None:
|
|
|
|
raise ApiError("invalid_study", "There is no study associated with the given workflow.", status_code=404)
|
|
|
|
|
2020-03-05 13:25:28 -05:00
|
|
|
processor = WorkflowProcessor(workflow_model)
|
2020-01-14 11:02:44 -05:00
|
|
|
task_id = uuid.UUID(task_id)
|
2020-06-01 17:42:28 -04:00
|
|
|
spiff_task = processor.bpmn_workflow.get_task(task_id)
|
2020-07-14 22:16:44 -04:00
|
|
|
_verify_user_and_role(processor, spiff_task)
|
2020-06-29 08:41:21 -06:00
|
|
|
if not spiff_task:
|
2020-06-16 13:13:30 -06:00
|
|
|
raise ApiError("empty_task", "Processor failed to obtain task.", status_code=404)
|
2020-06-01 17:42:28 -04:00
|
|
|
if spiff_task.state != spiff_task.READY:
|
2020-04-28 17:25:53 -04:00
|
|
|
raise ApiError("invalid_state", "You may not update a task unless it is in the READY state. "
|
|
|
|
"Consider calling a token reset to make this task Ready.")
|
2020-07-14 10:29:25 -04:00
|
|
|
|
2020-06-17 11:35:06 -04:00
|
|
|
if terminate_loop:
|
2020-06-25 11:02:57 -04:00
|
|
|
spiff_task.terminate_loop()
|
|
|
|
spiff_task.update_data(body)
|
2020-06-01 17:42:28 -04:00
|
|
|
processor.complete_task(spiff_task)
|
2020-02-03 15:15:36 -05:00
|
|
|
processor.do_engine_steps()
|
2020-05-04 10:57:09 -04:00
|
|
|
processor.save()
|
2020-03-10 21:29:57 -04:00
|
|
|
|
2020-07-14 22:16:44 -04:00
|
|
|
# Log the action, and any pending task assignments in the event of lanes in the workflow.
|
2020-07-27 14:38:57 -04:00
|
|
|
user = UserService.current_user(allow_admin_impersonate=False) # Always log as the real user.
|
|
|
|
WorkflowService.log_task_action(user.uid, processor, spiff_task, WorkflowService.TASK_ACTION_COMPLETE)
|
2020-07-14 22:16:44 -04:00
|
|
|
WorkflowService.update_task_assignments(processor)
|
2020-07-14 10:29:25 -04:00
|
|
|
|
2020-07-14 22:16:44 -04:00
|
|
|
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
|
2020-03-10 21:29:57 -04:00
|
|
|
return WorkflowApiSchema().dump(workflow_api_model)
|
2020-03-16 16:12:39 -04:00
|
|
|
|
|
|
|
|
|
|
|
def list_workflow_spec_categories():
|
|
|
|
schema = WorkflowSpecCategoryModelSchema(many=True)
|
|
|
|
return schema.dump(session.query(WorkflowSpecCategoryModel).all())
|
|
|
|
|
|
|
|
|
|
|
|
def get_workflow_spec_category(cat_id):
|
|
|
|
schema = WorkflowSpecCategoryModelSchema()
|
|
|
|
return schema.dump(session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).first())
|
|
|
|
|
|
|
|
|
|
|
|
def add_workflow_spec_category(body):
|
|
|
|
schema = WorkflowSpecCategoryModelSchema()
|
|
|
|
new_cat: WorkflowSpecCategoryModel = schema.load(body, session=session)
|
|
|
|
session.add(new_cat)
|
|
|
|
session.commit()
|
|
|
|
return schema.dump(new_cat)
|
|
|
|
|
|
|
|
|
|
|
|
def update_workflow_spec_category(cat_id, body):
|
|
|
|
if cat_id is None:
|
2020-03-20 11:07:55 -04:00
|
|
|
raise ApiError('unknown_category', 'Please provide a valid Workflow Spec Category ID.')
|
2020-03-16 16:12:39 -04:00
|
|
|
|
|
|
|
category = session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).first()
|
|
|
|
|
|
|
|
if category is None:
|
2020-03-20 11:07:55 -04:00
|
|
|
raise ApiError('unknown_category', 'The category "' + cat_id + '" is not recognized.')
|
2020-03-16 16:12:39 -04:00
|
|
|
|
|
|
|
schema = WorkflowSpecCategoryModelSchema()
|
|
|
|
category = schema.load(body, session=session, instance=category, partial=True)
|
|
|
|
session.add(category)
|
|
|
|
session.commit()
|
|
|
|
return schema.dump(category)
|
2020-03-16 16:30:56 -04:00
|
|
|
|
|
|
|
|
|
|
|
def delete_workflow_spec_category(cat_id):
|
|
|
|
session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).delete()
|
|
|
|
session.commit()
|
2020-04-22 19:40:40 -04:00
|
|
|
|
|
|
|
|
2020-06-30 10:34:16 -04:00
|
|
|
def lookup(workflow_id, field_id, query=None, value=None, limit=10):
|
2020-04-22 19:40:40 -04:00
|
|
|
"""
|
2020-05-19 16:11:43 -04:00
|
|
|
given a field in a task, attempts to find the lookup table or function associated
|
|
|
|
with that field and runs a full-text query against it to locate the values and
|
|
|
|
labels that would be returned to a type-ahead box.
|
2020-05-29 01:39:39 -04:00
|
|
|
Tries to be fast, but first runs will be very slow.
|
2020-04-22 19:40:40 -04:00
|
|
|
"""
|
2020-05-29 01:39:39 -04:00
|
|
|
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == workflow_id).first()
|
2020-06-30 10:34:16 -04:00
|
|
|
lookup_data = LookupService.lookup(workflow, field_id, query, value, limit)
|
2020-05-31 16:49:39 -04:00
|
|
|
return LookupDataSchema(many=True).dump(lookup_data)
|
2020-06-12 13:46:10 -04:00
|
|
|
|
|
|
|
|
2020-07-14 22:16:44 -04:00
|
|
|
def _verify_user_and_role(processor, spiff_task):
|
2020-07-14 10:29:25 -04:00
|
|
|
"""Assures the currently logged in user can access the given workflow and task, or
|
2020-07-27 14:38:57 -04:00
|
|
|
raises an error. """
|
2020-07-14 10:29:25 -04:00
|
|
|
|
2020-07-27 14:38:57 -04:00
|
|
|
user = UserService.current_user(allow_admin_impersonate=True)
|
2020-07-14 22:16:44 -04:00
|
|
|
allowed_users = WorkflowService.get_users_assigned_to_task(processor, spiff_task)
|
2020-07-27 14:38:57 -04:00
|
|
|
if user.uid not in allowed_users:
|
2020-07-14 22:16:44 -04:00
|
|
|
raise ApiError.from_task("permission_denied",
|
|
|
|
f"This task must be completed by '{allowed_users}', "
|
2020-07-27 14:38:57 -04:00
|
|
|
f"but you are {user.uid}", spiff_task)
|