2020-01-14 16:02:44 +00:00
|
|
|
import uuid
|
|
|
|
|
2020-06-12 17:46:10 +00:00
|
|
|
from flask import g
|
2020-12-04 22:56:12 +00:00
|
|
|
|
|
|
|
from crc import session
|
2020-01-21 20:24:17 +00:00
|
|
|
from crc.api.common import ApiError, ApiErrorSchema
|
2020-12-04 22:56:12 +00:00
|
|
|
from crc.models.api_models import WorkflowApiSchema
|
2021-11-22 22:17:19 +00:00
|
|
|
from crc.models.file import FileModel
|
2021-04-27 16:13:49 +00:00
|
|
|
from crc.models.study import StudyModel, WorkflowMetadata, StudyStatus
|
2020-12-04 22:56:12 +00:00
|
|
|
from crc.models.task_event import TaskEventModel, TaskEvent, TaskEventSchema
|
2020-03-16 20:12:39 +00:00
|
|
|
from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, WorkflowSpecModel, WorkflowSpecCategoryModel, \
|
2021-07-22 14:20:34 +00:00
|
|
|
WorkflowSpecCategoryModelSchema, WorkflowLibraryModel, WorkflowLibraryModelSchema
|
2021-02-04 16:23:05 +00:00
|
|
|
from crc.services.error_service import ValidationErrorService
|
2020-04-24 10:58:24 +00:00
|
|
|
from crc.services.file_service import FileService
|
2020-05-19 20:11:43 +00:00
|
|
|
from crc.services.lookup_service import LookupService
|
2020-04-29 20:07:39 +00:00
|
|
|
from crc.services.study_service import StudyService
|
2020-07-27 18:38:57 +00:00
|
|
|
from crc.services.user_service import UserService
|
2020-02-10 21:19:23 +00:00
|
|
|
from crc.services.workflow_processor import WorkflowProcessor
|
2020-04-15 15:13:32 +00:00
|
|
|
from crc.services.workflow_service import WorkflowService
|
2019-12-27 18:50:03 +00:00
|
|
|
|
|
|
|
|
2021-07-26 17:00:11 +00:00
|
|
|
def all_specifications(libraries=False,standalone=False):
|
|
|
|
if libraries and standalone:
|
|
|
|
raise ApiError('inconceivable!', 'You should specify libraries or standalone, but not both')
|
2020-01-03 16:44:24 +00:00
|
|
|
schema = WorkflowSpecModelSchema(many=True)
|
2021-07-26 17:00:11 +00:00
|
|
|
if libraries:
|
|
|
|
return schema.dump(session.query(WorkflowSpecModel)\
|
|
|
|
.filter(WorkflowSpecModel.library==True).all())
|
|
|
|
|
|
|
|
if standalone:
|
|
|
|
return schema.dump(session.query(WorkflowSpecModel)\
|
|
|
|
.filter(WorkflowSpecModel.standalone==True).all())
|
2021-09-13 20:15:18 +00:00
|
|
|
|
|
|
|
# return standard workflows (not library, not standalone)
|
|
|
|
return schema.dump(session.query(WorkflowSpecModel)
|
|
|
|
.filter((WorkflowSpecModel.library==False) | (
|
|
|
|
WorkflowSpecModel.library==None))
|
|
|
|
.filter((WorkflowSpecModel.standalone==False) | (
|
|
|
|
WorkflowSpecModel.standalone==None))
|
|
|
|
.all())
|
2019-12-27 18:50:03 +00:00
|
|
|
|
|
|
|
|
2020-01-14 02:43:09 +00:00
|
|
|
def add_workflow_specification(body):
|
2021-08-31 16:17:25 +00:00
|
|
|
category_id = body['category_id']
|
|
|
|
WorkflowService.cleanup_workflow_spec_display_order(category_id)
|
|
|
|
count = session.query(WorkflowSpecModel).filter_by(category_id=category_id).count()
|
2021-01-27 14:50:29 +00:00
|
|
|
body['display_order'] = count
|
2021-09-13 20:15:18 +00:00
|
|
|
# Libraries and standalone workflows don't get a category_id
|
|
|
|
if body['library'] is True or body['standalone'] is True:
|
|
|
|
body['category_id'] = None
|
2020-03-15 19:54:13 +00:00
|
|
|
new_spec: WorkflowSpecModel = WorkflowSpecModelSchema().load(body, session=session)
|
2020-01-14 19:10:53 +00:00
|
|
|
session.add(new_spec)
|
|
|
|
session.commit()
|
|
|
|
return WorkflowSpecModelSchema().dump(new_spec)
|
|
|
|
|
|
|
|
|
2020-01-28 15:14:06 +00:00
|
|
|
def get_workflow_specification(spec_id):
|
2020-01-14 19:10:53 +00:00
|
|
|
if spec_id is None:
|
2020-03-23 16:22:26 +00:00
|
|
|
raise ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.')
|
2020-01-14 19:10:53 +00:00
|
|
|
|
2020-01-21 20:24:17 +00:00
|
|
|
spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
2020-01-14 19:10:53 +00:00
|
|
|
|
2020-01-21 20:24:17 +00:00
|
|
|
if spec is None:
|
2020-03-23 16:22:26 +00:00
|
|
|
raise ApiError('unknown_spec', 'The Workflow Specification "' + spec_id + '" is not recognized.')
|
2020-01-14 19:10:53 +00:00
|
|
|
|
2020-01-28 15:14:06 +00:00
|
|
|
return WorkflowSpecModelSchema().dump(spec)
|
|
|
|
|
2021-07-22 14:20:34 +00:00
|
|
|
def validate_spec_and_library(spec_id,library_id):
|
|
|
|
if spec_id is None:
|
|
|
|
raise ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.')
|
|
|
|
if library_id is None:
|
|
|
|
raise ApiError('unknown_spec', 'Please provide a valid Library Specification ID.')
|
|
|
|
spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
|
|
|
library: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=library_id).first()
|
|
|
|
if spec is None:
|
|
|
|
raise ApiError('unknown_spec', 'The Workflow Specification "' + spec_id + '" is not recognized.')
|
|
|
|
if library is None:
|
|
|
|
raise ApiError('unknown_spec', 'The Library Specification "' + library_id + '" is not recognized.')
|
|
|
|
if not library.library:
|
|
|
|
raise ApiError('unknown_spec', 'Linked workflow spec is not a library.')
|
|
|
|
|
|
|
|
|
|
|
|
def add_workflow_spec_library(spec_id,library_id):
|
|
|
|
validate_spec_and_library(spec_id, library_id)
|
|
|
|
libraries: WorkflowLibraryModel = session.query(WorkflowLibraryModel).filter_by(workflow_spec_id=spec_id).all()
|
|
|
|
libraryids = [x.library_spec_id for x in libraries]
|
|
|
|
if library_id in libraryids:
|
|
|
|
raise ApiError('unknown_spec', 'The Library Specification "' + spec_id + '" is already attached.')
|
|
|
|
newlib = WorkflowLibraryModel()
|
|
|
|
newlib.workflow_spec_id = spec_id
|
|
|
|
newlib.library_spec_id = library_id
|
|
|
|
session.add(newlib)
|
|
|
|
session.commit()
|
|
|
|
libraries: WorkflowLibraryModel = session.query(WorkflowLibraryModel).filter_by(workflow_spec_id=spec_id).all()
|
|
|
|
return WorkflowLibraryModelSchema(many=True).dump(libraries)
|
|
|
|
|
|
|
|
def drop_workflow_spec_library(spec_id,library_id):
|
|
|
|
validate_spec_and_library(spec_id, library_id)
|
|
|
|
session.query(WorkflowLibraryModel).filter_by(workflow_spec_id=spec_id,library_spec_id=library_id).delete()
|
|
|
|
session.commit()
|
|
|
|
libraries: WorkflowLibraryModel = session.query(WorkflowLibraryModel).filter_by(workflow_spec_id=spec_id).all()
|
|
|
|
return WorkflowLibraryModelSchema(many=True).dump(libraries)
|
2020-01-28 15:14:06 +00:00
|
|
|
|
2021-11-22 22:17:19 +00:00
|
|
|
|
2021-07-07 04:53:30 +00:00
|
|
|
def validate_workflow_specification(spec_id, study_id=None, test_until=None):
|
2020-03-27 12:29:31 +00:00
|
|
|
try:
|
2021-11-22 22:17:19 +00:00
|
|
|
WorkflowService.raise_if_disabled(spec_id, study_id)
|
2021-07-07 04:53:30 +00:00
|
|
|
WorkflowService.test_spec(spec_id, study_id, test_until)
|
|
|
|
WorkflowService.test_spec(spec_id, study_id, test_until, required_only=True)
|
2020-05-30 22:43:20 +00:00
|
|
|
except ApiError as ae:
|
2021-07-07 04:53:30 +00:00
|
|
|
error = ae
|
|
|
|
error = ValidationErrorService.interpret_validation_error(error)
|
|
|
|
return ApiErrorSchema(many=True).dump([error])
|
|
|
|
return []
|
2020-03-27 12:29:31 +00:00
|
|
|
|
2021-08-31 14:36:22 +00:00
|
|
|
|
2020-01-28 15:14:06 +00:00
|
|
|
def update_workflow_specification(spec_id, body):
|
2020-03-16 20:12:39 +00:00
|
|
|
if spec_id is None:
|
2020-03-20 15:07:55 +00:00
|
|
|
raise ApiError('unknown_spec', 'Please provide a valid Workflow Spec ID.')
|
2020-03-16 20:12:39 +00:00
|
|
|
spec = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
|
|
|
|
|
|
|
if spec is None:
|
2020-03-20 15:07:55 +00:00
|
|
|
raise ApiError('unknown_study', 'The spec "' + spec_id + '" is not recognized.')
|
2020-03-16 20:12:39 +00:00
|
|
|
|
2021-08-31 14:36:22 +00:00
|
|
|
# Make sure they don't try to change the display_order
|
|
|
|
# There is a separate endpoint for this
|
|
|
|
body['display_order'] = spec.display_order
|
|
|
|
|
2021-09-13 20:15:18 +00:00
|
|
|
# Libraries and standalone workflows don't get a category_id
|
|
|
|
if body['library'] is True or body['standalone'] is True:
|
|
|
|
body['category_id'] = None
|
|
|
|
|
2020-03-16 20:12:39 +00:00
|
|
|
schema = WorkflowSpecModelSchema()
|
|
|
|
spec = schema.load(body, session=session, instance=spec, partial=True)
|
2020-01-21 20:24:17 +00:00
|
|
|
session.add(spec)
|
2020-01-14 16:45:12 +00:00
|
|
|
session.commit()
|
2020-03-16 20:12:39 +00:00
|
|
|
return schema.dump(spec)
|
2020-01-14 02:43:09 +00:00
|
|
|
|
|
|
|
|
2020-01-23 21:05:09 +00:00
|
|
|
def delete_workflow_specification(spec_id):
|
|
|
|
if spec_id is None:
|
2020-03-23 16:22:26 +00:00
|
|
|
raise ApiError('unknown_spec', 'Please provide a valid Workflow Specification ID.')
|
2020-01-23 21:05:09 +00:00
|
|
|
|
|
|
|
spec: WorkflowSpecModel = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
2021-08-30 18:56:26 +00:00
|
|
|
category_id = spec.category_id
|
2020-01-23 21:05:09 +00:00
|
|
|
|
|
|
|
if spec is None:
|
2020-03-23 16:22:26 +00:00
|
|
|
raise ApiError('unknown_spec', 'The Workflow Specification "' + spec_id + '" is not recognized.')
|
2020-01-23 21:05:09 +00:00
|
|
|
|
2020-01-24 14:26:17 +00:00
|
|
|
# Delete all items in the database related to the deleted workflow spec.
|
|
|
|
files = session.query(FileModel).filter_by(workflow_spec_id=spec_id).all()
|
|
|
|
for file in files:
|
2020-04-24 10:58:24 +00:00
|
|
|
FileService.delete_file(file.id)
|
2020-01-24 14:26:17 +00:00
|
|
|
|
2020-05-27 01:18:09 +00:00
|
|
|
session.query(TaskEventModel).filter(TaskEventModel.workflow_spec_id == spec_id).delete()
|
|
|
|
|
2020-07-15 02:16:44 +00:00
|
|
|
# Delete all events and workflow models related to this specification
|
2020-04-29 20:07:39 +00:00
|
|
|
for workflow in session.query(WorkflowModel).filter_by(workflow_spec_id=spec_id):
|
2020-08-10 17:51:05 +00:00
|
|
|
StudyService.delete_workflow(workflow.id)
|
2021-08-18 17:27:28 +00:00
|
|
|
# .delete() doesn't work when we need a cascade. Must grab the record, and explicitly delete
|
2021-08-18 12:25:17 +00:00
|
|
|
deleteSpec = session.query(WorkflowSpecModel).filter_by(id=spec_id).first()
|
|
|
|
session.delete(deleteSpec)
|
2020-01-23 21:05:09 +00:00
|
|
|
session.commit()
|
|
|
|
|
2021-08-30 18:56:26 +00:00
|
|
|
# Reorder the remaining specs
|
2021-08-31 16:17:25 +00:00
|
|
|
WorkflowService.cleanup_workflow_spec_display_order(category_id)
|
2021-08-30 18:56:26 +00:00
|
|
|
|
2020-01-23 21:05:09 +00:00
|
|
|
|
2021-08-27 15:40:38 +00:00
|
|
|
def reorder_workflow_specification(spec_id, direction):
|
2021-08-27 17:02:49 +00:00
|
|
|
if direction not in ('up', 'down'):
|
|
|
|
raise ApiError(code='bad_direction',
|
|
|
|
message='The direction must be `up` or `down`.')
|
|
|
|
spec = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.id == spec_id).first()
|
2021-08-30 14:41:08 +00:00
|
|
|
if spec:
|
2021-09-08 15:16:49 +00:00
|
|
|
WorkflowService.cleanup_workflow_spec_display_order(spec.category_id)
|
2021-08-30 14:41:08 +00:00
|
|
|
ordered_specs = WorkflowService.reorder_workflow_spec(spec, direction)
|
|
|
|
else:
|
|
|
|
raise ApiError(code='bad_spec_id',
|
|
|
|
message=f'The spec_id {spec_id} did not return a specification. Please check that it is valid.')
|
|
|
|
schema = WorkflowSpecModelSchema(many=True)
|
|
|
|
return schema.dump(ordered_specs)
|
2021-08-27 15:40:38 +00:00
|
|
|
|
|
|
|
|
2021-04-26 12:31:12 +00:00
|
|
|
def get_workflow_from_spec(spec_id):
|
|
|
|
workflow_model = WorkflowService.get_workflow_from_spec(spec_id, g.user)
|
|
|
|
processor = WorkflowProcessor(workflow_model)
|
|
|
|
|
|
|
|
processor.do_engine_steps()
|
|
|
|
processor.save()
|
|
|
|
WorkflowService.update_task_assignments(processor)
|
|
|
|
|
|
|
|
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
|
|
|
|
return WorkflowApiSchema().dump(workflow_api_model)
|
|
|
|
|
|
|
|
|
2021-01-19 20:09:20 +00:00
|
|
|
def get_workflow(workflow_id, do_engine_steps=True):
|
|
|
|
"""Retrieve workflow based on workflow_id, and return it in the last saved State.
|
|
|
|
If do_engine_steps is False, return the workflow without running any engine tasks or logging any events. """
|
2020-03-15 19:54:13 +00:00
|
|
|
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
2021-01-19 20:09:20 +00:00
|
|
|
processor = WorkflowProcessor(workflow_model)
|
2020-12-29 23:05:13 +00:00
|
|
|
|
2020-07-28 17:51:29 +00:00
|
|
|
if do_engine_steps:
|
2020-07-28 17:33:38 +00:00
|
|
|
processor.do_engine_steps()
|
|
|
|
processor.save()
|
|
|
|
WorkflowService.update_task_assignments(processor)
|
2021-01-19 20:09:20 +00:00
|
|
|
|
2020-06-17 21:11:15 +00:00
|
|
|
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
|
2020-03-11 19:16:42 +00:00
|
|
|
return WorkflowApiSchema().dump(workflow_api_model)
|
2019-12-27 18:50:03 +00:00
|
|
|
|
2019-12-31 16:31:30 +00:00
|
|
|
|
2021-04-15 15:23:20 +00:00
|
|
|
def restart_workflow(workflow_id, clear_data=False, delete_files=False):
|
2021-01-19 20:09:20 +00:00
|
|
|
"""Restart a workflow with the latest spec.
|
|
|
|
Clear data allows user to restart the workflow without previous data."""
|
|
|
|
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
2021-04-15 15:23:20 +00:00
|
|
|
WorkflowProcessor.reset(workflow_model, clear_data=clear_data, delete_files=delete_files)
|
2021-01-19 20:09:20 +00:00
|
|
|
return get_workflow(workflow_model.id)
|
|
|
|
|
|
|
|
|
2020-07-28 14:16:48 +00:00
|
|
|
def get_task_events(action = None, workflow = None, study = None):
|
2020-07-15 02:16:44 +00:00
|
|
|
"""Provides a way to see a history of what has happened, or get a list of tasks that need your attention."""
|
2021-02-18 16:25:17 +00:00
|
|
|
user = UserService.current_user(allow_admin_impersonate=True)
|
|
|
|
studies = session.query(StudyModel).filter(StudyModel.user_uid==user.uid)
|
2021-02-18 13:09:13 +00:00
|
|
|
studyids = [s.id for s in studies]
|
2021-02-18 16:25:17 +00:00
|
|
|
query = session.query(TaskEventModel).filter((TaskEventModel.study_id.in_(studyids)) | \
|
|
|
|
(TaskEventModel.user_uid==user.uid))
|
2020-07-15 02:16:44 +00:00
|
|
|
if action:
|
|
|
|
query = query.filter(TaskEventModel.action == action)
|
2020-07-28 14:16:48 +00:00
|
|
|
if workflow:
|
|
|
|
query = query.filter(TaskEventModel.workflow_id == workflow)
|
|
|
|
if study:
|
|
|
|
query = query.filter(TaskEventModel.study_id == study)
|
2020-07-15 02:16:44 +00:00
|
|
|
events = query.all()
|
|
|
|
|
|
|
|
# Turn the database records into something a little richer for the UI to use.
|
|
|
|
task_events = []
|
|
|
|
for event in events:
|
|
|
|
study = session.query(StudyModel).filter(StudyModel.id == event.study_id).first()
|
|
|
|
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == event.workflow_id).first()
|
|
|
|
workflow_meta = WorkflowMetadata.from_workflow(workflow)
|
2021-05-14 19:52:30 +00:00
|
|
|
if study and study.status in [StudyStatus.open_for_enrollment, StudyStatus.in_progress]:
|
|
|
|
task_events.append(TaskEvent(event, study, workflow_meta))
|
2020-07-15 02:16:44 +00:00
|
|
|
return TaskEventSchema(many=True).dump(task_events)
|
|
|
|
|
|
|
|
|
2020-04-29 20:07:39 +00:00
|
|
|
def delete_workflow(workflow_id):
|
|
|
|
StudyService.delete_workflow(workflow_id)
|
2019-12-27 18:50:03 +00:00
|
|
|
|
2020-05-15 19:54:53 +00:00
|
|
|
|
2020-04-28 21:25:53 +00:00
|
|
|
def set_current_task(workflow_id, task_id):
|
|
|
|
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
|
|
|
processor = WorkflowProcessor(workflow_model)
|
|
|
|
task_id = uuid.UUID(task_id)
|
2020-06-01 21:42:28 +00:00
|
|
|
spiff_task = processor.bpmn_workflow.get_task(task_id)
|
2020-07-15 02:16:44 +00:00
|
|
|
_verify_user_and_role(processor, spiff_task)
|
2020-07-27 18:38:57 +00:00
|
|
|
user_uid = UserService.current_user(allow_admin_impersonate=True).uid
|
2020-06-01 21:42:28 +00:00
|
|
|
if spiff_task.state != spiff_task.COMPLETED and spiff_task.state != spiff_task.READY:
|
2020-05-04 14:57:09 +00:00
|
|
|
raise ApiError("invalid_state", "You may not move the token to a task who's state is not "
|
2020-05-15 19:54:53 +00:00
|
|
|
"currently set to COMPLETE or READY.")
|
2020-04-28 21:25:53 +00:00
|
|
|
|
2020-11-04 19:37:53 +00:00
|
|
|
# If we have an interrupt task, run it.
|
2020-12-29 23:05:13 +00:00
|
|
|
processor.cancel_notify()
|
2020-11-04 19:37:53 +00:00
|
|
|
|
2020-05-17 17:29:36 +00:00
|
|
|
# Only reset the token if the task doesn't already have it.
|
2020-06-01 21:42:28 +00:00
|
|
|
if spiff_task.state == spiff_task.COMPLETED:
|
|
|
|
spiff_task.reset_token(reset_data=True) # Don't try to copy the existing data back into this task.
|
|
|
|
|
2020-05-04 14:57:09 +00:00
|
|
|
processor.save()
|
2020-07-15 02:16:44 +00:00
|
|
|
WorkflowService.log_task_action(user_uid, processor, spiff_task, WorkflowService.TASK_ACTION_TOKEN_RESET)
|
|
|
|
WorkflowService.update_task_assignments(processor)
|
|
|
|
|
2020-06-17 21:11:15 +00:00
|
|
|
workflow_api_model = WorkflowService.processor_to_workflow_api(processor, spiff_task)
|
2020-04-28 21:25:53 +00:00
|
|
|
return WorkflowApiSchema().dump(workflow_api_model)
|
2020-03-10 19:46:14 +00:00
|
|
|
|
2020-05-04 14:57:09 +00:00
|
|
|
|
2021-03-13 01:41:07 +00:00
|
|
|
def update_task(workflow_id, task_id, body, terminate_loop=None, update_all=False):
|
2020-02-07 16:34:44 +00:00
|
|
|
workflow_model = session.query(WorkflowModel).filter_by(id=workflow_id).first()
|
2020-06-12 17:46:10 +00:00
|
|
|
if workflow_model is None:
|
|
|
|
raise ApiError("invalid_workflow_id", "The given workflow id is not valid.", status_code=404)
|
|
|
|
|
2020-03-05 18:25:28 +00:00
|
|
|
processor = WorkflowProcessor(workflow_model)
|
2020-01-14 16:02:44 +00:00
|
|
|
task_id = uuid.UUID(task_id)
|
2020-06-01 21:42:28 +00:00
|
|
|
spiff_task = processor.bpmn_workflow.get_task(task_id)
|
2020-07-15 02:16:44 +00:00
|
|
|
_verify_user_and_role(processor, spiff_task)
|
2021-03-13 01:41:07 +00:00
|
|
|
user = UserService.current_user(allow_admin_impersonate=False) # Always log as the real user.
|
|
|
|
|
2020-06-29 14:41:21 +00:00
|
|
|
if not spiff_task:
|
2020-06-16 19:13:30 +00:00
|
|
|
raise ApiError("empty_task", "Processor failed to obtain task.", status_code=404)
|
2020-06-01 21:42:28 +00:00
|
|
|
if spiff_task.state != spiff_task.READY:
|
2020-04-28 21:25:53 +00:00
|
|
|
raise ApiError("invalid_state", "You may not update a task unless it is in the READY state. "
|
|
|
|
"Consider calling a token reset to make this task Ready.")
|
2020-07-14 14:29:25 +00:00
|
|
|
|
2021-06-08 18:24:59 +00:00
|
|
|
if terminate_loop and spiff_task.is_looping():
|
2020-06-25 15:02:57 +00:00
|
|
|
spiff_task.terminate_loop()
|
2020-03-11 01:29:57 +00:00
|
|
|
|
2021-03-14 16:20:39 +00:00
|
|
|
# Extract the details specific to the form submitted
|
|
|
|
form_data = WorkflowService().extract_form_data(body, spiff_task)
|
|
|
|
|
2021-03-13 01:41:07 +00:00
|
|
|
# Update the task
|
2021-03-14 16:20:39 +00:00
|
|
|
__update_task(processor, spiff_task, form_data, user)
|
2020-07-14 14:29:25 +00:00
|
|
|
|
2021-03-13 01:41:07 +00:00
|
|
|
# If we need to update all tasks, then get the next ready task and if it a multi-instance with the same
|
|
|
|
# task spec, complete that form as well.
|
|
|
|
if update_all:
|
2021-03-14 16:20:39 +00:00
|
|
|
last_index = spiff_task.task_info()["mi_index"]
|
2021-03-13 01:41:07 +00:00
|
|
|
next_task = processor.next_task()
|
2021-03-14 16:20:39 +00:00
|
|
|
while next_task and next_task.task_info()["mi_index"] > last_index:
|
2021-03-13 01:41:07 +00:00
|
|
|
__update_task(processor, next_task, form_data, user)
|
2021-03-14 16:20:39 +00:00
|
|
|
last_index = next_task.task_info()["mi_index"]
|
2021-03-13 01:41:07 +00:00
|
|
|
next_task = processor.next_task()
|
|
|
|
|
|
|
|
WorkflowService.update_task_assignments(processor)
|
2020-07-15 02:16:44 +00:00
|
|
|
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
|
2020-03-11 01:29:57 +00:00
|
|
|
return WorkflowApiSchema().dump(workflow_api_model)
|
2020-03-16 20:12:39 +00:00
|
|
|
|
|
|
|
|
2021-03-13 01:41:07 +00:00
|
|
|
def __update_task(processor, task, data, user):
|
|
|
|
"""All the things that need to happen when we complete a form, abstracted
|
|
|
|
here because we need to do it multiple times when completing all tasks in
|
|
|
|
a multi-instance task"""
|
|
|
|
task.update_data(data)
|
2021-06-08 12:03:14 +00:00
|
|
|
WorkflowService.post_process_form(task) # some properties may update the data store.
|
2021-03-13 01:41:07 +00:00
|
|
|
processor.complete_task(task)
|
|
|
|
processor.do_engine_steps()
|
|
|
|
processor.save()
|
|
|
|
WorkflowService.log_task_action(user.uid, processor, task, WorkflowService.TASK_ACTION_COMPLETE)
|
|
|
|
|
|
|
|
|
2020-03-16 20:12:39 +00:00
|
|
|
def list_workflow_spec_categories():
|
|
|
|
schema = WorkflowSpecCategoryModelSchema(many=True)
|
2021-09-14 14:56:41 +00:00
|
|
|
return schema.dump(session.query(WorkflowSpecCategoryModel).order_by(WorkflowSpecCategoryModel.display_order).all())
|
2020-03-16 20:12:39 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_workflow_spec_category(cat_id):
|
|
|
|
schema = WorkflowSpecCategoryModelSchema()
|
|
|
|
return schema.dump(session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).first())
|
|
|
|
|
|
|
|
|
|
|
|
def add_workflow_spec_category(body):
|
2021-08-31 15:16:27 +00:00
|
|
|
WorkflowService.cleanup_workflow_spec_category_display_order()
|
|
|
|
count = session.query(WorkflowSpecCategoryModel).count()
|
|
|
|
body['display_order'] = count
|
2020-03-16 20:12:39 +00:00
|
|
|
schema = WorkflowSpecCategoryModelSchema()
|
|
|
|
new_cat: WorkflowSpecCategoryModel = schema.load(body, session=session)
|
|
|
|
session.add(new_cat)
|
|
|
|
session.commit()
|
|
|
|
return schema.dump(new_cat)
|
|
|
|
|
|
|
|
|
|
|
|
def update_workflow_spec_category(cat_id, body):
|
|
|
|
if cat_id is None:
|
2020-03-20 15:07:55 +00:00
|
|
|
raise ApiError('unknown_category', 'Please provide a valid Workflow Spec Category ID.')
|
2020-03-16 20:12:39 +00:00
|
|
|
|
|
|
|
category = session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).first()
|
|
|
|
|
|
|
|
if category is None:
|
2020-03-20 15:07:55 +00:00
|
|
|
raise ApiError('unknown_category', 'The category "' + cat_id + '" is not recognized.')
|
2020-03-16 20:12:39 +00:00
|
|
|
|
2021-08-31 14:36:22 +00:00
|
|
|
# Make sure they don't try to change the display_order
|
|
|
|
# There is a separate endpoint for that
|
|
|
|
body['display_order'] = category.display_order
|
|
|
|
|
2020-03-16 20:12:39 +00:00
|
|
|
schema = WorkflowSpecCategoryModelSchema()
|
|
|
|
category = schema.load(body, session=session, instance=category, partial=True)
|
|
|
|
session.add(category)
|
|
|
|
session.commit()
|
|
|
|
return schema.dump(category)
|
2020-03-16 20:30:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
def delete_workflow_spec_category(cat_id):
|
|
|
|
session.query(WorkflowSpecCategoryModel).filter_by(id=cat_id).delete()
|
|
|
|
session.commit()
|
2021-08-30 19:20:45 +00:00
|
|
|
# Reorder the remaining categories
|
2021-08-31 14:07:38 +00:00
|
|
|
WorkflowService.cleanup_workflow_spec_category_display_order()
|
2020-04-22 23:40:40 +00:00
|
|
|
|
|
|
|
|
2021-08-27 15:40:38 +00:00
|
|
|
def reorder_workflow_spec_category(cat_id, direction):
|
2021-08-30 15:27:07 +00:00
|
|
|
if direction not in ('up', 'down'):
|
|
|
|
raise ApiError(code='bad_direction',
|
|
|
|
message='The direction must be `up` or `down`.')
|
2021-09-08 15:16:49 +00:00
|
|
|
WorkflowService.cleanup_workflow_spec_category_display_order()
|
2021-08-30 15:27:07 +00:00
|
|
|
category = session.query(WorkflowSpecCategoryModel).\
|
|
|
|
filter(WorkflowSpecCategoryModel.id == cat_id).first()
|
|
|
|
if category:
|
|
|
|
ordered_categories = WorkflowService.reorder_workflow_spec_category(category, direction)
|
|
|
|
schema = WorkflowSpecCategoryModelSchema(many=True)
|
|
|
|
return schema.dump(ordered_categories)
|
|
|
|
else:
|
2021-08-30 17:55:21 +00:00
|
|
|
raise ApiError(code='bad_category_id',
|
|
|
|
message=f'The category id {cat_id} did not return a Workflow Spec Category. Make sure it is a valid ID.')
|
2021-08-27 15:40:38 +00:00
|
|
|
|
|
|
|
|
2021-03-01 19:54:04 +00:00
|
|
|
def lookup(workflow_id, task_spec_name, field_id, query=None, value=None, limit=10):
|
2020-04-22 23:40:40 +00:00
|
|
|
"""
|
2020-05-19 20:11:43 +00:00
|
|
|
given a field in a task, attempts to find the lookup table or function associated
|
|
|
|
with that field and runs a full-text query against it to locate the values and
|
|
|
|
labels that would be returned to a type-ahead box.
|
2020-05-29 05:39:39 +00:00
|
|
|
Tries to be fast, but first runs will be very slow.
|
2020-04-22 23:40:40 +00:00
|
|
|
"""
|
2020-05-29 05:39:39 +00:00
|
|
|
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == workflow_id).first()
|
2021-03-01 19:54:04 +00:00
|
|
|
lookup_data = LookupService.lookup(workflow, task_spec_name, field_id, query, value, limit)
|
2021-10-19 14:13:43 +00:00
|
|
|
# Just return the data
|
|
|
|
return lookup_data
|
2020-06-12 17:46:10 +00:00
|
|
|
|
2021-10-28 20:19:39 +00:00
|
|
|
|
|
|
|
def lookup_ldap(query=None, limit=10):
|
|
|
|
"""
|
|
|
|
perform a lookup against the LDAP server without needing a provided workflow.
|
|
|
|
"""
|
|
|
|
value = None
|
|
|
|
lookup_data = LookupService._run_ldap_query(query, value, limit)
|
|
|
|
return lookup_data
|
|
|
|
|
|
|
|
|
2020-07-15 02:16:44 +00:00
|
|
|
def _verify_user_and_role(processor, spiff_task):
|
2020-07-14 14:29:25 +00:00
|
|
|
"""Assures the currently logged in user can access the given workflow and task, or
|
2020-07-27 18:38:57 +00:00
|
|
|
raises an error. """
|
2020-07-14 14:29:25 +00:00
|
|
|
|
2020-07-27 18:38:57 +00:00
|
|
|
user = UserService.current_user(allow_admin_impersonate=True)
|
2020-07-15 02:16:44 +00:00
|
|
|
allowed_users = WorkflowService.get_users_assigned_to_task(processor, spiff_task)
|
2020-07-27 18:38:57 +00:00
|
|
|
if user.uid not in allowed_users:
|
2020-07-15 02:16:44 +00:00
|
|
|
raise ApiError.from_task("permission_denied",
|
|
|
|
f"This task must be completed by '{allowed_users}', "
|
2020-07-27 18:38:57 +00:00
|
|
|
f"but you are {user.uid}", spiff_task)
|