2021-08-11 18:21:23 +00:00
|
|
|
import copy
|
2020-05-25 19:30:06 +00:00
|
|
|
import string
|
2020-05-04 14:57:09 +00:00
|
|
|
from datetime import datetime
|
2020-05-25 19:30:06 +00:00
|
|
|
import random
|
2020-12-04 22:56:12 +00:00
|
|
|
import string
|
|
|
|
from datetime import datetime
|
2020-08-21 17:34:37 +00:00
|
|
|
from typing import List
|
2020-05-04 14:57:09 +00:00
|
|
|
|
2020-05-19 20:11:43 +00:00
|
|
|
import jinja2
|
2020-12-14 15:07:19 +00:00
|
|
|
from SpiffWorkflow import Task as SpiffTask, WorkflowException, NavItem
|
2021-06-15 20:17:15 +00:00
|
|
|
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
|
2020-07-14 17:42:52 +00:00
|
|
|
from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
|
2020-04-19 19:14:10 +00:00
|
|
|
from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask
|
2020-06-19 12:22:53 +00:00
|
|
|
from SpiffWorkflow.bpmn.specs.MultiInstanceTask import MultiInstanceTask
|
2020-04-19 19:14:10 +00:00
|
|
|
from SpiffWorkflow.bpmn.specs.ScriptTask import ScriptTask
|
2020-07-14 17:42:52 +00:00
|
|
|
from SpiffWorkflow.bpmn.specs.StartEvent import StartEvent
|
2020-04-19 19:14:10 +00:00
|
|
|
from SpiffWorkflow.bpmn.specs.UserTask import UserTask
|
2020-05-04 14:57:09 +00:00
|
|
|
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
|
2020-12-14 15:07:19 +00:00
|
|
|
from SpiffWorkflow.specs import CancelTask, StartTask, MultiChoice
|
2020-06-17 21:11:15 +00:00
|
|
|
from SpiffWorkflow.util.deep_merge import DeepMerge
|
2021-06-18 20:41:55 +00:00
|
|
|
from SpiffWorkflow.util.metrics import timeit
|
2021-03-30 16:10:49 +00:00
|
|
|
|
2020-05-19 20:11:43 +00:00
|
|
|
from jinja2 import Template
|
2020-04-15 15:13:32 +00:00
|
|
|
|
2020-05-14 17:43:23 +00:00
|
|
|
from crc import db, app
|
2020-04-15 15:13:32 +00:00
|
|
|
from crc.api.common import ApiError
|
2020-12-04 22:56:12 +00:00
|
|
|
from crc.models.api_models import Task, MultiInstanceType, WorkflowApi
|
2021-06-08 12:03:14 +00:00
|
|
|
from crc.models.data_store import DataStoreModel
|
2021-06-22 18:58:52 +00:00
|
|
|
from crc.models.file import LookupDataModel, FileModel, File, FileSchema
|
2020-05-29 08:42:48 +00:00
|
|
|
from crc.models.study import StudyModel
|
2020-12-04 22:56:12 +00:00
|
|
|
from crc.models.task_event import TaskEventModel
|
2020-08-21 17:34:37 +00:00
|
|
|
from crc.models.user import UserModel, UserModelSchema
|
2020-06-17 21:11:15 +00:00
|
|
|
from crc.models.workflow import WorkflowModel, WorkflowStatus, WorkflowSpecModel
|
2021-07-15 17:09:40 +00:00
|
|
|
from crc.services.data_store_service import DataStoreBase
|
|
|
|
|
2021-07-06 17:10:20 +00:00
|
|
|
from crc.services.document_service import DocumentService
|
2020-04-15 15:13:32 +00:00
|
|
|
from crc.services.file_service import FileService
|
2020-05-19 20:11:43 +00:00
|
|
|
from crc.services.lookup_service import LookupService
|
2020-05-29 08:42:48 +00:00
|
|
|
from crc.services.study_service import StudyService
|
2020-07-27 18:38:57 +00:00
|
|
|
from crc.services.user_service import UserService
|
2020-06-16 17:34:21 +00:00
|
|
|
from crc.services.workflow_processor import WorkflowProcessor
|
2020-04-15 15:13:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
class WorkflowService(object):
|
2020-07-15 02:16:44 +00:00
|
|
|
TASK_ACTION_COMPLETE = "COMPLETE"
|
|
|
|
TASK_ACTION_TOKEN_RESET = "TOKEN_RESET"
|
|
|
|
TASK_ACTION_HARD_RESET = "HARD_RESET"
|
|
|
|
TASK_ACTION_SOFT_RESET = "SOFT_RESET"
|
|
|
|
TASK_ACTION_ASSIGNMENT = "ASSIGNMENT" # Whenever the lane changes between tasks we assign the task to specifc user.
|
|
|
|
|
|
|
|
TASK_STATE_LOCKED = "LOCKED" # When the task belongs to a different user.
|
2020-05-04 14:57:09 +00:00
|
|
|
|
2020-04-15 15:13:32 +00:00
|
|
|
"""Provides tools for processing workflows and tasks. This
|
|
|
|
should at some point, be the only way to work with Workflows, and
|
|
|
|
the workflow Processor should be hidden behind this service.
|
|
|
|
This will help maintain a structure that avoids circular dependencies.
|
|
|
|
But for now, this contains tools for converting spiff-workflow models into our
|
2020-06-10 04:57:56 +00:00
|
|
|
own API models with additional information and capabilities and
|
2020-05-30 19:37:04 +00:00
|
|
|
handles the testing of a workflow specification by completing it with
|
|
|
|
random selections, attempting to mimic a front end as much as possible. """
|
2020-04-15 15:13:32 +00:00
|
|
|
|
2021-06-03 18:22:26 +00:00
|
|
|
from crc.services.user_service import UserService
|
2020-05-29 08:42:48 +00:00
|
|
|
@staticmethod
|
2021-06-03 18:22:26 +00:00
|
|
|
def make_test_workflow(spec_id, validate_study_id=None):
|
|
|
|
try:
|
|
|
|
user = UserService.current_user()
|
|
|
|
except ApiError as e:
|
|
|
|
user = None
|
|
|
|
if not user:
|
|
|
|
user = db.session.query(UserModel).filter_by(uid="test").first()
|
2020-05-29 08:42:48 +00:00
|
|
|
if not user:
|
|
|
|
db.session.add(UserModel(uid="test"))
|
2021-05-01 19:55:20 +00:00
|
|
|
db.session.commit()
|
2021-06-03 18:22:26 +00:00
|
|
|
user = db.session.query(UserModel).filter_by(uid="test").first()
|
|
|
|
if validate_study_id:
|
|
|
|
study = db.session.query(StudyModel).filter_by(id=validate_study_id).first()
|
|
|
|
else:
|
|
|
|
study = db.session.query(StudyModel).filter_by(user_uid=user.uid).first()
|
2020-05-29 08:42:48 +00:00
|
|
|
if not study:
|
2021-06-03 18:22:26 +00:00
|
|
|
db.session.add(StudyModel(user_uid=user.uid, title="test"))
|
2020-05-29 08:42:48 +00:00
|
|
|
db.session.commit()
|
2021-06-03 18:22:26 +00:00
|
|
|
study = db.session.query(StudyModel).filter_by(user_uid=user.uid).first()
|
2020-05-29 05:39:39 +00:00
|
|
|
workflow_model = WorkflowModel(status=WorkflowStatus.not_started,
|
|
|
|
workflow_spec_id=spec_id,
|
2021-04-29 14:25:28 +00:00
|
|
|
last_updated=datetime.utcnow(),
|
2020-05-29 08:42:48 +00:00
|
|
|
study=study)
|
|
|
|
return workflow_model
|
|
|
|
|
|
|
|
@staticmethod
|
2021-06-16 18:40:20 +00:00
|
|
|
def delete_test_data(workflow: WorkflowModel):
|
|
|
|
db.session.delete(workflow)
|
|
|
|
# Also, delete any test study or user models that may have been created.
|
2020-06-12 18:13:27 +00:00
|
|
|
for study in db.session.query(StudyModel).filter(StudyModel.user_uid == "test"):
|
2020-05-29 08:42:48 +00:00
|
|
|
StudyService.delete_study(study.id)
|
2020-05-30 19:37:04 +00:00
|
|
|
user = db.session.query(UserModel).filter_by(uid="test").first()
|
|
|
|
if user:
|
|
|
|
db.session.delete(user)
|
2021-06-23 20:39:58 +00:00
|
|
|
db.session.commit()
|
2020-05-29 08:42:48 +00:00
|
|
|
|
2021-06-09 13:24:37 +00:00
|
|
|
@staticmethod
|
|
|
|
def do_waiting():
|
|
|
|
records = db.session.query(WorkflowModel).filter(WorkflowModel.status==WorkflowStatus.waiting).all()
|
|
|
|
for workflow_model in records:
|
2021-07-06 17:10:20 +00:00
|
|
|
try:
|
|
|
|
app.logger.info('Processing workflow %s' % workflow_model.id)
|
|
|
|
processor = WorkflowProcessor(workflow_model)
|
|
|
|
processor.bpmn_workflow.refresh_waiting_tasks()
|
|
|
|
processor.bpmn_workflow.do_engine_steps()
|
|
|
|
processor.save()
|
2021-07-12 13:43:12 +00:00
|
|
|
except Exception as e:
|
|
|
|
app.logger.error(f"Error running waiting task for workflow #%i (%s) for study #%i. %s" %
|
|
|
|
(workflow_model.id,
|
|
|
|
workflow_model.workflow_spec.name,
|
|
|
|
workflow_model.study_id,
|
|
|
|
str(e)))
|
2021-06-09 13:24:37 +00:00
|
|
|
|
2020-05-29 08:42:48 +00:00
|
|
|
@staticmethod
|
2021-06-18 20:41:55 +00:00
|
|
|
@timeit
|
2021-06-09 18:32:01 +00:00
|
|
|
def test_spec(spec_id, validate_study_id=None, test_until=None, required_only=False):
|
2020-05-30 19:37:04 +00:00
|
|
|
"""Runs a spec through it's paces to see if it results in any errors.
|
|
|
|
Not fool-proof, but a good sanity check. Returns the final data
|
2020-05-30 21:21:57 +00:00
|
|
|
output form the last task if successful.
|
|
|
|
|
2021-06-09 18:32:01 +00:00
|
|
|
test_until
|
|
|
|
|
2020-05-30 21:21:57 +00:00
|
|
|
required_only can be set to true, in which case this will run the
|
|
|
|
spec, only completing the required fields, rather than everything.
|
|
|
|
"""
|
2020-05-29 08:42:48 +00:00
|
|
|
|
2021-06-03 18:22:26 +00:00
|
|
|
workflow_model = WorkflowService.make_test_workflow(spec_id, validate_study_id)
|
2020-05-29 05:39:39 +00:00
|
|
|
try:
|
|
|
|
processor = WorkflowProcessor(workflow_model, validate_only=True)
|
2021-06-16 18:40:20 +00:00
|
|
|
count = 0
|
2021-06-17 19:12:28 +00:00
|
|
|
|
2021-06-16 18:40:20 +00:00
|
|
|
while not processor.bpmn_workflow.is_completed():
|
2021-06-22 15:29:00 +00:00
|
|
|
processor.bpmn_workflow.get_deep_nav_list() # Assure no errors with navigation.
|
|
|
|
exit_task = processor.bpmn_workflow.do_engine_steps(exit_at=test_until)
|
|
|
|
if (exit_task != None):
|
|
|
|
raise ApiError.from_task("validation_break",
|
|
|
|
f"The validation has been exited early on task '{exit_task.task_spec.name}' and was parented by ",
|
|
|
|
exit_task.parent)
|
|
|
|
tasks = processor.bpmn_workflow.get_tasks(SpiffTask.READY)
|
|
|
|
for task in tasks:
|
|
|
|
if task.task_spec.lane is not None and task.task_spec.lane not in task.data:
|
|
|
|
raise ApiError.from_task("invalid_role",
|
|
|
|
f"This task is in a lane called '{task.task_spec.lane}', The "
|
|
|
|
f" current task data must have information mapping this role to "
|
|
|
|
f" a unique user id.", task)
|
|
|
|
task_api = WorkflowService.spiff_task_to_api_task(
|
|
|
|
task,
|
|
|
|
add_docs_and_forms=True) # Assure we try to process the documentation, and raise those errors.
|
|
|
|
# make sure forms have a form key
|
|
|
|
if hasattr(task_api, 'form') and task_api.form is not None and task_api.form.key == '':
|
|
|
|
raise ApiError(code='missing_form_key',
|
|
|
|
message='Forms must include a Form Key.',
|
|
|
|
task_id=task.id,
|
|
|
|
task_name=task.get_name())
|
|
|
|
WorkflowService.populate_form_with_random_data(task, task_api, required_only)
|
|
|
|
processor.complete_task(task)
|
|
|
|
if test_until == task.task_spec.name:
|
|
|
|
raise ApiError.from_task("validation_break",
|
|
|
|
f"The validation has been exited early on task '{task.task_spec.name}' and was parented by ",
|
|
|
|
task.parent)
|
|
|
|
count += 1
|
|
|
|
if count >= 100:
|
|
|
|
raise ApiError.from_task(code='validation_loop',
|
|
|
|
message=f'There appears to be an infinite loop in the validation. Task is {task.task_spec.description}',
|
|
|
|
task=task)
|
2021-06-16 18:40:20 +00:00
|
|
|
WorkflowService._process_documentation(processor.bpmn_workflow.last_task.parent.parent)
|
2021-06-22 15:29:00 +00:00
|
|
|
|
2020-05-29 05:39:39 +00:00
|
|
|
except WorkflowException as we:
|
2020-06-03 19:03:22 +00:00
|
|
|
raise ApiError.from_workflow_exception("workflow_validation_exception", str(we), we)
|
2021-06-16 18:40:20 +00:00
|
|
|
finally:
|
|
|
|
WorkflowService.delete_test_data(workflow_model)
|
2020-05-30 19:37:04 +00:00
|
|
|
return processor.bpmn_workflow.last_task.data
|
2020-04-15 15:13:32 +00:00
|
|
|
|
2020-05-25 19:30:06 +00:00
|
|
|
@staticmethod
|
2020-05-30 21:21:57 +00:00
|
|
|
def populate_form_with_random_data(task, task_api, required_only):
|
2020-05-25 19:30:06 +00:00
|
|
|
"""populates a task with random data - useful for testing a spec."""
|
|
|
|
|
|
|
|
if not hasattr(task.task_spec, 'form'): return
|
|
|
|
|
2020-06-02 22:17:00 +00:00
|
|
|
form_data = task.data # Just like with the front end, we start with what was already there, and modify it.
|
2020-08-27 18:00:14 +00:00
|
|
|
hide_groups = []
|
2020-05-25 19:30:06 +00:00
|
|
|
for field in task_api.form.fields:
|
2020-12-28 23:02:16 +00:00
|
|
|
# Assure we have a field type
|
|
|
|
if field.type is None:
|
|
|
|
raise ApiError(code='invalid_form_data',
|
2021-01-08 19:42:13 +00:00
|
|
|
message = f'Type is missing for field "{field.id}". A field type must be provided.',
|
|
|
|
task_id = task.id,
|
|
|
|
task_name = task.get_name())
|
|
|
|
# Assure we have valid ids
|
2021-01-07 16:10:31 +00:00
|
|
|
if not WorkflowService.check_field_id(field.id):
|
|
|
|
raise ApiError(code='invalid_form_id',
|
2021-01-08 19:42:13 +00:00
|
|
|
message=f'Invalid Field name: "{field.id}". A field ID must begin with a letter, '
|
|
|
|
f'and can only contain letters, numbers, and "_"',
|
|
|
|
task_id = task.id,
|
|
|
|
task_name = task.get_name())
|
2020-08-27 18:00:14 +00:00
|
|
|
# Assure field has valid properties
|
|
|
|
WorkflowService.check_field_properties(field, task)
|
|
|
|
|
2020-09-01 19:58:50 +00:00
|
|
|
# Process the label of the field if it is dynamic.
|
2020-08-27 18:00:14 +00:00
|
|
|
if field.has_property(Task.FIELD_PROP_LABEL_EXPRESSION):
|
|
|
|
result = WorkflowService.evaluate_property(Task.FIELD_PROP_LABEL_EXPRESSION, field, task)
|
|
|
|
field.label = result
|
2020-09-01 19:58:50 +00:00
|
|
|
|
2021-02-08 15:18:41 +00:00
|
|
|
# If a field is hidden and required, it must have a default value or value_expression
|
|
|
|
if field.has_property(Task.FIELD_PROP_HIDE_EXPRESSION) and field.has_validation(Task.FIELD_CONSTRAINT_REQUIRED):
|
2021-03-04 15:42:13 +00:00
|
|
|
if not field.has_property(Task.FIELD_PROP_VALUE_EXPRESSION) and \
|
|
|
|
(not (hasattr(field, 'default_value')) or field.default_value is None):
|
2021-02-08 15:18:41 +00:00
|
|
|
raise ApiError(code='hidden and required field missing default',
|
2021-03-04 15:42:13 +00:00
|
|
|
message=f'Field "{field.id}" is required but can be hidden. It must have either a default value or a value_expression',
|
2021-02-15 21:27:56 +00:00
|
|
|
task_id='task.id',
|
|
|
|
task_name=task.get_name())
|
2021-02-08 15:18:41 +00:00
|
|
|
|
|
|
|
# If the field is hidden and not required, it should not produce a value.
|
|
|
|
if field.has_property(Task.FIELD_PROP_HIDE_EXPRESSION) and not field.has_validation(Task.FIELD_CONSTRAINT_REQUIRED):
|
2020-09-01 19:58:50 +00:00
|
|
|
if WorkflowService.evaluate_property(Task.FIELD_PROP_HIDE_EXPRESSION, field, task):
|
|
|
|
continue
|
|
|
|
|
2021-01-27 15:21:13 +00:00
|
|
|
# A task should only have default_value **or** value expression, not both.
|
|
|
|
if field.has_property(Task.FIELD_PROP_VALUE_EXPRESSION) and (hasattr(field, 'default_value') and field.default_value):
|
|
|
|
raise ApiError(code='default value and value_expression',
|
|
|
|
message='This task has both a default_value and value_expression. Please fix this to only have one or the other.')
|
|
|
|
# If we have a default_value or value_expression, try to set the default
|
|
|
|
if field.has_property(Task.FIELD_PROP_VALUE_EXPRESSION) or (hasattr(field, 'default_value') and field.default_value):
|
2020-09-01 19:58:50 +00:00
|
|
|
form_data[field.id] = WorkflowService.get_default_value(field, task)
|
2021-02-11 20:36:12 +00:00
|
|
|
if not field.has_property(Task.FIELD_PROP_REPEAT):
|
|
|
|
continue
|
2020-09-01 19:58:50 +00:00
|
|
|
|
|
|
|
# If we are only populating required fields, and this isn't required. stop here.
|
|
|
|
if required_only:
|
|
|
|
if (not field.has_validation(Task.FIELD_CONSTRAINT_REQUIRED) or
|
|
|
|
field.get_validation(Task.FIELD_CONSTRAINT_REQUIRED).lower().strip() != "true"):
|
|
|
|
continue # Don't include any fields that aren't specifically marked as required.
|
|
|
|
if field.has_property(Task.FIELD_PROP_REQUIRED_EXPRESSION):
|
|
|
|
result = WorkflowService.evaluate_property(Task.FIELD_PROP_REQUIRED_EXPRESSION, field, task)
|
|
|
|
if not result and required_only:
|
|
|
|
continue # Don't complete fields that are not required.
|
|
|
|
|
|
|
|
# If it is read only, stop here.
|
|
|
|
if field.has_property("read_only") and field.get_property(Task.FIELD_PROP_READ_ONLY).lower().strip() == "true":
|
2020-06-02 22:17:00 +00:00
|
|
|
continue # Don't mess about with read only fields.
|
2020-08-27 18:00:14 +00:00
|
|
|
|
|
|
|
if field.has_property(Task.FIELD_PROP_REPEAT):
|
|
|
|
group = field.get_property(Task.FIELD_PROP_REPEAT)
|
|
|
|
if field.has_property(Task.FIELD_PROP_REPEAT_HIDE_EXPRESSION):
|
|
|
|
result = WorkflowService.evaluate_property(Task.FIELD_PROP_REPEAT_HIDE_EXPRESSION, field, task)
|
|
|
|
if not result:
|
|
|
|
hide_groups.append(group)
|
|
|
|
if group not in form_data and group not in hide_groups:
|
2020-05-30 19:37:04 +00:00
|
|
|
form_data[group] = [{},{},{}]
|
2020-08-27 18:00:14 +00:00
|
|
|
if group in form_data and group not in hide_groups:
|
|
|
|
for i in range(3):
|
|
|
|
form_data[group][i][field.id] = WorkflowService.get_random_data_for_field(field, task)
|
2020-05-30 19:37:04 +00:00
|
|
|
else:
|
|
|
|
form_data[field.id] = WorkflowService.get_random_data_for_field(field, task)
|
|
|
|
if task.data is None:
|
|
|
|
task.data = {}
|
|
|
|
task.data.update(form_data)
|
|
|
|
|
2021-01-07 16:10:31 +00:00
|
|
|
@staticmethod
|
|
|
|
def check_field_id(id):
|
|
|
|
"""Assures that field names are valid Python and Javascript names."""
|
|
|
|
if not id[0].isalpha():
|
|
|
|
return False
|
|
|
|
for char in id[1:len(id)]:
|
2021-02-15 21:27:56 +00:00
|
|
|
if char.isalnum() or char == '_' or char == '.':
|
2021-01-07 16:10:31 +00:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2020-08-27 18:00:14 +00:00
|
|
|
@staticmethod
|
|
|
|
def check_field_properties(field, task):
|
|
|
|
"""Assures that all properties are valid on a given workflow."""
|
|
|
|
field_prop_names = list(map(lambda fp: fp.id, field.properties))
|
|
|
|
valid_names = Task.valid_property_names()
|
|
|
|
for name in field_prop_names:
|
|
|
|
if name not in valid_names:
|
|
|
|
raise ApiError.from_task("invalid_field_property",
|
|
|
|
f'The field {field.id} contains an unsupported '
|
|
|
|
f'property: {name}', task=task)
|
|
|
|
|
2021-06-08 12:03:14 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def post_process_form(task):
|
|
|
|
"""Looks through the fields in a submitted form, acting on any properties."""
|
2021-06-08 15:16:10 +00:00
|
|
|
if not hasattr(task.task_spec, 'form'): return
|
2021-06-08 12:03:14 +00:00
|
|
|
for field in task.task_spec.form.fields:
|
2021-06-11 21:20:25 +00:00
|
|
|
data = task.data
|
|
|
|
if field.has_property(Task.FIELD_PROP_REPEAT):
|
|
|
|
repeat_array = task.data[field.get_property(Task.FIELD_PROP_REPEAT)]
|
|
|
|
for repeat_data in repeat_array:
|
|
|
|
WorkflowService.__post_process_field(task, field, repeat_data)
|
|
|
|
else:
|
|
|
|
WorkflowService.__post_process_field(task, field, data)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def __post_process_field(task, field, data):
|
|
|
|
if field.has_property(Task.FIELD_PROP_DOC_CODE) and field.id in data:
|
|
|
|
# This is generally handled by the front end, but it is possible that the file was uploaded BEFORE
|
|
|
|
# the doc_code was correctly set, so this is a stop gap measure to assure we still hit it correctly.
|
|
|
|
file_id = data[field.id]["id"]
|
|
|
|
doc_code = task.workflow.script_engine.eval(field.get_property(Task.FIELD_PROP_DOC_CODE), data)
|
|
|
|
file = db.session.query(FileModel).filter(FileModel.id == file_id).first()
|
|
|
|
if(file):
|
2021-06-08 12:03:14 +00:00
|
|
|
file.irb_doc_code = doc_code
|
|
|
|
db.session.commit()
|
2021-06-11 21:20:25 +00:00
|
|
|
else:
|
|
|
|
# We have a problem, the file doesn't exist, and was removed, but it is still referenced in the data
|
|
|
|
# At least attempt to clear out the data.
|
|
|
|
data = {}
|
|
|
|
if field.has_property(Task.FIELD_PROP_FILE_DATA) and \
|
2021-06-25 20:18:28 +00:00
|
|
|
field.get_property(Task.FIELD_PROP_FILE_DATA) in data and \
|
2021-08-11 20:43:50 +00:00
|
|
|
field.id in data and data[field.id]:
|
2021-06-11 21:20:25 +00:00
|
|
|
file_id = data[field.get_property(Task.FIELD_PROP_FILE_DATA)]["id"]
|
2021-07-23 15:24:17 +00:00
|
|
|
if field.type == 'enum':
|
|
|
|
data_args = (field.id, data[field.id]['label'])
|
|
|
|
else:
|
|
|
|
data_args = (field.id, data[field.id])
|
2021-07-15 17:09:40 +00:00
|
|
|
DataStoreBase().set_data_common(task.id, None, None, None, None, None, file_id, *data_args)
|
2021-06-08 12:03:14 +00:00
|
|
|
|
2020-08-27 18:00:14 +00:00
|
|
|
@staticmethod
|
|
|
|
def evaluate_property(property_name, field, task):
|
|
|
|
expression = field.get_property(property_name)
|
2021-06-15 20:17:15 +00:00
|
|
|
data = task.data
|
|
|
|
if field.has_property(Task.FIELD_PROP_REPEAT):
|
|
|
|
# Then you must evaluate the expression based on the data within the group only.
|
|
|
|
group = field.get_property(Task.FIELD_PROP_REPEAT)
|
|
|
|
if group in task.data:
|
2021-08-11 18:21:23 +00:00
|
|
|
# Here we must make the current group data top level (as it would be in a repeat section) but
|
|
|
|
# make all other top level task data available as well.
|
|
|
|
new_data = copy.deepcopy(task.data)
|
|
|
|
del(new_data[group])
|
2021-06-15 20:17:15 +00:00
|
|
|
data = task.data[group][0]
|
2021-08-11 18:21:23 +00:00
|
|
|
data.update(new_data)
|
|
|
|
|
2020-08-27 18:00:14 +00:00
|
|
|
try:
|
2021-06-15 20:17:15 +00:00
|
|
|
return task.workflow.script_engine.eval(expression, data)
|
2020-08-27 18:00:14 +00:00
|
|
|
except Exception as e:
|
|
|
|
message = f"The field {field.id} contains an invalid expression. {e}"
|
|
|
|
raise ApiError.from_task(f'invalid_{property_name}', message, task=task)
|
|
|
|
|
2020-09-01 19:58:50 +00:00
|
|
|
@staticmethod
|
|
|
|
def has_lookup(field):
|
|
|
|
"""Returns true if this is a lookup field."""
|
|
|
|
"""Note, this does not include enums based on task data, that
|
|
|
|
is populated when the form is created, not as a lookup from a data table. """
|
|
|
|
has_ldap_lookup = field.has_property(Task.FIELD_PROP_LDAP_LOOKUP)
|
|
|
|
has_file_lookup = field.has_property(Task.FIELD_PROP_SPREADSHEET_NAME)
|
|
|
|
return has_ldap_lookup or has_file_lookup
|
2020-08-27 18:00:14 +00:00
|
|
|
|
|
|
|
|
2020-09-01 19:58:50 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_default_value(field, task):
|
|
|
|
has_lookup = WorkflowService.has_lookup(field)
|
|
|
|
|
|
|
|
default = field.default_value
|
|
|
|
# If there is a value expression, use that rather than the default value.
|
|
|
|
if field.has_property(Task.FIELD_PROP_VALUE_EXPRESSION):
|
|
|
|
result = WorkflowService.evaluate_property(Task.FIELD_PROP_VALUE_EXPRESSION, field, task)
|
|
|
|
default = result
|
|
|
|
|
|
|
|
# If no default exists, return None
|
2021-02-03 14:50:16 +00:00
|
|
|
# Note: if default is False, we don't want to execute this code
|
2021-03-08 19:00:03 +00:00
|
|
|
if default is None or (isinstance(default, str) and default.strip() == ''):
|
|
|
|
if field.type == "enum" or field.type == "autocomplete":
|
2021-04-09 12:40:58 +00:00
|
|
|
# Return empty arrays for multi-select enums, otherwise do a value of None.
|
|
|
|
if field.has_property(Task.FIELD_PROP_ENUM_TYPE) and field.get_property(Task.FIELD_PROP_ENUM_TYPE) == "checkbox":
|
|
|
|
return []
|
|
|
|
else:
|
|
|
|
return {'value': None, 'label': None}
|
2021-03-08 19:00:03 +00:00
|
|
|
else:
|
|
|
|
return None
|
2020-09-01 19:58:50 +00:00
|
|
|
|
|
|
|
if field.type == "enum" and not has_lookup:
|
|
|
|
default_option = next((obj for obj in field.options if obj.id == default), None)
|
|
|
|
if not default_option:
|
|
|
|
raise ApiError.from_task("invalid_default", "You specified a default value that does not exist in "
|
|
|
|
"the enum options ", task)
|
|
|
|
return {'value': default_option.id, 'label': default_option.name}
|
|
|
|
elif field.type == "autocomplete" or field.type == "enum":
|
|
|
|
lookup_model = LookupService.get_lookup_model(task, field)
|
|
|
|
if field.has_property(Task.FIELD_PROP_LDAP_LOOKUP): # All ldap records get the same person.
|
|
|
|
return None # There is no default value for ldap.
|
|
|
|
elif lookup_model:
|
|
|
|
data = db.session.query(LookupDataModel).\
|
|
|
|
filter(LookupDataModel.lookup_file_model == lookup_model). \
|
|
|
|
filter(LookupDataModel.value == default).\
|
|
|
|
first()
|
|
|
|
if not data:
|
|
|
|
raise ApiError.from_task("invalid_default", "You specified a default value that does not exist in "
|
|
|
|
"the enum options ", task)
|
|
|
|
return {"value": data.value, "label": data.label, "data": data.data}
|
|
|
|
else:
|
|
|
|
raise ApiError.from_task("unknown_lookup_option", "The settings for this auto complete field "
|
|
|
|
"are incorrect: %s " % field.id, task)
|
|
|
|
elif field.type == "long":
|
|
|
|
return int(default)
|
|
|
|
elif field.type == 'boolean':
|
2021-02-03 14:50:16 +00:00
|
|
|
default = str(default).lower()
|
|
|
|
if default == 'true' or default == 't':
|
|
|
|
return True
|
|
|
|
return False
|
2020-09-01 19:58:50 +00:00
|
|
|
else:
|
|
|
|
return default
|
2020-08-27 18:00:14 +00:00
|
|
|
|
2020-05-30 19:37:04 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_random_data_for_field(field, task):
|
2020-09-01 19:58:50 +00:00
|
|
|
"""Randomly populates the field, mainly concerned with getting enums correct, as
|
|
|
|
the rest are pretty easy."""
|
|
|
|
has_lookup = WorkflowService.has_lookup(field)
|
2020-07-17 15:51:21 +00:00
|
|
|
|
|
|
|
if field.type == "enum" and not has_lookup:
|
|
|
|
# If it's a normal enum field with no lookup,
|
|
|
|
# return a random option.
|
2020-05-30 19:37:04 +00:00
|
|
|
if len(field.options) > 0:
|
|
|
|
random_choice = random.choice(field.options)
|
|
|
|
if isinstance(random_choice, dict):
|
2021-06-15 20:17:15 +00:00
|
|
|
return {'value': random_choice['id'], 'label': random_choice['name'], 'data': random_choice['data']}
|
2020-05-25 19:30:06 +00:00
|
|
|
else:
|
2020-05-30 19:37:04 +00:00
|
|
|
# fixme: why it is sometimes an EnumFormFieldOption, and other times not?
|
2020-09-01 19:58:50 +00:00
|
|
|
return {'value': random_choice.id, 'label': random_choice.name}
|
2020-05-30 19:37:04 +00:00
|
|
|
else:
|
|
|
|
raise ApiError.from_task("invalid_enum", "You specified an enumeration field (%s),"
|
|
|
|
" with no options" % field.id, task)
|
2020-07-17 15:51:21 +00:00
|
|
|
elif field.type == "autocomplete" or field.type == "enum":
|
|
|
|
# If it has a lookup, get the lookup model from the spreadsheet or task data, then return a random option
|
|
|
|
# from the lookup model
|
2020-05-30 19:37:04 +00:00
|
|
|
lookup_model = LookupService.get_lookup_model(task, field)
|
2020-09-01 19:58:50 +00:00
|
|
|
if field.has_property(Task.FIELD_PROP_LDAP_LOOKUP): # All ldap records get the same person.
|
|
|
|
return WorkflowService._random_ldap_record()
|
2020-05-30 19:37:04 +00:00
|
|
|
elif lookup_model:
|
|
|
|
data = db.session.query(LookupDataModel).filter(
|
|
|
|
LookupDataModel.lookup_file_model == lookup_model).limit(10).all()
|
2020-07-17 15:51:21 +00:00
|
|
|
options = [{"value": d.value, "label": d.label, "data": d.data} for d in data]
|
2021-02-08 20:10:53 +00:00
|
|
|
if len(options) > 0:
|
|
|
|
return random.choice(options)
|
|
|
|
else:
|
|
|
|
raise ApiError.from_task("invalid enum", "You specified an enumeration field (%s),"
|
|
|
|
" with no options" % field.id, task)
|
2020-05-25 19:30:06 +00:00
|
|
|
else:
|
2020-06-03 19:03:22 +00:00
|
|
|
raise ApiError.from_task("unknown_lookup_option", "The settings for this auto complete field "
|
2020-05-30 19:37:04 +00:00
|
|
|
"are incorrect: %s " % field.id, task)
|
|
|
|
elif field.type == "long":
|
|
|
|
return random.randint(1, 1000)
|
|
|
|
elif field.type == 'boolean':
|
|
|
|
return random.choice([True, False])
|
|
|
|
elif field.type == 'file':
|
2021-06-22 18:58:52 +00:00
|
|
|
doc_code = field.id
|
|
|
|
if field.has_property('doc_code'):
|
|
|
|
doc_code = WorkflowService.evaluate_property('doc_code', field, task)
|
|
|
|
file_model = FileModel(name="test.png",
|
|
|
|
irb_doc_code = field.id)
|
2021-07-06 17:10:20 +00:00
|
|
|
doc_dict = DocumentService.get_dictionary()
|
2021-06-22 18:58:52 +00:00
|
|
|
file = File.from_models(file_model, None, doc_dict)
|
|
|
|
return FileSchema().dump(file)
|
2020-05-30 19:37:04 +00:00
|
|
|
elif field.type == 'files':
|
|
|
|
return random.randrange(1, 100)
|
|
|
|
else:
|
|
|
|
return WorkflowService._random_string()
|
2020-05-25 19:30:06 +00:00
|
|
|
|
2020-09-01 19:58:50 +00:00
|
|
|
@staticmethod
|
|
|
|
def _random_ldap_record():
|
|
|
|
return {
|
|
|
|
"label": "dhf8r",
|
|
|
|
"value": "Dan Funk",
|
|
|
|
"data": {
|
|
|
|
"uid": "dhf8r",
|
|
|
|
"display_name": "Dan Funk",
|
|
|
|
"given_name": "Dan",
|
|
|
|
"email_address": "dhf8r@virginia.edu",
|
|
|
|
"department": "Department of Psychocosmographictology",
|
|
|
|
"affiliation": "Roustabout",
|
|
|
|
"sponsor_type": "Staff"}
|
|
|
|
}
|
|
|
|
|
2020-05-27 00:06:50 +00:00
|
|
|
|
2020-05-25 19:30:06 +00:00
|
|
|
@staticmethod
|
|
|
|
def _random_string(string_length=10):
|
|
|
|
"""Generate a random string of fixed length """
|
|
|
|
letters = string.ascii_lowercase
|
|
|
|
return ''.join(random.choice(letters) for i in range(string_length))
|
|
|
|
|
2020-06-17 21:11:15 +00:00
|
|
|
@staticmethod
|
|
|
|
def processor_to_workflow_api(processor: WorkflowProcessor, next_task=None):
|
|
|
|
"""Returns an API model representing the state of the current workflow, if requested, and
|
|
|
|
possible, next_task is set to the current_task."""
|
|
|
|
|
2020-12-14 15:07:19 +00:00
|
|
|
navigation = processor.bpmn_workflow.get_deep_nav_list()
|
|
|
|
WorkflowService.update_navigation(navigation, processor)
|
2020-07-14 14:29:25 +00:00
|
|
|
|
2020-06-17 21:11:15 +00:00
|
|
|
|
|
|
|
spec = db.session.query(WorkflowSpecModel).filter_by(id=processor.workflow_spec_id).first()
|
2021-02-10 16:58:19 +00:00
|
|
|
is_review = FileService.is_workflow_review(processor.workflow_spec_id)
|
2020-06-17 21:11:15 +00:00
|
|
|
workflow_api = WorkflowApi(
|
|
|
|
id=processor.get_workflow_id(),
|
|
|
|
status=processor.get_status(),
|
|
|
|
next_task=None,
|
|
|
|
navigation=navigation,
|
|
|
|
workflow_spec_id=processor.workflow_spec_id,
|
|
|
|
spec_version=processor.get_version_string(),
|
|
|
|
is_latest_spec=processor.is_latest_spec,
|
|
|
|
total_tasks=len(navigation),
|
|
|
|
completed_tasks=processor.workflow_model.completed_tasks,
|
|
|
|
last_updated=processor.workflow_model.last_updated,
|
2021-02-10 16:58:19 +00:00
|
|
|
is_review=is_review,
|
2021-04-29 18:29:21 +00:00
|
|
|
title=spec.display_name,
|
|
|
|
study_id=processor.workflow_model.study_id or None
|
2020-06-17 21:11:15 +00:00
|
|
|
)
|
|
|
|
if not next_task: # The Next Task can be requested to be a certain task, useful for parallel tasks.
|
|
|
|
# This may or may not work, sometimes there is no next task to complete.
|
|
|
|
next_task = processor.next_task()
|
|
|
|
if next_task:
|
2020-06-19 12:22:53 +00:00
|
|
|
previous_form_data = WorkflowService.get_previously_submitted_data(processor.workflow_model.id, next_task)
|
|
|
|
DeepMerge.merge(next_task.data, previous_form_data)
|
2020-06-17 21:11:15 +00:00
|
|
|
workflow_api.next_task = WorkflowService.spiff_task_to_api_task(next_task, add_docs_and_forms=True)
|
2020-07-15 02:16:44 +00:00
|
|
|
# Update the state of the task to locked if the current user does not own the task.
|
|
|
|
user_uids = WorkflowService.get_users_assigned_to_task(processor, next_task)
|
2020-07-27 18:38:57 +00:00
|
|
|
if not UserService.in_list(user_uids, allow_admin_impersonate=True):
|
2020-07-15 02:16:44 +00:00
|
|
|
workflow_api.next_task.state = WorkflowService.TASK_STATE_LOCKED
|
2020-06-17 21:11:15 +00:00
|
|
|
return workflow_api
|
|
|
|
|
2020-12-14 15:07:19 +00:00
|
|
|
@staticmethod
|
|
|
|
def update_navigation(navigation: List[NavItem], processor: WorkflowProcessor):
|
|
|
|
# Recursive function to walk down through children, and clean up descriptions, and statuses
|
|
|
|
for nav_item in navigation:
|
|
|
|
spiff_task = processor.bpmn_workflow.get_task(nav_item.task_id)
|
|
|
|
if spiff_task:
|
|
|
|
# Use existing logic to set the description, and alter the state based on permissions.
|
|
|
|
api_task = WorkflowService.spiff_task_to_api_task(spiff_task, add_docs_and_forms=False)
|
|
|
|
nav_item.description = api_task.title
|
|
|
|
user_uids = WorkflowService.get_users_assigned_to_task(processor, spiff_task)
|
|
|
|
if (isinstance(spiff_task.task_spec, UserTask) or isinstance(spiff_task.task_spec, ManualTask)) \
|
|
|
|
and not UserService.in_list(user_uids, allow_admin_impersonate=True):
|
|
|
|
nav_item.state = WorkflowService.TASK_STATE_LOCKED
|
|
|
|
else:
|
|
|
|
# Strip off the first word in the description, to meet guidlines for BPMN.
|
|
|
|
if nav_item.description:
|
|
|
|
if nav_item.description is not None and ' ' in nav_item.description:
|
|
|
|
nav_item.description = nav_item.description.partition(' ')[2]
|
|
|
|
|
|
|
|
# Recurse here
|
|
|
|
WorkflowService.update_navigation(nav_item.children, processor)
|
|
|
|
|
|
|
|
|
2020-06-17 21:11:15 +00:00
|
|
|
@staticmethod
|
2020-06-28 15:35:35 +00:00
|
|
|
def get_previously_submitted_data(workflow_id, spiff_task):
|
2020-06-19 12:22:53 +00:00
|
|
|
""" If the user has completed this task previously, find the form data for the last submission."""
|
2020-06-26 16:47:42 +00:00
|
|
|
query = db.session.query(TaskEventModel) \
|
2020-06-17 21:11:15 +00:00
|
|
|
.filter_by(workflow_id=workflow_id) \
|
2020-06-28 15:35:35 +00:00
|
|
|
.filter_by(task_name=spiff_task.task_spec.name) \
|
2020-06-26 16:47:42 +00:00
|
|
|
.filter_by(action=WorkflowService.TASK_ACTION_COMPLETE)
|
|
|
|
|
2020-06-28 15:35:35 +00:00
|
|
|
if hasattr(spiff_task, 'internal_data') and 'runtimes' in spiff_task.internal_data:
|
|
|
|
query = query.filter_by(mi_index=spiff_task.internal_data['runtimes'])
|
2020-06-26 16:47:42 +00:00
|
|
|
|
2020-06-28 15:35:35 +00:00
|
|
|
latest_event = query.order_by(TaskEventModel.date.desc()).first()
|
2020-06-17 21:11:15 +00:00
|
|
|
if latest_event:
|
2020-06-19 12:22:53 +00:00
|
|
|
if latest_event.form_data is not None:
|
|
|
|
return latest_event.form_data
|
2020-06-17 21:11:15 +00:00
|
|
|
else:
|
2020-07-02 22:10:33 +00:00
|
|
|
missing_form_error = (
|
|
|
|
f'We have lost data for workflow {workflow_id}, '
|
|
|
|
f'task {spiff_task.task_spec.name}, it is not in the task event model, '
|
|
|
|
f'and it should be.'
|
|
|
|
)
|
|
|
|
app.logger.error("missing_form_data", missing_form_error, exc_info=True)
|
2020-06-17 21:11:15 +00:00
|
|
|
return {}
|
|
|
|
else:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
2020-12-14 15:07:19 +00:00
|
|
|
|
2020-04-15 15:13:32 +00:00
|
|
|
@staticmethod
|
2020-05-06 17:01:38 +00:00
|
|
|
def spiff_task_to_api_task(spiff_task, add_docs_and_forms=False):
|
2020-04-19 19:14:10 +00:00
|
|
|
task_type = spiff_task.task_spec.__class__.__name__
|
|
|
|
|
2020-07-14 17:42:52 +00:00
|
|
|
task_types = [UserTask, ManualTask, BusinessRuleTask, CancelTask, ScriptTask, StartTask, EndEvent, StartEvent]
|
|
|
|
|
|
|
|
for t in task_types:
|
|
|
|
if isinstance(spiff_task.task_spec, t):
|
|
|
|
task_type = t.__name__
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
task_type = "NoneTask"
|
2020-04-19 19:14:10 +00:00
|
|
|
|
2020-04-21 15:43:43 +00:00
|
|
|
info = spiff_task.task_info()
|
|
|
|
if info["is_looping"]:
|
|
|
|
mi_type = MultiInstanceType.looping
|
|
|
|
elif info["is_sequential_mi"]:
|
|
|
|
mi_type = MultiInstanceType.sequential
|
|
|
|
elif info["is_parallel_mi"]:
|
|
|
|
mi_type = MultiInstanceType.parallel
|
|
|
|
else:
|
|
|
|
mi_type = MultiInstanceType.none
|
2020-04-19 19:14:10 +00:00
|
|
|
|
2020-05-14 17:43:23 +00:00
|
|
|
props = {}
|
2020-04-21 16:07:59 +00:00
|
|
|
if hasattr(spiff_task.task_spec, 'extensions'):
|
2020-06-28 15:35:35 +00:00
|
|
|
for key, val in spiff_task.task_spec.extensions.items():
|
|
|
|
props[key] = val
|
2020-04-21 16:07:59 +00:00
|
|
|
|
2020-07-15 02:16:44 +00:00
|
|
|
if hasattr(spiff_task.task_spec, 'lane'):
|
|
|
|
lane = spiff_task.task_spec.lane
|
|
|
|
else:
|
|
|
|
lane = None
|
|
|
|
|
2020-04-15 15:13:32 +00:00
|
|
|
task = Task(spiff_task.id,
|
|
|
|
spiff_task.task_spec.name,
|
|
|
|
spiff_task.task_spec.description,
|
2020-04-19 19:14:10 +00:00
|
|
|
task_type,
|
2020-04-15 15:13:32 +00:00
|
|
|
spiff_task.get_state_name(),
|
2020-07-15 02:16:44 +00:00
|
|
|
lane,
|
2020-04-15 15:13:32 +00:00
|
|
|
None,
|
2020-04-17 17:30:32 +00:00
|
|
|
"",
|
2020-05-15 19:54:53 +00:00
|
|
|
{},
|
2020-04-21 15:43:43 +00:00
|
|
|
mi_type,
|
|
|
|
info["mi_count"],
|
2020-04-21 16:07:59 +00:00
|
|
|
info["mi_index"],
|
2020-05-15 20:38:37 +00:00
|
|
|
process_name=spiff_task.task_spec._wf_spec.description,
|
2020-05-15 19:54:53 +00:00
|
|
|
properties=props
|
|
|
|
)
|
2020-04-15 15:13:32 +00:00
|
|
|
|
2020-05-06 17:01:38 +00:00
|
|
|
# Only process the form and documentation if requested.
|
|
|
|
# The task should be in a completed or a ready state, and should
|
|
|
|
# not be a previously completed MI Task.
|
|
|
|
if add_docs_and_forms:
|
2020-05-15 19:54:53 +00:00
|
|
|
task.data = spiff_task.data
|
2020-04-15 15:13:32 +00:00
|
|
|
if hasattr(spiff_task.task_spec, "form"):
|
|
|
|
task.form = spiff_task.task_spec.form
|
2020-07-03 16:20:10 +00:00
|
|
|
for i, field in enumerate(task.form.fields):
|
|
|
|
task.form.fields[i] = WorkflowService.process_options(spiff_task, field)
|
2021-01-22 15:00:28 +00:00
|
|
|
# If there is a default value, set it.
|
|
|
|
if field.id not in task.data and WorkflowService.get_default_value(field, spiff_task) is not None:
|
|
|
|
task.data[field.id] = WorkflowService.get_default_value(field, spiff_task)
|
2020-04-17 17:30:32 +00:00
|
|
|
task.documentation = WorkflowService._process_documentation(spiff_task)
|
2020-05-14 21:13:47 +00:00
|
|
|
|
|
|
|
# All ready tasks should have a valid name, and this can be computed for
|
|
|
|
# some tasks, particularly multi-instance tasks that all have the same spec
|
|
|
|
# but need different labels.
|
|
|
|
if spiff_task.state == SpiffTask.READY:
|
2020-05-15 19:54:53 +00:00
|
|
|
task.properties = WorkflowService._process_properties(spiff_task, props)
|
2020-05-14 17:43:23 +00:00
|
|
|
|
2020-05-15 19:54:53 +00:00
|
|
|
# Replace the title with the display name if it is set in the task properties,
|
|
|
|
# otherwise strip off the first word of the task, as that should be following
|
|
|
|
# a BPMN standard, and should not be included in the display.
|
|
|
|
if task.properties and "display_name" in task.properties:
|
2020-07-06 16:09:21 +00:00
|
|
|
try:
|
2020-08-04 17:59:50 +00:00
|
|
|
task.title = spiff_task.workflow.script_engine.evaluate_expression(spiff_task, task.properties[Task.PROP_EXTENSIONS_TITLE])
|
2020-07-06 16:09:21 +00:00
|
|
|
except Exception as e:
|
2021-03-12 20:40:23 +00:00
|
|
|
# if the task is ready, we should raise an error, but if it is in the future or the past, we may not
|
|
|
|
# have the information we need to properly set the title, so don't error out, and just use what is
|
|
|
|
# provided.
|
|
|
|
if spiff_task.state == spiff_task.READY:
|
|
|
|
raise ApiError.from_task(code="task_title_error", message="Could not set task title on task %s with '%s' property because %s" %
|
|
|
|
(spiff_task.task_spec.name, Task.PROP_EXTENSIONS_TITLE, str(e)), task=spiff_task)
|
|
|
|
# Otherwise, just use the curreent title.
|
2020-05-15 19:54:53 +00:00
|
|
|
elif task.title and ' ' in task.title:
|
|
|
|
task.title = task.title.partition(' ')[2]
|
2020-04-15 15:13:32 +00:00
|
|
|
return task
|
|
|
|
|
2020-05-14 17:43:23 +00:00
|
|
|
@staticmethod
|
|
|
|
def _process_properties(spiff_task, props):
|
|
|
|
"""Runs all the property values through the Jinja2 processor to inject data."""
|
2020-05-15 19:54:53 +00:00
|
|
|
for k, v in props.items():
|
2020-05-14 17:43:23 +00:00
|
|
|
try:
|
|
|
|
template = Template(v)
|
|
|
|
props[k] = template.render(**spiff_task.data)
|
|
|
|
except jinja2.exceptions.TemplateError as ue:
|
2020-07-02 22:10:33 +00:00
|
|
|
app.logger.error(f'Failed to process task property {str(ue)}', exc_info=True)
|
2020-05-15 19:54:53 +00:00
|
|
|
return props
|
2020-05-14 17:43:23 +00:00
|
|
|
|
2020-04-15 15:13:32 +00:00
|
|
|
@staticmethod
|
2020-04-17 17:30:32 +00:00
|
|
|
def _process_documentation(spiff_task):
|
2020-04-15 15:13:32 +00:00
|
|
|
"""Runs the given documentation string through the Jinja2 processor to inject data
|
2020-04-17 17:30:32 +00:00
|
|
|
create loops, etc... - If a markdown file exists with the same name as the task id,
|
|
|
|
it will use that file instead of the documentation. """
|
|
|
|
|
|
|
|
documentation = spiff_task.task_spec.documentation if hasattr(spiff_task.task_spec, "documentation") else ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
doc_file_name = spiff_task.task_spec.name + ".md"
|
|
|
|
data_model = FileService.get_workflow_file_data(spiff_task.workflow, doc_file_name)
|
|
|
|
raw_doc = data_model.data.decode("utf-8")
|
|
|
|
except ApiError:
|
|
|
|
raw_doc = documentation
|
|
|
|
|
|
|
|
if not raw_doc:
|
|
|
|
return ""
|
2020-04-15 15:13:32 +00:00
|
|
|
|
|
|
|
try:
|
2020-04-17 17:30:32 +00:00
|
|
|
template = Template(raw_doc)
|
|
|
|
return template.render(**spiff_task.data)
|
2020-04-15 15:13:32 +00:00
|
|
|
except jinja2.exceptions.TemplateError as ue:
|
2020-06-01 15:00:56 +00:00
|
|
|
raise ApiError.from_task(code="template_error", message="Error processing template for task %s: %s" %
|
|
|
|
(spiff_task.task_spec.name, str(ue)), task=spiff_task)
|
2020-05-31 17:48:00 +00:00
|
|
|
except TypeError as te:
|
2020-06-01 15:00:56 +00:00
|
|
|
raise ApiError.from_task(code="template_error", message="Error processing template for task %s: %s" %
|
|
|
|
(spiff_task.task_spec.name, str(te)), task=spiff_task)
|
2020-07-06 19:06:50 +00:00
|
|
|
except Exception as e:
|
|
|
|
app.logger.error(str(e), exc_info=True)
|
2020-04-15 15:13:32 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2020-04-22 23:40:40 +00:00
|
|
|
def process_options(spiff_task, field):
|
|
|
|
|
2020-05-19 20:11:43 +00:00
|
|
|
# If this is an auto-complete field, do not populate options, a lookup will happen later.
|
|
|
|
if field.type == Task.FIELD_TYPE_AUTO_COMPLETE:
|
2020-04-22 23:40:40 +00:00
|
|
|
pass
|
2020-08-27 18:00:14 +00:00
|
|
|
elif field.has_property(Task.FIELD_PROP_SPREADSHEET_NAME):
|
2020-05-29 05:39:39 +00:00
|
|
|
lookup_model = LookupService.get_lookup_model(spiff_task, field)
|
2020-04-22 23:40:40 +00:00
|
|
|
data = db.session.query(LookupDataModel).filter(LookupDataModel.lookup_file_model == lookup_model).all()
|
2020-05-11 21:04:05 +00:00
|
|
|
if not hasattr(field, 'options'):
|
|
|
|
field.options = []
|
2020-04-22 23:40:40 +00:00
|
|
|
for d in data:
|
2020-07-03 16:20:10 +00:00
|
|
|
field.options.append({"id": d.value, "name": d.label, "data": d.data})
|
2020-08-27 18:00:14 +00:00
|
|
|
elif field.has_property(Task.FIELD_PROP_DATA_NAME):
|
2020-07-14 15:38:48 +00:00
|
|
|
field.options = WorkflowService.get_options_from_task_data(spiff_task, field)
|
2020-07-03 16:20:10 +00:00
|
|
|
return field
|
2020-04-22 23:40:40 +00:00
|
|
|
|
2020-07-14 15:38:48 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_options_from_task_data(spiff_task, field):
|
2020-08-27 18:00:14 +00:00
|
|
|
if not (field.has_property(Task.FIELD_PROP_VALUE_COLUMN) or
|
|
|
|
field.has_property(Task.FIELD_PROP_LABEL_COLUMN)):
|
2020-07-14 15:38:48 +00:00
|
|
|
raise ApiError.from_task("invalid_enum",
|
|
|
|
f"For enumerations based on task data, you must include 3 properties: "
|
2020-08-27 18:00:14 +00:00
|
|
|
f"{Task.FIELD_PROP_DATA_NAME}, {Task.FIELD_PROP_VALUE_COLUMN}, "
|
|
|
|
f"{Task.FIELD_PROP_LABEL_COLUMN}", task=spiff_task)
|
|
|
|
prop = field.get_property(Task.FIELD_PROP_DATA_NAME)
|
2020-07-14 15:38:48 +00:00
|
|
|
if prop not in spiff_task.data:
|
|
|
|
raise ApiError.from_task("invalid_enum", f"For enumerations based on task data, task data must have "
|
|
|
|
f"a property called {prop}", task=spiff_task)
|
|
|
|
# Get the enum options from the task data
|
|
|
|
data_model = spiff_task.data[prop]
|
2020-08-27 18:00:14 +00:00
|
|
|
value_column = field.get_property(Task.FIELD_PROP_VALUE_COLUMN)
|
|
|
|
label_column = field.get_property(Task.FIELD_PROP_LABEL_COLUMN)
|
2020-07-14 15:38:48 +00:00
|
|
|
items = data_model.items() if isinstance(data_model, dict) else data_model
|
|
|
|
options = []
|
|
|
|
for item in items:
|
2021-01-08 20:15:24 +00:00
|
|
|
if value_column not in item:
|
|
|
|
raise ApiError.from_task("invalid_enum", f"The value column '{value_column}' does not exist for item {item}",
|
|
|
|
task=spiff_task)
|
|
|
|
if label_column not in item:
|
|
|
|
raise ApiError.from_task("invalid_enum", f"The label column '{label_column}' does not exist for item {item}",
|
|
|
|
task=spiff_task)
|
|
|
|
|
2021-06-15 20:17:15 +00:00
|
|
|
options.append(Box({"id": item[value_column], "name": item[label_column], "data": item}))
|
2020-07-14 15:38:48 +00:00
|
|
|
return options
|
|
|
|
|
2020-05-04 14:57:09 +00:00
|
|
|
@staticmethod
|
2020-07-15 02:16:44 +00:00
|
|
|
def update_task_assignments(processor):
|
|
|
|
"""For every upcoming user task, log a task action
|
|
|
|
that connects the assigned user(s) to that task. All
|
|
|
|
existing assignment actions for this workflow are removed from the database,
|
|
|
|
so that only the current valid actions are available. update_task_assignments
|
|
|
|
should be called whenever progress is made on a workflow."""
|
|
|
|
db.session.query(TaskEventModel). \
|
|
|
|
filter(TaskEventModel.workflow_id == processor.workflow_model.id). \
|
|
|
|
filter(TaskEventModel.action == WorkflowService.TASK_ACTION_ASSIGNMENT).delete()
|
2020-07-23 16:00:24 +00:00
|
|
|
db.session.commit()
|
2020-07-15 02:16:44 +00:00
|
|
|
|
|
|
|
for task in processor.get_current_user_tasks():
|
|
|
|
user_ids = WorkflowService.get_users_assigned_to_task(processor, task)
|
|
|
|
for user_id in user_ids:
|
|
|
|
WorkflowService.log_task_action(user_id, processor, task, WorkflowService.TASK_ACTION_ASSIGNMENT)
|
|
|
|
|
|
|
|
@staticmethod
|
2020-08-21 17:34:37 +00:00
|
|
|
def get_users_assigned_to_task(processor, spiff_task) -> List[str]:
|
2021-04-26 12:45:08 +00:00
|
|
|
if processor.workflow_model.study_id is None and processor.workflow_model.user_id is None:
|
|
|
|
raise ApiError.from_task(code='invalid_workflow',
|
|
|
|
message='A workflow must have either a study_id or a user_id.',
|
|
|
|
task=spiff_task)
|
|
|
|
# Standalone workflow - we only care about the current user
|
|
|
|
elif processor.workflow_model.study_id is None and processor.workflow_model.user_id is not None:
|
|
|
|
return [processor.workflow_model.user_id]
|
|
|
|
# Workflow associated with a study - get all the users
|
|
|
|
else:
|
|
|
|
if not hasattr(spiff_task.task_spec, 'lane') or spiff_task.task_spec.lane is None:
|
|
|
|
associated = StudyService.get_study_associates(processor.workflow_model.study.id)
|
2021-08-10 20:16:08 +00:00
|
|
|
return [user.uid for user in associated if user.access]
|
2021-04-26 12:45:08 +00:00
|
|
|
if spiff_task.task_spec.lane not in spiff_task.data:
|
|
|
|
return [] # No users are assignable to the task at this moment
|
|
|
|
lane_users = spiff_task.data[spiff_task.task_spec.lane]
|
|
|
|
if not isinstance(lane_users, list):
|
|
|
|
lane_users = [lane_users]
|
|
|
|
|
|
|
|
lane_uids = []
|
|
|
|
for user in lane_users:
|
|
|
|
if isinstance(user, dict):
|
|
|
|
if 'value' in user and user['value'] is not None:
|
|
|
|
lane_uids.append(user['value'])
|
|
|
|
else:
|
|
|
|
raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." %
|
|
|
|
spiff_task.task_spec.name, task=spiff_task)
|
|
|
|
elif isinstance(user, str):
|
|
|
|
lane_uids.append(user)
|
2020-08-21 17:34:37 +00:00
|
|
|
else:
|
2021-04-26 12:45:08 +00:00
|
|
|
raise ApiError.from_task(code="task_lane_user_error", message="Spiff Task %s lane user is not a string or dict" %
|
|
|
|
spiff_task.task_spec.name, task=spiff_task)
|
2020-08-21 17:34:37 +00:00
|
|
|
|
2021-04-26 12:45:08 +00:00
|
|
|
return lane_uids
|
2020-07-15 02:16:44 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def log_task_action(user_uid, processor, spiff_task, action):
|
2020-05-04 14:57:09 +00:00
|
|
|
task = WorkflowService.spiff_task_to_api_task(spiff_task)
|
2020-06-19 12:22:53 +00:00
|
|
|
form_data = WorkflowService.extract_form_data(spiff_task.data, spiff_task)
|
2020-05-04 14:57:09 +00:00
|
|
|
task_event = TaskEventModel(
|
2020-07-15 02:16:44 +00:00
|
|
|
study_id=processor.workflow_model.study_id,
|
2020-06-12 17:46:10 +00:00
|
|
|
user_uid=user_uid,
|
2020-07-15 02:16:44 +00:00
|
|
|
workflow_id=processor.workflow_model.id,
|
|
|
|
workflow_spec_id=processor.workflow_model.workflow_spec_id,
|
|
|
|
spec_version=processor.get_version_string(),
|
2020-05-04 14:57:09 +00:00
|
|
|
action=action,
|
|
|
|
task_id=task.id,
|
|
|
|
task_name=task.name,
|
|
|
|
task_title=task.title,
|
|
|
|
task_type=str(task.type),
|
|
|
|
task_state=task.state,
|
2020-07-15 02:16:44 +00:00
|
|
|
task_lane=task.lane,
|
2020-06-19 12:22:53 +00:00
|
|
|
form_data=form_data,
|
2020-05-15 20:38:37 +00:00
|
|
|
mi_type=task.multi_instance_type.value, # Some tasks have a repeat behavior.
|
|
|
|
mi_count=task.multi_instance_count, # This is the number of times the task could repeat.
|
|
|
|
mi_index=task.multi_instance_index, # And the index of the currently repeating task.
|
|
|
|
process_name=task.process_name,
|
2021-07-12 19:58:32 +00:00
|
|
|
# date=datetime.utcnow(), <=== For future reference, NEVER do this. Let the database set the time.
|
2020-05-04 14:57:09 +00:00
|
|
|
)
|
|
|
|
db.session.add(task_event)
|
|
|
|
db.session.commit()
|
2020-05-29 05:39:39 +00:00
|
|
|
|
2020-06-17 21:11:15 +00:00
|
|
|
@staticmethod
|
2020-06-19 12:22:53 +00:00
|
|
|
def extract_form_data(latest_data, task):
|
2021-03-14 16:20:39 +00:00
|
|
|
"""Extracts data from the latest_data that is directly related to the form that is being
|
|
|
|
submitted."""
|
2020-06-19 12:22:53 +00:00
|
|
|
data = {}
|
|
|
|
|
|
|
|
if hasattr(task.task_spec, 'form'):
|
|
|
|
for field in task.task_spec.form.fields:
|
2021-05-07 18:06:53 +00:00
|
|
|
if field.has_property(Task.FIELD_PROP_REPEAT):
|
2020-08-27 18:00:14 +00:00
|
|
|
group = field.get_property(Task.FIELD_PROP_REPEAT)
|
2020-06-17 21:11:15 +00:00
|
|
|
if group in latest_data:
|
2020-06-19 12:22:53 +00:00
|
|
|
data[group] = latest_data[group]
|
|
|
|
else:
|
2021-03-14 16:20:39 +00:00
|
|
|
value = WorkflowService.get_dot_value(field.id, latest_data)
|
|
|
|
if value is not None:
|
|
|
|
WorkflowService.set_dot_value(field.id, value, data)
|
2020-06-19 12:22:53 +00:00
|
|
|
return data
|
2020-06-17 21:11:15 +00:00
|
|
|
|
2021-03-14 16:20:39 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_dot_value(path, source):
|
|
|
|
### Given a path in dot notation, uas as 'fruit.type' tries to find that value in
|
|
|
|
### the source, but looking deep in the dictionary.
|
|
|
|
paths = path.split(".") # [a,b,c]
|
|
|
|
s = source
|
|
|
|
index = 0
|
|
|
|
for p in paths:
|
|
|
|
index += 1
|
|
|
|
if isinstance(s, dict) and p in s:
|
|
|
|
if index == len(paths):
|
|
|
|
return s[p]
|
|
|
|
else:
|
|
|
|
s = s[p]
|
|
|
|
if path in source:
|
|
|
|
return source[path]
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def set_dot_value(path, value, target):
|
|
|
|
### Given a path in dot notation, such as "fruit.type", and a value "apple", will
|
|
|
|
### set the value in the target dictionary, as target["fruit"]["type"]="apple"
|
|
|
|
destination = target
|
|
|
|
paths = path.split(".") # [a,b,c]
|
|
|
|
index = 0
|
|
|
|
for p in paths:
|
|
|
|
index += 1
|
|
|
|
if p not in destination:
|
|
|
|
if index == len(paths):
|
|
|
|
destination[p] = value
|
|
|
|
else:
|
|
|
|
destination[p] = {}
|
|
|
|
destination = destination[p]
|
|
|
|
return target
|
|
|
|
|
|
|
|
|
2021-01-29 19:05:07 +00:00
|
|
|
@staticmethod
|
|
|
|
def process_workflows_for_cancels(study_id):
|
|
|
|
workflows = db.session.query(WorkflowModel).filter_by(study_id=study_id).all()
|
|
|
|
for workflow in workflows:
|
|
|
|
if workflow.status == WorkflowStatus.user_input_required or workflow.status == WorkflowStatus.waiting:
|
2021-03-09 16:52:55 +00:00
|
|
|
WorkflowProcessor.reset(workflow, clear_data=False)
|
2021-04-26 12:43:14 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_workflow_from_spec(workflow_spec_id, user):
|
|
|
|
workflow_model = WorkflowModel(status=WorkflowStatus.not_started,
|
|
|
|
study=None,
|
|
|
|
user_id=user.uid,
|
|
|
|
workflow_spec_id=workflow_spec_id,
|
|
|
|
last_updated=datetime.now())
|
|
|
|
db.session.add(workflow_model)
|
|
|
|
db.session.commit()
|
|
|
|
return workflow_model
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_standalone_workflow_specs():
|
|
|
|
specs = db.session.query(WorkflowSpecModel).filter_by(standalone=True).all()
|
|
|
|
return specs
|
2021-05-12 17:51:51 +00:00
|
|
|
|
2021-07-22 14:20:34 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_library_workflow_specs():
|
|
|
|
specs = db.session.query(WorkflowSpecModel).filter_by(library=True).all()
|
|
|
|
return specs
|
|
|
|
|
2021-05-12 17:51:51 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_primary_workflow(workflow_spec_id):
|
|
|
|
# Returns the FileModel of the primary workflow for a workflow_spec
|
|
|
|
primary = None
|
|
|
|
file = db.session.query(FileModel).filter(FileModel.workflow_spec_id==workflow_spec_id, FileModel.primary==True).first()
|
|
|
|
if file:
|
|
|
|
primary = file
|
|
|
|
return primary
|