Updated to use the latest script engine / evaluation engine that creates a single location where all values used in BPMN/DMN are processed. Right now this is a python based interpreter, but we will eventually base this on FEEL expressions.

The validation process needs to take the api model into account so we catch errors with bad file names.
This commit is contained in:
Dan Funk 2020-05-11 17:04:05 -04:00
parent 27d8e0b51b
commit 02f8764056
9 changed files with 43 additions and 42 deletions

14
Pipfile.lock generated
View File

@ -478,11 +478,11 @@
},
"marshmallow": {
"hashes": [
"sha256:56663fa1d5385c14c6a1236badd166d6dee987a5f64d2b6cc099dadf96eb4f09",
"sha256:f12203bf8d94c410ab4b8d66edfde4f8a364892bde1f6747179765559f93d62a"
"sha256:c2673233aa21dde264b84349dc2fd1dce5f30ed724a0a00e75426734de5b84ab",
"sha256:f88fe96434b1f0f476d54224d59333eba8ca1a203a2695683c1855675c4049a7"
],
"index": "pypi",
"version": "==3.5.2"
"version": "==3.6.0"
},
"marshmallow-enum": {
"hashes": [
@ -783,7 +783,7 @@
"spiffworkflow": {
"editable": true,
"git": "https://github.com/sartography/SpiffWorkflow.git",
"ref": "6608bb1d9cc77b906bf668804470e850ec798414"
"ref": "f626ac6d4f035f3a65a058320efd8d33d1ec652a"
},
"sqlalchemy": {
"hashes": [
@ -938,11 +938,11 @@
},
"pytest": {
"hashes": [
"sha256:0e5b30f5cb04e887b91b1ee519fa3d89049595f428c1db76e73bd7f17b09b172",
"sha256:84dde37075b8805f3d1f392cc47e38a0e59518fb46a431cfdaf7cf1ce805f970"
"sha256:95c710d0a72d91c13fae35dce195633c929c3792f54125919847fdcdf7caa0d3",
"sha256:eb2b5e935f6a019317e455b6da83dd8650ac9ffd2ee73a7b657a30873d67a698"
],
"index": "pypi",
"version": "==5.4.1"
"version": "==5.4.2"
},
"six": {
"hashes": [

View File

@ -52,7 +52,12 @@ class StudyInfo(Script):
"NETBADGEID": "dhf8r"
},
"details":
{},
{
"IS_IND": 0,
"IS_IDE": 0,
"IS_MULTI_SITE": 0,
"IS_UVA_PI_MULTI": 0
},
"approvals": {
"study_id": 12,
"workflow_id": 321,

View File

@ -2,9 +2,10 @@ from datetime import datetime
import json
from typing import List
import requests
from SpiffWorkflow import WorkflowException
from crc import db, session
from crc import db, session, app
from crc.api.common import ApiError
from crc.models.file import FileModel, FileModelSchema
from crc.models.protocol_builder import ProtocolBuilderStudy, ProtocolBuilderStatus
@ -108,7 +109,11 @@ class StudyService(object):
that is available.."""
# Get PB required docs
pb_docs = ProtocolBuilderService.get_required_docs(study_id=study_id)
try:
pb_docs = ProtocolBuilderService.get_required_docs(study_id=study_id)
except requests.exceptions.ConnectionError as ce:
app.logger.error("Failed to connect to the Protocol Builder - %s" % str(ce))
pb_docs = []
# Loop through all known document types, get the counts for those files, and use pb_docs to mark those required.
doc_dictionary = FileService.get_file_reference_dictionary()

View File

@ -4,7 +4,7 @@ import string
import xml.etree.ElementTree as ElementTree
from datetime import datetime
from SpiffWorkflow import Task as SpiffTask
from SpiffWorkflow import Task as SpiffTask, WorkflowException
from SpiffWorkflow.bpmn.BpmnScriptEngine import BpmnScriptEngine
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer
@ -12,6 +12,7 @@ from SpiffWorkflow.bpmn.specs.EndEvent import EndEvent
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from SpiffWorkflow.exceptions import WorkflowTaskExecException
from SpiffWorkflow.operators import Operator
from SpiffWorkflow.specs import WorkflowSpec
@ -69,24 +70,6 @@ class CustomBpmnScriptEngine(BpmnScriptEngine):
camel = camel.strip()
return re.sub(r'(?<!^)(?=[A-Z])', '_', camel).lower()
def evaluate(self, task, expression):
"""
Evaluate the given expression, within the context of the given task and
return the result.
"""
if isinstance(expression, Operator):
return expression._matches(task)
else:
return self._eval(task, expression, **task.data)
def _eval(self, task, expression, **kwargs):
locals().update(kwargs)
try:
return eval(expression)
except NameError as ne:
raise ApiError.from_task('invalid_expression',
"The expression '%s' you provided has a missing value. % s" % (expression, str(ne)),
task=task)
class MyCustomParser(BpmnDmnParser):
"""
@ -282,13 +265,13 @@ class WorkflowProcessor(object):
@staticmethod
def populate_form_with_random_data(task):
def populate_form_with_random_data(task, task_api):
"""populates a task with random data - useful for testing a spec."""
if not hasattr(task.task_spec, 'form'): return
form_data = {}
for field in task.task_spec.form.fields:
for field in task_api.form.fields:
if field.type == "enum":
if len(field.options) > 0:
form_data[field.id] = random.choice(field.options)
@ -346,7 +329,10 @@ class WorkflowProcessor(object):
return self.workflow_model.spec_version
def do_engine_steps(self):
self.bpmn_workflow.do_engine_steps()
try:
self.bpmn_workflow.do_engine_steps()
except WorkflowTaskExecException as we:
raise ApiError.from_task("task_error", str(we), we.task)
def serialize(self):
return self._serializer.serialize_workflow(self.bpmn_workflow)

View File

@ -54,8 +54,8 @@ class WorkflowService(object):
tasks = bpmn_workflow.get_tasks(SpiffTask.READY)
for task in tasks:
task_api = WorkflowService.spiff_task_to_api_task(
task) # Assure we try to process the documenation, and raise those errors.
WorkflowProcessor.populate_form_with_random_data(task)
task, add_docs_and_forms=True) # Assure we try to process the documenation, and raise those errors.
WorkflowProcessor.populate_form_with_random_data(task, task_api)
task.complete()
except WorkflowException as we:
raise ApiError.from_task_spec("workflow_execution_exception", str(we),
@ -151,13 +151,15 @@ class WorkflowService(object):
@staticmethod
def process_options(spiff_task, field):
lookup_model = WorkflowService.get_lookup_table(spiff_task, field);
lookup_model = WorkflowService.get_lookup_table(spiff_task, field)
# If lookup is set to true, do not populate options, a lookup will happen later.
if field.has_property(Task.EMUM_OPTIONS_AS_LOOKUP) and field.get_property(Task.EMUM_OPTIONS_AS_LOOKUP):
pass
else:
data = db.session.query(LookupDataModel).filter(LookupDataModel.lookup_file_model == lookup_model).all()
if not hasattr(field, 'options'):
field.options = []
for d in data:
field.options.append({"id": d.value, "name": d.label})

View File

@ -7,7 +7,7 @@
<decisionTable id="decisionTable_1">
<input id="input_1" label="Pharmacy Manual Upload Count">
<inputExpression id="inputExpression_1" typeRef="integer">
<text>StudyInfo.documents["DrugDevDoc_PharmManual"]["count"]</text>
<text>StudyInfo.documents.DrugDevDoc_PharmManual.count</text>
</inputExpression>
</input>
<output id="output_1" label="Pharmacy Manual(s) Uploaded?" name="isPharmacyManual" typeRef="boolean" />

View File

@ -315,6 +315,7 @@ class TestTasksApi(BaseTest):
results = json.loads(rv.get_data(as_text=True))
self.assertEqual(5, len(results))
def test_sub_process(self):
self.load_example_data()
workflow = self.create_workflow('subprocess')

View File

@ -19,6 +19,7 @@ from crc.services.file_service import FileService
from crc.services.study_service import StudyService
from crc.models.protocol_builder import ProtocolBuilderStudySchema, ProtocolBuilderInvestigatorSchema, \
ProtocolBuilderRequiredDocumentSchema
from crc.services.workflow_service import WorkflowService
from tests.base_test import BaseTest
from crc.services.workflow_processor import WorkflowProcessor
@ -26,7 +27,8 @@ from crc.services.workflow_processor import WorkflowProcessor
class TestWorkflowProcessor(BaseTest):
def _populate_form_with_random_data(self, task):
WorkflowProcessor.populate_form_with_random_data(task)
api_task = WorkflowService.spiff_task_to_api_task(task, add_docs_and_forms=True)
WorkflowProcessor.populate_form_with_random_data(task, api_task)
def get_processor(self, study_model, spec_model):
workflow_model = StudyService._create_workflow_model(study_model, spec_model)
@ -206,7 +208,7 @@ class TestWorkflowProcessor(BaseTest):
processor.complete_task(next_user_tasks[0])
with self.assertRaises(ApiError) as context:
processor.do_engine_steps()
self.assertEqual("invalid_expression", context.exception.code)
self.assertEqual("task_error", context.exception.code)
def test_workflow_with_docx_template(self):
self.load_example_data()
@ -417,4 +419,4 @@ class TestWorkflowProcessor(BaseTest):
task.task_spec.form.fields.append(field)
with self.assertRaises(ApiError):
processor.populate_form_with_random_data(task)
self._populate_form_with_random_data(task)

View File

@ -43,12 +43,12 @@ class TestWorkflowSpecValidation(BaseTest):
def test_invalid_expression(self):
errors = self.validate_workflow("invalid_expression")
self.assertEqual(1, len(errors))
self.assertEqual("invalid_expression", errors[0]['code'])
self.assertEqual("workflow_execution_exception", errors[0]['code'])
self.assertEqual("ExclusiveGateway_003amsm", errors[0]['task_id'])
self.assertEqual("Has Bananas Gateway", errors[0]['task_name'])
self.assertEqual("invalid_expression.bpmn", errors[0]['file_name'])
self.assertEqual("The expression 'this_value_does_not_exist==true' you provided has a missing value."
" name 'this_value_does_not_exist' is not defined", errors[0]["message"])
self.assertEqual('ExclusiveGateway_003amsm: Error evaluating expression \'this_value_does_not_exist==true\', '
'name \'this_value_does_not_exist\' is not defined', errors[0]["message"])
def test_validation_error(self):
errors = self.validate_workflow("invalid_spec")