Merge branch 'dev' into delete-uploaded-file-script-283

This commit is contained in:
mike cullerton 2021-04-05 10:40:33 -04:00
commit 9317e49e65
17 changed files with 504 additions and 363 deletions

View File

@ -45,7 +45,6 @@ werkzeug = "*"
xlrd = "*" xlrd = "*"
xlsxwriter = "*" xlsxwriter = "*"
pygithub = "*" pygithub = "*"
python-box = "*"
python-levenshtein = "*" python-levenshtein = "*"

719
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -65,7 +65,8 @@ paths:
application/json: application/json:
schema: schema:
type: array type: array
$ref: "#/components/schemas/User" items:
$ref: "#/components/schemas/User"
# /v1.0/study # /v1.0/study
/study: /study:
get: get:
@ -80,7 +81,8 @@ paths:
application/json: application/json:
schema: schema:
type: array type: array
$ref: "#/components/schemas/Study" items:
$ref: "#/components/schemas/Study"
post: post:
operationId: crc.api.study.add_study operationId: crc.api.study.add_study
summary: Creates a new study with the given parameters. summary: Creates a new study with the given parameters.
@ -159,6 +161,7 @@ paths:
/workflow_sync/{workflow_spec_id}/spec: /workflow_sync/{workflow_spec_id}/spec:
parameters: parameters:
- name: workflow_spec_id - name: workflow_spec_id
required: true
in: path in: path
description: The unique id of an existing workflow specification to modify. description: The unique id of an existing workflow specification to modify.
schema: schema:
@ -392,7 +395,7 @@ paths:
parameters: parameters:
- name: spec_id - name: spec_id
in: path in: path
required: false required: true
description: The unique id of an existing workflow specification to modify. description: The unique id of an existing workflow specification to modify.
schema: schema:
type: string type: string
@ -441,7 +444,7 @@ paths:
parameters: parameters:
- name: spec_id - name: spec_id
in: path in: path
required: false required: true
description: The unique id of an existing workflow specification to validate. description: The unique id of an existing workflow specification to validate.
schema: schema:
type: string type: string
@ -497,7 +500,7 @@ paths:
parameters: parameters:
- name: cat_id - name: cat_id
in: path in: path
required: false required: true
description: The unique id of an existing workflow spec category to modify. description: The unique id of an existing workflow spec category to modify.
schema: schema:
type: string type: string
@ -1513,8 +1516,7 @@ components:
example: "random_fact" example: "random_fact"
x-nullable: true x-nullable: true
file: file:
type: file type: string
format: binary
Workflow: Workflow:
properties: properties:
id: id:
@ -1522,7 +1524,7 @@ components:
type: integer type: integer
format: int64 format: int64
status: status:
type: enum type: string
enum: ['new','user_input_required','waiting','complete'] enum: ['new','user_input_required','waiting','complete']
navigation: navigation:
type: array type: array
@ -1576,7 +1578,7 @@ components:
data: data:
type: object type: object
multi_instance_type: multi_instance_type:
type: enum type: string
enum: ['none', 'looping', 'parallel', 'sequential'] enum: ['none', 'looping', 'parallel', 'sequential']
multi_instance_count: multi_instance_count:
type: number type: number
@ -1709,7 +1711,7 @@ components:
type: string type: string
readOnly: true readOnly: true
type: type:
type: enum type: string
enum: ['string', 'long', 'boolean', 'date', 'enum'] enum: ['string', 'long', 'boolean', 'date', 'enum']
readOnly: true readOnly: true
label: label:
@ -1785,7 +1787,7 @@ components:
type: string type: string
example: "Chuck Norris" example: "Chuck Norris"
data: data:
type: any type: string
NavigationItem: NavigationItem:
properties: properties:
id: id:
@ -1815,7 +1817,7 @@ components:
type: integer type: integer
example: 4 example: 4
state: state:
type: enum type: string
enum: ['FUTURE', 'WAITING', 'READY', 'CANCELLED', 'COMPLETED','LIKELY','MAYBE'] enum: ['FUTURE', 'WAITING', 'READY', 'CANCELLED', 'COMPLETED','LIKELY','MAYBE']
readOnly: true readOnly: true
is_decision: is_decision:

View File

@ -6,10 +6,10 @@ from crc.models.protocol_builder import ProtocolBuilderInvestigatorType
from crc.models.study import StudyModel, StudySchema from crc.models.study import StudyModel, StudySchema
from crc.models.workflow import WorkflowStatus from crc.models.workflow import WorkflowStatus
from crc.scripts.script import Script from crc.scripts.script import Script
from crc.services.cache_service import timeit
from crc.services.file_service import FileService from crc.services.file_service import FileService
from crc.services.protocol_builder import ProtocolBuilderService from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.study_service import StudyService from crc.services.study_service import StudyService
from box import Box
class StudyInfo(Script): class StudyInfo(Script):
"""Please see the detailed description that is provided below. """ """Please see the detailed description that is provided below. """
@ -186,7 +186,7 @@ Returns information specific to the protocol.
# Assure the reference file exists (a bit hacky, but we want to raise this error early, and cleanly.) # Assure the reference file exists (a bit hacky, but we want to raise this error early, and cleanly.)
FileService.get_reference_file_data(FileService.DOCUMENT_LIST) FileService.get_reference_file_data(FileService.DOCUMENT_LIST)
FileService.get_reference_file_data(FileService.INVESTIGATOR_LIST) FileService.get_reference_file_data(FileService.INVESTIGATOR_LIST)
data = Box({ data = {
"study":{ "study":{
"info": { "info": {
"id": 12, "id": 12,
@ -378,13 +378,14 @@ Returns information specific to the protocol.
'id': 0, 'id': 0,
} }
} }
}) }
if args[0]=='documents': if args[0]=='documents':
return StudyService().get_documents_status(study_id) return StudyService().get_documents_status(study_id)
return data['study'][args[0]] return data['study'][args[0]]
#self.add_data_to_task(task=task, data=data["study"]) #self.add_data_to_task(task=task, data=data["study"])
#self.add_data_to_task(task, {"documents": StudyService().get_documents_status(study_id)}) #self.add_data_to_task(task, {"documents": StudyService().get_documents_status(study_id)})
@timeit
def do_task(self, task, study_id, workflow_id, *args, **kwargs): def do_task(self, task, study_id, workflow_id, *args, **kwargs):
self.check_args(args,2) self.check_args(args,2)
prefix = None prefix = None
@ -413,12 +414,12 @@ Returns information specific to the protocol.
retval = StudyService().get_documents_status(study_id) retval = StudyService().get_documents_status(study_id)
if cmd == 'protocol': if cmd == 'protocol':
retval = StudyService().get_protocol(study_id) retval = StudyService().get_protocol(study_id)
if isinstance(retval, list): # if isinstance(retval, list):
retval = [Box(item) for item in retval] # retval = [Box(item) for item in retval]
if isinstance(retval,dict) and prefix is not None: # if isinstance(retval,dict) and prefix is not None:
return Box({x:retval[x] for x in retval.keys() if x[:len(prefix)] == prefix}) # return Box({x:retval[x] for x in retval.keys() if x[:len(prefix)] == prefix})
elif isinstance(retval,dict) : # elif isinstance(retval,dict) :
return Box(retval) # return Box(retval)
else: else:
return retval return retval

View File

@ -0,0 +1,56 @@
import time
cache_store = {}
import time
def firsttime():
return time.time()
def sincetime(txt,lasttime):
thistime=firsttime()
print('%s runtime was %2f'%(txt,thistime-lasttime))
return thistime
def timeit(f):
def timed(*args, **kw):
ts = time.time()
result = f(*args, **kw)
te = time.time()
print('func:%r args:[%r, %r] took: %2.4f sec' % (f.__name__, args, kw, te-ts))
return result
return timed
# first pass - meant to be down and dirty
def purge_cache(now):
dellist = []
for key in cache_store.keys():
if cache_store[key]['timeout'] < now:
dellist.append(key)
for key in dellist:
del cache_store[key]
def cache(f,timeout=60):
"""Cache the values for function for x minutes
we still have work to do to make a optional kw argument
to set the length of time to cache
"""
def cached(*args, **kw):
now = time.time()
purge_cache(now)
key =f.__name__+str(args)+str(kw)
if key in cache_store.keys():
return cache_store[key]['value']
else:
newtime = now+timeout*60
result = f(*args, **kw)
cache_store[key] ={}
cache_store[key]['value'] = result
cache_store[key]['timeout'] = newtime
return result
return cached

View File

@ -17,9 +17,10 @@ generic_message = """Workflow validation failed. For more information about the
known_errors = {'Error is Non-default exclusive outgoing sequence flow without condition': known_errors = {'Error is Non-default exclusive outgoing sequence flow without condition':
{'hint': 'Add a Condition Type to your gateway path.'}, {'hint': 'Add a Condition Type to your gateway path.'},
'Could not set task title on task (\w+) with \'(.*)\' property because \\1: Error evaluating expression \'(.*)\', "\'Box\' object has no attribute \'\\2\'"$': 'Could not set task title on task .*':
{'hint': 'You are overriding the title for task `{task_id}`, using the `{property}` extension, and it is causing an error. Look under the extensions tab for the task, and check the value you are setting for the property.', {'hint': 'You are overriding the title using an extension and it is causing this error. '
'groups': {'task_id': 0, 'property': 1}}} 'Look under the extensions tab for the task, and check the value you are setting '
'for the property.'}}
class ValidationErrorService(object): class ValidationErrorService(object):

View File

@ -16,7 +16,7 @@ from crc import session, app
from crc.api.common import ApiError from crc.api.common import ApiError
from crc.models.file import FileType, FileDataModel, FileModel, LookupFileModel, LookupDataModel from crc.models.file import FileType, FileDataModel, FileModel, LookupFileModel, LookupDataModel
from crc.models.workflow import WorkflowSpecModel, WorkflowModel, WorkflowSpecDependencyFile from crc.models.workflow import WorkflowSpecModel, WorkflowModel, WorkflowSpecDependencyFile
from crc.services.cache_service import cache
class FileService(object): class FileService(object):
"""Provides consistent management and rules for storing, retrieving and processing files.""" """Provides consistent management and rules for storing, retrieving and processing files."""
@ -58,6 +58,7 @@ class FileService(object):
return code in doc_dict return code in doc_dict
@staticmethod @staticmethod
@cache
def is_workflow_review(workflow_spec_id): def is_workflow_review(workflow_spec_id):
files = session.query(FileModel).filter(FileModel.workflow_spec_id==workflow_spec_id).all() files = session.query(FileModel).filter(FileModel.workflow_spec_id==workflow_spec_id).all()
review = any([f.is_review for f in files]) review = any([f.is_review for f in files])

View File

@ -23,6 +23,7 @@ from crc.services.ldap_service import LdapService
from crc.services.protocol_builder import ProtocolBuilderService from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.workflow_processor import WorkflowProcessor from crc.services.workflow_processor import WorkflowProcessor
from SpiffWorkflow import Task as SpiffTask from SpiffWorkflow import Task as SpiffTask
from crc.services.cache_service import timeit
class StudyService(object): class StudyService(object):
"""Provides common tools for working with a Study""" """Provides common tools for working with a Study"""

View File

@ -28,7 +28,7 @@ from crc.scripts.script import Script
from crc.services.file_service import FileService from crc.services.file_service import FileService
from crc import app from crc import app
from crc.services.user_service import UserService from crc.services.user_service import UserService
from crc.services.cache_service import timeit, firsttime, sincetime
class CustomBpmnScriptEngine(BpmnScriptEngine): class CustomBpmnScriptEngine(BpmnScriptEngine):
"""This is a custom script processor that can be easily injected into Spiff Workflow. """This is a custom script processor that can be easily injected into Spiff Workflow.
@ -277,16 +277,22 @@ class WorkflowProcessor(object):
self.workflow_model.dependencies.append(WorkflowSpecDependencyFile(file_data_id=file_data.id)) self.workflow_model.dependencies.append(WorkflowSpecDependencyFile(file_data_id=file_data.id))
@staticmethod @staticmethod
@timeit
def run_master_spec(spec_model, study): def run_master_spec(spec_model, study):
"""Executes a BPMN specification for the given study, without recording any information to the database """Executes a BPMN specification for the given study, without recording any information to the database
Useful for running the master specification, which should not persist. """ Useful for running the master specification, which should not persist. """
lasttime = firsttime()
spec_data_files = FileService.get_spec_data_files(spec_model.id) spec_data_files = FileService.get_spec_data_files(spec_model.id)
lasttime = sincetime('load Files', lasttime)
spec = WorkflowProcessor.get_spec(spec_data_files, spec_model.id) spec = WorkflowProcessor.get_spec(spec_data_files, spec_model.id)
lasttime = sincetime('get spec', lasttime)
try: try:
bpmn_workflow = BpmnWorkflow(spec, script_engine=WorkflowProcessor._script_engine) bpmn_workflow = BpmnWorkflow(spec, script_engine=WorkflowProcessor._script_engine)
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = study.id bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = study.id
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = False bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = False
lasttime = sincetime('get_workflow', lasttime)
bpmn_workflow.do_engine_steps() bpmn_workflow.do_engine_steps()
lasttime = sincetime('run steps', lasttime)
except WorkflowException as we: except WorkflowException as we:
raise ApiError.from_task_spec("error_running_master_spec", str(we), we.sender) raise ApiError.from_task_spec("error_running_master_spec", str(we), we.sender)

View File

@ -16,7 +16,7 @@ from SpiffWorkflow.bpmn.specs.UserTask import UserTask
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
from SpiffWorkflow.specs import CancelTask, StartTask, MultiChoice from SpiffWorkflow.specs import CancelTask, StartTask, MultiChoice
from SpiffWorkflow.util.deep_merge import DeepMerge from SpiffWorkflow.util.deep_merge import DeepMerge
from box import Box
from jinja2 import Template from jinja2 import Template
from crc import db, app from crc import db, app

View File

@ -53,7 +53,7 @@ psycopg2-binary==2.8.6
pyasn1==0.4.8 pyasn1==0.4.8
pycparser==2.20 pycparser==2.20
pygithub==1.53 pygithub==1.53
pygments==2.7.2 pygments==2.7.4
pyjwt==1.7.1 pyjwt==1.7.1
pyparsing==2.4.7 pyparsing==2.4.7
pyrsistent==0.17.3 pyrsistent==0.17.3
@ -80,7 +80,7 @@ sphinxcontrib-serializinghtml==1.1.4
spiffworkflow spiffworkflow
sqlalchemy==1.3.20 sqlalchemy==1.3.20
swagger-ui-bundle==0.0.8 swagger-ui-bundle==0.0.8
urllib3==1.26.2 urllib3==1.26.3
waitress==1.4.4 waitress==1.4.4
webob==1.8.6 webob==1.8.6
webtest==2.0.35 webtest==2.0.35

3
postgres/package-lock.json generated Normal file
View File

@ -0,0 +1,3 @@
{
"lockfileVersion": 1
}

View File

@ -166,8 +166,10 @@ class BaseTest(unittest.TestCase):
study_model = StudyModel(**study_json) study_model = StudyModel(**study_json)
session.add(study_model) session.add(study_model)
StudyService._add_all_workflow_specs_to_study(study_model) StudyService._add_all_workflow_specs_to_study(study_model)
session.execute(Sequence(StudyModel.__tablename__ + '_id_seq')) session.commit()
session.commit() update_seq = f"ALTER SEQUENCE %s RESTART WITH %s" % (StudyModel.__tablename__ + '_id_seq', study_model.id + 1)
print("Update Sequence." + update_seq)
session.execute(update_seq)
session.flush() session.flush()
specs = session.query(WorkflowSpecModel).all() specs = session.query(WorkflowSpecModel).all()

View File

@ -2,7 +2,8 @@ from tests.base_test import BaseTest
from crc.scripts.update_study import UpdateStudy from crc.scripts.update_study import UpdateStudy
from crc.services.workflow_processor import WorkflowProcessor from crc.services.workflow_processor import WorkflowProcessor
from box import Box from SpiffWorkflow.bpmn.PythonScriptEngine import Box
class TestUpdateStudyScript(BaseTest): class TestUpdateStudyScript(BaseTest):

View File

@ -10,7 +10,7 @@ from crc.models.api_models import WorkflowApiSchema, MultiInstanceType, TaskSche
from crc.models.file import FileModelSchema from crc.models.file import FileModelSchema
from crc.models.workflow import WorkflowStatus from crc.models.workflow import WorkflowStatus
from crc.models.task_event import TaskEventModel from crc.models.task_event import TaskEventModel
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
class TestTasksApi(BaseTest): class TestTasksApi(BaseTest):

View File

@ -22,5 +22,6 @@ class TestCustomerError(BaseTest):
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % spec_model.id, headers=self.logged_in_headers()) rv = self.app.get('/v1.0/workflow-specification/%s/validate' % spec_model.id, headers=self.logged_in_headers())
self.assertIn('hint', rv.json[0]) self.assertIn('hint', rv.json[0])
json_data = json.loads(rv.get_data(as_text=True)) json_data = json.loads(rv.get_data(as_text=True))
self.assertEqual('You are overriding the title for task `Task_0xbnd5d`, using the `display_name` extension, and it is causing an error. Look under the extensions tab for the task, and check the value you are setting for the property.', self.assertEqual("You are overriding the title using an extension and it is causing this error. Look under the "
"extensions tab for the task, and check the value you are setting for the property.",
json_data[0]['hint']) json_data[0]['hint'])

View File

@ -253,10 +253,8 @@ class TestWorkflowProcessor(BaseTest):
processor = self.get_processor(study, workflow_spec_model) processor = self.get_processor(study, workflow_spec_model)
self.assertTrue(processor.get_version_string().startswith('v2.1.1')) self.assertTrue(processor.get_version_string().startswith('v2.1.1'))
def test_hard_reset(self): def test_hard_reset(self):
self.load_example_data() self.load_example_data()
# Start the two_forms workflow, and enter some data in the first form. # Start the two_forms workflow, and enter some data in the first form.
study = session.query(StudyModel).first() study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms") workflow_spec_model = self.load_test_spec("two_forms")
@ -275,6 +273,8 @@ class TestWorkflowProcessor(BaseTest):
# Assure that creating a new processor doesn't cause any issues, and maintains the spec version. # Assure that creating a new processor doesn't cause any issues, and maintains the spec version.
processor.workflow_model.bpmn_workflow_json = processor.serialize() processor.workflow_model.bpmn_workflow_json = processor.serialize()
db.session.add(processor.workflow_model) ## Assure this isn't transient, which was causing some errors.
self.assertIsNotNone(processor.workflow_model.bpmn_workflow_json)
processor2 = WorkflowProcessor(processor.workflow_model) processor2 = WorkflowProcessor(processor.workflow_model)
self.assertFalse(processor2.is_latest_spec) # Still at version 1. self.assertFalse(processor2.is_latest_spec) # Still at version 1.