Merge branch 'dev' into delete-uploaded-file-script-283

This commit is contained in:
mike cullerton 2021-04-05 10:40:33 -04:00
commit 9317e49e65
17 changed files with 504 additions and 363 deletions

View File

@ -45,7 +45,6 @@ werkzeug = "*"
xlrd = "*"
xlsxwriter = "*"
pygithub = "*"
python-box = "*"
python-levenshtein = "*"

719
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -65,7 +65,8 @@ paths:
application/json:
schema:
type: array
$ref: "#/components/schemas/User"
items:
$ref: "#/components/schemas/User"
# /v1.0/study
/study:
get:
@ -80,7 +81,8 @@ paths:
application/json:
schema:
type: array
$ref: "#/components/schemas/Study"
items:
$ref: "#/components/schemas/Study"
post:
operationId: crc.api.study.add_study
summary: Creates a new study with the given parameters.
@ -159,6 +161,7 @@ paths:
/workflow_sync/{workflow_spec_id}/spec:
parameters:
- name: workflow_spec_id
required: true
in: path
description: The unique id of an existing workflow specification to modify.
schema:
@ -392,7 +395,7 @@ paths:
parameters:
- name: spec_id
in: path
required: false
required: true
description: The unique id of an existing workflow specification to modify.
schema:
type: string
@ -441,7 +444,7 @@ paths:
parameters:
- name: spec_id
in: path
required: false
required: true
description: The unique id of an existing workflow specification to validate.
schema:
type: string
@ -497,7 +500,7 @@ paths:
parameters:
- name: cat_id
in: path
required: false
required: true
description: The unique id of an existing workflow spec category to modify.
schema:
type: string
@ -1513,8 +1516,7 @@ components:
example: "random_fact"
x-nullable: true
file:
type: file
format: binary
type: string
Workflow:
properties:
id:
@ -1522,7 +1524,7 @@ components:
type: integer
format: int64
status:
type: enum
type: string
enum: ['new','user_input_required','waiting','complete']
navigation:
type: array
@ -1576,7 +1578,7 @@ components:
data:
type: object
multi_instance_type:
type: enum
type: string
enum: ['none', 'looping', 'parallel', 'sequential']
multi_instance_count:
type: number
@ -1709,7 +1711,7 @@ components:
type: string
readOnly: true
type:
type: enum
type: string
enum: ['string', 'long', 'boolean', 'date', 'enum']
readOnly: true
label:
@ -1785,7 +1787,7 @@ components:
type: string
example: "Chuck Norris"
data:
type: any
type: string
NavigationItem:
properties:
id:
@ -1815,7 +1817,7 @@ components:
type: integer
example: 4
state:
type: enum
type: string
enum: ['FUTURE', 'WAITING', 'READY', 'CANCELLED', 'COMPLETED','LIKELY','MAYBE']
readOnly: true
is_decision:

View File

@ -6,10 +6,10 @@ from crc.models.protocol_builder import ProtocolBuilderInvestigatorType
from crc.models.study import StudyModel, StudySchema
from crc.models.workflow import WorkflowStatus
from crc.scripts.script import Script
from crc.services.cache_service import timeit
from crc.services.file_service import FileService
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.study_service import StudyService
from box import Box
class StudyInfo(Script):
"""Please see the detailed description that is provided below. """
@ -186,7 +186,7 @@ Returns information specific to the protocol.
# Assure the reference file exists (a bit hacky, but we want to raise this error early, and cleanly.)
FileService.get_reference_file_data(FileService.DOCUMENT_LIST)
FileService.get_reference_file_data(FileService.INVESTIGATOR_LIST)
data = Box({
data = {
"study":{
"info": {
"id": 12,
@ -378,13 +378,14 @@ Returns information specific to the protocol.
'id': 0,
}
}
})
}
if args[0]=='documents':
return StudyService().get_documents_status(study_id)
return data['study'][args[0]]
#self.add_data_to_task(task=task, data=data["study"])
#self.add_data_to_task(task, {"documents": StudyService().get_documents_status(study_id)})
@timeit
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
self.check_args(args,2)
prefix = None
@ -413,12 +414,12 @@ Returns information specific to the protocol.
retval = StudyService().get_documents_status(study_id)
if cmd == 'protocol':
retval = StudyService().get_protocol(study_id)
if isinstance(retval, list):
retval = [Box(item) for item in retval]
if isinstance(retval,dict) and prefix is not None:
return Box({x:retval[x] for x in retval.keys() if x[:len(prefix)] == prefix})
elif isinstance(retval,dict) :
return Box(retval)
# if isinstance(retval, list):
# retval = [Box(item) for item in retval]
# if isinstance(retval,dict) and prefix is not None:
# return Box({x:retval[x] for x in retval.keys() if x[:len(prefix)] == prefix})
# elif isinstance(retval,dict) :
# return Box(retval)
else:
return retval

View File

@ -0,0 +1,56 @@
import time
cache_store = {}
import time
def firsttime():
return time.time()
def sincetime(txt,lasttime):
thistime=firsttime()
print('%s runtime was %2f'%(txt,thistime-lasttime))
return thistime
def timeit(f):
def timed(*args, **kw):
ts = time.time()
result = f(*args, **kw)
te = time.time()
print('func:%r args:[%r, %r] took: %2.4f sec' % (f.__name__, args, kw, te-ts))
return result
return timed
# first pass - meant to be down and dirty
def purge_cache(now):
dellist = []
for key in cache_store.keys():
if cache_store[key]['timeout'] < now:
dellist.append(key)
for key in dellist:
del cache_store[key]
def cache(f,timeout=60):
"""Cache the values for function for x minutes
we still have work to do to make a optional kw argument
to set the length of time to cache
"""
def cached(*args, **kw):
now = time.time()
purge_cache(now)
key =f.__name__+str(args)+str(kw)
if key in cache_store.keys():
return cache_store[key]['value']
else:
newtime = now+timeout*60
result = f(*args, **kw)
cache_store[key] ={}
cache_store[key]['value'] = result
cache_store[key]['timeout'] = newtime
return result
return cached

View File

@ -17,9 +17,10 @@ generic_message = """Workflow validation failed. For more information about the
known_errors = {'Error is Non-default exclusive outgoing sequence flow without condition':
{'hint': 'Add a Condition Type to your gateway path.'},
'Could not set task title on task (\w+) with \'(.*)\' property because \\1: Error evaluating expression \'(.*)\', "\'Box\' object has no attribute \'\\2\'"$':
{'hint': 'You are overriding the title for task `{task_id}`, using the `{property}` extension, and it is causing an error. Look under the extensions tab for the task, and check the value you are setting for the property.',
'groups': {'task_id': 0, 'property': 1}}}
'Could not set task title on task .*':
{'hint': 'You are overriding the title using an extension and it is causing this error. '
'Look under the extensions tab for the task, and check the value you are setting '
'for the property.'}}
class ValidationErrorService(object):

View File

@ -16,7 +16,7 @@ from crc import session, app
from crc.api.common import ApiError
from crc.models.file import FileType, FileDataModel, FileModel, LookupFileModel, LookupDataModel
from crc.models.workflow import WorkflowSpecModel, WorkflowModel, WorkflowSpecDependencyFile
from crc.services.cache_service import cache
class FileService(object):
"""Provides consistent management and rules for storing, retrieving and processing files."""
@ -58,6 +58,7 @@ class FileService(object):
return code in doc_dict
@staticmethod
@cache
def is_workflow_review(workflow_spec_id):
files = session.query(FileModel).filter(FileModel.workflow_spec_id==workflow_spec_id).all()
review = any([f.is_review for f in files])

View File

@ -23,6 +23,7 @@ from crc.services.ldap_service import LdapService
from crc.services.protocol_builder import ProtocolBuilderService
from crc.services.workflow_processor import WorkflowProcessor
from SpiffWorkflow import Task as SpiffTask
from crc.services.cache_service import timeit
class StudyService(object):
"""Provides common tools for working with a Study"""

View File

@ -28,7 +28,7 @@ from crc.scripts.script import Script
from crc.services.file_service import FileService
from crc import app
from crc.services.user_service import UserService
from crc.services.cache_service import timeit, firsttime, sincetime
class CustomBpmnScriptEngine(BpmnScriptEngine):
"""This is a custom script processor that can be easily injected into Spiff Workflow.
@ -277,16 +277,22 @@ class WorkflowProcessor(object):
self.workflow_model.dependencies.append(WorkflowSpecDependencyFile(file_data_id=file_data.id))
@staticmethod
@timeit
def run_master_spec(spec_model, study):
"""Executes a BPMN specification for the given study, without recording any information to the database
Useful for running the master specification, which should not persist. """
lasttime = firsttime()
spec_data_files = FileService.get_spec_data_files(spec_model.id)
lasttime = sincetime('load Files', lasttime)
spec = WorkflowProcessor.get_spec(spec_data_files, spec_model.id)
lasttime = sincetime('get spec', lasttime)
try:
bpmn_workflow = BpmnWorkflow(spec, script_engine=WorkflowProcessor._script_engine)
bpmn_workflow.data[WorkflowProcessor.STUDY_ID_KEY] = study.id
bpmn_workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY] = False
lasttime = sincetime('get_workflow', lasttime)
bpmn_workflow.do_engine_steps()
lasttime = sincetime('run steps', lasttime)
except WorkflowException as we:
raise ApiError.from_task_spec("error_running_master_spec", str(we), we.sender)

View File

@ -16,7 +16,7 @@ from SpiffWorkflow.bpmn.specs.UserTask import UserTask
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
from SpiffWorkflow.specs import CancelTask, StartTask, MultiChoice
from SpiffWorkflow.util.deep_merge import DeepMerge
from box import Box
from jinja2 import Template
from crc import db, app

View File

@ -53,7 +53,7 @@ psycopg2-binary==2.8.6
pyasn1==0.4.8
pycparser==2.20
pygithub==1.53
pygments==2.7.2
pygments==2.7.4
pyjwt==1.7.1
pyparsing==2.4.7
pyrsistent==0.17.3
@ -80,7 +80,7 @@ sphinxcontrib-serializinghtml==1.1.4
spiffworkflow
sqlalchemy==1.3.20
swagger-ui-bundle==0.0.8
urllib3==1.26.2
urllib3==1.26.3
waitress==1.4.4
webob==1.8.6
webtest==2.0.35

3
postgres/package-lock.json generated Normal file
View File

@ -0,0 +1,3 @@
{
"lockfileVersion": 1
}

View File

@ -166,8 +166,10 @@ class BaseTest(unittest.TestCase):
study_model = StudyModel(**study_json)
session.add(study_model)
StudyService._add_all_workflow_specs_to_study(study_model)
session.execute(Sequence(StudyModel.__tablename__ + '_id_seq'))
session.commit()
session.commit()
update_seq = f"ALTER SEQUENCE %s RESTART WITH %s" % (StudyModel.__tablename__ + '_id_seq', study_model.id + 1)
print("Update Sequence." + update_seq)
session.execute(update_seq)
session.flush()
specs = session.query(WorkflowSpecModel).all()

View File

@ -2,7 +2,8 @@ from tests.base_test import BaseTest
from crc.scripts.update_study import UpdateStudy
from crc.services.workflow_processor import WorkflowProcessor
from box import Box
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
class TestUpdateStudyScript(BaseTest):

View File

@ -10,7 +10,7 @@ from crc.models.api_models import WorkflowApiSchema, MultiInstanceType, TaskSche
from crc.models.file import FileModelSchema
from crc.models.workflow import WorkflowStatus
from crc.models.task_event import TaskEventModel
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
class TestTasksApi(BaseTest):

View File

@ -22,5 +22,6 @@ class TestCustomerError(BaseTest):
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % spec_model.id, headers=self.logged_in_headers())
self.assertIn('hint', rv.json[0])
json_data = json.loads(rv.get_data(as_text=True))
self.assertEqual('You are overriding the title for task `Task_0xbnd5d`, using the `display_name` extension, and it is causing an error. Look under the extensions tab for the task, and check the value you are setting for the property.',
self.assertEqual("You are overriding the title using an extension and it is causing this error. Look under the "
"extensions tab for the task, and check the value you are setting for the property.",
json_data[0]['hint'])

View File

@ -253,10 +253,8 @@ class TestWorkflowProcessor(BaseTest):
processor = self.get_processor(study, workflow_spec_model)
self.assertTrue(processor.get_version_string().startswith('v2.1.1'))
def test_hard_reset(self):
self.load_example_data()
# Start the two_forms workflow, and enter some data in the first form.
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
@ -275,6 +273,8 @@ class TestWorkflowProcessor(BaseTest):
# Assure that creating a new processor doesn't cause any issues, and maintains the spec version.
processor.workflow_model.bpmn_workflow_json = processor.serialize()
db.session.add(processor.workflow_model) ## Assure this isn't transient, which was causing some errors.
self.assertIsNotNone(processor.workflow_model.bpmn_workflow_json)
processor2 = WorkflowProcessor(processor.workflow_model)
self.assertFalse(processor2.is_latest_spec) # Still at version 1.