Add file_data_get and file_data_set functions that can be accessed in a bpmn script function

Fixes #299
This commit is contained in:
Kelly McDonald 2021-04-26 09:41:14 -04:00
parent 277beb345f
commit d3d7eeb309
8 changed files with 264 additions and 76 deletions

View File

@ -7,30 +7,30 @@ from crc.models.data_store import DataStoreModel, DataStoreSchema
from crc.scripts.data_store_base import DataStoreBase
def study_data_set(study_id, key, value):
"""Set a study data value in the data_store, mimic the script endpoint"""
if study_id is None:
raise ApiError('unknown_study', 'Please provide a valid Study ID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
retval = dsb.set_data_common('api', study_id, None, None, None, 'api_study_data_set', key, value)
json_value = json.dumps(retval, ensure_ascii=False, indent=2)
return json_value
# def study_data_set(study_id, key, value):
# """Set a study data value in the data_store, mimic the script endpoint"""
# if study_id is None:
# raise ApiError('unknown_study', 'Please provide a valid Study ID.')
#
# if key is None:
# raise ApiError('invalid_key', 'Please provide a valid key')
# dsb = DataStoreBase()
# retval = dsb.set_data_common('api', study_id, None, None, None, 'api_study_data_set', key, value)
# json_value = json.dumps(retval, ensure_ascii=False, indent=2)
# return json_value
def study_data_get(study_id, key, default=None):
"""Get a study data value in the data_store, mimic the script endpoint"""
if study_id is None:
raise ApiError('unknown_study', 'Please provide a valid Study ID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
retval = dsb.get_data_common(study_id, None, 'api_study_data_get', key, default)
# json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text
return retval
# def study_data_get(study_id, key, default=None):
# """Get a study data value in the data_store, mimic the script endpoint"""
# if study_id is None:
# raise ApiError('unknown_study', 'Please provide a valid Study ID.')
#
# if key is None:
# raise ApiError('invalid_key', 'Please provide a valid key')
# dsb = DataStoreBase()
# retval = dsb.get_data_common(study_id, None, 'api_study_data_get', key, default)
# # json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text
# return retval
def study_multi_get(study_id):
@ -44,56 +44,56 @@ def study_multi_get(study_id):
return results
def study_data_del(study_id, key):
"""Delete a study data value in the data store"""
if study_id is None:
raise ApiError('unknown_study', 'Please provide a valid Study ID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
dsb.del_data_common(study_id, None, 'api_study_data_get', key)
json_value = json.dumps('deleted', ensure_ascii=False, indent=2)
return json_value
# def study_data_del(study_id, key):
# """Delete a study data value in the data store"""
# if study_id is None:
# raise ApiError('unknown_study', 'Please provide a valid Study ID.')
#
# if key is None:
# raise ApiError('invalid_key', 'Please provide a valid key')
# dsb = DataStoreBase()
# dsb.del_data_common(study_id, None, 'api_study_data_get', key)
# json_value = json.dumps('deleted', ensure_ascii=False, indent=2)
# return json_value
def user_data_set(user_id, key, value):
"""Set a user data value in the data_store, mimic the script endpoint"""
if user_id is None:
raise ApiError('unknown_study', 'Please provide a valid UserID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
retval = dsb.set_data_common('api',
None,
user_id,
None,
None,
'api_user_data_set',
key, value)
json_value = json.dumps(retval, ensure_ascii=False, indent=2)
return json_value
# def user_data_set(user_id, key, value):
# """Set a user data value in the data_store, mimic the script endpoint"""
# if user_id is None:
# raise ApiError('unknown_study', 'Please provide a valid UserID.')
#
# if key is None:
# raise ApiError('invalid_key', 'Please provide a valid key')
# dsb = DataStoreBase()
#
# retval = dsb.set_data_common('api',
# None,
# user_id,
# None,
# None,
# 'api_user_data_set',
# key, value)
#
# json_value = json.dumps(retval, ensure_ascii=False, indent=2)
# return json_value
def user_data_get(user_id, key, default=None):
"""Get a user data value from the data_store, mimic the script endpoint"""
if user_id is None:
raise ApiError('unknown_study', 'Please provide a valid UserID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
retval = dsb.get_data_common(None,
user_id,
'api_user_data_get',
key, default)
# json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text
return retval
# def user_data_get(user_id, key, default=None):
# """Get a user data value from the data_store, mimic the script endpoint"""
# if user_id is None:
# raise ApiError('unknown_study', 'Please provide a valid UserID.')
#
# if key is None:
# raise ApiError('invalid_key', 'Please provide a valid key')
# dsb = DataStoreBase()
# retval = dsb.get_data_common(None,
# user_id,
# 'api_user_data_get',
# key, default)
#
# # json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text
# return retval
#
def user_multi_get(user_id):
"""Get all data values in the data_store for a userid"""
@ -130,7 +130,7 @@ def update_datastore(id, body):
item = session.query(DataStoreModel).filter_by(id=id).first()
if item is None:
raise ApiError('unknown_item', 'The item "' + id + '" is not recognized.')
print(body)
#print(body)
# I'm not sure if there is a generic way to use the
# schema to both parse the body and update the SQLAlchemy record
for key in body:
@ -155,12 +155,16 @@ def add_datastore(body):
if 'value' not in body:
raise ApiError('no_value', 'You need to specify a value to add a datastore item')
if (not 'user_id' in body) and (not 'study_id' in body):
raise ApiError('conflicting_values', 'A datastore item should have either a study_id or a user_id')
if ('user_id' not in body) and ('study_id' not in body) and ('file_id' not in body):
raise ApiError('conflicting_values', 'A datastore item should have either a study_id, user_id or file_id ')
if 'user_id' in body and 'study_id' in body:
raise ApiError('conflicting_values', 'A datastore item should have either a study_id or a user_id, '
'but not both')
present = 0
for field in ['user_id','study_id','file_id']:
if field in body:
present = present+1
if present > 1:
raise ApiError('conflicting_values', 'A datastore item should have one of a study_id, user_id or a file_id '
'but not more than one of these')
item = DataStoreModel(key=body['key'], value=body['value'])
# I'm not sure if there is a generic way to use the

View File

@ -20,10 +20,11 @@ class DataStoreBase(object):
overwritten = True
return overwritten
def set_validate_common(self, study_id, workflow_id, user_id, script_name, *args):
def set_validate_common(self, study_id, workflow_id, user_id, script_name, file_id, *args):
self.check_args_2(args, script_name)
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == workflow_id).first()
self.get_prev_value(study_id=study_id, user_id=user_id, key=args[0])
self.get_prev_value(study_id=study_id, user_id=user_id, file_id=file_id, key=args[0])
def check_args(self, args, maxlen=1, script_name='study_data_get'):
if len(args) < 1 or len(args) > maxlen:

View File

@ -0,0 +1,34 @@
from flask import g
from crc.api.common import ApiError
from crc.scripts.data_store_base import DataStoreBase
from crc.scripts.script import Script
class FileDataGet(Script, DataStoreBase):
def get_description(self):
return """Gets user data from the data store - takes only two keyword arguments arguments: 'file_id' and 'key' """
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
self.do_task(task, study_id, workflow_id, *args, **kwargs)
def validate_kw_args(self,**kwargs):
if kwargs.get('key',None) is None:
raise ApiError(code="missing_argument",
message=f"The 'file_data_get' script requires a keyword argument of 'key'")
if kwargs.get('file_id',None) is None:
raise ApiError(code="missing_argument",
message=f"The 'file_data_get' script requires a keyword argument of 'file_id'")
return True
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
if self.validate_kw_args(**kwargs):
myargs = [kwargs['key']]
return self.get_data_common(None,
None,
'file_data_get',
kwargs['file_id'],
*myargs)

View File

@ -0,0 +1,44 @@
from flask import g
from crc.api.common import ApiError
from crc.scripts.data_store_base import DataStoreBase
from crc.scripts.script import Script
class FileDataSet(Script, DataStoreBase):
def get_description(self):
return """Sets data the data store - takes three keyword arguments arguments: 'file_id' and 'key' and 'value'"""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
self.do_task(task, study_id, workflow_id, *args, **kwargs)
def validate_kw_args(self,**kwargs):
if kwargs.get('key',None) is None:
raise ApiError(code="missing_argument",
message=f"The 'file_data_get' script requires a keyword argument of 'key'")
if kwargs.get('file_id',None) is None:
raise ApiError(code="missing_argument",
message=f"The 'file_data_get' script requires a keyword argument of 'file_id'")
if kwargs.get('value',None) is None:
raise ApiError(code="missing_argument",
message=f"The 'file_data_get' script requires a keyword argument of 'value'")
return True
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
if self.validate_kw_args(**kwargs):
myargs = [kwargs['key'],kwargs['value']]
fileid = kwargs['file_id']
del(kwargs['file_id'])
return self.set_data_common(task.id,
None,
None,
workflow_id,
None,
'file_data_set',
fileid,
*myargs,
**kwargs)

View File

@ -11,6 +11,7 @@ class StudyDataSet(Script,DataStoreBase):
workflow_id,
None,
'study_data_set',
None,
*args)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
@ -20,6 +21,7 @@ class StudyDataSet(Script,DataStoreBase):
workflow_id,
None,
'study_data_set',
None,
*args,
**kwargs)

View File

@ -15,6 +15,7 @@ class UserDataSet(Script,DataStoreBase):
workflow_id,
g.user.uid,
'user_data_set',
None,
*args)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
@ -24,6 +25,7 @@ class UserDataSet(Script,DataStoreBase):
workflow_id,
None,
'user_data_set',
None,
*args,
**kwargs)

View File

@ -0,0 +1,70 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1j7idla" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:process id="Process_18biih5" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1pnq3kg</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_1pnq3kg" sourceRef="StartEvent_1" targetRef="Task_Has_Bananas" />
<bpmn:scriptTask id="Task_Has_Bananas" name="get Documents">
<bpmn:incoming>SequenceFlow_1pnq3kg</bpmn:incoming>
<bpmn:outgoing>Flow_1xqewuk</bpmn:outgoing>
<bpmn:script>documents = study_info('documents')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1xqewuk" sourceRef="Task_Has_Bananas" targetRef="Activity_0yikdu7" />
<bpmn:scriptTask id="Activity_0yikdu7" name="save arbitrary value">
<bpmn:incoming>Flow_1xqewuk</bpmn:incoming>
<bpmn:outgoing>Flow_0z7kamo</bpmn:outgoing>
<bpmn:script>filelist = list(documents.keys())
fileid = documents['UVACompl_PRCAppr'].files[0]['file_id']
file_data_set(file_id=fileid,key='test',value='me')</bpmn:script>
</bpmn:scriptTask>
<bpmn:endEvent id="Event_1pdyoyv">
<bpmn:incoming>Flow_15mmymi</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0z7kamo" sourceRef="Activity_0yikdu7" targetRef="Activity_19x6e2e" />
<bpmn:sequenceFlow id="Flow_15mmymi" sourceRef="Activity_19x6e2e" targetRef="Event_1pdyoyv" />
<bpmn:scriptTask id="Activity_19x6e2e" name="get output">
<bpmn:incoming>Flow_0z7kamo</bpmn:incoming>
<bpmn:outgoing>Flow_15mmymi</bpmn:outgoing>
<bpmn:script>output=file_data_get(file_id=fileid,key='test')
</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_18biih5">
<bpmndi:BPMNEdge id="SequenceFlow_1pnq3kg_di" bpmnElement="SequenceFlow_1pnq3kg">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_01ekdl8_di" bpmnElement="Task_Has_Bananas">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1xqewuk_di" bpmnElement="Flow_1xqewuk">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0g5namy_di" bpmnElement="Activity_0yikdu7">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1pdyoyv_di" bpmnElement="Event_1pdyoyv">
<dc:Bounds x="782" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0z7kamo_di" bpmnElement="Flow_0z7kamo">
<di:waypoint x="530" y="117" />
<di:waypoint x="590" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_15mmymi_di" bpmnElement="Flow_15mmymi">
<di:waypoint x="690" y="117" />
<di:waypoint x="782" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0ma7ela_di" bpmnElement="Activity_19x6e2e">
<dc:Bounds x="590" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,31 @@
import json
from crc.services.file_service import FileService
from crc.services.workflow_processor import WorkflowProcessor
from tests.base_test import BaseTest
from crc.models.workflow import WorkflowStatus
from crc import db
from crc.api.common import ApiError
from crc.models.task_event import TaskEventModel, TaskEventSchema
from crc.services.workflow_service import WorkflowService
class TestFileDatastore(BaseTest):
def test_file_datastore_workflow(self):
self.load_example_data()
self.create_reference_document()
# we need to create a file with an IRB code
# for this study
workflow = self.create_workflow('file_data_store')
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
FileService.add_workflow_file(workflow_id=workflow.id,
name="anything.png", content_type="text",
binary_data=b'1234', irb_doc_code=irb_code)
processor = WorkflowProcessor(workflow)
processor.do_engine_steps()
task_data = processor.bpmn_workflow.last_task.data
self.assertEqual(task_data['output'],'me')