diff --git a/crc/api/data_store.py b/crc/api/data_store.py index 03c088e9..d48eb8a0 100644 --- a/crc/api/data_store.py +++ b/crc/api/data_store.py @@ -7,30 +7,30 @@ from crc.models.data_store import DataStoreModel, DataStoreSchema from crc.scripts.data_store_base import DataStoreBase -def study_data_set(study_id, key, value): - """Set a study data value in the data_store, mimic the script endpoint""" - if study_id is None: - raise ApiError('unknown_study', 'Please provide a valid Study ID.') - - if key is None: - raise ApiError('invalid_key', 'Please provide a valid key') - dsb = DataStoreBase() - retval = dsb.set_data_common('api', study_id, None, None, None, 'api_study_data_set', key, value) - json_value = json.dumps(retval, ensure_ascii=False, indent=2) - return json_value +# def study_data_set(study_id, key, value): +# """Set a study data value in the data_store, mimic the script endpoint""" +# if study_id is None: +# raise ApiError('unknown_study', 'Please provide a valid Study ID.') +# +# if key is None: +# raise ApiError('invalid_key', 'Please provide a valid key') +# dsb = DataStoreBase() +# retval = dsb.set_data_common('api', study_id, None, None, None, 'api_study_data_set', key, value) +# json_value = json.dumps(retval, ensure_ascii=False, indent=2) +# return json_value -def study_data_get(study_id, key, default=None): - """Get a study data value in the data_store, mimic the script endpoint""" - if study_id is None: - raise ApiError('unknown_study', 'Please provide a valid Study ID.') - - if key is None: - raise ApiError('invalid_key', 'Please provide a valid key') - dsb = DataStoreBase() - retval = dsb.get_data_common(study_id, None, 'api_study_data_get', key, default) - # json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text - return retval +# def study_data_get(study_id, key, default=None): +# """Get a study data value in the data_store, mimic the script endpoint""" +# if study_id is None: +# raise ApiError('unknown_study', 'Please provide a valid Study ID.') +# +# if key is None: +# raise ApiError('invalid_key', 'Please provide a valid key') +# dsb = DataStoreBase() +# retval = dsb.get_data_common(study_id, None, 'api_study_data_get', key, default) +# # json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text +# return retval def study_multi_get(study_id): @@ -44,56 +44,56 @@ def study_multi_get(study_id): return results -def study_data_del(study_id, key): - """Delete a study data value in the data store""" - if study_id is None: - raise ApiError('unknown_study', 'Please provide a valid Study ID.') - - if key is None: - raise ApiError('invalid_key', 'Please provide a valid key') - dsb = DataStoreBase() - dsb.del_data_common(study_id, None, 'api_study_data_get', key) - json_value = json.dumps('deleted', ensure_ascii=False, indent=2) - return json_value +# def study_data_del(study_id, key): +# """Delete a study data value in the data store""" +# if study_id is None: +# raise ApiError('unknown_study', 'Please provide a valid Study ID.') +# +# if key is None: +# raise ApiError('invalid_key', 'Please provide a valid key') +# dsb = DataStoreBase() +# dsb.del_data_common(study_id, None, 'api_study_data_get', key) +# json_value = json.dumps('deleted', ensure_ascii=False, indent=2) +# return json_value -def user_data_set(user_id, key, value): - """Set a user data value in the data_store, mimic the script endpoint""" - if user_id is None: - raise ApiError('unknown_study', 'Please provide a valid UserID.') - - if key is None: - raise ApiError('invalid_key', 'Please provide a valid key') - dsb = DataStoreBase() - - retval = dsb.set_data_common('api', - None, - user_id, - None, - None, - 'api_user_data_set', - key, value) - - json_value = json.dumps(retval, ensure_ascii=False, indent=2) - return json_value +# def user_data_set(user_id, key, value): +# """Set a user data value in the data_store, mimic the script endpoint""" +# if user_id is None: +# raise ApiError('unknown_study', 'Please provide a valid UserID.') +# +# if key is None: +# raise ApiError('invalid_key', 'Please provide a valid key') +# dsb = DataStoreBase() +# +# retval = dsb.set_data_common('api', +# None, +# user_id, +# None, +# None, +# 'api_user_data_set', +# key, value) +# +# json_value = json.dumps(retval, ensure_ascii=False, indent=2) +# return json_value -def user_data_get(user_id, key, default=None): - """Get a user data value from the data_store, mimic the script endpoint""" - if user_id is None: - raise ApiError('unknown_study', 'Please provide a valid UserID.') - - if key is None: - raise ApiError('invalid_key', 'Please provide a valid key') - dsb = DataStoreBase() - retval = dsb.get_data_common(None, - user_id, - 'api_user_data_get', - key, default) - - # json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text - return retval - +# def user_data_get(user_id, key, default=None): +# """Get a user data value from the data_store, mimic the script endpoint""" +# if user_id is None: +# raise ApiError('unknown_study', 'Please provide a valid UserID.') +# +# if key is None: +# raise ApiError('invalid_key', 'Please provide a valid key') +# dsb = DataStoreBase() +# retval = dsb.get_data_common(None, +# user_id, +# 'api_user_data_get', +# key, default) +# +# # json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text +# return retval +# def user_multi_get(user_id): """Get all data values in the data_store for a userid""" @@ -130,7 +130,7 @@ def update_datastore(id, body): item = session.query(DataStoreModel).filter_by(id=id).first() if item is None: raise ApiError('unknown_item', 'The item "' + id + '" is not recognized.') - print(body) + #print(body) # I'm not sure if there is a generic way to use the # schema to both parse the body and update the SQLAlchemy record for key in body: @@ -155,12 +155,16 @@ def add_datastore(body): if 'value' not in body: raise ApiError('no_value', 'You need to specify a value to add a datastore item') - if (not 'user_id' in body) and (not 'study_id' in body): - raise ApiError('conflicting_values', 'A datastore item should have either a study_id or a user_id') + if ('user_id' not in body) and ('study_id' not in body) and ('file_id' not in body): + raise ApiError('conflicting_values', 'A datastore item should have either a study_id, user_id or file_id ') - if 'user_id' in body and 'study_id' in body: - raise ApiError('conflicting_values', 'A datastore item should have either a study_id or a user_id, ' - 'but not both') + present = 0 + for field in ['user_id','study_id','file_id']: + if field in body: + present = present+1 + if present > 1: + raise ApiError('conflicting_values', 'A datastore item should have one of a study_id, user_id or a file_id ' + 'but not more than one of these') item = DataStoreModel(key=body['key'], value=body['value']) # I'm not sure if there is a generic way to use the diff --git a/crc/scripts/data_store_base.py b/crc/scripts/data_store_base.py index 05934052..98fd030a 100644 --- a/crc/scripts/data_store_base.py +++ b/crc/scripts/data_store_base.py @@ -20,10 +20,11 @@ class DataStoreBase(object): overwritten = True return overwritten - def set_validate_common(self, study_id, workflow_id, user_id, script_name, *args): + + def set_validate_common(self, study_id, workflow_id, user_id, script_name, file_id, *args): self.check_args_2(args, script_name) workflow = session.query(WorkflowModel).filter(WorkflowModel.id == workflow_id).first() - self.get_prev_value(study_id=study_id, user_id=user_id, key=args[0]) + self.get_prev_value(study_id=study_id, user_id=user_id, file_id=file_id, key=args[0]) def check_args(self, args, maxlen=1, script_name='study_data_get'): if len(args) < 1 or len(args) > maxlen: diff --git a/crc/scripts/file_data_get.py b/crc/scripts/file_data_get.py new file mode 100644 index 00000000..4fa2ca5b --- /dev/null +++ b/crc/scripts/file_data_get.py @@ -0,0 +1,34 @@ +from flask import g + +from crc.api.common import ApiError +from crc.scripts.data_store_base import DataStoreBase +from crc.scripts.script import Script + + +class FileDataGet(Script, DataStoreBase): + def get_description(self): + return """Gets user data from the data store - takes only two keyword arguments arguments: 'file_id' and 'key' """ + + def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs): + self.do_task(task, study_id, workflow_id, *args, **kwargs) + + def validate_kw_args(self,**kwargs): + if kwargs.get('key',None) is None: + raise ApiError(code="missing_argument", + message=f"The 'file_data_get' script requires a keyword argument of 'key'") + + if kwargs.get('file_id',None) is None: + raise ApiError(code="missing_argument", + message=f"The 'file_data_get' script requires a keyword argument of 'file_id'") + return True + + + def do_task(self, task, study_id, workflow_id, *args, **kwargs): + if self.validate_kw_args(**kwargs): + myargs = [kwargs['key']] + + return self.get_data_common(None, + None, + 'file_data_get', + kwargs['file_id'], + *myargs) diff --git a/crc/scripts/file_data_set.py b/crc/scripts/file_data_set.py new file mode 100644 index 00000000..8c5d0a49 --- /dev/null +++ b/crc/scripts/file_data_set.py @@ -0,0 +1,44 @@ +from flask import g + +from crc.api.common import ApiError +from crc.scripts.data_store_base import DataStoreBase +from crc.scripts.script import Script + + +class FileDataSet(Script, DataStoreBase): + def get_description(self): + return """Sets data the data store - takes three keyword arguments arguments: 'file_id' and 'key' and 'value'""" + + def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs): + self.do_task(task, study_id, workflow_id, *args, **kwargs) + + def validate_kw_args(self,**kwargs): + if kwargs.get('key',None) is None: + raise ApiError(code="missing_argument", + message=f"The 'file_data_get' script requires a keyword argument of 'key'") + + if kwargs.get('file_id',None) is None: + raise ApiError(code="missing_argument", + message=f"The 'file_data_get' script requires a keyword argument of 'file_id'") + if kwargs.get('value',None) is None: + raise ApiError(code="missing_argument", + message=f"The 'file_data_get' script requires a keyword argument of 'value'") + + return True + + + def do_task(self, task, study_id, workflow_id, *args, **kwargs): + if self.validate_kw_args(**kwargs): + myargs = [kwargs['key'],kwargs['value']] + fileid = kwargs['file_id'] + del(kwargs['file_id']) + return self.set_data_common(task.id, + None, + None, + workflow_id, + None, + 'file_data_set', + fileid, + *myargs, + **kwargs) + diff --git a/crc/scripts/study_data_set.py b/crc/scripts/study_data_set.py index 75b0dd8e..9c4135ab 100644 --- a/crc/scripts/study_data_set.py +++ b/crc/scripts/study_data_set.py @@ -11,6 +11,7 @@ class StudyDataSet(Script,DataStoreBase): workflow_id, None, 'study_data_set', + None, *args) def do_task(self, task, study_id, workflow_id, *args, **kwargs): @@ -20,6 +21,7 @@ class StudyDataSet(Script,DataStoreBase): workflow_id, None, 'study_data_set', + None, *args, **kwargs) diff --git a/crc/scripts/user_data_set.py b/crc/scripts/user_data_set.py index 7a569cb0..906afab2 100644 --- a/crc/scripts/user_data_set.py +++ b/crc/scripts/user_data_set.py @@ -15,6 +15,7 @@ class UserDataSet(Script,DataStoreBase): workflow_id, g.user.uid, 'user_data_set', + None, *args) def do_task(self, task, study_id, workflow_id, *args, **kwargs): @@ -24,6 +25,7 @@ class UserDataSet(Script,DataStoreBase): workflow_id, None, 'user_data_set', + None, *args, **kwargs) diff --git a/tests/data/file_data_store/file_data_store.bpmn b/tests/data/file_data_store/file_data_store.bpmn new file mode 100644 index 00000000..e7cf4374 --- /dev/null +++ b/tests/data/file_data_store/file_data_store.bpmn @@ -0,0 +1,70 @@ + + + + + SequenceFlow_1pnq3kg + + + + SequenceFlow_1pnq3kg + Flow_1xqewuk + documents = study_info('documents') + + + + Flow_1xqewuk + Flow_0z7kamo + filelist = list(documents.keys()) + +fileid = documents['UVACompl_PRCAppr'].files[0]['file_id'] + +file_data_set(file_id=fileid,key='test',value='me') + + + Flow_15mmymi + + + + + Flow_0z7kamo + Flow_15mmymi + output=file_data_get(file_id=fileid,key='test') + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/test_file_datastore.py b/tests/test_file_datastore.py new file mode 100644 index 00000000..78f83259 --- /dev/null +++ b/tests/test_file_datastore.py @@ -0,0 +1,31 @@ +import json + +from crc.services.file_service import FileService +from crc.services.workflow_processor import WorkflowProcessor +from tests.base_test import BaseTest +from crc.models.workflow import WorkflowStatus +from crc import db +from crc.api.common import ApiError +from crc.models.task_event import TaskEventModel, TaskEventSchema +from crc.services.workflow_service import WorkflowService + + +class TestFileDatastore(BaseTest): + + + def test_file_datastore_workflow(self): + self.load_example_data() + self.create_reference_document() + # we need to create a file with an IRB code + # for this study + workflow = self.create_workflow('file_data_store') + irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs. + FileService.add_workflow_file(workflow_id=workflow.id, + name="anything.png", content_type="text", + binary_data=b'1234', irb_doc_code=irb_code) + + processor = WorkflowProcessor(workflow) + processor.do_engine_steps() + task_data = processor.bpmn_workflow.last_task.data + self.assertEqual(task_data['output'],'me') +