diff --git a/crc/api.yml b/crc/api.yml index 4ebe3cc2..ad22677e 100644 --- a/crc/api.yml +++ b/crc/api.yml @@ -1026,6 +1026,122 @@ paths: type: array items: $ref: "#/components/schemas/Approval" + /datastore: + post: + operationId: crc.api.data_store.add_datastore + summary: Add datastore item with the given parameters. + tags: + - DataStore + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/DataStore' + responses: + '200': + description: Datastore updated successfully. + content: + application/json: + schema: + $ref: "#/components/schemas/DataStore" + + /datastore/{id}: + parameters: + - name: id + in: path + required: true + description: The key to lookup. + schema: + type: string + format: string + + get: + operationId: crc.api.data_store.datastore_get + summary: Get a datastore item by id + tags: + - DataStore + responses: + '200': + description: A value from the data store, or a default if provided, or None if not found. + content: + application/json: + schema: + $ref: "#/components/schemas/DataStore" + put: + operationId: crc.api.data_store.update_datastore + summary: Updates an existing datastore item with the given parameters. + tags: + - DataStore + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/DataStore' + responses: + '200': + description: Datastore updated successfully. + content: + application/json: + schema: + $ref: "#/components/schemas/DataStore" + + + delete: + operationId: crc.api.data_store.datastore_del + summary: Deletes a value from the data store by id + tags: + - DataStore + responses: + '200': + description: Deletes a value from a data store. + content: + application/json: + schema: + $ref: "#/components/schemas/DataStore" + + + /datastore/study/{study_id}: + parameters: + - name: study_id + in: path + required: true + description: The study id we are concerned with . + schema: + type: integer + format: int32 + get: + operationId: crc.api.data_store.study_multi_get + summary: Gets all datastore items for a study_id + tags: + - DataStore + responses: + '200': + description: Get all values from the data store for a study_id + content: + application/json: + schema: + $ref: "#/components/schemas/DataStore" + /datastore/user/{user_id}: + parameters: + - name: user_id + in: path + required: true + description: The user id we are concerned with . + schema: + type: string + format: string + get: + operationId: crc.api.data_store.user_multi_get + summary: Gets all datastore items by user_id + tags: + - DataStore + responses: + '200': + description: Get all values from the data store for a user_id. + content: + application/json: + schema: + $ref: "#/components/schemas/DataStore" components: securitySchemes: jwt: @@ -1096,6 +1212,39 @@ components: type: string x-nullable: true example: "27b-6-1212" + DataStore: + properties: + id: + type: integer + example: 1234 + key: + type: string + example: MyKey + workflow_id: + type: integer + x-nullable: true + example: 12 + study_id: + type: integer + x-nullable: true + example: 42 + user_id: + type: string + x-nullable: true + example: dhf8r + task_id: + type: string + x-nullable: true + example: MyTask + spec_id: + type: string + x-nullable: true + example: My Spec Name + value: + type: string + x-nullable: true + example: Some Value + WorkflowSpec: properties: id: @@ -1495,4 +1644,3 @@ components: type: number format: integer example: 5 - diff --git a/crc/api/data_store.py b/crc/api/data_store.py new file mode 100644 index 00000000..03c088e9 --- /dev/null +++ b/crc/api/data_store.py @@ -0,0 +1,174 @@ +import json +from datetime import datetime + +from crc import session +from crc.api.common import ApiError +from crc.models.data_store import DataStoreModel, DataStoreSchema +from crc.scripts.data_store_base import DataStoreBase + + +def study_data_set(study_id, key, value): + """Set a study data value in the data_store, mimic the script endpoint""" + if study_id is None: + raise ApiError('unknown_study', 'Please provide a valid Study ID.') + + if key is None: + raise ApiError('invalid_key', 'Please provide a valid key') + dsb = DataStoreBase() + retval = dsb.set_data_common('api', study_id, None, None, None, 'api_study_data_set', key, value) + json_value = json.dumps(retval, ensure_ascii=False, indent=2) + return json_value + + +def study_data_get(study_id, key, default=None): + """Get a study data value in the data_store, mimic the script endpoint""" + if study_id is None: + raise ApiError('unknown_study', 'Please provide a valid Study ID.') + + if key is None: + raise ApiError('invalid_key', 'Please provide a valid key') + dsb = DataStoreBase() + retval = dsb.get_data_common(study_id, None, 'api_study_data_get', key, default) + # json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text + return retval + + +def study_multi_get(study_id): + """Get all data_store values for a given study_id study""" + if study_id is None: + raise ApiError('unknown_study', 'Please provide a valid Study ID.') + + dsb = DataStoreBase() + retval = dsb.get_multi_common(study_id, None) + results = DataStoreSchema(many=True).dump(retval) + return results + + +def study_data_del(study_id, key): + """Delete a study data value in the data store""" + if study_id is None: + raise ApiError('unknown_study', 'Please provide a valid Study ID.') + + if key is None: + raise ApiError('invalid_key', 'Please provide a valid key') + dsb = DataStoreBase() + dsb.del_data_common(study_id, None, 'api_study_data_get', key) + json_value = json.dumps('deleted', ensure_ascii=False, indent=2) + return json_value + + +def user_data_set(user_id, key, value): + """Set a user data value in the data_store, mimic the script endpoint""" + if user_id is None: + raise ApiError('unknown_study', 'Please provide a valid UserID.') + + if key is None: + raise ApiError('invalid_key', 'Please provide a valid key') + dsb = DataStoreBase() + + retval = dsb.set_data_common('api', + None, + user_id, + None, + None, + 'api_user_data_set', + key, value) + + json_value = json.dumps(retval, ensure_ascii=False, indent=2) + return json_value + + +def user_data_get(user_id, key, default=None): + """Get a user data value from the data_store, mimic the script endpoint""" + if user_id is None: + raise ApiError('unknown_study', 'Please provide a valid UserID.') + + if key is None: + raise ApiError('invalid_key', 'Please provide a valid key') + dsb = DataStoreBase() + retval = dsb.get_data_common(None, + user_id, + 'api_user_data_get', + key, default) + + # json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text + return retval + + +def user_multi_get(user_id): + """Get all data values in the data_store for a userid""" + if user_id is None: + raise ApiError('unknown_study', 'Please provide a valid UserID.') + + dsb = DataStoreBase() + retval = dsb.get_multi_common(None, + user_id) + results = DataStoreSchema(many=True).dump(retval) + return results + + +def datastore_del(id): + """Delete a data store item for a user_id and a key""" + session.query(DataStoreModel).filter_by(id=id).delete() + session.commit() + json_value = json.dumps('deleted', ensure_ascii=False, indent=2) + return json_value + + +def datastore_get(id): + """Delete a data store item for a user_id and a key""" + item = session.query(DataStoreModel).filter_by(id=id).first() + results = DataStoreSchema(many=False).dump(item) + return results + + +def update_datastore(id, body): + """allow a modification to a datastore item """ + if id is None: + raise ApiError('unknown_id', 'Please provide a valid ID.') + + item = session.query(DataStoreModel).filter_by(id=id).first() + if item is None: + raise ApiError('unknown_item', 'The item "' + id + '" is not recognized.') + print(body) + # I'm not sure if there is a generic way to use the + # schema to both parse the body and update the SQLAlchemy record + for key in body: + if hasattr(item, key): + setattr(item, key, body[key]) + item.last_updated = datetime.now() + session.add(item) + session.commit() + return DataStoreSchema().dump(item) + + +def add_datastore(body): + """ add a new datastore item """ + + print(body) + if body.get(id, None): + raise ApiError('id_specified', 'You may not specify an id for a new datastore item') + + if 'key' not in body: + raise ApiError('no_key', 'You need to specify a key to add a datastore item') + + if 'value' not in body: + raise ApiError('no_value', 'You need to specify a value to add a datastore item') + + if (not 'user_id' in body) and (not 'study_id' in body): + raise ApiError('conflicting_values', 'A datastore item should have either a study_id or a user_id') + + if 'user_id' in body and 'study_id' in body: + raise ApiError('conflicting_values', 'A datastore item should have either a study_id or a user_id, ' + 'but not both') + + item = DataStoreModel(key=body['key'], value=body['value']) + # I'm not sure if there is a generic way to use the + # schema to both parse the body and update the SQLAlchemy record + for key in body: + if hasattr(item, key): + setattr(item, key, body[key]) + item.last_updated = datetime.now() + session.add(item) + session.commit() + return DataStoreSchema().dump(item) diff --git a/crc/models/data_store.py b/crc/models/data_store.py new file mode 100644 index 00000000..f219e888 --- /dev/null +++ b/crc/models/data_store.py @@ -0,0 +1,31 @@ +from flask_marshmallow.sqla import SQLAlchemyAutoSchema +from marshmallow import EXCLUDE +from sqlalchemy import func +import marshmallow +from marshmallow import INCLUDE, fields + +from crc import db, ma + +class DataStoreModel(db.Model): + __tablename__ = 'data_store' + id = db.Column(db.Integer, primary_key=True) + last_updated = db.Column(db.DateTime(timezone=True), default=func.now()) + key = db.Column(db.String, nullable=False) + workflow_id = db.Column(db.Integer) + study_id = db.Column(db.Integer, nullable=True) + task_id = db.Column(db.String) + spec_id = db.Column(db.String) + user_id = db.Column(db.String, nullable=True) + value = db.Column(db.String) + + +class DataStoreSchema(ma.Schema): + id = fields.Integer(required=False) + key = fields.String(required=True) + last_updated = fields.DateTime(server_default=func.now(), onupdate=func.now()) + workflow_id = fields.Integer() + study_id = fields.Integer(allow_none=True) + task_id = fields.String() + spec_id = fields.String() + user_id = fields.String(allow_none=True) + value = fields.String() diff --git a/crc/scripts/data_store_base.py b/crc/scripts/data_store_base.py new file mode 100644 index 00000000..05934052 --- /dev/null +++ b/crc/scripts/data_store_base.py @@ -0,0 +1,80 @@ +import importlib +import os +import pkgutil +from crc import session +from crc.api.common import ApiError +from crc.models.data_store import DataStoreModel +from crc.models.workflow import WorkflowModel +from datetime import datetime + + +class DataStoreBase(object): + + def overwritten(self, value, prev_value): + if prev_value is None: + overwritten = False + else: + if prev_value == value: + overwritten = False + else: + overwritten = True + return overwritten + + def set_validate_common(self, study_id, workflow_id, user_id, script_name, *args): + self.check_args_2(args, script_name) + workflow = session.query(WorkflowModel).filter(WorkflowModel.id == workflow_id).first() + self.get_prev_value(study_id=study_id, user_id=user_id, key=args[0]) + + def check_args(self, args, maxlen=1, script_name='study_data_get'): + if len(args) < 1 or len(args) > maxlen: + raise ApiError(code="missing_argument", + message=f"The {script_name} script takes either one or two arguments, " + f"starting with the key and an optional default") + + def check_args_2(self, args, script_name='study_data_set'): + if len(args) != 2: + raise ApiError(code="missing_argument", + message=f"The {script_name} script takes two arguments, starting with the key and a " + "value for the key") + + def get_prev_value(self, study_id, user_id, key): + study = session.query(DataStoreModel).filter_by(study_id=study_id, user_id=user_id, key=key).first() + return study + + def set_data_common(self, task_id, study_id, user_id, workflow_id, workflow_spec_id, script_name, *args, **kwargs): + + self.check_args_2(args, script_name=script_name) + study = self.get_prev_value(study_id=study_id, user_id=user_id, key=args[0]) + if workflow_spec_id is None and workflow_id is not None: + workflow = session.query(WorkflowModel).filter(WorkflowModel.id == workflow_id).first() + workflow_spec_id = workflow.workflow_spec_id + if study is not None: + prev_value = study.value + else: + prev_value = None + study = DataStoreModel(key=args[0], value=args[1], + study_id=study_id, + task_id=task_id, + user_id=user_id, # Make this available to any User + workflow_id=workflow_id, + spec_id=workflow_spec_id) + study.value = args[1] + study.last_updated = datetime.now() + overwritten = self.overwritten(study.value, prev_value) + session.add(study) + session.commit() + return {'new_value': study.value, + 'old_value': prev_value, + 'overwritten': overwritten} + + def get_data_common(self, study_id, user_id, script_name, *args): + self.check_args(args, 2, script_name) + study = session.query(DataStoreModel).filter_by(study_id=study_id, user_id=user_id, key=args[0]).first() + if study: + return study.value + else: + return args[1] + + def get_multi_common(self, study_id, user_id): + study = session.query(DataStoreModel).filter_by(study_id=study_id, user_id=user_id) + return study diff --git a/crc/scripts/script.py b/crc/scripts/script.py index 84c2ee05..45c4c221 100644 --- a/crc/scripts/script.py +++ b/crc/scripts/script.py @@ -68,9 +68,6 @@ class Script(object): workflow_id) return execlist - - - @staticmethod def get_all_subclasses(): return Script._get_all_subclasses(Script) @@ -109,3 +106,6 @@ class ScriptValidationError: @classmethod def from_api_error(cls, api_error: ApiError): return cls(api_error.code, api_error.message) + + + diff --git a/crc/scripts/study_data_get.py b/crc/scripts/study_data_get.py new file mode 100644 index 00000000..4b109dff --- /dev/null +++ b/crc/scripts/study_data_get.py @@ -0,0 +1,17 @@ +from crc.scripts.data_store_base import DataStoreBase +from crc.scripts.script import Script + + +class StudyDataGet(Script,DataStoreBase): + def get_description(self): + return """Gets study data from the data store.""" + + def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs): + self.do_task(task, study_id, workflow_id, *args, **kwargs) + + def do_task(self, task, study_id, workflow_id, *args, **kwargs): + return self.get_data_common(study_id, + None, + 'study_data_get', + *args) + diff --git a/crc/scripts/study_data_set.py b/crc/scripts/study_data_set.py new file mode 100644 index 00000000..dffc1bd0 --- /dev/null +++ b/crc/scripts/study_data_set.py @@ -0,0 +1,30 @@ +from crc.scripts.data_store_base import DataStoreBase +from crc.scripts.script import Script + + +class StudyDataSet(Script,DataStoreBase): + def get_description(self): + return """Sets study data from the data store.""" + + def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs): + self.set_validate_common(study_id, + workflow_id, + None, + 'study_data_set', + *args) + + def do_task(self, task, study_id, workflow_id, *args, **kwargs): + return self.set_data_common(task.id, + study_id, + None, + workflow_id, + None, + 'study_data_set', + *args, + **kwargs) + + + + + + diff --git a/crc/scripts/user_data_get.py b/crc/scripts/user_data_get.py new file mode 100644 index 00000000..037ae300 --- /dev/null +++ b/crc/scripts/user_data_get.py @@ -0,0 +1,18 @@ +from flask import g + +from crc.scripts.data_store_base import DataStoreBase +from crc.scripts.script import Script + + +class UserDataGet(Script, DataStoreBase): + def get_description(self): + return """Gets user data from the data store.""" + + def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs): + self.do_task(task, study_id, workflow_id, *args, **kwargs) + + def do_task(self, task, study_id, workflow_id, *args, **kwargs): + return self.get_data_common(None, + g.user.uid, + 'user_data_get', + *args) diff --git a/crc/scripts/user_data_set.py b/crc/scripts/user_data_set.py new file mode 100644 index 00000000..3e7b4cae --- /dev/null +++ b/crc/scripts/user_data_set.py @@ -0,0 +1,29 @@ +from flask import g + +from crc.scripts.data_store_base import DataStoreBase +from crc.scripts.script import Script + + +class UserDataSet(Script,DataStoreBase): + def get_description(self): + return """Sets user data to the data store.""" + + def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs): + self.set_validate_common(None, + workflow_id, + g.user.uid, + 'user_data_set', + *args) + + def do_task(self, task, study_id, workflow_id, *args, **kwargs): + return self.set_data_common(task.id, + None, + g.user.uid, + workflow_id, + None, + 'user_data_set', + *args, + **kwargs) + + + diff --git a/migrations/versions/0718ad13e5f3_.py b/migrations/versions/0718ad13e5f3_.py new file mode 100644 index 00000000..6c8a34e8 --- /dev/null +++ b/migrations/versions/0718ad13e5f3_.py @@ -0,0 +1,33 @@ +"""empty message + +Revision ID: 0718ad13e5f3 +Revises: 69081f1ff387 +Create Date: 2020-11-06 11:08:33.657440 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '0718ad13e5f3' +down_revision = '69081f1ff387' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('data_store', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('key', sa.String(), nullable=False), + sa.Column('value', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('data_store') + # ### end Alembic commands ### diff --git a/migrations/versions/e0dfdbfd6f69_add_columns.py b/migrations/versions/e0dfdbfd6f69_add_columns.py new file mode 100644 index 00000000..585b5202 --- /dev/null +++ b/migrations/versions/e0dfdbfd6f69_add_columns.py @@ -0,0 +1,36 @@ +"""add columns + +Revision ID: e0dfdbfd6f69 +Revises: 0718ad13e5f3 +Create Date: 2020-11-09 08:33:04.585139 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'e0dfdbfd6f69' +down_revision = '0718ad13e5f3' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('data_store', sa.Column('spec_id', sa.String(), nullable=True)) + op.add_column('data_store', sa.Column('study_id', sa.Integer(), nullable=True)) + op.add_column('data_store', sa.Column('task_id', sa.String(), nullable=True)) + op.add_column('data_store', sa.Column('user_id', sa.String(), nullable=True)) + op.add_column('data_store', sa.Column('workflow_id', sa.Integer(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('data_store', 'workflow_id') + op.drop_column('data_store', 'user_id') + op.drop_column('data_store', 'task_id') + op.drop_column('data_store', 'study_id') + op.drop_column('data_store', 'spec_id') + # ### end Alembic commands ### diff --git a/migrations/versions/f186725c1ad3_.py b/migrations/versions/f186725c1ad3_.py new file mode 100644 index 00000000..3f378e0a --- /dev/null +++ b/migrations/versions/f186725c1ad3_.py @@ -0,0 +1,28 @@ +"""empty message + +Revision ID: f186725c1ad3 +Revises: e0dfdbfd6f69 +Create Date: 2020-11-13 11:01:31.882424 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'f186725c1ad3' +down_revision = 'e0dfdbfd6f69' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('data_store', sa.Column('last_updated', sa.DateTime(timezone=True), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('data_store', 'last_updated') + # ### end Alembic commands ### diff --git a/tests/base_test.py b/tests/base_test.py index 0a8033aa..af899917 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -17,6 +17,7 @@ from crc.models.approval import ApprovalModel, ApprovalStatus from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES from crc.models.task_event import TaskEventModel from crc.models.study import StudyModel, StudyStatus +from crc.models.data_store import DataStoreModel from crc.models.user import UserModel from crc.models.workflow import WorkflowSpecModel, WorkflowSpecCategoryModel from crc.services.file_service import FileService diff --git a/tests/data/study_sponsors_data_store/study_sponsors_data_store.bpmn b/tests/data/study_sponsors_data_store/study_sponsors_data_store.bpmn new file mode 100644 index 00000000..9ecae660 --- /dev/null +++ b/tests/data/study_sponsors_data_store/study_sponsors_data_store.bpmn @@ -0,0 +1,91 @@ + + + + + SequenceFlow_1nfe5m9 + + + + SequenceFlow_1nfe5m9 + SequenceFlow_1bqiin0 + sponsors = study_info('sponsors') + + + + Flow_05136ua + + + + SequenceFlow_1bqiin0 + Flow_09cika8 + study_data_set('testme','newval') + + + + Flow_09cika8 + Flow_1oeqjuy + out = study_data_get('testme','bogus') + + + + Flow_1oeqjuy + Flow_0g9waf3 + study_data_set('testme','badval') + + + Flow_0g9waf3 + Flow_05136ua + empty = user_data_get('testme','empty') + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/emails/test_email_script.py b/tests/emails/test_email_script.py index 12a00fac..1385609b 100644 --- a/tests/emails/test_email_script.py +++ b/tests/emails/test_email_script.py @@ -1,12 +1,6 @@ -from tests.base_test import BaseTest - +from crc import mail from crc.models.email import EmailModel -from crc.services.file_service import FileService -from crc.scripts.email import Email -from crc.services.workflow_processor import WorkflowProcessor -from crc.api.common import ApiError - -from crc import db, mail +from tests.base_test import BaseTest class TestEmailScript(BaseTest): diff --git a/tests/study/test_study_data_store_script.py b/tests/study/test_study_data_store_script.py new file mode 100644 index 00000000..ab24f9a5 --- /dev/null +++ b/tests/study/test_study_data_store_script.py @@ -0,0 +1,51 @@ +from unittest.mock import patch +import flask + + +from tests.base_test import BaseTest + +from crc import session, app +from crc.models.study import StudyModel +from crc.models.user import UserModel +from crc.services.study_service import StudyService +from crc.services.workflow_processor import WorkflowProcessor +from crc.services.workflow_service import WorkflowService + +class TestSudySponsorsScript(BaseTest): + test_uid = "dhf8r" + test_study_id = 1 + + + def test_study_sponsors_script_validation(self): + flask.g.user = UserModel(uid='dhf8r') + self.load_example_data() # study_info script complains if irb_documents.xls is not loaded + # during the validate phase I'm going to assume that we will never + # have a case where irb_documents.xls is not loaded ?? + + self.load_test_spec("study_sponsors_data_store") + WorkflowService.test_spec("study_sponsors_data_store") # This would raise errors if it didn't validate + + + @patch('crc.services.protocol_builder.requests.get') + def test_study_sponsors_script(self, mock_get): + + mock_get.return_value.ok = True + mock_get.return_value.text = self.protocol_builder_response('sponsors.json') + flask.g.user = UserModel(uid='dhf8r') + app.config['PB_ENABLED'] = True + + self.load_example_data() + self.create_reference_document() + study = session.query(StudyModel).first() + workflow_spec_model = self.load_test_spec("study_sponsors_data_store") + workflow_model = StudyService._create_workflow_model(study, workflow_spec_model) + WorkflowService.test_spec("study_sponsors_data_store") + processor = WorkflowProcessor(workflow_model) + processor.do_engine_steps() + self.assertTrue(processor.bpmn_workflow.is_completed()) + data = processor.next_task().data + self.assertIn('sponsors', data) + self.assertIn('out', data) + self.assertEquals('empty', data['empty']) + self.assertEquals('newval', data['out']) + self.assertEquals(3, len(data['sponsors'])) diff --git a/tests/test_datastore_api.py b/tests/test_datastore_api.py new file mode 100644 index 00000000..c9b3154a --- /dev/null +++ b/tests/test_datastore_api.py @@ -0,0 +1,120 @@ +import json +from profile import Profile + +from tests.base_test import BaseTest + +from datetime import datetime, timezone +from unittest.mock import patch +from crc.models.data_store import DataStoreModel, DataStoreSchema +from crc import session, app + + + + +class DataStoreTest(BaseTest): + TEST_STUDY_ITEM = { + "key": "MyKey", + "workflow_id": 12, + "study_id": 42, + "task_id": "MyTask", + "spec_id": "My Spec Name", + "value": "Some Value" + + } + def add_test_study_data(self): + study_data = DataStoreSchema().dump(self.TEST_STUDY_ITEM) + rv = self.app.post('/v1.0/datastore', + content_type="application/json", + headers=self.logged_in_headers(), + data=json.dumps(study_data)) + self.assert_success(rv) + return json.loads(rv.get_data(as_text=True)) + + def add_test_user_data(self): + study_data = DataStoreSchema().dump(self.TEST_STUDY_ITEM) + study_data['user_id'] = 'dhf8r' + del(study_data['study_id']) + study_data['value'] = 'User Value' + rv = self.app.post('/v1.0/datastore', + content_type="application/json", + headers=self.logged_in_headers(), + data=json.dumps(study_data)) + self.assert_success(rv) + return json.loads(rv.get_data(as_text=True)) + + + + def test_get_study_data(self): + """Generic test, but pretty detailed, in that the study should return a categorized list of workflows + This starts with out loading the example data, to show that all the bases are covered from ground 0.""" + + """NOTE: The protocol builder is not enabled or mocked out. As the master workflow (which is empty), + and the test workflow do not need it, and it is disabled in the configuration.""" + self.load_example_data() + new_study = self.add_test_study_data() + new_study = session.query(DataStoreModel).filter_by(id=new_study["id"]).first() + + api_response = self.app.get('/v1.0/datastore/%i' % new_study.id, + headers=self.logged_in_headers(), content_type="application/json") + self.assert_success(api_response) + d = api_response.get_data(as_text=True) + study_data = DataStoreSchema().loads(d) + + self.assertEqual(study_data['key'], self.TEST_STUDY_ITEM['key']) + self.assertEqual(study_data['value'], self.TEST_STUDY_ITEM['value']) + self.assertEqual(study_data['user_id'], None) + + + + def test_update_study(self): + self.load_example_data() + new_study = self.add_test_study_data() + new_study = session.query(DataStoreModel).filter_by(id=new_study["id"]).first() + new_study.value = 'MyNewValue' + api_response = self.app.put('/v1.0/datastore/%i' % new_study.id, + data=DataStoreSchema().dump(new_study), + headers=self.logged_in_headers(), content_type="application/json") + + + api_response = self.app.get('/v1.0/datastore/%i' % new_study.id, + headers=self.logged_in_headers(), content_type="application/json") + self.assert_success(api_response) + study_data = DataStoreSchema().loads(api_response.get_data(as_text=True)) + + self.assertEqual(study_data['key'], self.TEST_STUDY_ITEM['key']) + self.assertEqual(study_data['value'], 'MyNewValue') + self.assertEqual(study_data['user_id'], None) + + + + def test_delete_study(self): + self.load_example_data() + new_study = self.add_test_study_data() + oldid = new_study['id'] + new_study = session.query(DataStoreModel).filter_by(id=new_study["id"]).first() + rv = self.app.delete('/v1.0/datastore/%i' % new_study.id, headers=self.logged_in_headers()) + self.assert_success(rv) + studyreponse = session.query(DataStoreModel).filter_by(id=oldid).first() + self.assertEqual(studyreponse,None) + + + + def test_data_crosstalk(self): + """Test to make sure that data saved for user or study is not acessible from the other method""" + + self.load_example_data() + new_study = self.add_test_study_data() + new_user = self.add_test_user_data() + + api_response = self.app.get(f'/v1.0/datastore/user/{new_user["user_id"]}', + headers=self.logged_in_headers(), content_type="application/json") + self.assert_success(api_response) + d = json.loads(api_response.get_data(as_text=True)) + self.assertEqual(d[0]['value'],'User Value') + + api_response = self.app.get(f'/v1.0/datastore/study/{new_study["study_id"]}', + headers=self.logged_in_headers(), content_type="application/json") + + self.assert_success(api_response) + d = json.loads(api_response.get_data(as_text=True)) + self.assertEqual(d[0]['value'],'Some Value')