Merge pull request #196 from sartography/128-data-store

128 data store
This commit is contained in:
Dan Funk 2020-11-18 18:55:33 -05:00 committed by GitHub
commit b8a91a513b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 893 additions and 12 deletions

View File

@ -1026,6 +1026,122 @@ paths:
type: array
items:
$ref: "#/components/schemas/Approval"
/datastore:
post:
operationId: crc.api.data_store.add_datastore
summary: Add datastore item with the given parameters.
tags:
- DataStore
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/DataStore'
responses:
'200':
description: Datastore updated successfully.
content:
application/json:
schema:
$ref: "#/components/schemas/DataStore"
/datastore/{id}:
parameters:
- name: id
in: path
required: true
description: The key to lookup.
schema:
type: string
format: string
get:
operationId: crc.api.data_store.datastore_get
summary: Get a datastore item by id
tags:
- DataStore
responses:
'200':
description: A value from the data store, or a default if provided, or None if not found.
content:
application/json:
schema:
$ref: "#/components/schemas/DataStore"
put:
operationId: crc.api.data_store.update_datastore
summary: Updates an existing datastore item with the given parameters.
tags:
- DataStore
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/DataStore'
responses:
'200':
description: Datastore updated successfully.
content:
application/json:
schema:
$ref: "#/components/schemas/DataStore"
delete:
operationId: crc.api.data_store.datastore_del
summary: Deletes a value from the data store by id
tags:
- DataStore
responses:
'200':
description: Deletes a value from a data store.
content:
application/json:
schema:
$ref: "#/components/schemas/DataStore"
/datastore/study/{study_id}:
parameters:
- name: study_id
in: path
required: true
description: The study id we are concerned with .
schema:
type: integer
format: int32
get:
operationId: crc.api.data_store.study_multi_get
summary: Gets all datastore items for a study_id
tags:
- DataStore
responses:
'200':
description: Get all values from the data store for a study_id
content:
application/json:
schema:
$ref: "#/components/schemas/DataStore"
/datastore/user/{user_id}:
parameters:
- name: user_id
in: path
required: true
description: The user id we are concerned with .
schema:
type: string
format: string
get:
operationId: crc.api.data_store.user_multi_get
summary: Gets all datastore items by user_id
tags:
- DataStore
responses:
'200':
description: Get all values from the data store for a user_id.
content:
application/json:
schema:
$ref: "#/components/schemas/DataStore"
components:
securitySchemes:
jwt:
@ -1096,6 +1212,39 @@ components:
type: string
x-nullable: true
example: "27b-6-1212"
DataStore:
properties:
id:
type: integer
example: 1234
key:
type: string
example: MyKey
workflow_id:
type: integer
x-nullable: true
example: 12
study_id:
type: integer
x-nullable: true
example: 42
user_id:
type: string
x-nullable: true
example: dhf8r
task_id:
type: string
x-nullable: true
example: MyTask
spec_id:
type: string
x-nullable: true
example: My Spec Name
value:
type: string
x-nullable: true
example: Some Value
WorkflowSpec:
properties:
id:
@ -1495,4 +1644,3 @@ components:
type: number
format: integer
example: 5

174
crc/api/data_store.py Normal file
View File

@ -0,0 +1,174 @@
import json
from datetime import datetime
from crc import session
from crc.api.common import ApiError
from crc.models.data_store import DataStoreModel, DataStoreSchema
from crc.scripts.data_store_base import DataStoreBase
def study_data_set(study_id, key, value):
"""Set a study data value in the data_store, mimic the script endpoint"""
if study_id is None:
raise ApiError('unknown_study', 'Please provide a valid Study ID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
retval = dsb.set_data_common('api', study_id, None, None, None, 'api_study_data_set', key, value)
json_value = json.dumps(retval, ensure_ascii=False, indent=2)
return json_value
def study_data_get(study_id, key, default=None):
"""Get a study data value in the data_store, mimic the script endpoint"""
if study_id is None:
raise ApiError('unknown_study', 'Please provide a valid Study ID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
retval = dsb.get_data_common(study_id, None, 'api_study_data_get', key, default)
# json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text
return retval
def study_multi_get(study_id):
"""Get all data_store values for a given study_id study"""
if study_id is None:
raise ApiError('unknown_study', 'Please provide a valid Study ID.')
dsb = DataStoreBase()
retval = dsb.get_multi_common(study_id, None)
results = DataStoreSchema(many=True).dump(retval)
return results
def study_data_del(study_id, key):
"""Delete a study data value in the data store"""
if study_id is None:
raise ApiError('unknown_study', 'Please provide a valid Study ID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
dsb.del_data_common(study_id, None, 'api_study_data_get', key)
json_value = json.dumps('deleted', ensure_ascii=False, indent=2)
return json_value
def user_data_set(user_id, key, value):
"""Set a user data value in the data_store, mimic the script endpoint"""
if user_id is None:
raise ApiError('unknown_study', 'Please provide a valid UserID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
retval = dsb.set_data_common('api',
None,
user_id,
None,
None,
'api_user_data_set',
key, value)
json_value = json.dumps(retval, ensure_ascii=False, indent=2)
return json_value
def user_data_get(user_id, key, default=None):
"""Get a user data value from the data_store, mimic the script endpoint"""
if user_id is None:
raise ApiError('unknown_study', 'Please provide a valid UserID.')
if key is None:
raise ApiError('invalid_key', 'Please provide a valid key')
dsb = DataStoreBase()
retval = dsb.get_data_common(None,
user_id,
'api_user_data_get',
key, default)
# json_value = json.dumps(retval, ensure_ascii=False, indent=2) # just return raw text
return retval
def user_multi_get(user_id):
"""Get all data values in the data_store for a userid"""
if user_id is None:
raise ApiError('unknown_study', 'Please provide a valid UserID.')
dsb = DataStoreBase()
retval = dsb.get_multi_common(None,
user_id)
results = DataStoreSchema(many=True).dump(retval)
return results
def datastore_del(id):
"""Delete a data store item for a user_id and a key"""
session.query(DataStoreModel).filter_by(id=id).delete()
session.commit()
json_value = json.dumps('deleted', ensure_ascii=False, indent=2)
return json_value
def datastore_get(id):
"""Delete a data store item for a user_id and a key"""
item = session.query(DataStoreModel).filter_by(id=id).first()
results = DataStoreSchema(many=False).dump(item)
return results
def update_datastore(id, body):
"""allow a modification to a datastore item """
if id is None:
raise ApiError('unknown_id', 'Please provide a valid ID.')
item = session.query(DataStoreModel).filter_by(id=id).first()
if item is None:
raise ApiError('unknown_item', 'The item "' + id + '" is not recognized.')
print(body)
# I'm not sure if there is a generic way to use the
# schema to both parse the body and update the SQLAlchemy record
for key in body:
if hasattr(item, key):
setattr(item, key, body[key])
item.last_updated = datetime.now()
session.add(item)
session.commit()
return DataStoreSchema().dump(item)
def add_datastore(body):
""" add a new datastore item """
print(body)
if body.get(id, None):
raise ApiError('id_specified', 'You may not specify an id for a new datastore item')
if 'key' not in body:
raise ApiError('no_key', 'You need to specify a key to add a datastore item')
if 'value' not in body:
raise ApiError('no_value', 'You need to specify a value to add a datastore item')
if (not 'user_id' in body) and (not 'study_id' in body):
raise ApiError('conflicting_values', 'A datastore item should have either a study_id or a user_id')
if 'user_id' in body and 'study_id' in body:
raise ApiError('conflicting_values', 'A datastore item should have either a study_id or a user_id, '
'but not both')
item = DataStoreModel(key=body['key'], value=body['value'])
# I'm not sure if there is a generic way to use the
# schema to both parse the body and update the SQLAlchemy record
for key in body:
if hasattr(item, key):
setattr(item, key, body[key])
item.last_updated = datetime.now()
session.add(item)
session.commit()
return DataStoreSchema().dump(item)

31
crc/models/data_store.py Normal file
View File

@ -0,0 +1,31 @@
from flask_marshmallow.sqla import SQLAlchemyAutoSchema
from marshmallow import EXCLUDE
from sqlalchemy import func
import marshmallow
from marshmallow import INCLUDE, fields
from crc import db, ma
class DataStoreModel(db.Model):
__tablename__ = 'data_store'
id = db.Column(db.Integer, primary_key=True)
last_updated = db.Column(db.DateTime(timezone=True), default=func.now())
key = db.Column(db.String, nullable=False)
workflow_id = db.Column(db.Integer)
study_id = db.Column(db.Integer, nullable=True)
task_id = db.Column(db.String)
spec_id = db.Column(db.String)
user_id = db.Column(db.String, nullable=True)
value = db.Column(db.String)
class DataStoreSchema(ma.Schema):
id = fields.Integer(required=False)
key = fields.String(required=True)
last_updated = fields.DateTime(server_default=func.now(), onupdate=func.now())
workflow_id = fields.Integer()
study_id = fields.Integer(allow_none=True)
task_id = fields.String()
spec_id = fields.String()
user_id = fields.String(allow_none=True)
value = fields.String()

View File

@ -0,0 +1,80 @@
import importlib
import os
import pkgutil
from crc import session
from crc.api.common import ApiError
from crc.models.data_store import DataStoreModel
from crc.models.workflow import WorkflowModel
from datetime import datetime
class DataStoreBase(object):
def overwritten(self, value, prev_value):
if prev_value is None:
overwritten = False
else:
if prev_value == value:
overwritten = False
else:
overwritten = True
return overwritten
def set_validate_common(self, study_id, workflow_id, user_id, script_name, *args):
self.check_args_2(args, script_name)
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == workflow_id).first()
self.get_prev_value(study_id=study_id, user_id=user_id, key=args[0])
def check_args(self, args, maxlen=1, script_name='study_data_get'):
if len(args) < 1 or len(args) > maxlen:
raise ApiError(code="missing_argument",
message=f"The {script_name} script takes either one or two arguments, "
f"starting with the key and an optional default")
def check_args_2(self, args, script_name='study_data_set'):
if len(args) != 2:
raise ApiError(code="missing_argument",
message=f"The {script_name} script takes two arguments, starting with the key and a "
"value for the key")
def get_prev_value(self, study_id, user_id, key):
study = session.query(DataStoreModel).filter_by(study_id=study_id, user_id=user_id, key=key).first()
return study
def set_data_common(self, task_id, study_id, user_id, workflow_id, workflow_spec_id, script_name, *args, **kwargs):
self.check_args_2(args, script_name=script_name)
study = self.get_prev_value(study_id=study_id, user_id=user_id, key=args[0])
if workflow_spec_id is None and workflow_id is not None:
workflow = session.query(WorkflowModel).filter(WorkflowModel.id == workflow_id).first()
workflow_spec_id = workflow.workflow_spec_id
if study is not None:
prev_value = study.value
else:
prev_value = None
study = DataStoreModel(key=args[0], value=args[1],
study_id=study_id,
task_id=task_id,
user_id=user_id, # Make this available to any User
workflow_id=workflow_id,
spec_id=workflow_spec_id)
study.value = args[1]
study.last_updated = datetime.now()
overwritten = self.overwritten(study.value, prev_value)
session.add(study)
session.commit()
return {'new_value': study.value,
'old_value': prev_value,
'overwritten': overwritten}
def get_data_common(self, study_id, user_id, script_name, *args):
self.check_args(args, 2, script_name)
study = session.query(DataStoreModel).filter_by(study_id=study_id, user_id=user_id, key=args[0]).first()
if study:
return study.value
else:
return args[1]
def get_multi_common(self, study_id, user_id):
study = session.query(DataStoreModel).filter_by(study_id=study_id, user_id=user_id)
return study

View File

@ -68,9 +68,6 @@ class Script(object):
workflow_id)
return execlist
@staticmethod
def get_all_subclasses():
return Script._get_all_subclasses(Script)
@ -109,3 +106,6 @@ class ScriptValidationError:
@classmethod
def from_api_error(cls, api_error: ApiError):
return cls(api_error.code, api_error.message)

View File

@ -0,0 +1,17 @@
from crc.scripts.data_store_base import DataStoreBase
from crc.scripts.script import Script
class StudyDataGet(Script,DataStoreBase):
def get_description(self):
return """Gets study data from the data store."""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
self.do_task(task, study_id, workflow_id, *args, **kwargs)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
return self.get_data_common(study_id,
None,
'study_data_get',
*args)

View File

@ -0,0 +1,30 @@
from crc.scripts.data_store_base import DataStoreBase
from crc.scripts.script import Script
class StudyDataSet(Script,DataStoreBase):
def get_description(self):
return """Sets study data from the data store."""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
self.set_validate_common(study_id,
workflow_id,
None,
'study_data_set',
*args)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
return self.set_data_common(task.id,
study_id,
None,
workflow_id,
None,
'study_data_set',
*args,
**kwargs)

View File

@ -0,0 +1,18 @@
from flask import g
from crc.scripts.data_store_base import DataStoreBase
from crc.scripts.script import Script
class UserDataGet(Script, DataStoreBase):
def get_description(self):
return """Gets user data from the data store."""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
self.do_task(task, study_id, workflow_id, *args, **kwargs)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
return self.get_data_common(None,
g.user.uid,
'user_data_get',
*args)

View File

@ -0,0 +1,29 @@
from flask import g
from crc.scripts.data_store_base import DataStoreBase
from crc.scripts.script import Script
class UserDataSet(Script,DataStoreBase):
def get_description(self):
return """Sets user data to the data store."""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
self.set_validate_common(None,
workflow_id,
g.user.uid,
'user_data_set',
*args)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
return self.set_data_common(task.id,
None,
g.user.uid,
workflow_id,
None,
'user_data_set',
*args,
**kwargs)

View File

@ -0,0 +1,33 @@
"""empty message
Revision ID: 0718ad13e5f3
Revises: 69081f1ff387
Create Date: 2020-11-06 11:08:33.657440
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0718ad13e5f3'
down_revision = '69081f1ff387'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('data_store',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(), nullable=False),
sa.Column('value', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('data_store')
# ### end Alembic commands ###

View File

@ -0,0 +1,36 @@
"""add columns
Revision ID: e0dfdbfd6f69
Revises: 0718ad13e5f3
Create Date: 2020-11-09 08:33:04.585139
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e0dfdbfd6f69'
down_revision = '0718ad13e5f3'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('data_store', sa.Column('spec_id', sa.String(), nullable=True))
op.add_column('data_store', sa.Column('study_id', sa.Integer(), nullable=True))
op.add_column('data_store', sa.Column('task_id', sa.String(), nullable=True))
op.add_column('data_store', sa.Column('user_id', sa.String(), nullable=True))
op.add_column('data_store', sa.Column('workflow_id', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('data_store', 'workflow_id')
op.drop_column('data_store', 'user_id')
op.drop_column('data_store', 'task_id')
op.drop_column('data_store', 'study_id')
op.drop_column('data_store', 'spec_id')
# ### end Alembic commands ###

View File

@ -0,0 +1,28 @@
"""empty message
Revision ID: f186725c1ad3
Revises: e0dfdbfd6f69
Create Date: 2020-11-13 11:01:31.882424
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f186725c1ad3'
down_revision = 'e0dfdbfd6f69'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('data_store', sa.Column('last_updated', sa.DateTime(timezone=True), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('data_store', 'last_updated')
# ### end Alembic commands ###

View File

@ -17,6 +17,7 @@ from crc.models.approval import ApprovalModel, ApprovalStatus
from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES
from crc.models.task_event import TaskEventModel
from crc.models.study import StudyModel, StudyStatus
from crc.models.data_store import DataStoreModel
from crc.models.user import UserModel
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecCategoryModel
from crc.services.file_service import FileService

View File

@ -0,0 +1,91 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_0kmksnn" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:process id="Process_0exnnpv" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_1nfe5m9</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="SequenceFlow_1nfe5m9" sourceRef="StartEvent_1" targetRef="Task_Script_Load_Study_Sponsors" />
<bpmn:scriptTask id="Task_Script_Load_Study_Sponsors" name="Load Study Sponsors">
<bpmn:incoming>SequenceFlow_1nfe5m9</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1bqiin0</bpmn:outgoing>
<bpmn:script>sponsors = study_info('sponsors')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_1bqiin0" sourceRef="Task_Script_Load_Study_Sponsors" targetRef="Activity_0cm6tn2" />
<bpmn:endEvent id="EndEvent_171dj09">
<bpmn:incoming>Flow_05136ua</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_09cika8" sourceRef="Activity_0cm6tn2" targetRef="Activity_0d8iftx" />
<bpmn:scriptTask id="Activity_0cm6tn2" name="setval">
<bpmn:incoming>SequenceFlow_1bqiin0</bpmn:incoming>
<bpmn:outgoing>Flow_09cika8</bpmn:outgoing>
<bpmn:script>study_data_set('testme','newval')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1oeqjuy" sourceRef="Activity_0d8iftx" targetRef="Activity_1yup9u7" />
<bpmn:scriptTask id="Activity_0d8iftx" name="getval">
<bpmn:incoming>Flow_09cika8</bpmn:incoming>
<bpmn:outgoing>Flow_1oeqjuy</bpmn:outgoing>
<bpmn:script>out = study_data_get('testme','bogus')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0g9waf3" sourceRef="Activity_1yup9u7" targetRef="Activity_0xw717o" />
<bpmn:scriptTask id="Activity_1yup9u7" name="reset value">
<bpmn:incoming>Flow_1oeqjuy</bpmn:incoming>
<bpmn:outgoing>Flow_0g9waf3</bpmn:outgoing>
<bpmn:script>study_data_set('testme','badval')</bpmn:script>
</bpmn:scriptTask>
<bpmn:scriptTask id="Activity_0xw717o" name="Make sure user_data_get doesn&#39;t get the study_data_set variable">
<bpmn:incoming>Flow_0g9waf3</bpmn:incoming>
<bpmn:outgoing>Flow_05136ua</bpmn:outgoing>
<bpmn:script>empty = user_data_get('testme','empty')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_05136ua" sourceRef="Activity_0xw717o" targetRef="EndEvent_171dj09" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0exnnpv">
<bpmndi:BPMNEdge id="SequenceFlow_1bqiin0_di" bpmnElement="SequenceFlow_1bqiin0">
<di:waypoint x="370" y="117" />
<di:waypoint x="440" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1nfe5m9_di" bpmnElement="SequenceFlow_1nfe5m9">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ScriptTask_1mp6xid_di" bpmnElement="Task_Script_Load_Study_Sponsors">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_171dj09_di" bpmnElement="EndEvent_171dj09">
<dc:Bounds x="792" y="672" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_09cika8_di" bpmnElement="Flow_09cika8">
<di:waypoint x="540" y="117" />
<di:waypoint x="600" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0wnwluq_di" bpmnElement="Activity_0cm6tn2">
<dc:Bounds x="440" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1oeqjuy_di" bpmnElement="Flow_1oeqjuy">
<di:waypoint x="700" y="117" />
<di:waypoint x="760" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0cq37mm_di" bpmnElement="Activity_0d8iftx">
<dc:Bounds x="600" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0g9waf3_di" bpmnElement="Flow_0g9waf3">
<di:waypoint x="810" y="157" />
<di:waypoint x="810" y="250" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0cj83fx_di" bpmnElement="Activity_1yup9u7">
<dc:Bounds x="760" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0pqth07_di" bpmnElement="Activity_0xw717o">
<dc:Bounds x="760" y="250" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_05136ua_di" bpmnElement="Flow_05136ua">
<di:waypoint x="810" y="330" />
<di:waypoint x="810" y="672" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,12 +1,6 @@
from tests.base_test import BaseTest
from crc import mail
from crc.models.email import EmailModel
from crc.services.file_service import FileService
from crc.scripts.email import Email
from crc.services.workflow_processor import WorkflowProcessor
from crc.api.common import ApiError
from crc import db, mail
from tests.base_test import BaseTest
class TestEmailScript(BaseTest):

View File

@ -0,0 +1,51 @@
from unittest.mock import patch
import flask
from tests.base_test import BaseTest
from crc import session, app
from crc.models.study import StudyModel
from crc.models.user import UserModel
from crc.services.study_service import StudyService
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.workflow_service import WorkflowService
class TestSudySponsorsScript(BaseTest):
test_uid = "dhf8r"
test_study_id = 1
def test_study_sponsors_script_validation(self):
flask.g.user = UserModel(uid='dhf8r')
self.load_example_data() # study_info script complains if irb_documents.xls is not loaded
# during the validate phase I'm going to assume that we will never
# have a case where irb_documents.xls is not loaded ??
self.load_test_spec("study_sponsors_data_store")
WorkflowService.test_spec("study_sponsors_data_store") # This would raise errors if it didn't validate
@patch('crc.services.protocol_builder.requests.get')
def test_study_sponsors_script(self, mock_get):
mock_get.return_value.ok = True
mock_get.return_value.text = self.protocol_builder_response('sponsors.json')
flask.g.user = UserModel(uid='dhf8r')
app.config['PB_ENABLED'] = True
self.load_example_data()
self.create_reference_document()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("study_sponsors_data_store")
workflow_model = StudyService._create_workflow_model(study, workflow_spec_model)
WorkflowService.test_spec("study_sponsors_data_store")
processor = WorkflowProcessor(workflow_model)
processor.do_engine_steps()
self.assertTrue(processor.bpmn_workflow.is_completed())
data = processor.next_task().data
self.assertIn('sponsors', data)
self.assertIn('out', data)
self.assertEquals('empty', data['empty'])
self.assertEquals('newval', data['out'])
self.assertEquals(3, len(data['sponsors']))

120
tests/test_datastore_api.py Normal file
View File

@ -0,0 +1,120 @@
import json
from profile import Profile
from tests.base_test import BaseTest
from datetime import datetime, timezone
from unittest.mock import patch
from crc.models.data_store import DataStoreModel, DataStoreSchema
from crc import session, app
class DataStoreTest(BaseTest):
TEST_STUDY_ITEM = {
"key": "MyKey",
"workflow_id": 12,
"study_id": 42,
"task_id": "MyTask",
"spec_id": "My Spec Name",
"value": "Some Value"
}
def add_test_study_data(self):
study_data = DataStoreSchema().dump(self.TEST_STUDY_ITEM)
rv = self.app.post('/v1.0/datastore',
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(study_data))
self.assert_success(rv)
return json.loads(rv.get_data(as_text=True))
def add_test_user_data(self):
study_data = DataStoreSchema().dump(self.TEST_STUDY_ITEM)
study_data['user_id'] = 'dhf8r'
del(study_data['study_id'])
study_data['value'] = 'User Value'
rv = self.app.post('/v1.0/datastore',
content_type="application/json",
headers=self.logged_in_headers(),
data=json.dumps(study_data))
self.assert_success(rv)
return json.loads(rv.get_data(as_text=True))
def test_get_study_data(self):
"""Generic test, but pretty detailed, in that the study should return a categorized list of workflows
This starts with out loading the example data, to show that all the bases are covered from ground 0."""
"""NOTE: The protocol builder is not enabled or mocked out. As the master workflow (which is empty),
and the test workflow do not need it, and it is disabled in the configuration."""
self.load_example_data()
new_study = self.add_test_study_data()
new_study = session.query(DataStoreModel).filter_by(id=new_study["id"]).first()
api_response = self.app.get('/v1.0/datastore/%i' % new_study.id,
headers=self.logged_in_headers(), content_type="application/json")
self.assert_success(api_response)
d = api_response.get_data(as_text=True)
study_data = DataStoreSchema().loads(d)
self.assertEqual(study_data['key'], self.TEST_STUDY_ITEM['key'])
self.assertEqual(study_data['value'], self.TEST_STUDY_ITEM['value'])
self.assertEqual(study_data['user_id'], None)
def test_update_study(self):
self.load_example_data()
new_study = self.add_test_study_data()
new_study = session.query(DataStoreModel).filter_by(id=new_study["id"]).first()
new_study.value = 'MyNewValue'
api_response = self.app.put('/v1.0/datastore/%i' % new_study.id,
data=DataStoreSchema().dump(new_study),
headers=self.logged_in_headers(), content_type="application/json")
api_response = self.app.get('/v1.0/datastore/%i' % new_study.id,
headers=self.logged_in_headers(), content_type="application/json")
self.assert_success(api_response)
study_data = DataStoreSchema().loads(api_response.get_data(as_text=True))
self.assertEqual(study_data['key'], self.TEST_STUDY_ITEM['key'])
self.assertEqual(study_data['value'], 'MyNewValue')
self.assertEqual(study_data['user_id'], None)
def test_delete_study(self):
self.load_example_data()
new_study = self.add_test_study_data()
oldid = new_study['id']
new_study = session.query(DataStoreModel).filter_by(id=new_study["id"]).first()
rv = self.app.delete('/v1.0/datastore/%i' % new_study.id, headers=self.logged_in_headers())
self.assert_success(rv)
studyreponse = session.query(DataStoreModel).filter_by(id=oldid).first()
self.assertEqual(studyreponse,None)
def test_data_crosstalk(self):
"""Test to make sure that data saved for user or study is not acessible from the other method"""
self.load_example_data()
new_study = self.add_test_study_data()
new_user = self.add_test_user_data()
api_response = self.app.get(f'/v1.0/datastore/user/{new_user["user_id"]}',
headers=self.logged_in_headers(), content_type="application/json")
self.assert_success(api_response)
d = json.loads(api_response.get_data(as_text=True))
self.assertEqual(d[0]['value'],'User Value')
api_response = self.app.get(f'/v1.0/datastore/study/{new_study["study_id"]}',
headers=self.logged_in_headers(), content_type="application/json")
self.assert_success(api_response)
d = json.loads(api_response.get_data(as_text=True))
self.assertEqual(d[0]['value'],'Some Value')