Merge pull request #222 from sartography/dev

dev to testing
This commit is contained in:
Dan Funk 2021-01-11 14:18:46 -05:00 committed by GitHub
commit 3056fe28e1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 293 additions and 36 deletions

View File

@ -99,6 +99,13 @@ def load_example_rrt_data():
ExampleDataLoader.clean_db()
ExampleDataLoader().load_rrt()
@app.cli.command()
def load_reference_files():
"""Load example data into the database."""
from example_data import ExampleDataLoader
ExampleDataLoader().load_reference_documents()
@app.cli.command()
def clear_db():
"""Load example data into the database."""

View File

@ -37,42 +37,55 @@ def get_changed_workflows(remote,as_df=False):
# get the local thumbprints & make sure that 'workflow_spec_id' is a column, not an index
local = get_all_spec_state_dataframe().reset_index()
if local.empty:
# return the list as a dict, let swagger convert it to json
remote_workflows['new'] = True
if as_df:
return remote_workflows
else:
return remote_workflows.reset_index().to_dict(orient='records')
# merge these on workflow spec id and hash - this will
# make two different date columns date_x and date_y
different = remote_workflows.merge(local,
different = remote_workflows.merge(local,
right_on=['workflow_spec_id','md5_hash'],
left_on=['workflow_spec_id','md5_hash'],
how = 'outer' ,
indicator=True).loc[lambda x : x['_merge']!='both']
if len(different)==0:
return []
# each line has a tag on it - if was in the left or the right,
# label it so we know if that was on the remote or local machine
different.loc[different['_merge']=='left_only','location'] = 'remote'
different.loc[different['_merge']=='right_only','location'] = 'local'
# this takes the different date_created_x and date-created_y columns and
# combines them back into one date_created column
index = different['date_created_x'].isnull()
different.loc[index,'date_created_x'] = different[index]['date_created_y']
different = different[['workflow_spec_id','date_created_x','location']].copy()
different.columns=['workflow_spec_id','date_created','location']
# If there are no differences, then we can just return.
if not different.empty:
# our different list will have multiple entries for a workflow if there is a version on either side
# we want to grab the most recent one, so we sort and grab the most recent one for each workflow
changedfiles = different.sort_values('date_created',ascending=False).groupby('workflow_spec_id').first()
# each line has a tag on it - if was in the left or the right,
# label it so we know if that was on the remote or local machine
different.loc[different['_merge']=='left_only','location'] = 'remote'
different.loc[different['_merge']=='right_only','location'] = 'local'
# get an exclusive or list of workflow ids - that is we want lists of files that are
# on one machine or the other, but not both
remote_spec_ids = remote_workflows[['workflow_spec_id']]
local_spec_ids = local[['workflow_spec_id']]
left = remote_spec_ids[~remote_spec_ids['workflow_spec_id'].isin(local_spec_ids['workflow_spec_id'])]
right = local_spec_ids[~local_spec_ids['workflow_spec_id'].isin(remote_spec_ids['workflow_spec_id'])]
# this takes the different date_created_x and date-created_y columns and
# combines them back into one date_created column
index = different['date_created_x'].isnull()
different.loc[index,'date_created_x'] = different[index]['date_created_y']
different = different[['workflow_spec_id','date_created_x','location']].copy()
different.columns=['workflow_spec_id','date_created','location']
# flag files as new that are only on the remote box and remove the files that are only on the local box
changedfiles['new'] = False
changedfiles.loc[changedfiles.index.isin(left['workflow_spec_id']), 'new'] = True
output = changedfiles[~changedfiles.index.isin(right['workflow_spec_id'])]
# our different list will have multiple entries for a workflow if there is a version on either side
# we want to grab the most recent one, so we sort and grab the most recent one for each workflow
changedfiles = different.sort_values('date_created',ascending=False).groupby('workflow_spec_id').first()
# get an exclusive or list of workflow ids - that is we want lists of files that are
# on one machine or the other, but not both
remote_spec_ids = remote_workflows[['workflow_spec_id']]
local_spec_ids = local[['workflow_spec_id']]
left = remote_spec_ids[~remote_spec_ids['workflow_spec_id'].isin(local_spec_ids['workflow_spec_id'])]
right = local_spec_ids[~local_spec_ids['workflow_spec_id'].isin(remote_spec_ids['workflow_spec_id'])]
# flag files as new that are only on the remote box and remove the files that are only on the local box
changedfiles['new'] = False
changedfiles.loc[changedfiles.index.isin(left['workflow_spec_id']), 'new'] = True
output = changedfiles[~changedfiles.index.isin(right['workflow_spec_id'])]
else:
output = different
# return the list as a dict, let swagger convert it to json
if as_df:
@ -295,6 +308,10 @@ def get_all_spec_state_dataframe():
'date_created':file.date_created})
df = pd.DataFrame(filelist)
# If the file list is empty, return an empty data frame
if df.empty:
return df
# get a distinct list of file_model_id's with the most recent file_data retained
df = df.sort_values('date_created').drop_duplicates(['file_model_id'],keep='last').copy()

View File

@ -28,6 +28,7 @@ class Task(object):
# Autocomplete field
FIELD_TYPE_AUTO_COMPLETE = "autocomplete"
FIELD_TYPE_AUTO_COMPLETE_MAX = "autocomplete_num" # Not used directly, passed in from the front end.
# Required field
FIELD_CONSTRAINT_REQUIRED = "required"

View File

@ -0,0 +1,23 @@
from crc.scripts.script import Script
from crc.services.file_service import FileService
class IsFileUploaded(Script):
def get_description(self):
return """Test whether a file is uploaded for a study.
Pass in the IRB Doc Code for the file."""
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
doc_code = args[0]
files = FileService.get_files_for_study(study_id)
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
files = FileService.get_files_for_study(study_id)
if len(files) > 0:
doc_code = args[0]
for file in files:
if doc_code == file.irb_doc_code:
return True
return False

View File

@ -35,14 +35,22 @@ class FileService(object):
def add_workflow_spec_file(workflow_spec: WorkflowSpecModel,
name, content_type, binary_data, primary=False, is_status=False):
"""Create a new file and associate it with a workflow spec."""
file_model = FileModel(
workflow_spec_id=workflow_spec.id,
name=name,
primary=primary,
is_status=is_status,
)
# Raise ApiError if the file already exists
if session.query(FileModel)\
.filter(FileModel.workflow_spec_id == workflow_spec.id)\
.filter(FileModel.name == name).first():
return FileService.update_file(file_model, binary_data, content_type)
raise ApiError(code="Duplicate File",
message='If you want to replace the file, use the update mechanism.')
else:
file_model = FileModel(
workflow_spec_id=workflow_spec.id,
name=name,
primary=primary,
is_status=is_status,
)
return FileService.update_file(file_model, binary_data, content_type)
@staticmethod
def is_allowed_document(code):

View File

@ -130,7 +130,16 @@ class WorkflowService(object):
# Assure we have a field type
if field.type is None:
raise ApiError(code='invalid_form_data',
message='Field type is None. A field type must be provided.')
message = f'Type is missing for field "{field.id}". A field type must be provided.',
task_id = task.id,
task_name = task.get_name())
# Assure we have valid ids
if not WorkflowService.check_field_id(field.id):
raise ApiError(code='invalid_form_id',
message=f'Invalid Field name: "{field.id}". A field ID must begin with a letter, '
f'and can only contain letters, numbers, and "_"',
task_id = task.id,
task_name = task.get_name())
# Assure field has valid properties
WorkflowService.check_field_properties(field, task)
@ -179,6 +188,18 @@ class WorkflowService(object):
task.data = {}
task.data.update(form_data)
@staticmethod
def check_field_id(id):
"""Assures that field names are valid Python and Javascript names."""
if not id[0].isalpha():
return False
for char in id[1:len(id)]:
if char.isalnum() or char == '_':
pass
else:
return False
return True
@staticmethod
def check_field_properties(field, task):
"""Assures that all properties are valid on a given workflow."""
@ -565,6 +586,13 @@ class WorkflowService(object):
items = data_model.items() if isinstance(data_model, dict) else data_model
options = []
for item in items:
if value_column not in item:
raise ApiError.from_task("invalid_enum", f"The value column '{value_column}' does not exist for item {item}",
task=spiff_task)
if label_column not in item:
raise ApiError.from_task("invalid_enum", f"The label column '{label_column}' does not exist for item {item}",
task=spiff_task)
options.append({"id": item[value_column], "name": item[label_column], "data": item})
return options

View File

@ -0,0 +1,86 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_06dpn07" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.2.0">
<bpmn:process id="Process_1iqn8uk" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0dbfi6t</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0dbfi6t" sourceRef="StartEvent_1" targetRef="Activity_09rr8u7" />
<bpmn:manualTask id="Activity_09rr8u7" name="Hello">
<bpmn:documentation>&lt;H1&gt;Hello&lt;/H1&gt;</bpmn:documentation>
<bpmn:incoming>Flow_0dbfi6t</bpmn:incoming>
<bpmn:outgoing>Flow_02rje6r</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:sequenceFlow id="Flow_02rje6r" sourceRef="Activity_09rr8u7" targetRef="Activity_GetName" />
<bpmn:userTask id="Activity_GetName" name="Get Name" camunda:formKey="GetName">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="name" label="Name" type="string" defaultValue="World" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_02rje6r</bpmn:incoming>
<bpmn:outgoing>Flow_1iphrck</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_1iphrck" sourceRef="Activity_GetName" targetRef="Activity_GetTitle" />
<bpmn:userTask id="Activity_GetTitle" name="Get Title" camunda:formKey="GetTitle">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="user-title" label="Title" type="string" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_1iphrck</bpmn:incoming>
<bpmn:outgoing>Flow_0cxh51h</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_0cxh51h" sourceRef="Activity_GetTitle" targetRef="Activity_SayHello" />
<bpmn:scriptTask id="Activity_SayHello" name="Say Hello">
<bpmn:incoming>Flow_0cxh51h</bpmn:incoming>
<bpmn:outgoing>Flow_0hbiuz4</bpmn:outgoing>
<bpmn:script>print('Hello', name)</bpmn:script>
</bpmn:scriptTask>
<bpmn:endEvent id="Event_13veu8t">
<bpmn:incoming>Flow_0hbiuz4</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0hbiuz4" sourceRef="Activity_SayHello" targetRef="Event_13veu8t" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1iqn8uk">
<bpmndi:BPMNEdge id="Flow_0dbfi6t_di" bpmnElement="Flow_0dbfi6t">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_02rje6r_di" bpmnElement="Flow_02rje6r">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1iphrck_di" bpmnElement="Flow_1iphrck">
<di:waypoint x="530" y="117" />
<di:waypoint x="590" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0cxh51h_di" bpmnElement="Flow_0cxh51h">
<di:waypoint x="690" y="117" />
<di:waypoint x="750" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0hbiuz4_di" bpmnElement="Flow_0hbiuz4">
<di:waypoint x="850" y="117" />
<di:waypoint x="912" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_132twgr_di" bpmnElement="Activity_09rr8u7">
<dc:Bounds x="270" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0it9qzi_di" bpmnElement="Activity_GetName">
<dc:Bounds x="430" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_19s9l3h_di" bpmnElement="Activity_GetTitle">
<dc:Bounds x="590" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_05qpklh_di" bpmnElement="Activity_SayHello">
<dc:Bounds x="750" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_13veu8t_di" bpmnElement="Event_13veu8t">
<dc:Bounds x="912" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,41 @@
from tests.base_test import BaseTest
from crc.services.file_service import FileService
from crc.scripts.is_file_uploaded import IsFileUploaded
class TestIsFileUploaded(BaseTest):
def test_file_uploaded_pass(self):
self.load_example_data()
irb_code_1 = 'UVACompl_PRCAppr'
irb_code_2 = 'Study_App_Doc'
workflow = self.create_workflow('empty_workflow')
first_task = self.get_workflow_api(workflow).next_task
study_id = workflow.study_id
# We shouldn't have any files yet.
files = FileService.get_files_for_study(study_id)
self.assertEqual(0, len(files))
self.assertEqual(False, IsFileUploaded.do_task(IsFileUploaded, first_task, study_id, workflow.id, irb_code_1))
# Add a file
FileService.add_workflow_file(workflow_id=workflow.id,
name="something.png", content_type="text",
binary_data=b'1234', irb_doc_code=irb_code_1)
# Make sure we find the file
files = FileService.get_files_for_study(study_id)
self.assertEqual(1, len(files))
self.assertEqual(True, IsFileUploaded.do_task(IsFileUploaded, first_task, study_id, workflow.id, irb_code_1))
# Add second file
FileService.add_workflow_file(workflow_id=workflow.id,
name="anything.png", content_type="text",
binary_data=b'5678', irb_doc_code=irb_code_2)
# Make sure we find both files.
files = FileService.get_files_for_study(study_id)
self.assertEqual(2, len(files))
self.assertEqual(True, IsFileUploaded.do_task(IsFileUploaded, first_task, study_id, workflow.id, irb_code_1))
self.assertEqual(True, IsFileUploaded.do_task(IsFileUploaded, first_task, study_id, workflow.id, irb_code_2))

View File

@ -1,7 +1,7 @@
from unittest.mock import patch
from tests.base_test import BaseTest
from crc import db
from tests.base_test import BaseTest
from crc.api.workflow_sync import get_all_spec_state, \
get_changed_workflows, \
get_workflow_spec_files, \

View File

@ -0,0 +1,32 @@
from tests.base_test import BaseTest
from crc import session
from crc.api.common import ApiError
from crc.models.workflow import WorkflowSpecModel
from crc.services.file_service import FileService
class TestDuplicateWorkflowSpecFile(BaseTest):
def test_duplicate_workflow_spec_file(self):
# We want this to fail.
# Users should not be able to upload a file that already exists.
self.load_example_data()
spec = session.query(WorkflowSpecModel).first()
# Add a file
file_model = FileService.add_workflow_spec_file(spec,
name="something.png",
content_type="text",
binary_data=b'1234')
self.assertEqual(file_model.name, 'something.png')
self.assertEqual(file_model.content_type, 'text')
# Try to add it again
try:
FileService.add_workflow_spec_file(spec,
name="something.png",
content_type="text",
binary_data=b'5678')
except ApiError as ae:
self.assertEqual(ae.message, 'If you want to replace the file, use the update mechanism.')

View File

@ -0,0 +1,14 @@
import json
from tests.base_test import BaseTest
class TestFormFieldName(BaseTest):
def test_form_field_name(self):
spec_model = self.load_test_spec('workflow_form_field_name')
rv = self.app.get('/v1.0/workflow-specification/%s/validate' % spec_model.id, headers=self.logged_in_headers())
json_data = json.loads(rv.get_data(as_text=True))
self.assertEqual(json_data[0]['message'],
'When populating all fields ... Invalid Field name: "user-title". A field ID must begin '
'with a letter, and can only contain letters, numbers, and "_"')

View File

@ -10,5 +10,5 @@ class TestFormFieldType(BaseTest):
json_data = json.loads(rv.get_data(as_text=True))
self.assertEqual(json_data[0]['message'],
'When populating all fields ... Field type is None. A field type must be provided.')
'When populating all fields ... Type is missing for field "name". A field type must be provided.')
# print('TestFormFieldType: Good Form')