Merge remote-tracking branch 'origin/rrt/dev' into feature/emails-enhancement
This commit is contained in:
commit
fe5a7ddce3
15
crc/api.yml
15
crc/api.yml
|
@ -917,6 +917,21 @@ paths:
|
|||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
/health_attesting:
|
||||
get:
|
||||
operationId: crc.api.approval.get_health_attesting_csv
|
||||
summary: Returns a CSV file with health attesting records
|
||||
tags:
|
||||
- Approvals
|
||||
responses:
|
||||
'200':
|
||||
description: A CSV file
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Approval"
|
||||
components:
|
||||
securitySchemes:
|
||||
jwt:
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
import csv
|
||||
import io
|
||||
import json
|
||||
import pickle
|
||||
from base64 import b64decode
|
||||
from datetime import datetime
|
||||
|
||||
from flask import g
|
||||
from flask import g, make_response
|
||||
|
||||
from crc import db, session
|
||||
from crc.api.common import ApiError
|
||||
|
@ -88,71 +90,25 @@ def get_approvals_for_study(study_id=None):
|
|||
return results
|
||||
|
||||
|
||||
def get_health_attesting_csv():
|
||||
records = ApprovalService.get_health_attesting_records()
|
||||
si = io.StringIO()
|
||||
cw = csv.writer(si)
|
||||
cw.writerows(records)
|
||||
output = make_response(si.getvalue())
|
||||
output.headers["Content-Disposition"] = "attachment; filename=health_attesting.csv"
|
||||
output.headers["Content-type"] = "text/csv"
|
||||
return output
|
||||
|
||||
|
||||
# ----- Begin descent into madness ---- #
|
||||
def get_csv():
|
||||
"""A damn lie, it's a json file. A huge bit of a one-off for RRT, but 3 weeks of midnight work can convince a
|
||||
man to do just about anything"""
|
||||
approvals = ApprovalService.get_all_approvals(include_cancelled=False)
|
||||
output = []
|
||||
errors = []
|
||||
for approval in approvals:
|
||||
try:
|
||||
if approval.status != ApprovalStatus.APPROVED.value:
|
||||
continue
|
||||
for related_approval in approval.related_approvals:
|
||||
if related_approval.status != ApprovalStatus.APPROVED.value:
|
||||
continue
|
||||
workflow = db.session.query(WorkflowModel).filter(WorkflowModel.id == approval.workflow_id).first()
|
||||
data = json.loads(workflow.bpmn_workflow_json)
|
||||
last_task = find_task(data['last_task']['__uuid__'], data['task_tree'])
|
||||
personnel = extract_value(last_task, 'personnel')
|
||||
training_val = extract_value(last_task, 'RequiredTraining')
|
||||
pi_supervisor = extract_value(last_task, 'PISupervisor')['value']
|
||||
review_complete = 'AllRequiredTraining' in training_val
|
||||
pi_uid = workflow.study.primary_investigator_id
|
||||
pi_details = LdapService.user_info(pi_uid)
|
||||
details = []
|
||||
details.append(pi_details)
|
||||
for person in personnel:
|
||||
uid = person['PersonnelComputingID']['value']
|
||||
details.append(LdapService.user_info(uid))
|
||||
content = ApprovalService.get_not_really_csv_content()
|
||||
|
||||
for person in details:
|
||||
record = {
|
||||
"study_id": approval.study_id,
|
||||
"pi_uid": pi_details.uid,
|
||||
"pi": pi_details.display_name,
|
||||
"name": person.display_name,
|
||||
"uid": person.uid,
|
||||
"email": person.email_address,
|
||||
"supervisor": "",
|
||||
"review_complete": review_complete,
|
||||
}
|
||||
# We only know the PI's supervisor.
|
||||
if person.uid == pi_details.uid:
|
||||
record["supervisor"] = pi_supervisor
|
||||
return content
|
||||
|
||||
output.append(record)
|
||||
|
||||
except Exception as e:
|
||||
errors.append("Error pulling data for workflow #%i: %s" % (approval.workflow_id, str(e)))
|
||||
return {"results": output, "errors": errors }
|
||||
|
||||
|
||||
def extract_value(task, key):
|
||||
if key in task['data']:
|
||||
return pickle.loads(b64decode(task['data'][key]['__bytes__']))
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def find_task(uuid, task):
|
||||
if task['id']['__uuid__'] == uuid:
|
||||
return task
|
||||
for child in task['children']:
|
||||
task = find_task(uuid, child)
|
||||
if task:
|
||||
return task
|
||||
# ----- come back to the world of the living ---- #
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
from datetime import datetime
|
||||
import json
|
||||
import pickle
|
||||
from base64 import b64decode
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from sqlalchemy import desc
|
||||
from sqlalchemy import desc, func
|
||||
|
||||
from crc import app, db, session
|
||||
from crc.api.common import ApiError
|
||||
|
@ -109,16 +112,129 @@ class ApprovalService(object):
|
|||
db_approvals = query.all()
|
||||
return [Approval.from_model(approval_model) for approval_model in db_approvals]
|
||||
|
||||
@staticmethod
|
||||
def get_approval_details(approval):
|
||||
"""Returns a list of packed approval details, obtained from
|
||||
the task data sent during the workflow """
|
||||
def extract_value(task, key):
|
||||
if key in task['data']:
|
||||
return pickle.loads(b64decode(task['data'][key]['__bytes__']))
|
||||
else:
|
||||
return ""
|
||||
|
||||
def find_task(uuid, task):
|
||||
if task['id']['__uuid__'] == uuid:
|
||||
return task
|
||||
for child in task['children']:
|
||||
task = find_task(uuid, child)
|
||||
if task:
|
||||
return task
|
||||
|
||||
if approval.status != ApprovalStatus.APPROVED.value:
|
||||
return {}
|
||||
for related_approval in approval.related_approvals:
|
||||
if related_approval.status != ApprovalStatus.APPROVED.value:
|
||||
continue
|
||||
workflow = db.session.query(WorkflowModel).filter(WorkflowModel.id == approval.workflow_id).first()
|
||||
data = json.loads(workflow.bpmn_workflow_json)
|
||||
last_task = find_task(data['last_task']['__uuid__'], data['task_tree'])
|
||||
personnel = extract_value(last_task, 'personnel')
|
||||
training_val = extract_value(last_task, 'RequiredTraining')
|
||||
pi_supervisor = extract_value(last_task, 'PISupervisor')['value']
|
||||
review_complete = 'AllRequiredTraining' in training_val
|
||||
pi_uid = workflow.study.primary_investigator_id
|
||||
pi_details = LdapService.user_info(pi_uid)
|
||||
details = {
|
||||
'Supervisor': pi_supervisor,
|
||||
'PI_Details': pi_details,
|
||||
'Review': review_complete
|
||||
}
|
||||
details['person_details'] = []
|
||||
details['person_details'].append(pi_details)
|
||||
for person in personnel:
|
||||
uid = person['PersonnelComputingID']['value']
|
||||
details['person_details'].append(LdapService.user_info(uid))
|
||||
|
||||
return details
|
||||
|
||||
@staticmethod
|
||||
def get_health_attesting_records():
|
||||
"""Return a list with prepared information related to all approvals """
|
||||
|
||||
approvals = ApprovalService.get_all_approvals(include_cancelled=False)
|
||||
|
||||
health_attesting_rows = [
|
||||
['university_computing_id',
|
||||
'last_name',
|
||||
'first_name',
|
||||
'department',
|
||||
'job_title',
|
||||
'supervisor_university_computing_id']
|
||||
]
|
||||
|
||||
for approval in approvals:
|
||||
try:
|
||||
details = ApprovalService.get_approval_details(approval)
|
||||
if not details:
|
||||
continue
|
||||
|
||||
for person in details['person_details']:
|
||||
first_name = person.given_name
|
||||
last_name = person.display_name.replace(first_name, '').strip()
|
||||
record = [
|
||||
person.uid,
|
||||
last_name,
|
||||
first_name,
|
||||
'',
|
||||
'Academic Researcher',
|
||||
details['Supervisor'] if person.uid == details['person_details'][0].uid else 'askresearch'
|
||||
]
|
||||
|
||||
if record not in health_attesting_rows:
|
||||
health_attesting_rows.append(record)
|
||||
|
||||
except Exception as e:
|
||||
app.logger.error("Error pulling data for workflow #%i: %s" % (approval.workflow_id, str(e)))
|
||||
|
||||
return health_attesting_rows
|
||||
|
||||
@staticmethod
|
||||
def get_not_really_csv_content():
|
||||
approvals = ApprovalService.get_all_approvals(include_cancelled=False)
|
||||
output = []
|
||||
errors = []
|
||||
for approval in approvals:
|
||||
try:
|
||||
details = ApprovalService.get_approval_details(approval)
|
||||
|
||||
for person in details['person_details']:
|
||||
record = {
|
||||
"study_id": approval.study_id,
|
||||
"pi_uid": details['PI_Details'].uid,
|
||||
"pi": details['PI_Details'].display_name,
|
||||
"name": person.display_name,
|
||||
"uid": person.uid,
|
||||
"email": person.email_address,
|
||||
"supervisor": details['Supervisor'] if person.uid == details['person_details'][0].uid else "",
|
||||
"review_complete": details['Review'],
|
||||
}
|
||||
|
||||
output.append(record)
|
||||
|
||||
except Exception as e:
|
||||
errors.append("Error pulling data for workflow #%i: %s" % (approval.workflow_id, str(e)))
|
||||
return {"results": output, "errors": errors }
|
||||
|
||||
@staticmethod
|
||||
def update_approval(approval_id, approver_uid):
|
||||
"""Update a specific approval"""
|
||||
"""Update a specific approval
|
||||
NOTE: Actual update happens in the API layer, this
|
||||
funtion is currently in charge of only sending
|
||||
corresponding emails
|
||||
"""
|
||||
db_approval = session.query(ApprovalModel).get(approval_id)
|
||||
status = db_approval.status
|
||||
if db_approval:
|
||||
# db_approval.status = status
|
||||
# session.add(db_approval)
|
||||
# session.commit()
|
||||
if status == ApprovalStatus.APPROVED.value:
|
||||
# second_approval = ApprovalModel().query.filter_by(
|
||||
# study_id=db_approval.study_id, workflow_id=db_approval.workflow_id,
|
||||
|
|
|
@ -57,6 +57,32 @@ class TestApprovalsService(BaseTest):
|
|||
self.assertEqual(1, models[0].version)
|
||||
self.assertEqual(2, models[1].version)
|
||||
|
||||
def test_get_health_attesting_records(self):
|
||||
self.load_example_data()
|
||||
self.create_reference_document()
|
||||
workflow = self.create_workflow('empty_workflow')
|
||||
FileService.add_workflow_file(workflow_id=workflow.id,
|
||||
name="anything.png", content_type="text",
|
||||
binary_data=b'5678', irb_doc_code="AD_CoCAppr")
|
||||
|
||||
ApprovalService.add_approval(study_id=workflow.study_id, workflow_id=workflow.id, approver_uid="dhf8r")
|
||||
records = ApprovalService.get_health_attesting_records()
|
||||
|
||||
self.assertEqual(len(records), 1)
|
||||
|
||||
def test_get_not_really_csv_content(self):
|
||||
self.load_example_data()
|
||||
self.create_reference_document()
|
||||
workflow = self.create_workflow('empty_workflow')
|
||||
FileService.add_workflow_file(workflow_id=workflow.id,
|
||||
name="anything.png", content_type="text",
|
||||
binary_data=b'5678', irb_doc_code="AD_CoCAppr")
|
||||
|
||||
ApprovalService.add_approval(study_id=workflow.study_id, workflow_id=workflow.id, approver_uid="dhf8r")
|
||||
records = ApprovalService.get_not_really_csv_content()
|
||||
|
||||
self.assertEqual(len(records), 2)
|
||||
|
||||
def test_new_approval_sends_proper_emails(self):
|
||||
self.assertEqual(1, 1)
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ class TestTasksApi(BaseTest):
|
|||
# The total number of tasks may change over time, as users move through gateways
|
||||
# branches may be pruned. As we hit parallel Multi-Instance new tasks may be created...
|
||||
self.assertIsNotNone(workflow.total_tasks)
|
||||
self.assertEquals(prev_completed_task_count + 1, workflow.completed_tasks)
|
||||
self.assertEqual(prev_completed_task_count + 1, workflow.completed_tasks)
|
||||
# Assure a record exists in the Task Events
|
||||
task_events = session.query(TaskEventModel) \
|
||||
.filter_by(workflow_id=workflow.id) \
|
||||
|
@ -56,25 +56,25 @@ class TestTasksApi(BaseTest):
|
|||
self.assertGreater(len(task_events), 0)
|
||||
event = task_events[0]
|
||||
self.assertIsNotNone(event.study_id)
|
||||
self.assertEquals("dhf8r", event.user_uid)
|
||||
self.assertEquals(workflow.id, event.workflow_id)
|
||||
self.assertEquals(workflow.workflow_spec_id, event.workflow_spec_id)
|
||||
self.assertEquals(workflow.spec_version, event.spec_version)
|
||||
self.assertEquals(WorkflowService.TASK_ACTION_COMPLETE, event.action)
|
||||
self.assertEquals(task_in.id, task_id)
|
||||
self.assertEquals(task_in.name, event.task_name)
|
||||
self.assertEquals(task_in.title, event.task_title)
|
||||
self.assertEquals(task_in.type, event.task_type)
|
||||
self.assertEquals("COMPLETED", event.task_state)
|
||||
self.assertEqual("dhf8r", event.user_uid)
|
||||
self.assertEqual(workflow.id, event.workflow_id)
|
||||
self.assertEqual(workflow.workflow_spec_id, event.workflow_spec_id)
|
||||
self.assertEqual(workflow.spec_version, event.spec_version)
|
||||
self.assertEqual(WorkflowService.TASK_ACTION_COMPLETE, event.action)
|
||||
self.assertEqual(task_in.id, task_id)
|
||||
self.assertEqual(task_in.name, event.task_name)
|
||||
self.assertEqual(task_in.title, event.task_title)
|
||||
self.assertEqual(task_in.type, event.task_type)
|
||||
self.assertEqual("COMPLETED", event.task_state)
|
||||
# Not sure what vodoo is happening inside of marshmallow to get me in this state.
|
||||
if isinstance(task_in.multi_instance_type, MultiInstanceType):
|
||||
self.assertEquals(task_in.multi_instance_type.value, event.mi_type)
|
||||
self.assertEqual(task_in.multi_instance_type.value, event.mi_type)
|
||||
else:
|
||||
self.assertEquals(task_in.multi_instance_type, event.mi_type)
|
||||
self.assertEqual(task_in.multi_instance_type, event.mi_type)
|
||||
|
||||
self.assertEquals(task_in.multi_instance_count, event.mi_count)
|
||||
self.assertEquals(task_in.multi_instance_index, event.mi_index)
|
||||
self.assertEquals(task_in.process_name, event.process_name)
|
||||
self.assertEqual(task_in.multi_instance_count, event.mi_count)
|
||||
self.assertEqual(task_in.multi_instance_index, event.mi_index)
|
||||
self.assertEqual(task_in.process_name, event.process_name)
|
||||
self.assertIsNotNone(event.date)
|
||||
|
||||
# Assure that there is data in the form_data
|
||||
|
|
Loading…
Reference in New Issue