2020-11-16 10:01:56 -05:00
|
|
|
import smtplib
|
2021-01-29 09:57:58 -05:00
|
|
|
from datetime import datetime, timedelta
|
|
|
|
from sqlalchemy import and_, or_
|
2020-11-16 10:01:56 -05:00
|
|
|
|
2020-10-15 15:29:40 -04:00
|
|
|
from communicator import db, app, executor
|
2020-09-17 11:16:41 -04:00
|
|
|
from communicator.models import Sample
|
2020-09-25 11:56:08 -04:00
|
|
|
from communicator.models.invitation import Invitation
|
2020-09-25 11:33:20 -04:00
|
|
|
from communicator.models.notification import Notification, EMAIL_TYPE, TEXT_TYPE
|
2021-01-29 09:40:59 -05:00
|
|
|
from communicator.models import Sample, SampleSchema
|
|
|
|
from communicator.models import IvyFile, IvyFileSchema
|
2021-01-29 16:40:10 -05:00
|
|
|
from communicator.models import Deposit, DepositSchema
|
2020-09-17 11:16:41 -04:00
|
|
|
from communicator.services.ivy_service import IvyService
|
|
|
|
from communicator.services.notification_service import NotificationService
|
|
|
|
from communicator.services.sample_service import SampleService
|
2020-09-25 11:51:17 -04:00
|
|
|
from time import sleep
|
2020-09-17 11:16:41 -04:00
|
|
|
|
2021-01-29 09:40:59 -05:00
|
|
|
def add_sample_search_filters(query, filters, ignore_dates=False):
|
|
|
|
q_filters = dict()
|
|
|
|
if "student_id" in filters:
|
|
|
|
if (type(filters["student_id"]) == list):
|
|
|
|
q_filters["student_id"] = or_(*[Sample.student_id == ID for ID in filters["student_id"]])
|
|
|
|
|
|
|
|
if "location" in filters:
|
|
|
|
if filters["location"] != None:
|
|
|
|
q_filters["location"] = or_(*[Sample.location == ID for ID in filters["location"]])
|
|
|
|
|
|
|
|
if "compute_id" in filters:
|
|
|
|
if filters["compute_id"] != None:
|
|
|
|
# Search Email and Compute ID column to account for typos
|
|
|
|
q_filters["compute_id"] = or_(*([Sample.computing_id.ilike(ID) for ID in filters["compute_id"]] +
|
|
|
|
[Sample.email.contains(ID.lower()) for ID in filters["compute_id"]]))
|
|
|
|
if not ignore_dates:
|
|
|
|
if "start_date" in filters:
|
|
|
|
q_filters["start_date"] = Sample.date >= filters["start_date"]
|
|
|
|
|
|
|
|
if "end_date" in filters:
|
|
|
|
q_filters["end_date"] = Sample.date <= (filters["end_date"] + timedelta(1))
|
|
|
|
|
2021-01-29 09:57:58 -05:00
|
|
|
if not "include_tests" in filters:
|
|
|
|
q_filters["include_tests"] = Sample.student_id != 0
|
2021-02-01 08:36:48 -05:00
|
|
|
|
2021-01-29 09:40:59 -05:00
|
|
|
query = query.filter(and_(*[q_filters[key] for key in q_filters]))
|
|
|
|
return query
|
2020-09-10 11:28:58 -04:00
|
|
|
|
2020-12-16 11:51:36 -05:00
|
|
|
def verify_token(token, required_scopes):
|
|
|
|
if token == app.config['API_TOKEN']:
|
|
|
|
return {'scope':['any']}
|
|
|
|
else:
|
|
|
|
raise Exception("permission_denied", "API Token information is not correct")
|
|
|
|
|
|
|
|
|
2020-09-10 11:28:58 -04:00
|
|
|
def status():
|
2020-10-15 15:29:40 -04:00
|
|
|
return {"status": "good"}
|
|
|
|
|
2020-09-17 11:16:41 -04:00
|
|
|
|
2020-09-23 12:43:58 -04:00
|
|
|
def add_sample(body):
|
|
|
|
sample = Sample(barcode=body['barcode'],
|
|
|
|
student_id=body['student_id'],
|
2020-12-16 10:12:01 -05:00
|
|
|
computing_id=body['computing_id'],
|
2021-01-07 16:59:36 -05:00
|
|
|
date=body['date'])
|
|
|
|
|
|
|
|
# Split the 4 digit location code into station and location
|
|
|
|
loc_code = body['location']
|
|
|
|
sample.location, sample.station = int(loc_code[:2]), int(loc_code[2:])
|
|
|
|
|
2020-09-24 16:51:49 -04:00
|
|
|
SampleService().add_or_update_records([sample])
|
2020-09-17 11:16:41 -04:00
|
|
|
|
2020-10-15 15:29:40 -04:00
|
|
|
|
2021-01-29 09:40:59 -05:00
|
|
|
def get_samples(last_modified = None, start_date = None, end_date = None, student_id = "", compute_id = "", location = "", page = 0):
|
2020-12-16 11:51:36 -05:00
|
|
|
query = db.session.query(Sample)
|
2021-01-29 09:40:59 -05:00
|
|
|
|
|
|
|
filters = dict()
|
|
|
|
if start_date != None:
|
|
|
|
filters["start_date"] = datetime.strptime(start_date, "%m/%d/%Y").date()
|
|
|
|
if end_date != None:
|
|
|
|
filters["end_date"] = datetime.strptime(end_date, "%m/%d/%Y").date()
|
|
|
|
if len(student_id.strip()) > 0:
|
|
|
|
filters["student_id"] = student_id.split()
|
|
|
|
if len(compute_id.strip()) > 0:
|
|
|
|
filters["compute_id"] = compute_id.split()
|
|
|
|
if len(location.strip()) > 0:
|
|
|
|
filters["location"] = [int(i) for i in location.split()]
|
|
|
|
|
|
|
|
query = add_sample_search_filters(query, filters)
|
2021-01-08 17:26:25 -05:00
|
|
|
if last_modified:
|
|
|
|
lm_date = datetime.fromisoformat(last_modified)
|
2021-01-29 09:40:59 -05:00
|
|
|
query = query.filter(Sample.last_modified > lm_date)
|
|
|
|
samples = query.order_by(Sample.last_modified).limit(200).all()
|
2020-12-16 11:51:36 -05:00
|
|
|
response = SampleSchema(many=True).dump(samples)
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2020-09-23 15:24:09 -04:00
|
|
|
def clear_samples():
|
2020-09-25 11:33:20 -04:00
|
|
|
db.session.query(Notification).delete()
|
2020-09-23 15:24:09 -04:00
|
|
|
db.session.query(Sample).delete()
|
2020-09-25 11:56:08 -04:00
|
|
|
db.session.query(Invitation).delete()
|
2020-09-23 15:24:09 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
2021-01-29 09:40:59 -05:00
|
|
|
def get_deposits(page = "0"):
|
|
|
|
query = db.session.query(Deposit)
|
|
|
|
deposits = query.order_by(Deposit.date_added.desc())[int(page) * 10:(int(page) * 10) + 10]
|
|
|
|
response = DepositSchema(many=True).dump(deposits)
|
|
|
|
return response
|
|
|
|
|
|
|
|
def clear_deposits():
|
|
|
|
db.session.query(Deposit).delete()
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def add_deposit(body):
|
|
|
|
from communicator.models.deposit import Deposit, DepositSchema
|
|
|
|
|
|
|
|
new_deposit = Deposit(date_added=datetime.strptime(body['date_added'], "%m/%d/%Y").date(),
|
|
|
|
amount=int(body['amount']),
|
|
|
|
notes=body['notes'])
|
|
|
|
|
|
|
|
db.session.add(new_deposit)
|
|
|
|
db.session.commit()
|
|
|
|
return DepositSchema().dumps(new_deposit)
|
|
|
|
|
|
|
|
def get_imported_files(page = "0"):
|
2021-01-29 16:40:10 -05:00
|
|
|
from sqlalchemy import func, case
|
|
|
|
cases = [func.count(case([(Sample.email_notified == "t" , 1)])).label("successful_emails"),
|
|
|
|
func.count(case([(Sample.email_notified == "f" , 1)])).label("failed_emails"),
|
|
|
|
func.count(case([(Sample.text_notified == "t" , 1)])).label("successful_texts"),
|
|
|
|
func.count(case([(Sample.text_notified == "f" , 1)])).label("failed_texts")]
|
|
|
|
|
|
|
|
query = db.session.query(IvyFile.date_added,IvyFile.file_name,IvyFile.sample_count,
|
|
|
|
*cases).order_by(IvyFile.date_added.desc()).join(Sample, Sample.ivy_file == '/ivy_data/outgoing/' + IvyFile.file_name)\
|
|
|
|
.group_by(IvyFile.file_name)[int(page) * 10:(int(page) * 10) + 10]
|
|
|
|
return query
|
|
|
|
|
2020-09-25 11:33:20 -04:00
|
|
|
|
2020-09-24 12:25:18 -04:00
|
|
|
def update_and_notify():
|
2020-10-15 15:29:40 -04:00
|
|
|
# These can take a very long time to execute.
|
|
|
|
executor.submit(_update_data)
|
|
|
|
executor.submit(_notify_by_email)
|
|
|
|
executor.submit(_notify_by_text)
|
|
|
|
return "Task scheduled and running the background"
|
|
|
|
|
2020-09-24 12:25:18 -04:00
|
|
|
|
2020-09-17 11:16:41 -04:00
|
|
|
def update_data():
|
2020-10-15 15:29:40 -04:00
|
|
|
executor.submit(_update_data)
|
|
|
|
return "Task scheduled and running the background"
|
|
|
|
|
|
|
|
|
|
|
|
def _update_data():
|
2020-09-23 15:24:09 -04:00
|
|
|
"""Updates the database based on local files placed by IVY. No longer attempts
|
|
|
|
to pull files from the Firebase service."""
|
2020-10-20 14:43:05 -04:00
|
|
|
app.logger.info("Executing Update Data Task")
|
2020-09-17 11:16:41 -04:00
|
|
|
ivy_service = IvyService()
|
2020-09-30 15:35:59 -04:00
|
|
|
ivy_service.request_transfer()
|
2020-10-15 15:29:40 -04:00
|
|
|
files, samples = ivy_service.load_directory()
|
2020-09-17 11:16:41 -04:00
|
|
|
SampleService().add_or_update_records(samples)
|
2020-10-15 15:29:40 -04:00
|
|
|
for file in files:
|
|
|
|
db.session.add(file)
|
|
|
|
db.session.commit()
|
2020-10-15 16:16:46 -04:00
|
|
|
if app.config['DELETE_IVY_FILES']:
|
|
|
|
ivy_service.delete_file(file.file_name)
|
|
|
|
else:
|
|
|
|
app.logger.info("Not Deleting Files, per DELETE_IVY_FILES flag")
|
2020-09-22 16:22:15 -04:00
|
|
|
db.session.commit()
|
2020-09-17 11:16:41 -04:00
|
|
|
|
2020-12-30 09:06:46 -05:00
|
|
|
def split_location_column():
|
|
|
|
sample_service = SampleService()
|
2021-01-07 16:59:36 -05:00
|
|
|
sample_service.split_all_location_columns()
|
|
|
|
|
|
|
|
def correct_computing_id():
|
|
|
|
sample_service = SampleService()
|
|
|
|
sample_service.correct_computing_id()
|
2020-12-30 09:06:46 -05:00
|
|
|
|
2020-10-07 12:58:44 -04:00
|
|
|
def merge_similar_records():
|
|
|
|
sample_service = SampleService()
|
|
|
|
sample_service.merge_similar_records()
|
|
|
|
|
2020-09-25 11:33:20 -04:00
|
|
|
|
2020-10-17 15:56:47 -04:00
|
|
|
def notify_by_email(file_name=None, retry=False):
|
|
|
|
executor.submit(_notify_by_email, file_name, retry)
|
2020-10-15 15:29:40 -04:00
|
|
|
return "Task scheduled and running the background"
|
|
|
|
|
|
|
|
|
2020-10-17 15:56:47 -04:00
|
|
|
def _notify_by_email(file_name=None, retry=False):
|
2020-09-17 11:16:41 -04:00
|
|
|
"""Sends out notifications via email"""
|
2020-10-15 15:29:40 -04:00
|
|
|
sample_query = db.session.query(Sample) \
|
|
|
|
.filter(Sample.result_code != None) \
|
|
|
|
.filter(Sample.email_notified == False)
|
|
|
|
if file_name:
|
|
|
|
sample_query = sample_query.filter(Sample.ivy_file == file_name)
|
|
|
|
samples = sample_query.all()
|
2020-11-13 10:25:32 -05:00
|
|
|
count = 0
|
2020-09-25 11:33:20 -04:00
|
|
|
with NotificationService(app) as notifier:
|
|
|
|
for sample in samples:
|
|
|
|
last_failure = sample.last_failure_by_type(EMAIL_TYPE)
|
2020-10-17 15:56:47 -04:00
|
|
|
if last_failure and not retry:
|
|
|
|
continue
|
2020-09-25 11:33:20 -04:00
|
|
|
try:
|
2021-02-04 09:11:19 -05:00
|
|
|
assert (sample.email != None)
|
2020-09-25 11:33:20 -04:00
|
|
|
notifier.send_result_email(sample)
|
2020-11-13 14:08:32 -05:00
|
|
|
count += 1
|
2020-09-25 11:33:20 -04:00
|
|
|
sample.email_notified = True
|
|
|
|
db.session.add(Notification(type=EMAIL_TYPE, sample=sample, successful=True))
|
2021-02-04 09:11:19 -05:00
|
|
|
except AssertionError as e:
|
|
|
|
app.logger.error(f'Email not provided for Sample: {sample.barcode} ', exc_info=True)
|
|
|
|
continue
|
2020-11-16 10:01:56 -05:00
|
|
|
except smtplib.SMTPServerDisconnected as de:
|
|
|
|
app.logger.error("Database connection terminated, stopping for now.", exc_info=True)
|
|
|
|
break
|
|
|
|
except smtplib.SMTPResponseException as re:
|
|
|
|
if re.smtp_code == 451:
|
|
|
|
app.logger.error("Too many messages error from SMTP Service, stopping for now.", exc_info=True)
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
app.logger.error(f'An exception happened in EmailService sending to {sample.email} ', exc_info=True)
|
|
|
|
app.logger.error(str(e))
|
|
|
|
db.session.add(Notification(type=EMAIL_TYPE, sample=sample, successful=False,
|
|
|
|
error_message=str(e)))
|
2020-09-25 11:33:20 -04:00
|
|
|
except Exception as e:
|
2020-11-16 10:01:56 -05:00
|
|
|
app.logger.error(f'An exception happened in EmailService sending to {sample.email} ', exc_info=True)
|
|
|
|
app.logger.error(str(e))
|
2020-09-25 11:33:20 -04:00
|
|
|
db.session.add(Notification(type=EMAIL_TYPE, sample=sample, successful=False,
|
|
|
|
error_message=str(e)))
|
2020-09-30 15:39:21 -04:00
|
|
|
db.session.commit()
|
2020-10-20 15:11:46 -04:00
|
|
|
sleep(0.5)
|
2020-11-16 10:01:56 -05:00
|
|
|
if count > 190: # At 2 a second, it should take 80 seconds or around a minute and 1/2 to send out a set.
|
|
|
|
app.logger.info("Reached the max 190 messages, stopping for now.")
|
2020-11-13 14:08:32 -05:00
|
|
|
break
|
|
|
|
|
|
|
|
|
2020-09-30 15:39:21 -04:00
|
|
|
|
2020-10-17 15:56:47 -04:00
|
|
|
def notify_by_text(file_name=None, retry=False):
|
|
|
|
executor.submit(_notify_by_text, file_name, retry)
|
2020-10-15 15:29:40 -04:00
|
|
|
return "Task scheduled and running the background"
|
|
|
|
|
|
|
|
|
2020-10-22 13:32:27 -04:00
|
|
|
def _notify_by_text(file_name=None, retry=False):
|
2020-10-17 15:56:47 -04:00
|
|
|
"""Sends out notifications via SMS Message, but only at reasonable times of day,
|
|
|
|
Can be resticted to a specific file name, and will attempt to retry on previous
|
|
|
|
failures if requested to do so. """
|
2020-09-25 11:33:20 -04:00
|
|
|
with NotificationService(app) as notifier:
|
|
|
|
if not notifier.is_reasonable_hour_for_text_messages:
|
|
|
|
print("Skipping text messages, it's not a good time to get one.")
|
|
|
|
return
|
2020-10-15 15:29:40 -04:00
|
|
|
sample_query = db.session.query(Sample) \
|
|
|
|
.filter(Sample.result_code != None) \
|
|
|
|
.filter(Sample.text_notified == False)
|
|
|
|
if file_name:
|
|
|
|
sample_query = sample_query.filter(Sample.ivy_file == file_name)
|
2020-10-26 17:06:31 -04:00
|
|
|
|
2020-11-03 16:01:50 -05:00
|
|
|
# Do not limit texts, as errors pile up we end up sending less and less, till none go out.
|
|
|
|
# sample_query = sample_query.limit(150) # Only send out 150 texts at a time.
|
2020-10-15 15:29:40 -04:00
|
|
|
samples = sample_query.all()
|
2020-11-13 14:08:32 -05:00
|
|
|
count = 0
|
2020-09-25 11:33:20 -04:00
|
|
|
for sample in samples:
|
|
|
|
last_failure = sample.last_failure_by_type(TEXT_TYPE)
|
2020-10-17 15:56:47 -04:00
|
|
|
if last_failure and not retry:
|
|
|
|
continue
|
2020-09-25 11:33:20 -04:00
|
|
|
try:
|
|
|
|
notifier.send_result_sms(sample)
|
2020-11-13 14:08:32 -05:00
|
|
|
count += 1
|
2020-09-25 11:33:20 -04:00
|
|
|
sample.text_notified = True
|
|
|
|
db.session.add(Notification(type=TEXT_TYPE, sample=sample, successful=True))
|
|
|
|
except Exception as e:
|
|
|
|
db.session.add(Notification(type=TEXT_TYPE, sample=sample, successful=False,
|
|
|
|
error_message=str(e)))
|
2020-09-30 15:39:21 -04:00
|
|
|
db.session.commit()
|
2020-09-25 11:51:17 -04:00
|
|
|
sleep(0.5)
|
2020-11-13 14:08:32 -05:00
|
|
|
if count > 190:
|
|
|
|
break
|