539 lines
18 KiB
Python
Raw Normal View History

2020-12-18 10:12:15 -05:00
2020-12-28 09:15:42 -05:00
import random
import csv
import io
import json
import logging
import os
2020-11-11 10:47:34 -05:00
from datetime import datetime
2020-12-18 10:12:15 -05:00
from datetime import date
from functools import wraps
import connexion
import sentry_sdk
from connexion import ProblemException
2020-11-11 10:47:34 -05:00
from flask import render_template, request, redirect, url_for, flash, abort, Response, send_file, session
from flask_assets import Environment
from flask_cors import CORS
from flask_mail import Mail
from flask_marshmallow import Marshmallow
from flask_migrate import Migrate
from flask_paginate import Pagination, get_page_parameter
from flask_sqlalchemy import SQLAlchemy
2021-01-01 09:00:23 -05:00
from sqlalchemy import func, and_, case
from sentry_sdk.integrations.flask import FlaskIntegration
from webassets import Bundle
from flask_executor import Executor
2020-12-16 11:00:01 -05:00
import numpy as np
logging.basicConfig(level=logging.INFO)
# API, fully defined in api.yml
connexion_app = connexion.FlaskApp(__name__)
app = connexion_app.app
# Executor for long running tasks
executor = Executor(app)
# Configuration
app.config.from_object('config.default')
if "TESTING" in os.environ and os.environ["TESTING"] == "true":
app.config.from_object('config.testing')
app.config.from_pyfile('../config/testing.py')
else:
app.config.root_path = app.instance_path
app.config.from_pyfile('config.py', silent=True)
2020-12-10 08:16:01 -05:00
# Connexion Error handling
def render_errors(exception):
return Response(json.dumps({"error": str(exception)}), status=500, mimetype="application/json")
2020-12-18 10:12:15 -05:00
connexion_app.add_error_handler(Exception, render_errors)
# Mail settings
mail = Mail(app)
# Database
db = SQLAlchemy(app)
migrate = Migrate(app, db)
ma = Marshmallow(app)
# Asset management
url_map = app.url_map
try:
for rule in url_map.iter_rules('static'):
url_map._rules.remove(rule)
except ValueError:
# no static view was created yet
pass
app.add_url_rule(
app.static_url_path + '/<path:filename>',
endpoint='static', view_func=app.send_static_file)
assets = Environment(app)
assets.init_app(app)
assets.url = app.static_url_path
scss = Bundle(
2020-12-10 08:16:01 -05:00
'assets/scss/argon.scss',
filters='pyscss',
2020-12-10 08:16:01 -05:00
output='argon.css'
)
assets.register('app_scss', scss)
2020-12-28 09:15:42 -05:00
connexion_app.add_api('api.yml', base_path='/v1.0')
2020-12-19 13:41:15 -05:00
from datetime import date, timedelta
2020-12-30 09:06:46 -05:00
from communicator import models
from communicator import api
from communicator import forms
from communicator.models import Sample
2021-01-04 12:15:56 -05:00
from flask_table import Table, Col, DatetimeCol, BoolCol, NestedTableCol
from communicator.tables import SampleTable
# Convert list of allowed origins to list of regexes
2020-12-16 11:00:01 -05:00
origins_re = [r"^https?:\/\/%s(.*)" % o.replace('.', '\.')
for o in app.config['CORS_ALLOW_ORIGINS']]
cors = CORS(connexion_app.app, origins=origins_re)
# Sentry error handling
2020-10-06 12:49:27 -04:00
if app.config['ENABLE_SENTRY']:
sentry_sdk.init(
2020-10-06 12:49:27 -04:00
dsn="https://048a9b3ac72f476a8c77b910ad4d7f84@o401361.ingest.sentry.io/5454621",
integrations=[FlaskIntegration()],
traces_sample_rate=1.0
)
2020-12-16 11:00:01 -05:00
# HTML Pages
BASE_HREF = app.config['APPLICATION_ROOT'].strip('/')
2020-12-28 09:15:42 -05:00
def superuser(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from communicator.services.user_service import UserService
if not UserService().is_valid_user():
flash("You do not have permission to view that page", "warning")
2020-12-16 11:00:01 -05:00
logging.info("Permission Denied to user " +
UserService().get_user_info())
abort(404)
return f(*args, **kwargs)
return decorated_function
2020-12-18 10:12:15 -05:00
@app.errorhandler(404)
@superuser
def page_not_found(e):
# note that we set the 404 status explicitly
return render_template('pages/404.html')
2020-12-31 08:52:33 -05:00
def daterange(start, stop, days = 1, hours = 0):
if (type(start) == date):
start = date2datetime(start)
if (type(stop) == date):
stop = date2datetime(stop)
time = start
date_list = []
while time <= stop:
date_list.append(time)
time += timedelta(days=days,hours=hours)
return date_list
2020-12-30 09:06:46 -05:00
def date2datetime(_date):
return datetime.combine(_date, datetime.min.time())
2020-12-31 08:52:33 -05:00
def apply_filters(query, session):
2020-11-11 10:47:34 -05:00
if "index_filter" in session:
filters = session["index_filter"]
2020-11-11 12:03:37 -05:00
try:
if "start_date" in filters:
query = query.filter(
2020-12-28 09:15:42 -05:00
Sample.date >= filters["start_date"])
2020-12-18 10:12:15 -05:00
else:
2020-12-19 13:41:15 -05:00
filters["start_date"] = date.today()
query = query.filter(
2020-12-30 09:06:46 -05:00
Sample.date >= filters["start_date"])
2020-11-11 12:03:37 -05:00
if "end_date" in filters:
query = query.filter(
2020-12-28 09:15:42 -05:00
Sample.date <= filters["end_date"])
else:
filters["end_date"] = date.today() + timedelta(1)
2020-12-14 04:53:49 -05:00
if "student_id" in filters:
query = query.filter(
2020-12-16 11:00:01 -05:00
Sample.student_id.in_(filters["student_id"].split()))
2020-11-11 12:03:37 -05:00
if "location" in filters:
query = query.filter(
2020-12-16 11:00:01 -05:00
Sample.location.in_(filters["location"].split()))
2020-12-28 09:15:42 -05:00
if "station" in filters:
query = query.filter(
2020-12-28 09:15:42 -05:00
Sample.station.in_(filters["station"].split()))
if "compute_id" in filters:
filtered_samples = filtered_samples.filter(
Sample.compute_id.in_(filters["compute_id"].split()))
2020-11-11 12:03:37 -05:00
except Exception as e:
2020-12-28 09:15:42 -05:00
logging.error(
"Encountered an error building filters, so clearing. " + str(e))
2020-11-11 12:03:37 -05:00
session["index_filter"] = {}
2020-12-18 10:12:15 -05:00
else:
# Default to Todays Results
2020-12-28 09:15:42 -05:00
filters = dict()
filters["start_date"] = date.today()
filters["end_date"] = date.today() + timedelta(1)
query = query.filter(
2020-12-28 09:15:42 -05:00
Sample.date >= filters["start_date"])
2020-12-30 09:06:46 -05:00
if type(filters["start_date"]) == str:
filters["start_date"] = datetime.strptime(filters["start_date"].strip(), "%Y-%m-%d").date()
if type(filters["end_date"]) == str:
filters["end_date"] = datetime.strptime(filters["end_date"].strip(), "%Y-%m-%d").date()
return query, filters
2021-01-04 10:04:22 -05:00
def ingest_form(form):
pass
2021-01-04 12:15:56 -05:00
def group_columns(table):
pass
@app.route('/', methods=['GET', 'POST'])
@superuser
def index():
form = forms.SearchForm(request.form)
action = BASE_HREF + "/"
2020-12-30 09:06:46 -05:00
if request.method == "POST" or request.args.get('cancel') == 'true':
session["index_filter"] = {} # Clear out the session if it is invalid.
if form.validate():
session["index_filter"] = {}
if form.dateRange.data:
start, end = form.dateRange.data.split("-")
session["index_filter"]["start_date"] = datetime.strptime(start.strip(), "%m/%d/%Y").date()
session["index_filter"]["end_date"] = datetime.strptime(end.strip(), "%m/%d/%Y").date() + timedelta(1)
if form.studentId.data:
session["index_filter"]["student_id"] = form.studentId.data
if form.location.data:
session["index_filter"]["location"] = form.location.data
if form.compute_id.data:
session["index_filter"]["compute_id"] = form.compute_id.data
samples = db.session.query(Sample).order_by(Sample.date.desc())
# Store previous form submission settings in the session, so they are preseved through pagination.
filtered_samples, filters = apply_filters(samples, session)
2021-01-04 12:15:56 -05:00
if request.args.get('download') == 'true':
csv = __make_csv(filtered_samples)
return send_file(csv, attachment_filename='data_export.csv', as_attachment=True)
2020-12-30 09:06:46 -05:00
location_charts_data = {}
hourly_chart_data = {}
weekday_chart_data = {}
2020-12-30 09:06:46 -05:00
overall_chart_data = {}
2021-01-04 12:15:56 -05:00
important_dates = {}
2020-12-30 09:06:46 -05:00
overall_stat_data = {
"one_week_ago":0,
"two_week_ago":0,
"today":0,
}
location_stats_data = {}
2021-01-04 10:04:22 -05:00
today = filters["end_date"] - timedelta(1)
2020-12-30 09:06:46 -05:00
days_in_search = (filters["end_date"] - filters["start_date"]).days
2020-12-28 09:15:42 -05:00
one_week_ago = filters["end_date"] - timedelta(7)
two_weeks_ago = one_week_ago - timedelta(7)
2020-12-30 09:06:46 -05:00
chart_ticks = []
2020-12-31 08:52:33 -05:00
2021-01-01 09:00:23 -05:00
if days_in_search <= 1:
timeFormat = "%I:%M %p"
hours = 2
days = 0
elif days_in_search <= 3:
timeFormat = "%m/%d %I %p"
hours = 4
days = 0
else:
timeFormat = "%m/%d"
hours = 0
days = 1
# Count by Day
2021-01-01 09:00:23 -05:00
bounds = daterange(filters["start_date"], filters["end_date"], days=days, hours=hours)
2021-01-04 12:15:56 -05:00
chart_ticks = []
for i in range(len(bounds) - 1):
chart_ticks.append(f"{bounds[i].strftime(timeFormat)} - {bounds[i+1].strftime(timeFormat)}")
2021-01-01 09:00:23 -05:00
cases = [ ]
for i in range(len(bounds) - 1):
cases.append(func.count(case([(and_(Sample.date >= bounds[i], Sample.date <= bounds[i+1]), 1)])))
q = db.session.query(Sample.location, Sample.station,
*cases\
).group_by(Sample.location, Sample.station)
q, filters = apply_filters(q , session)
2021-01-01 09:00:23 -05:00
for result in q:
location, station = result[0], result[1]
if location not in location_charts_data: location_charts_data[location] = dict()
location_charts_data[location][station] = result[2:]
2021-01-01 09:00:23 -05:00
# Count by hour
cases = [ ]
for i in range(24):
cases.append(func.count(case([(func.extract('hour', Sample.date) == i, 1)])))
q = db.session.query(Sample.location, Sample.station,
*cases\
).group_by(Sample.location, Sample.station)
2021-01-04 10:04:22 -05:00
q, filters = apply_filters(q, session)
for result in q:
2021-01-04 10:04:22 -05:00
location = result[0]
hourly_chart_data[location] = result[1:]
# Count by weekday
cases = [ ]
for i in range(7):
cases.append(func.count(case([(func.extract('dow', Sample.date) == i, 1)])))
2021-01-04 10:04:22 -05:00
q = db.session.query(Sample.location,
*cases\
2021-01-04 10:04:22 -05:00
).group_by(Sample.location)
2021-01-01 09:00:23 -05:00
q, filters = apply_filters(q , session)
for result in q:
2021-01-04 10:04:22 -05:00
location = result[0]
2021-01-04 12:15:56 -05:00
weekday_chart_data[location] = [i/days_in_search for i in result[1:]]
# Count by range
2021-01-04 10:04:22 -05:00
cases = [func.count(case([(and_(Sample.date >= two_weeks_ago, Sample.date <= filters["end_date"]), 1)])),
func.count(case([(and_(Sample.date >= one_week_ago, Sample.date <= filters["end_date"]), 1)])),
func.count(case([(and_(Sample.date >= today, Sample.date <= filters["end_date"]), 1)]))]
2021-01-04 10:04:22 -05:00
q = db.session.query(Sample.location,
*cases\
).group_by(Sample.location)
2021-01-04 10:04:22 -05:00
q, filters = apply_filters(q , session)
for result in q:
location = result[0]
if location not in location_stats_data: location_stats_data[location] = dict()
location_stats_data[location]["two_week_ago"] = result[1]
location_stats_data[location]["one_week_ago"] = result[2]
location_stats_data[location]["today"] = result[3]
# Aggregate results
2021-01-01 09:00:23 -05:00
for location in location_stats_data:
overall_chart_data[location] = np.sum([location_charts_data[location][station] for station in location_charts_data[location]],axis=0).tolist()
2021-01-04 10:04:22 -05:00
overall_stat_data["one_week_ago"] += location_stats_data[location]["one_week_ago"]
overall_stat_data["two_week_ago"] += location_stats_data[location]["two_week_ago"]
overall_stat_data["today"] += location_stats_data[location]["today"]
2021-01-01 09:00:23 -05:00
2021-01-04 10:04:22 -05:00
important_dates = {
2021-01-01 09:00:23 -05:00
"today" : (filters["end_date"] - timedelta(1)).strftime("%m/%d/%Y"),
2020-12-31 08:52:33 -05:00
"range" : filters["start_date"].strftime("%m/%d/%Y") + " - " + (filters["end_date"] - timedelta(1)).strftime("%m/%d/%Y"),
"one_week_ago" : one_week_ago.strftime("%m/%d/%Y"),
"two_weeks_ago" : two_weeks_ago.strftime("%m/%d/%Y"),
}
2020-12-19 13:41:15 -05:00
################# Raw Samples Table ##############
page = request.args.get(get_page_parameter(), type=int, default=1)
pagination = Pagination(page=page, total=filtered_samples.count(
), search=False, record_name='samples', css_framework='bootstrap4')
2021-01-04 12:15:56 -05:00
grouped_data = []
for entry in filtered_samples[page * 10:(page * 10) + 10]:
logging.info(entry.notifications)
grouped_data.append({"barcode":entry.barcode,
"date":entry.date,
"notifications":entry.notifications,
"ids":[dict(type = "computing_id",
data = entry.computing_id),
dict(type = "student_id",
data = entry.student_id)],
"contacts":[dict(type="phone",
data=entry.phone),
dict(type="email",
data=entry.email)],
"taken_at":[dict(type="location",
data=entry.location),
dict(type="station",
data=entry.station)],
}
)
table = SampleTable(grouped_data)
2020-12-19 13:41:15 -05:00
return render_template('layouts/default.html',
2020-12-28 09:15:42 -05:00
base_href=BASE_HREF,
content=render_template(
'pages/index.html',
2020-12-31 08:52:33 -05:00
form = form,
2021-01-04 10:04:22 -05:00
dates = important_dates,
2020-12-31 08:52:33 -05:00
table = table,
action = action,
pagination = pagination,
2020-12-28 09:15:42 -05:00
2020-12-30 09:06:46 -05:00
chart_ticks = chart_ticks,
overall_chart_data = overall_chart_data,
location_charts_data = location_charts_data,
hourly_chart_data = hourly_chart_data,
weekday_chart_data = weekday_chart_data,
2020-12-28 09:15:42 -05:00
2020-12-30 09:06:46 -05:00
overall_stat_data = overall_stat_data,
location_stats_data = location_stats_data,
2020-12-28 09:15:42 -05:00
))
2020-12-19 21:56:21 -05:00
@app.route('/activate', methods=['GET', 'POST'])
@superuser
def activate_station():
return render_template('layouts/default.html',
2020-12-28 09:15:42 -05:00
base_href=BASE_HREF,
content=render_template(
'pages/stations.html'))
def __make_csv(sample_query):
csvfile = io.StringIO()
headers = [
'barcode',
'student_id',
'date',
'location',
'phone',
'email',
'result_code',
'ivy_file',
'email_notified',
'text_notified'
]
writer = csv.DictWriter(csvfile, headers)
writer.writeheader()
for sample in sample_query.all():
writer.writerow(
{
'barcode': sample.barcode,
'student_id': sample.student_id,
'date': sample.date,
'location': sample.location,
'phone': sample.phone,
'email': sample.email,
'result_code': sample.result_code,
'ivy_file': sample.ivy_file,
'email_notified': sample.email_notified,
'text_notified': sample.text_notified,
}
)
# Creating the byteIO object from the StringIO Object
mem = io.BytesIO()
mem.write(csvfile.getvalue().encode('utf-8'))
# seeking was necessary. Python 3.5.2, Flask 0.12.2
mem.seek(0)
csvfile.close()
return mem
@app.route('/invitation', methods=['GET', 'POST'])
@superuser
def send_invitation():
from communicator.models.invitation import Invitation
from communicator.tables import InvitationTable
form = forms.InvitationForm(request.form)
action = BASE_HREF + "/invitation"
title = "Send invitation to students"
if request.method == 'POST' and form.validate():
from communicator.services.notification_service import NotificationService
with NotificationService(app) as ns:
2020-12-16 11:00:01 -05:00
ns.send_invitations(
form.date.data, form.location.data, form.emails.data)
return redirect(url_for('send_invitation'))
# display results
page = request.args.get(get_page_parameter(), type=int, default=1)
2020-12-16 11:00:01 -05:00
invites = db.session.query(Invitation).order_by(
Invitation.date_sent.desc())
pagination = Pagination(page=page, total=invites.count(),
search=False, record_name='samples')
2020-12-16 11:00:01 -05:00
table = InvitationTable(invites.paginate(page, 10, error_out=False).items)
return render_template(
'form.html',
form=form,
table=table,
pagination=pagination,
action=action,
title=title,
description_map={},
base_href=BASE_HREF
)
2020-12-16 11:00:01 -05:00
@app.route('/imported_files', methods=['GET'])
@superuser
def list_imported_files_from_ivy():
from communicator.models.ivy_file import IvyFile
from communicator.tables import IvyFileTable
# display results
page = request.args.get(get_page_parameter(), type=int, default=1)
files = db.session.query(IvyFile).order_by(IvyFile.date_added.desc())
2020-12-16 11:00:01 -05:00
pagination = Pagination(page=page, total=files.count(),
search=False, record_name='samples')
table = IvyFileTable(files.paginate(page, 10, error_out=False).items)
return render_template(
'imported_files.html',
table=table,
pagination=pagination,
base_href=BASE_HREF
)
2020-12-16 11:00:01 -05:00
@app.route('/sso')
def sso():
2020-09-24 13:43:21 -04:00
from communicator.services.user_service import UserService
user = UserService().get_user_info()
response = ""
response += f"<h1>Current User: {user.display_name} ({user.uid})</h1>"
return response
2020-12-16 11:00:01 -05:00
@app.route('/debug-sentry')
def trigger_error():
division_by_zero = 1 / 0
2020-12-10 08:16:01 -05:00
# Access tokens
@app.cli.command()
def globus_token():
from communicator.services.ivy_service import IvyService
ivy_service = IvyService()
ivy_service.get_access_token()
@app.cli.command()
def count_files_in_ivy():
from communicator.services.ivy_service import IvyService
ivy_service = IvyService()
count = ivy_service.get_file_count_from_globus()
print(f"There are {count} files awaiting transfer")
2020-12-16 11:00:01 -05:00
@app.cli.command()
def transfer():
from communicator.services.ivy_service import IvyService
ivy_service = IvyService()
ivy_service.request_transfer()
@app.cli.command()
def delete():
from communicator.services.ivy_service import IvyService
ivy_service = IvyService()
ivy_service.delete_file()