449 lines
16 KiB
Python
Raw Normal View History

2020-12-18 10:12:15 -05:00
import csv
import io
import json
import logging
import os
2020-11-11 10:47:34 -05:00
from datetime import datetime
2020-12-18 10:12:15 -05:00
from datetime import date
from functools import wraps
import connexion
import sentry_sdk
from connexion import ProblemException
2020-11-11 10:47:34 -05:00
from flask import render_template, request, redirect, url_for, flash, abort, Response, send_file, session
from flask_assets import Environment
from flask_cors import CORS
from flask_mail import Mail
from flask_marshmallow import Marshmallow
from flask_migrate import Migrate
from flask_paginate import Pagination, get_page_parameter
from flask_sqlalchemy import SQLAlchemy
2020-12-19 13:41:15 -05:00
from sqlalchemy import func
from sentry_sdk.integrations.flask import FlaskIntegration
from webassets import Bundle
from flask_executor import Executor
2020-12-16 11:00:01 -05:00
import numpy as np
logging.basicConfig(level=logging.INFO)
# API, fully defined in api.yml
connexion_app = connexion.FlaskApp(__name__)
app = connexion_app.app
# Executor for long running tasks
executor = Executor(app)
# Configuration
app.config.from_object('config.default')
if "TESTING" in os.environ and os.environ["TESTING"] == "true":
app.config.from_object('config.testing')
app.config.from_pyfile('../config/testing.py')
else:
app.config.root_path = app.instance_path
app.config.from_pyfile('config.py', silent=True)
2020-12-10 08:16:01 -05:00
# Connexion Error handling
def render_errors(exception):
return Response(json.dumps({"error": str(exception)}), status=500, mimetype="application/json")
2020-12-18 10:12:15 -05:00
connexion_app.add_error_handler(Exception, render_errors)
# Mail settings
mail = Mail(app)
# Database
db = SQLAlchemy(app)
migrate = Migrate(app, db)
ma = Marshmallow(app)
# Asset management
url_map = app.url_map
try:
for rule in url_map.iter_rules('static'):
url_map._rules.remove(rule)
except ValueError:
# no static view was created yet
pass
app.add_url_rule(
app.static_url_path + '/<path:filename>',
endpoint='static', view_func=app.send_static_file)
assets = Environment(app)
assets.init_app(app)
assets.url = app.static_url_path
scss = Bundle(
2020-12-10 08:16:01 -05:00
'assets/scss/argon.scss',
filters='pyscss',
2020-12-10 08:16:01 -05:00
output='argon.css'
)
assets.register('app_scss', scss)
2020-12-18 10:12:15 -05:00
import random
from communicator import forms
2020-12-18 10:12:15 -05:00
from communicator import api
from communicator import models
2020-12-19 13:41:15 -05:00
from datetime import date, timedelta
connexion_app.add_api('api.yml', base_path='/v1.0')
# Convert list of allowed origins to list of regexes
2020-12-16 11:00:01 -05:00
origins_re = [r"^https?:\/\/%s(.*)" % o.replace('.', '\.')
for o in app.config['CORS_ALLOW_ORIGINS']]
cors = CORS(connexion_app.app, origins=origins_re)
# Sentry error handling
2020-10-06 12:49:27 -04:00
if app.config['ENABLE_SENTRY']:
sentry_sdk.init(
2020-10-06 12:49:27 -04:00
dsn="https://048a9b3ac72f476a8c77b910ad4d7f84@o401361.ingest.sentry.io/5454621",
integrations=[FlaskIntegration()],
traces_sample_rate=1.0
)
2020-12-16 11:00:01 -05:00
# HTML Pages
BASE_HREF = app.config['APPLICATION_ROOT'].strip('/')
def superuser(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from communicator.services.user_service import UserService
if not UserService().is_valid_user():
flash("You do not have permission to view that page", "warning")
2020-12-16 11:00:01 -05:00
logging.info("Permission Denied to user " +
UserService().get_user_info())
abort(404)
return f(*args, **kwargs)
return decorated_function
2020-12-18 10:12:15 -05:00
@app.errorhandler(404)
@superuser
def page_not_found(e):
# note that we set the 404 status explicitly
return render_template('pages/404.html')
@app.route('/', methods=['GET', 'POST'])
@superuser
def index():
from communicator.models import Sample
from communicator.tables import SampleTable
download = False
form = forms.SearchForm(request.form)
action = BASE_HREF + "/"
samples = db.session.query(Sample).order_by(Sample.date.desc())
2020-12-18 10:12:15 -05:00
2020-11-11 12:03:37 -05:00
if request.method == "POST" or request.args.get('cancel') == 'true':
2020-12-16 11:00:01 -05:00
session["index_filter"] = {} # Clear out the session if it is invalid.
2020-12-18 10:12:15 -05:00
2020-11-11 10:47:34 -05:00
if form.validate():
session["index_filter"] = {}
if form.startDate.data:
2020-11-11 10:47:34 -05:00
session["index_filter"]["start_date"] = form.startDate.data
if form.endDate.data:
2020-11-11 10:47:34 -05:00
session["index_filter"]["end_date"] = form.endDate.data
if form.studentId.data:
2020-11-11 10:47:34 -05:00
session["index_filter"]["student_id"] = form.studentId.data
if form.location.data:
2020-12-14 04:49:02 -05:00
session["index_filter"]["location"] = form.location.data
2020-11-19 14:56:10 -05:00
if form.email.data:
session["index_filter"]["email"] = form.email.data
2020-12-19 21:06:34 -05:00
# if form.download.data:
# download = True
2020-12-19 13:41:15 -05:00
2020-12-14 04:49:02 -05:00
# # Store previous form submission settings in the session, so they are preseved through pagination.
2020-12-18 10:12:15 -05:00
filtered_samples = samples
2020-11-11 10:47:34 -05:00
if "index_filter" in session:
filters = session["index_filter"]
2020-11-11 12:03:37 -05:00
try:
if "start_date" in filters:
2020-12-18 10:12:15 -05:00
filtered_samples = filtered_samples.filter(Sample.date >= filters["start_date"])
else:
filtered_samples = filtered_samples.filter(Sample.date >= date.today())
2020-12-19 13:41:15 -05:00
filters["start_date"] = date.today()
2020-11-11 12:03:37 -05:00
if "end_date" in filters:
2020-12-18 10:12:15 -05:00
filtered_samples = filtered_samples.filter(Sample.date <= filters["end_date"])
2020-12-14 04:53:49 -05:00
if "student_id" in filters:
2020-12-18 10:12:15 -05:00
filtered_samples = filtered_samples.filter(
2020-12-16 11:00:01 -05:00
Sample.student_id.in_(filters["student_id"].split()))
2020-11-11 12:03:37 -05:00
if "location" in filters:
2020-12-18 10:12:15 -05:00
filtered_samples = filtered_samples.filter(
2020-12-16 11:00:01 -05:00
Sample.location.in_(filters["location"].split()))
2020-11-19 14:56:10 -05:00
if "email" in filters:
2020-12-18 10:12:15 -05:00
filtered_samples = filtered_samples.filter(
2020-12-16 11:00:01 -05:00
Sample.email.ilike(filters["email"] + "%"))
2020-11-11 12:03:37 -05:00
except Exception as e:
2020-12-18 10:12:15 -05:00
logging.error("Encountered an error building filters, so clearing. " + str(e))
2020-11-11 12:03:37 -05:00
session["index_filter"] = {}
2020-12-18 10:12:15 -05:00
else:
# Default to Todays Results
filtered_samples = filtered_samples.filter(Sample.date >= date.today())
2020-12-19 13:41:15 -05:00
if download:
csv = __make_csv(filtered_samples)
return send_file(csv, attachment_filename='data_export.csv', as_attachment=True)
############# Build Graphs ######################
2020-12-19 21:06:34 -05:00
# Analysis
station_charts = []
location_chart = {"datasets": []}
2020-12-18 10:12:15 -05:00
stats = dict()
2020-12-19 21:06:34 -05:00
weekday_totals = [0 for _ in range(7)] # Mon, Tues, ...
hour_totals = [0 for _ in range(24)] # 12AM, 1AM, ...
############# Helper Variables ##################
2020-12-19 13:41:15 -05:00
start_date = filters["start_date"] if "start_date" in filters else date.today()
end_date = filters["end_date"] if "end_date" in filters else date.today() + timedelta(7)
2020-12-19 21:06:34 -05:00
# days = abs(start_date - end_date).days
# weeks_apart = days // 7 if days > 7 else 1
2020-12-19 13:41:15 -05:00
# Get Active Locations Info
active_stations = ["10", "20", "30", "40", "50", "60"]
# Seperate Data by location and station
location_data = dict()
sample_times = dict()
active_stations = set()
for entry in filtered_samples:
loc_code = str(entry.location)[:2]
stat_code= str(entry.location)[2:]
active_stations.add(stat_code)
if loc_code not in location_data:
location_data[loc_code] = [entry]
sample_times[loc_code] = [entry.date.timestamp()]
else:
location_data[loc_code].append(entry)
sample_times[loc_code].append(entry.date.timestamp())
############# #######################
stats["all"] = filtered_samples.count()
############# Daily Total #######################
2020-12-19 21:06:34 -05:00
stats["today"] = samples.filter(Sample.date >= date.today()).count()
2020-12-18 10:12:15 -05:00
############# Last 2 Week Average ###############
2020-12-19 21:06:34 -05:00
stats["weeks"] = round(samples.filter(Sample.date >= (date.today() - timedelta(14))).count() / 14,2)
2020-12-19 13:41:15 -05:00
################# Busiest Days/Hours ############
2020-12-19 21:06:34 -05:00
if filtered_samples.count() > 0:
for entry in filtered_samples:
weekday_totals[entry.date.weekday()] += 1
hour_totals[entry.date.hour] += 1
Range = (filtered_samples[-1].date.timestamp(),filtered_samples[0].date.timestamp())
for loc_code in location_data.keys():
############# Build histogram ###################
color = [hash(loc_code), 128, (hash(loc_code) % 256 + 128) % 256]
single_hist = dict({
"label": loc_code,
"borderColor": f'rgba({color[0]},{color[1]},{color[2]},.7)',
"pointBorderColor": f'rgba({color[0]},{color[1]},{color[2]},1)',
"borderWidth": 8,
"data": [],
2020-12-16 11:00:01 -05:00
})
2020-12-19 21:06:34 -05:00
# https://stackoverflow.com/questions/19442224/getting-information-for-bins-in-matplotlib-histogram-function
hist, bin_edges = np.histogram(np.array(sample_times[loc_code]),range=Range)
bins = [bin_edges[i]+(bin_edges[i+1]-bin_edges[i]) /
2 for i in range(len(bin_edges)-1)]
for cnt, time in zip(hist, bins):
single_hist["data"].append({
"x": datetime.utcfromtimestamp(time), "y": int(cnt)
})
location_chart["datasets"].append(single_hist)
############## Build Station Graph ##############
station_lines = []
# Read Data by station
i = 0
for stat_code in active_stations:
filtered_entries = [_entry for _entry in location_data[loc_code] if str(_entry.location)[2:] == stat_code] # ! Inefficient but works for rn
if len(filtered_entries) == 0: continue
station_line = {"label": stat_code,
"borderColor": f'rgba(50,255,255,.7)',
"pointBorderColor": f'rgba(50,255,255,1)',
"borderWidth": 10,
"data": [
{"x": filtered_entries[0].date, "y": i}, {"x": filtered_entries[-1].date, "y": i},
],
}
i += 1
station_lines.append(station_line)
station_charts.append({"datasets": station_lines, "labels" : []})
#################################################
# # Check for Unresponsive
# for loc_code in active_stations:
# if loc_code not in location_data:
# location_dict["datasets"].append({
# "label": loc_code,
# "borderColor": f'rgba(128,128,128,.7)',
# "pointBorderColor": f'rgba(128,128,128,1)',
# "borderWidth": 10,
# "data": [{
# "x": session["index_filter"]["start_date"], "y": i
# }, ],
# })
# i += 1
2020-12-19 13:41:15 -05:00
################# Raw Samples Table ##############
page = request.args.get(get_page_parameter(), type=int, default=1)
pagination = Pagination(page=page, total=filtered_samples.count(
), search=False, record_name='samples', css_framework='bootstrap4')
table = SampleTable(filtered_samples.paginate(page, 10, error_out=False).items)
return render_template('layouts/default.html',
base_href=BASE_HREF,
content=render_template(
'pages/index.html',
form=form,
table=table,
action=action,
pagination=pagination,
location_data=location_chart,
station_data=station_charts,
2020-12-19 21:06:34 -05:00
weekday_totals=weekday_totals,
hour_totals=hour_totals,
2020-12-19 13:41:15 -05:00
stats = stats
))
def __make_csv(sample_query):
csvfile = io.StringIO()
headers = [
'barcode',
'student_id',
'date',
'location',
'phone',
'email',
'result_code',
'ivy_file',
'email_notified',
'text_notified'
]
writer = csv.DictWriter(csvfile, headers)
writer.writeheader()
for sample in sample_query.all():
writer.writerow(
{
'barcode': sample.barcode,
'student_id': sample.student_id,
'date': sample.date,
'location': sample.location,
'phone': sample.phone,
'email': sample.email,
'result_code': sample.result_code,
'ivy_file': sample.ivy_file,
'email_notified': sample.email_notified,
'text_notified': sample.text_notified,
}
)
# Creating the byteIO object from the StringIO Object
mem = io.BytesIO()
mem.write(csvfile.getvalue().encode('utf-8'))
# seeking was necessary. Python 3.5.2, Flask 0.12.2
mem.seek(0)
csvfile.close()
return mem
@app.route('/invitation', methods=['GET', 'POST'])
@superuser
def send_invitation():
from communicator.models.invitation import Invitation
from communicator.tables import InvitationTable
form = forms.InvitationForm(request.form)
action = BASE_HREF + "/invitation"
title = "Send invitation to students"
if request.method == 'POST' and form.validate():
from communicator.services.notification_service import NotificationService
with NotificationService(app) as ns:
2020-12-16 11:00:01 -05:00
ns.send_invitations(
form.date.data, form.location.data, form.emails.data)
return redirect(url_for('send_invitation'))
# display results
page = request.args.get(get_page_parameter(), type=int, default=1)
2020-12-16 11:00:01 -05:00
invites = db.session.query(Invitation).order_by(
Invitation.date_sent.desc())
pagination = Pagination(page=page, total=invites.count(),
search=False, record_name='samples')
2020-12-16 11:00:01 -05:00
table = InvitationTable(invites.paginate(page, 10, error_out=False).items)
return render_template(
'form.html',
form=form,
table=table,
pagination=pagination,
action=action,
title=title,
description_map={},
base_href=BASE_HREF
)
2020-12-16 11:00:01 -05:00
@app.route('/imported_files', methods=['GET'])
@superuser
def list_imported_files_from_ivy():
from communicator.models.ivy_file import IvyFile
from communicator.tables import IvyFileTable
# display results
page = request.args.get(get_page_parameter(), type=int, default=1)
files = db.session.query(IvyFile).order_by(IvyFile.date_added.desc())
2020-12-16 11:00:01 -05:00
pagination = Pagination(page=page, total=files.count(),
search=False, record_name='samples')
table = IvyFileTable(files.paginate(page, 10, error_out=False).items)
return render_template(
'imported_files.html',
table=table,
pagination=pagination,
base_href=BASE_HREF
)
2020-12-16 11:00:01 -05:00
@app.route('/sso')
def sso():
2020-09-24 13:43:21 -04:00
from communicator.services.user_service import UserService
user = UserService().get_user_info()
response = ""
response += f"<h1>Current User: {user.display_name} ({user.uid})</h1>"
return response
2020-12-16 11:00:01 -05:00
@app.route('/debug-sentry')
def trigger_error():
division_by_zero = 1 / 0
2020-12-10 08:16:01 -05:00
# Access tokens
@app.cli.command()
def globus_token():
from communicator.services.ivy_service import IvyService
ivy_service = IvyService()
ivy_service.get_access_token()
@app.cli.command()
def count_files_in_ivy():
from communicator.services.ivy_service import IvyService
ivy_service = IvyService()
count = ivy_service.get_file_count_from_globus()
print(f"There are {count} files awaiting transfer")
2020-12-16 11:00:01 -05:00
@app.cli.command()
def transfer():
from communicator.services.ivy_service import IvyService
ivy_service = IvyService()
ivy_service.request_transfer()
@app.cli.command()
def delete():
from communicator.services.ivy_service import IvyService
ivy_service = IvyService()
ivy_service.delete_file()