2020-12-18 10:12:15 -05:00
|
|
|
|
2020-12-28 09:15:42 -05:00
|
|
|
import random
|
2020-11-03 16:01:50 -05:00
|
|
|
import csv
|
|
|
|
import io
|
2020-12-16 11:51:36 -05:00
|
|
|
import json
|
2020-11-03 16:01:50 -05:00
|
|
|
|
2020-09-10 11:28:58 -04:00
|
|
|
import logging
|
|
|
|
import os
|
2020-11-11 10:47:34 -05:00
|
|
|
from datetime import datetime
|
2020-12-18 10:12:15 -05:00
|
|
|
from datetime import date
|
2020-10-02 15:16:28 -04:00
|
|
|
from functools import wraps
|
2020-09-10 11:28:58 -04:00
|
|
|
|
|
|
|
import connexion
|
|
|
|
import sentry_sdk
|
2020-12-16 11:51:36 -05:00
|
|
|
from connexion import ProblemException
|
2020-11-11 10:47:34 -05:00
|
|
|
from flask import render_template, request, redirect, url_for, flash, abort, Response, send_file, session
|
2020-09-23 12:43:58 -04:00
|
|
|
from flask_assets import Environment
|
2020-09-10 11:28:58 -04:00
|
|
|
from flask_cors import CORS
|
|
|
|
from flask_mail import Mail
|
|
|
|
from flask_marshmallow import Marshmallow
|
|
|
|
from flask_migrate import Migrate
|
2020-09-23 12:43:58 -04:00
|
|
|
from flask_paginate import Pagination, get_page_parameter
|
2020-09-10 11:28:58 -04:00
|
|
|
from flask_sqlalchemy import SQLAlchemy
|
2021-01-01 09:00:23 -05:00
|
|
|
from sqlalchemy import func, and_, case
|
2020-09-10 11:28:58 -04:00
|
|
|
from sentry_sdk.integrations.flask import FlaskIntegration
|
2020-09-23 12:43:58 -04:00
|
|
|
from webassets import Bundle
|
2020-10-15 15:29:40 -04:00
|
|
|
from flask_executor import Executor
|
2020-09-10 11:28:58 -04:00
|
|
|
|
2020-12-16 11:00:01 -05:00
|
|
|
import numpy as np
|
2020-09-10 11:28:58 -04:00
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
|
|
|
|
|
|
# API, fully defined in api.yml
|
|
|
|
connexion_app = connexion.FlaskApp(__name__)
|
|
|
|
app = connexion_app.app
|
|
|
|
|
2020-10-15 15:29:40 -04:00
|
|
|
# Executor for long running tasks
|
|
|
|
executor = Executor(app)
|
|
|
|
|
2020-12-16 11:51:36 -05:00
|
|
|
|
2020-09-10 11:28:58 -04:00
|
|
|
# Configuration
|
|
|
|
app.config.from_object('config.default')
|
|
|
|
if "TESTING" in os.environ and os.environ["TESTING"] == "true":
|
|
|
|
app.config.from_object('config.testing')
|
|
|
|
app.config.from_pyfile('../config/testing.py')
|
|
|
|
else:
|
|
|
|
app.config.root_path = app.instance_path
|
|
|
|
app.config.from_pyfile('config.py', silent=True)
|
|
|
|
|
2020-12-10 08:16:01 -05:00
|
|
|
|
2020-12-16 11:51:36 -05:00
|
|
|
# Connexion Error handling
|
|
|
|
def render_errors(exception):
|
|
|
|
return Response(json.dumps({"error": str(exception)}), status=500, mimetype="application/json")
|
|
|
|
|
2020-12-18 10:12:15 -05:00
|
|
|
|
2020-12-16 11:51:36 -05:00
|
|
|
connexion_app.add_error_handler(Exception, render_errors)
|
|
|
|
|
|
|
|
|
2020-09-10 11:28:58 -04:00
|
|
|
# Mail settings
|
|
|
|
mail = Mail(app)
|
|
|
|
|
|
|
|
# Database
|
|
|
|
db = SQLAlchemy(app)
|
|
|
|
migrate = Migrate(app, db)
|
|
|
|
ma = Marshmallow(app)
|
|
|
|
|
2020-09-23 12:43:58 -04:00
|
|
|
# Asset management
|
|
|
|
url_map = app.url_map
|
|
|
|
try:
|
|
|
|
for rule in url_map.iter_rules('static'):
|
|
|
|
url_map._rules.remove(rule)
|
|
|
|
except ValueError:
|
|
|
|
# no static view was created yet
|
|
|
|
pass
|
|
|
|
app.add_url_rule(
|
|
|
|
app.static_url_path + '/<path:filename>',
|
|
|
|
endpoint='static', view_func=app.send_static_file)
|
|
|
|
assets = Environment(app)
|
|
|
|
assets.init_app(app)
|
|
|
|
assets.url = app.static_url_path
|
|
|
|
scss = Bundle(
|
2020-12-10 08:16:01 -05:00
|
|
|
'assets/scss/argon.scss',
|
2020-09-23 12:43:58 -04:00
|
|
|
filters='pyscss',
|
2020-12-10 08:16:01 -05:00
|
|
|
output='argon.css'
|
2020-09-23 12:43:58 -04:00
|
|
|
)
|
|
|
|
assets.register('app_scss', scss)
|
2020-12-28 09:15:42 -05:00
|
|
|
|
|
|
|
connexion_app.add_api('api.yml', base_path='/v1.0')
|
|
|
|
|
2020-12-19 13:41:15 -05:00
|
|
|
from datetime import date, timedelta
|
2020-12-30 09:06:46 -05:00
|
|
|
from communicator import models
|
|
|
|
from communicator import api
|
|
|
|
from communicator import forms
|
2021-01-04 09:20:09 -05:00
|
|
|
from communicator.models import Sample
|
|
|
|
from communicator.tables import SampleTable
|
2020-09-10 11:28:58 -04:00
|
|
|
# Convert list of allowed origins to list of regexes
|
2020-12-16 11:00:01 -05:00
|
|
|
origins_re = [r"^https?:\/\/%s(.*)" % o.replace('.', '\.')
|
|
|
|
for o in app.config['CORS_ALLOW_ORIGINS']]
|
2020-09-10 11:28:58 -04:00
|
|
|
cors = CORS(connexion_app.app, origins=origins_re)
|
|
|
|
|
|
|
|
# Sentry error handling
|
2020-10-06 12:49:27 -04:00
|
|
|
if app.config['ENABLE_SENTRY']:
|
2020-09-10 11:28:58 -04:00
|
|
|
sentry_sdk.init(
|
2020-10-06 12:49:27 -04:00
|
|
|
dsn="https://048a9b3ac72f476a8c77b910ad4d7f84@o401361.ingest.sentry.io/5454621",
|
|
|
|
integrations=[FlaskIntegration()],
|
|
|
|
traces_sample_rate=1.0
|
2020-09-10 11:28:58 -04:00
|
|
|
)
|
2020-09-21 14:21:07 -04:00
|
|
|
|
2020-12-16 11:00:01 -05:00
|
|
|
# HTML Pages
|
2020-09-22 16:22:15 -04:00
|
|
|
BASE_HREF = app.config['APPLICATION_ROOT'].strip('/')
|
|
|
|
|
2020-12-28 09:15:42 -05:00
|
|
|
|
2020-10-02 15:16:28 -04:00
|
|
|
def superuser(f):
|
|
|
|
@wraps(f)
|
|
|
|
def decorated_function(*args, **kwargs):
|
|
|
|
from communicator.services.user_service import UserService
|
|
|
|
if not UserService().is_valid_user():
|
|
|
|
flash("You do not have permission to view that page", "warning")
|
2020-12-16 11:00:01 -05:00
|
|
|
logging.info("Permission Denied to user " +
|
|
|
|
UserService().get_user_info())
|
2020-10-02 15:16:28 -04:00
|
|
|
abort(404)
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
return decorated_function
|
|
|
|
|
2020-12-18 10:12:15 -05:00
|
|
|
@app.errorhandler(404)
|
|
|
|
@superuser
|
|
|
|
def page_not_found(e):
|
|
|
|
# note that we set the 404 status explicitly
|
|
|
|
return render_template('pages/404.html')
|
2020-09-22 16:22:15 -04:00
|
|
|
|
2020-12-31 08:52:33 -05:00
|
|
|
def daterange(start, stop, days = 1, hours = 0):
|
|
|
|
if (type(start) == date):
|
|
|
|
start = date2datetime(start)
|
|
|
|
if (type(stop) == date):
|
|
|
|
stop = date2datetime(stop)
|
|
|
|
time = start
|
|
|
|
date_list = []
|
|
|
|
while time <= stop:
|
|
|
|
date_list.append(time)
|
|
|
|
time += timedelta(days=days,hours=hours)
|
|
|
|
return date_list
|
|
|
|
|
2020-12-30 09:06:46 -05:00
|
|
|
def date2datetime(_date):
|
|
|
|
return datetime.combine(_date, datetime.min.time())
|
2020-12-31 08:52:33 -05:00
|
|
|
|
2021-01-04 09:20:09 -05:00
|
|
|
def apply_filters(query, session):
|
2020-11-11 10:47:34 -05:00
|
|
|
if "index_filter" in session:
|
|
|
|
filters = session["index_filter"]
|
2020-11-11 12:03:37 -05:00
|
|
|
try:
|
|
|
|
if "start_date" in filters:
|
2021-01-04 09:20:09 -05:00
|
|
|
query = query.filter(
|
2020-12-28 09:15:42 -05:00
|
|
|
Sample.date >= filters["start_date"])
|
2020-12-18 10:12:15 -05:00
|
|
|
else:
|
2020-12-19 13:41:15 -05:00
|
|
|
filters["start_date"] = date.today()
|
2021-01-04 09:20:09 -05:00
|
|
|
query = query.filter(
|
2020-12-30 09:06:46 -05:00
|
|
|
Sample.date >= filters["start_date"])
|
2020-11-11 12:03:37 -05:00
|
|
|
if "end_date" in filters:
|
2021-01-04 09:20:09 -05:00
|
|
|
query = query.filter(
|
2020-12-28 09:15:42 -05:00
|
|
|
Sample.date <= filters["end_date"])
|
|
|
|
else:
|
|
|
|
filters["end_date"] = date.today() + timedelta(1)
|
2020-12-14 04:53:49 -05:00
|
|
|
if "student_id" in filters:
|
2021-01-04 09:20:09 -05:00
|
|
|
query = query.filter(
|
2020-12-16 11:00:01 -05:00
|
|
|
Sample.student_id.in_(filters["student_id"].split()))
|
2020-11-11 12:03:37 -05:00
|
|
|
if "location" in filters:
|
2021-01-04 09:20:09 -05:00
|
|
|
query = query.filter(
|
2020-12-16 11:00:01 -05:00
|
|
|
Sample.location.in_(filters["location"].split()))
|
2020-12-28 09:15:42 -05:00
|
|
|
if "station" in filters:
|
2021-01-04 09:20:09 -05:00
|
|
|
query = query.filter(
|
2020-12-28 09:15:42 -05:00
|
|
|
Sample.station.in_(filters["station"].split()))
|
|
|
|
if "compute_id" in filters:
|
|
|
|
filtered_samples = filtered_samples.filter(
|
|
|
|
Sample.compute_id.in_(filters["compute_id"].split()))
|
2020-11-11 12:03:37 -05:00
|
|
|
except Exception as e:
|
2020-12-28 09:15:42 -05:00
|
|
|
logging.error(
|
|
|
|
"Encountered an error building filters, so clearing. " + str(e))
|
2020-11-11 12:03:37 -05:00
|
|
|
session["index_filter"] = {}
|
2020-12-18 10:12:15 -05:00
|
|
|
else:
|
|
|
|
# Default to Todays Results
|
2020-12-28 09:15:42 -05:00
|
|
|
filters = dict()
|
|
|
|
filters["start_date"] = date.today()
|
|
|
|
filters["end_date"] = date.today() + timedelta(1)
|
2021-01-04 09:20:09 -05:00
|
|
|
query = query.filter(
|
2020-12-28 09:15:42 -05:00
|
|
|
Sample.date >= filters["start_date"])
|
2020-12-30 09:06:46 -05:00
|
|
|
if type(filters["start_date"]) == str:
|
|
|
|
filters["start_date"] = datetime.strptime(filters["start_date"].strip(), "%Y-%m-%d").date()
|
|
|
|
if type(filters["end_date"]) == str:
|
|
|
|
filters["end_date"] = datetime.strptime(filters["end_date"].strip(), "%Y-%m-%d").date()
|
|
|
|
|
2021-01-04 09:20:09 -05:00
|
|
|
return query, filters
|
|
|
|
|
2021-01-04 10:04:22 -05:00
|
|
|
def ingest_form(form):
|
|
|
|
pass
|
2021-01-04 09:20:09 -05:00
|
|
|
@app.route('/', methods=['GET', 'POST'])
|
|
|
|
@superuser
|
|
|
|
def index():
|
|
|
|
|
|
|
|
form = forms.SearchForm(request.form)
|
|
|
|
action = BASE_HREF + "/"
|
2020-12-30 09:06:46 -05:00
|
|
|
|
2021-01-04 09:20:09 -05:00
|
|
|
if request.method == "POST" or request.args.get('cancel') == 'true':
|
|
|
|
session["index_filter"] = {} # Clear out the session if it is invalid.
|
|
|
|
|
|
|
|
if form.validate():
|
|
|
|
session["index_filter"] = {}
|
|
|
|
if form.dateRange.data:
|
|
|
|
start, end = form.dateRange.data.split("-")
|
|
|
|
session["index_filter"]["start_date"] = datetime.strptime(start.strip(), "%m/%d/%Y").date()
|
|
|
|
session["index_filter"]["end_date"] = datetime.strptime(end.strip(), "%m/%d/%Y").date() + timedelta(1)
|
|
|
|
if form.studentId.data:
|
|
|
|
session["index_filter"]["student_id"] = form.studentId.data
|
|
|
|
if form.location.data:
|
|
|
|
session["index_filter"]["location"] = form.location.data
|
|
|
|
if form.compute_id.data:
|
|
|
|
session["index_filter"]["compute_id"] = form.compute_id.data
|
|
|
|
samples = db.session.query(Sample).order_by(Sample.date.desc())
|
|
|
|
# Store previous form submission settings in the session, so they are preseved through pagination.
|
|
|
|
filtered_samples, filters = apply_filters(samples, session)
|
|
|
|
|
|
|
|
# if request.args.get('download') == 'true':
|
|
|
|
# csv = __make_csv(filtered_samples)
|
|
|
|
# return send_file(csv, attachment_filename='data_export.csv', as_attachment=True)
|
2021-01-01 09:00:23 -05:00
|
|
|
|
2020-12-30 09:06:46 -05:00
|
|
|
weekday_totals = [0 for _ in range(7)] # Mon, Tues, ...
|
|
|
|
hour_totals = [0 for _ in range(24)] # 12AM, 1AM, ...
|
|
|
|
|
|
|
|
location_charts_data = {}
|
2021-01-04 09:20:09 -05:00
|
|
|
hourly_chart_data = {}
|
|
|
|
weekday_chart_data = {}
|
|
|
|
|
2020-12-30 09:06:46 -05:00
|
|
|
overall_chart_data = {}
|
2020-12-31 08:52:33 -05:00
|
|
|
dates = {}
|
2020-12-30 09:06:46 -05:00
|
|
|
overall_stat_data = {
|
|
|
|
"one_week_ago":0,
|
|
|
|
"two_week_ago":0,
|
|
|
|
"today":0,
|
|
|
|
}
|
|
|
|
location_stats_data = {}
|
|
|
|
|
2021-01-04 10:04:22 -05:00
|
|
|
today = filters["end_date"] - timedelta(1)
|
2020-12-30 09:06:46 -05:00
|
|
|
days_in_search = (filters["end_date"] - filters["start_date"]).days
|
2020-12-28 09:15:42 -05:00
|
|
|
one_week_ago = filters["end_date"] - timedelta(7)
|
|
|
|
two_weeks_ago = one_week_ago - timedelta(7)
|
2020-12-30 09:06:46 -05:00
|
|
|
chart_ticks = []
|
2020-12-31 08:52:33 -05:00
|
|
|
|
2021-01-01 09:00:23 -05:00
|
|
|
if days_in_search <= 1:
|
|
|
|
timeFormat = "%I:%M %p"
|
|
|
|
hours = 2
|
|
|
|
days = 0
|
|
|
|
elif days_in_search <= 3:
|
|
|
|
timeFormat = "%m/%d %I %p"
|
|
|
|
hours = 4
|
|
|
|
days = 0
|
|
|
|
else:
|
|
|
|
timeFormat = "%m/%d"
|
|
|
|
hours = 0
|
|
|
|
days = 1
|
2021-01-04 09:20:09 -05:00
|
|
|
|
|
|
|
# Count by Day
|
2021-01-01 09:00:23 -05:00
|
|
|
bounds = daterange(filters["start_date"], filters["end_date"], days=days, hours=hours)
|
|
|
|
cases = [ ]
|
|
|
|
|
|
|
|
for i in range(len(bounds) - 1):
|
|
|
|
cases.append(func.count(case([(and_(Sample.date >= bounds[i], Sample.date <= bounds[i+1]), 1)])))
|
|
|
|
|
|
|
|
q = db.session.query(Sample.location, Sample.station,
|
|
|
|
*cases\
|
|
|
|
).group_by(Sample.location, Sample.station)
|
|
|
|
|
2021-01-04 09:20:09 -05:00
|
|
|
q, filters = apply_filters(q , session)
|
|
|
|
|
2021-01-01 09:00:23 -05:00
|
|
|
for result in q:
|
|
|
|
location, station = result[0], result[1]
|
2021-01-04 09:20:09 -05:00
|
|
|
if location not in location_charts_data: location_charts_data[location] = dict()
|
|
|
|
location_charts_data[location][station] = result[2:]
|
|
|
|
bounds = daterange(filters["start_date"], filters["end_date"], days=days, hours=hours)
|
|
|
|
|
|
|
|
chart_ticks = []
|
|
|
|
for i in range(len(bounds) - 1):
|
|
|
|
chart_ticks.append(f"{bounds[i].strftime(timeFormat)} - {bounds[i+1].strftime(timeFormat)}")
|
2021-01-01 09:00:23 -05:00
|
|
|
|
2021-01-04 09:20:09 -05:00
|
|
|
# Count by hour
|
|
|
|
cases = [ ]
|
|
|
|
for i in range(24):
|
|
|
|
cases.append(func.count(case([(func.extract('hour', Sample.date) == i, 1)])))
|
|
|
|
|
|
|
|
q = db.session.query(Sample.location, Sample.station,
|
|
|
|
*cases\
|
|
|
|
).group_by(Sample.location, Sample.station)
|
|
|
|
|
2021-01-04 10:04:22 -05:00
|
|
|
q, filters = apply_filters(q, session)
|
2021-01-04 09:20:09 -05:00
|
|
|
|
|
|
|
for result in q:
|
2021-01-04 10:04:22 -05:00
|
|
|
location = result[0]
|
|
|
|
hourly_chart_data[location] = result[1:]
|
2021-01-04 09:20:09 -05:00
|
|
|
|
|
|
|
# Count by weekday
|
|
|
|
cases = [ ]
|
|
|
|
for i in range(7):
|
|
|
|
cases.append(func.count(case([(func.extract('dow', Sample.date) == i, 1)])))
|
|
|
|
|
2021-01-04 10:04:22 -05:00
|
|
|
q = db.session.query(Sample.location,
|
2021-01-04 09:20:09 -05:00
|
|
|
*cases\
|
2021-01-04 10:04:22 -05:00
|
|
|
).group_by(Sample.location)
|
2021-01-01 09:00:23 -05:00
|
|
|
|
2021-01-04 09:20:09 -05:00
|
|
|
q, filters = apply_filters(q , session)
|
|
|
|
|
|
|
|
for result in q:
|
2021-01-04 10:04:22 -05:00
|
|
|
location = result[0]
|
|
|
|
weekday_chart_data[location] = result[1:]
|
|
|
|
# Count by sfs
|
|
|
|
cases = [func.count(case([(and_(Sample.date >= two_weeks_ago, Sample.date <= filters["end_date"]), 1)])),
|
|
|
|
func.count(case([(and_(Sample.date >= one_week_ago, Sample.date <= filters["end_date"]), 1)])),
|
|
|
|
func.count(case([(and_(Sample.date >= today, Sample.date <= filters["end_date"]), 1)]))]
|
2021-01-04 09:20:09 -05:00
|
|
|
|
2021-01-04 10:04:22 -05:00
|
|
|
q = db.session.query(Sample.location,
|
|
|
|
*cases\
|
|
|
|
).group_by(Sample.location)
|
2021-01-04 09:20:09 -05:00
|
|
|
|
2021-01-04 10:04:22 -05:00
|
|
|
q, filters = apply_filters(q , session)
|
|
|
|
|
|
|
|
for result in q:
|
|
|
|
location = result[0]
|
|
|
|
logging.info(result)
|
|
|
|
if location not in location_stats_data: location_stats_data[location] = dict()
|
|
|
|
location_stats_data[location]["two_week_ago"] = result[1]
|
|
|
|
location_stats_data[location]["one_week_ago"] = result[2]
|
|
|
|
location_stats_data[location]["today"] = result[3]
|
|
|
|
|
|
|
|
weekday_chart_data[location] = result[1:]
|
|
|
|
|
|
|
|
# Aggregate results
|
2021-01-01 09:00:23 -05:00
|
|
|
for location in location_stats_data:
|
|
|
|
overall_chart_data[location] = np.sum([location_charts_data[location][station] for station in location_charts_data[location]],axis=0).tolist()
|
2021-01-04 10:04:22 -05:00
|
|
|
|
2021-01-04 09:20:09 -05:00
|
|
|
overall_stat_data["one_week_ago"] += location_stats_data[location]["one_week_ago"]
|
|
|
|
overall_stat_data["two_week_ago"] += location_stats_data[location]["two_week_ago"]
|
|
|
|
overall_stat_data["today"] += location_stats_data[location]["today"]
|
2021-01-01 09:00:23 -05:00
|
|
|
|
2021-01-04 10:04:22 -05:00
|
|
|
important_dates = {
|
2021-01-01 09:00:23 -05:00
|
|
|
"today" : (filters["end_date"] - timedelta(1)).strftime("%m/%d/%Y"),
|
2020-12-31 08:52:33 -05:00
|
|
|
"range" : filters["start_date"].strftime("%m/%d/%Y") + " - " + (filters["end_date"] - timedelta(1)).strftime("%m/%d/%Y"),
|
|
|
|
"one_week_ago" : one_week_ago.strftime("%m/%d/%Y"),
|
|
|
|
"two_weeks_ago" : two_weeks_ago.strftime("%m/%d/%Y"),
|
|
|
|
}
|
2020-12-19 13:41:15 -05:00
|
|
|
################# Raw Samples Table ##############
|
|
|
|
page = request.args.get(get_page_parameter(), type=int, default=1)
|
|
|
|
pagination = Pagination(page=page, total=filtered_samples.count(
|
|
|
|
), search=False, record_name='samples', css_framework='bootstrap4')
|
|
|
|
|
2020-12-28 09:15:42 -05:00
|
|
|
table = SampleTable(filtered_samples.paginate(
|
|
|
|
page, 10, error_out=False).items)
|
2020-12-19 13:41:15 -05:00
|
|
|
|
|
|
|
return render_template('layouts/default.html',
|
2020-12-28 09:15:42 -05:00
|
|
|
base_href=BASE_HREF,
|
|
|
|
content=render_template(
|
|
|
|
'pages/index.html',
|
2020-12-31 08:52:33 -05:00
|
|
|
form = form,
|
2021-01-04 10:04:22 -05:00
|
|
|
dates = important_dates,
|
2020-12-31 08:52:33 -05:00
|
|
|
table = table,
|
|
|
|
action = action,
|
|
|
|
pagination = pagination,
|
2020-12-28 09:15:42 -05:00
|
|
|
|
2020-12-30 09:06:46 -05:00
|
|
|
chart_ticks = chart_ticks,
|
|
|
|
overall_chart_data = overall_chart_data,
|
|
|
|
location_charts_data = location_charts_data,
|
2021-01-04 09:20:09 -05:00
|
|
|
hourly_chart_data = hourly_chart_data,
|
|
|
|
weekday_chart_data = weekday_chart_data,
|
2020-12-28 09:15:42 -05:00
|
|
|
|
2020-12-30 09:06:46 -05:00
|
|
|
overall_stat_data = overall_stat_data,
|
|
|
|
location_stats_data = location_stats_data,
|
2020-12-28 09:15:42 -05:00
|
|
|
|
2020-12-31 08:52:33 -05:00
|
|
|
weekday_totals = weekday_totals,
|
|
|
|
hour_totals = hour_totals,
|
2020-12-28 09:15:42 -05:00
|
|
|
))
|
|
|
|
|
2020-12-19 21:56:21 -05:00
|
|
|
@app.route('/activate', methods=['GET', 'POST'])
|
|
|
|
@superuser
|
|
|
|
def activate_station():
|
|
|
|
return render_template('layouts/default.html',
|
2020-12-28 09:15:42 -05:00
|
|
|
base_href=BASE_HREF,
|
|
|
|
content=render_template(
|
|
|
|
'pages/stations.html'))
|
|
|
|
|
2020-11-03 16:01:50 -05:00
|
|
|
|
|
|
|
def __make_csv(sample_query):
|
|
|
|
csvfile = io.StringIO()
|
|
|
|
headers = [
|
|
|
|
'barcode',
|
|
|
|
'student_id',
|
|
|
|
'date',
|
|
|
|
'location',
|
|
|
|
'phone',
|
|
|
|
'email',
|
|
|
|
'result_code',
|
|
|
|
'ivy_file',
|
|
|
|
'email_notified',
|
|
|
|
'text_notified'
|
|
|
|
]
|
|
|
|
writer = csv.DictWriter(csvfile, headers)
|
|
|
|
writer.writeheader()
|
|
|
|
for sample in sample_query.all():
|
|
|
|
writer.writerow(
|
|
|
|
{
|
|
|
|
'barcode': sample.barcode,
|
|
|
|
'student_id': sample.student_id,
|
|
|
|
'date': sample.date,
|
|
|
|
'location': sample.location,
|
|
|
|
'phone': sample.phone,
|
|
|
|
'email': sample.email,
|
|
|
|
'result_code': sample.result_code,
|
|
|
|
'ivy_file': sample.ivy_file,
|
|
|
|
'email_notified': sample.email_notified,
|
|
|
|
'text_notified': sample.text_notified,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
# Creating the byteIO object from the StringIO Object
|
|
|
|
mem = io.BytesIO()
|
|
|
|
mem.write(csvfile.getvalue().encode('utf-8'))
|
|
|
|
# seeking was necessary. Python 3.5.2, Flask 0.12.2
|
|
|
|
mem.seek(0)
|
|
|
|
csvfile.close()
|
|
|
|
return mem
|
|
|
|
|
|
|
|
|
2020-09-22 16:22:15 -04:00
|
|
|
@app.route('/invitation', methods=['GET', 'POST'])
|
2020-10-02 15:16:28 -04:00
|
|
|
@superuser
|
2020-09-22 16:22:15 -04:00
|
|
|
def send_invitation():
|
2020-09-23 17:00:51 -04:00
|
|
|
from communicator.models.invitation import Invitation
|
|
|
|
from communicator.tables import InvitationTable
|
|
|
|
|
2020-09-22 16:22:15 -04:00
|
|
|
form = forms.InvitationForm(request.form)
|
|
|
|
action = BASE_HREF + "/invitation"
|
|
|
|
title = "Send invitation to students"
|
2020-09-23 17:00:51 -04:00
|
|
|
if request.method == 'POST' and form.validate():
|
2020-09-22 16:22:15 -04:00
|
|
|
from communicator.services.notification_service import NotificationService
|
|
|
|
with NotificationService(app) as ns:
|
2020-12-16 11:00:01 -05:00
|
|
|
ns.send_invitations(
|
|
|
|
form.date.data, form.location.data, form.emails.data)
|
2020-09-24 11:33:51 -04:00
|
|
|
return redirect(url_for('send_invitation'))
|
2020-09-23 17:00:51 -04:00
|
|
|
# display results
|
|
|
|
page = request.args.get(get_page_parameter(), type=int, default=1)
|
2020-12-16 11:00:01 -05:00
|
|
|
invites = db.session.query(Invitation).order_by(
|
|
|
|
Invitation.date_sent.desc())
|
|
|
|
pagination = Pagination(page=page, total=invites.count(),
|
|
|
|
search=False, record_name='samples')
|
2020-09-23 17:00:51 -04:00
|
|
|
|
2020-12-16 11:00:01 -05:00
|
|
|
table = InvitationTable(invites.paginate(page, 10, error_out=False).items)
|
2020-09-23 17:00:51 -04:00
|
|
|
|
2020-09-22 16:22:15 -04:00
|
|
|
return render_template(
|
|
|
|
'form.html',
|
|
|
|
form=form,
|
2020-09-23 17:00:51 -04:00
|
|
|
table=table,
|
|
|
|
pagination=pagination,
|
2020-09-22 16:22:15 -04:00
|
|
|
action=action,
|
|
|
|
title=title,
|
|
|
|
description_map={},
|
|
|
|
base_href=BASE_HREF
|
2020-09-23 16:01:31 -04:00
|
|
|
)
|
|
|
|
|
2020-12-16 11:00:01 -05:00
|
|
|
|
2020-09-23 16:01:31 -04:00
|
|
|
@app.route('/imported_files', methods=['GET'])
|
2020-10-02 15:16:28 -04:00
|
|
|
@superuser
|
2020-09-23 16:01:31 -04:00
|
|
|
def list_imported_files_from_ivy():
|
|
|
|
from communicator.models.ivy_file import IvyFile
|
|
|
|
from communicator.tables import IvyFileTable
|
|
|
|
|
|
|
|
# display results
|
|
|
|
page = request.args.get(get_page_parameter(), type=int, default=1)
|
|
|
|
files = db.session.query(IvyFile).order_by(IvyFile.date_added.desc())
|
2020-12-16 11:00:01 -05:00
|
|
|
pagination = Pagination(page=page, total=files.count(),
|
|
|
|
search=False, record_name='samples')
|
2020-09-23 16:01:31 -04:00
|
|
|
|
|
|
|
table = IvyFileTable(files.paginate(page, 10, error_out=False).items)
|
|
|
|
return render_template(
|
|
|
|
'imported_files.html',
|
|
|
|
table=table,
|
|
|
|
pagination=pagination,
|
|
|
|
base_href=BASE_HREF
|
2020-09-22 16:22:15 -04:00
|
|
|
)
|
|
|
|
|
2020-12-16 11:00:01 -05:00
|
|
|
|
2020-09-23 14:43:24 -04:00
|
|
|
@app.route('/sso')
|
|
|
|
def sso():
|
2020-09-24 13:43:21 -04:00
|
|
|
from communicator.services.user_service import UserService
|
2020-09-24 13:36:46 -04:00
|
|
|
user = UserService().get_user_info()
|
2020-09-23 14:43:24 -04:00
|
|
|
response = ""
|
2020-09-24 13:36:46 -04:00
|
|
|
response += f"<h1>Current User: {user.display_name} ({user.uid})</h1>"
|
2020-09-23 14:43:24 -04:00
|
|
|
return response
|
2020-09-22 16:22:15 -04:00
|
|
|
|
2020-12-16 11:00:01 -05:00
|
|
|
|
2020-10-02 15:16:28 -04:00
|
|
|
@app.route('/debug-sentry')
|
|
|
|
def trigger_error():
|
|
|
|
division_by_zero = 1 / 0
|
|
|
|
|
2020-12-10 08:16:01 -05:00
|
|
|
|
2020-09-21 14:21:07 -04:00
|
|
|
# Access tokens
|
|
|
|
@app.cli.command()
|
|
|
|
def globus_token():
|
|
|
|
from communicator.services.ivy_service import IvyService
|
|
|
|
ivy_service = IvyService()
|
|
|
|
ivy_service.get_access_token()
|
|
|
|
|
2020-09-22 16:22:15 -04:00
|
|
|
|
2020-09-21 14:21:07 -04:00
|
|
|
@app.cli.command()
|
2020-10-01 11:25:12 -04:00
|
|
|
def count_files_in_ivy():
|
2020-09-21 14:21:07 -04:00
|
|
|
from communicator.services.ivy_service import IvyService
|
|
|
|
ivy_service = IvyService()
|
2020-10-01 11:25:12 -04:00
|
|
|
count = ivy_service.get_file_count_from_globus()
|
|
|
|
print(f"There are {count} files awaiting transfer")
|
2020-09-22 16:22:15 -04:00
|
|
|
|
2020-12-16 11:00:01 -05:00
|
|
|
|
2020-09-21 14:21:07 -04:00
|
|
|
@app.cli.command()
|
|
|
|
def transfer():
|
|
|
|
from communicator.services.ivy_service import IvyService
|
|
|
|
ivy_service = IvyService()
|
|
|
|
ivy_service.request_transfer()
|
|
|
|
|
2020-09-22 16:22:15 -04:00
|
|
|
|
2020-09-21 14:21:07 -04:00
|
|
|
@app.cli.command()
|
|
|
|
def delete():
|
|
|
|
from communicator.services.ivy_service import IvyService
|
|
|
|
ivy_service = IvyService()
|
|
|
|
ivy_service.delete_file()
|