Merge pull request #153 from sartography/feature/documents_publishing

Github integration with admin (Documents publishing) - WIP
This commit is contained in:
Dan Funk 2020-07-30 11:15:46 -04:00 committed by GitHub
commit 0b5e5e065a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 228 additions and 35 deletions

View File

@ -45,6 +45,7 @@ webtest = "*"
werkzeug = "*"
xlrd = "*"
xlsxwriter = "*"
pygithub = "*"
[requires]
python_version = "3.7"

89
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "2057a84011229daa6b8a9491d729a0bae5225e6ce11c7ca45136d3c1fad85ec0"
"sha256": "381d29428eb328ad6167774b510b9d818bd1505b95f50454a19f1564782326cc"
},
"pipfile-spec": 6,
"requires": {
@ -207,6 +207,13 @@
"index": "pypi",
"version": "==5.2.1"
},
"deprecated": {
"hashes": [
"sha256:525ba66fb5f90b07169fdd48b6373c18f1ee12728ca277ca44567a367d9d7f74",
"sha256:a766c1dccb30c5f6eb2b203f87edd1d8588847709c78589e1521d769addc8218"
],
"version": "==1.2.10"
},
"docutils": {
"hashes": [
"sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
@ -546,25 +553,25 @@
},
"pandas": {
"hashes": [
"sha256:02f1e8f71cd994ed7fcb9a35b6ddddeb4314822a0e09a9c5b2d278f8cb5d4096",
"sha256:13f75fb18486759da3ff40f5345d9dd20e7d78f2a39c5884d013456cec9876f0",
"sha256:35b670b0abcfed7cad76f2834041dcf7ae47fd9b22b63622d67cdc933d79f453",
"sha256:4c73f373b0800eb3062ffd13d4a7a2a6d522792fa6eb204d67a4fad0a40f03dc",
"sha256:5759edf0b686b6f25a5d4a447ea588983a33afc8a0081a0954184a4a87fd0dd7",
"sha256:5a7cf6044467c1356b2b49ef69e50bf4d231e773c3ca0558807cdba56b76820b",
"sha256:69c5d920a0b2a9838e677f78f4dde506b95ea8e4d30da25859db6469ded84fa8",
"sha256:8778a5cc5a8437a561e3276b85367412e10ae9fff07db1eed986e427d9a674f8",
"sha256:9871ef5ee17f388f1cb35f76dc6106d40cb8165c562d573470672f4cdefa59ef",
"sha256:9c31d52f1a7dd2bb4681d9f62646c7aa554f19e8e9addc17e8b1b20011d7522d",
"sha256:ab8173a8efe5418bbe50e43f321994ac6673afc5c7c4839014cf6401bbdd0705",
"sha256:ae961f1f0e270f1e4e2273f6a539b2ea33248e0e3a11ffb479d757918a5e03a9",
"sha256:b3c4f93fcb6e97d993bf87cdd917883b7dab7d20c627699f360a8fb49e9e0b91",
"sha256:c9410ce8a3dee77653bc0684cfa1535a7f9c291663bd7ad79e39f5ab58f67ab3",
"sha256:f69e0f7b7c09f1f612b1f8f59e2df72faa8a6b41c5a436dde5b615aaf948f107",
"sha256:faa42a78d1350b02a7d2f0dbe3c80791cf785663d6997891549d0f86dc49125e"
"sha256:0210f8fe19c2667a3817adb6de2c4fd92b1b78e1975ca60c0efa908e0985cbdb",
"sha256:0227e3a6e3a22c0e283a5041f1e3064d78fbde811217668bb966ed05386d8a7e",
"sha256:0bc440493cf9dc5b36d5d46bbd5508f6547ba68b02a28234cd8e81fdce42744d",
"sha256:16504f915f1ae424052f1e9b7cd2d01786f098fbb00fa4e0f69d42b22952d798",
"sha256:182a5aeae319df391c3df4740bb17d5300dcd78034b17732c12e62e6dd79e4a4",
"sha256:35db623487f00d9392d8af44a24516d6cb9f274afaf73cfcfe180b9c54e007d2",
"sha256:40ec0a7f611a3d00d3c666c4cceb9aa3f5bf9fbd81392948a93663064f527203",
"sha256:47a03bfef80d6812c91ed6fae43f04f2fa80a4e1b82b35aa4d9002e39529e0b8",
"sha256:4b21d46728f8a6be537716035b445e7ef3a75dbd30bd31aa1b251323219d853e",
"sha256:4d1a806252001c5db7caecbe1a26e49a6c23421d85a700960f6ba093112f54a1",
"sha256:60e20a4ab4d4fec253557d0fc9a4e4095c37b664f78c72af24860c8adcd07088",
"sha256:9f61cca5262840ff46ef857d4f5f65679b82188709d0e5e086a9123791f721c8",
"sha256:a15835c8409d5edc50b4af93be3377b5dd3eb53517e7f785060df1f06f6da0e2",
"sha256:b39508562ad0bb3f384b0db24da7d68a2608b9ddc85b1d931ccaaa92d5e45273",
"sha256:ed60848caadeacecefd0b1de81b91beff23960032cded0ac1449242b506a3b3f",
"sha256:fc714895b6de6803ac9f661abb316853d0cd657f5d23985222255ad76ccedc25"
],
"index": "pypi",
"version": "==1.0.5"
"version": "==1.1.0"
},
"psycopg2-binary": {
"hashes": [
@ -616,6 +623,14 @@
],
"version": "==2.20"
},
"pygithub": {
"hashes": [
"sha256:8375a058ec651cc0774244a3bc7395cf93617298735934cdd59e5bcd9a1df96e",
"sha256:d2d17d1e3f4474e070353f201164685a95b5a92f5ee0897442504e399c7bc249"
],
"index": "pypi",
"version": "==1.51"
},
"pygments": {
"hashes": [
"sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",
@ -869,12 +884,18 @@
"index": "pypi",
"version": "==1.0.1"
},
"wrapt": {
"hashes": [
"sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"
],
"version": "==1.12.1"
},
"wtforms": {
"hashes": [
"sha256:6ff8635f4caeed9f38641d48cfe019d0d3896f41910ab04494143fc027866e1b",
"sha256:861a13b3ae521d6700dac3b2771970bd354a63ba7043ecc3a82b5288596a1972"
"sha256:43f19879b2a9b8dfd81d2e4e427ce44d3e5c09dbe08f2af8f4be9586b7dfc33d",
"sha256:715ebd303f47384bf6468fd9dfff52c6acc400e71204df8acfa6ef7bf40e1c27"
],
"version": "==2.3.1"
"version": "==2.3.2"
},
"xlrd": {
"hashes": [
@ -886,11 +907,11 @@
},
"xlsxwriter": {
"hashes": [
"sha256:828b3285fc95105f5b1946a6a015b31cf388bd5378fdc6604e4d1b7839df2e77",
"sha256:82a3b0e73e3913483da23791d1a25e4d2dbb3837d1be4129473526b9a270a5cc"
"sha256:3015f707cf237d277cf1b2d7805f409f0387e32bc52f3c76db9f85098980e828",
"sha256:ee3fc2f32890246aba44dd14d777d6b3135e3454f865d8cc669618e20152296b"
],
"index": "pypi",
"version": "==1.2.9"
"version": "==1.3.0"
},
"zipp": {
"hashes": [
@ -956,6 +977,12 @@
"markers": "python_version < '3.8'",
"version": "==1.7.0"
},
"iniconfig": {
"hashes": [
"sha256:aa0b40f50a00e72323cb5d41302f9c6165728fd764ac8822aa3fff00a40d56b4"
],
"version": "==1.0.0"
},
"more-itertools": {
"hashes": [
"sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5",
@ -1001,11 +1028,11 @@
},
"pytest": {
"hashes": [
"sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1",
"sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"
"sha256:85228d75db9f45e06e57ef9bf4429267f81ac7c0d742cc9ed63d09886a9fe6f4",
"sha256:8b6007800c53fdacd5a5c192203f4e531eb2a1540ad9c752e052ec0f7143dbad"
],
"index": "pypi",
"version": "==5.4.3"
"version": "==6.0.1"
},
"six": {
"hashes": [
@ -1014,12 +1041,12 @@
],
"version": "==1.15.0"
},
"wcwidth": {
"toml": {
"hashes": [
"sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784",
"sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"
"sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f",
"sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"
],
"version": "==0.2.5"
"version": "==0.10.1"
},
"zipp": {
"hashes": [

View File

@ -46,6 +46,9 @@ PB_STUDY_DETAILS_URL = environ.get('PB_STUDY_DETAILS_URL', default=PB_BASE_URL +
LDAP_URL = environ.get('LDAP_URL', default="ldap.virginia.edu").strip('/') # No trailing slash or http://
LDAP_TIMEOUT_SEC = int(environ.get('LDAP_TIMEOUT_SEC', default=1))
# Github token
GITHUB_TOKEN = environ.get('GITHUB_TOKEN', None)
# Email configuration
DEFAULT_SENDER = 'askresearch@virginia.edu'
FALLBACK_EMAILS = ['askresearch@virginia.edu', 'sartographysupport@googlegroups.com']

View File

@ -3,19 +3,22 @@ import json
from flask import url_for
from flask_admin import Admin
from flask_admin.actions import action
from flask_admin.contrib import sqla
from flask_admin.contrib.sqla import ModelView
from sqlalchemy import desc
from werkzeug.utils import redirect
from jinja2 import Markup
from crc import db, app
from crc.api.user import verify_token, verify_token_admin
from crc.models.approval import ApprovalModel
from crc.models.file import FileModel
from crc.models.file import FileModel, FileDataModel
from crc.models.task_event import TaskEventModel
from crc.models.study import StudyModel
from crc.models.user import UserModel
from crc.models.workflow import WorkflowModel
from crc.services.file_service import FileService
class AdminModelView(sqla.ModelView):
@ -34,26 +37,40 @@ class AdminModelView(sqla.ModelView):
# redirect to login page if user doesn't have access
return redirect(url_for('home'))
class UserView(AdminModelView):
column_filters = ['uid']
class StudyView(AdminModelView):
column_filters = ['id', 'primary_investigator_id']
column_searchable_list = ['title']
class ApprovalView(AdminModelView):
column_filters = ['study_id', 'approver_uid']
class WorkflowView(AdminModelView):
column_filters = ['study_id', 'id']
class FileView(AdminModelView):
column_filters = ['workflow_id']
column_filters = ['workflow_id', 'type']
@action('publish', 'Publish', 'Are you sure you want to publish this file(s)?')
def action_publish(self, ids):
FileService.publish_to_github(ids)
@action('update', 'Update', 'Are you sure you want to update this file(s)?')
def action_update(self, ids):
FileService.update_from_github(ids)
def json_formatter(view, context, model, name):
value = getattr(model, name)
json_value = json.dumps(value, ensure_ascii=False, indent=2)
return Markup('<pre>{}</pre>'.format(json_value))
return Markup(f'<pre>{json_value}</pre>')
class TaskEventView(AdminModelView):
column_filters = ['workflow_id', 'action']
@ -62,6 +79,7 @@ class TaskEventView(AdminModelView):
'form_data': json_formatter,
}
admin = Admin(app)
admin.add_view(StudyView(StudyModel, db.session))

View File

@ -2,6 +2,7 @@ import hashlib
import json
import os
from datetime import datetime
from github import Github, UnknownObjectException
from uuid import UUID
from lxml import etree
@ -332,3 +333,51 @@ class FileService(object):
file_model.archived = True
session.commit()
app.logger.info("Failed to delete file, so archiving it instead. %i, due to %s" % (file_id, str(ie)))
@staticmethod
def update_from_github(file_ids):
gh_token = app.config['GITHUB_TOKEN']
_github = Github(gh_token)
repo = _github.get_user().get_repo('crispy-fiesta')
for file_id in file_ids:
file_data_model = FileDataModel.query.filter_by(
file_model_id=file_id
).order_by(
desc(FileDataModel.version)
).first()
try:
repo_file = repo.get_contents(file_data_model.file_model.name)
except UnknownObjectException:
# TODO: Add message indicating file is not in the repo
pass
else:
file_data_model.data = repo_file.decoded_content
session.add(file_data_model)
session.commit()
@staticmethod
def publish_to_github(file_ids):
gh_token = app.config['GITHUB_TOKEN']
_github = Github(gh_token)
repo = _github.get_user().get_repo('crispy-fiesta')
for file_id in file_ids:
file_data_model = FileDataModel.query.filter_by(file_model_id=file_id).first()
try:
repo_file = repo.get_contents(file_data_model.file_model.name)
except UnknownObjectException:
repo.create_file(
path=file_data_model.file_model.name,
message=f'Creating {file_data_model.file_model.name}',
content=file_data_model.data
)
return {'created': True}
else:
updated = repo.update_file(
path=repo_file.path,
message=f'Updating {file_data_model.file_model.name}',
content=file_data_model.data,
sha=repo_file.sha
)
return {'updated': True}

View File

@ -1,9 +1,45 @@
from github import UnknownObjectException
from sqlalchemy import desc
from tests.base_test import BaseTest
from unittest.mock import patch, Mock
from crc import db
from crc.models.file import FileDataModel
from crc.services.file_service import FileService
from crc.services.workflow_processor import WorkflowProcessor
class FakeGithubCreates(Mock):
def get_user(var):
class FakeUser(Mock):
def get_repo(var, name):
class FakeRepo(Mock):
def get_contents(var, filename):
raise UnknownObjectException(status='Failure', data='Failed data')
def update_file(var, path, message, content, sha):
pass
return FakeRepo()
return FakeUser()
class FakeGithub(Mock):
def get_user(var):
class FakeUser(Mock):
def get_repo(var, name):
class FakeRepo(Mock):
def get_contents(var, filename):
fake_file = Mock()
fake_file.decoded_content = b'Some bytes'
fake_file.path = '/el/path/'
fake_file.data = 'Serious data'
fake_file.sha = 'Sha'
return fake_file
def update_file(var, path, message, content, sha):
pass
return FakeRepo()
return FakeUser()
class TestFileService(BaseTest):
"""Largely tested via the test_file_api, and time is tight, but adding new tests here."""
@ -103,3 +139,62 @@ class TestFileService(BaseTest):
binary_data=b'5678')
file_models = FileService.get_workflow_files(workflow_id=workflow.id)
self.assertEqual(2, len(file_models))
@patch('crc.services.file_service.Github')
def test_update_from_github(self, mock_github):
mock_github.return_value = FakeGithub()
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
task = processor.next_task()
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
file_model = FileService.add_workflow_file(workflow_id=workflow.id,
irb_doc_code=irb_code,
name="anything.png", content_type="text",
binary_data=b'1234')
FileService.update_from_github([file_model.id])
file_model_data = FileDataModel.query.filter_by(
file_model_id=file_model.id
).order_by(
desc(FileDataModel.version)
).first()
self.assertEqual(file_model_data.data, b'Some bytes')
@patch('crc.services.file_service.Github')
def test_publish_to_github_creates(self, mock_github):
mock_github.return_value = FakeGithubCreates()
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
task = processor.next_task()
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
file_model = FileService.add_workflow_file(workflow_id=workflow.id,
irb_doc_code=irb_code,
name="anything.png", content_type="text",
binary_data=b'1234')
result = FileService.publish_to_github([file_model.id])
self.assertEqual(result['created'], True)
@patch('crc.services.file_service.Github')
def test_publish_to_github_updates(self, mock_github):
mock_github.return_value = FakeGithub()
self.load_example_data()
self.create_reference_document()
workflow = self.create_workflow('file_upload_form')
processor = WorkflowProcessor(workflow)
task = processor.next_task()
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
file_model = FileService.add_workflow_file(workflow_id=workflow.id,
irb_doc_code=irb_code,
name="anything.png", content_type="text",
binary_data=b'1234')
result = FileService.publish_to_github([file_model.id])
self.assertEqual(result['updated'], True)

View File

@ -30,4 +30,4 @@ class TestLdapService(BaseTest):
user_info = LdapService.user_info("nosuch")
self.assertFalse(True, "An API error should be raised.")
except ApiError as ae:
self.assertEqual("missing_ldap_record", ae.code)
self.assertEqual("missing_ldap_record", ae.code)