mirror of
https://github.com/sartography/cr-connect-workflow.git
synced 2025-02-20 11:48:16 +00:00
Merge branch 'dev' into feature/update_study_status_latest
This commit is contained in:
commit
50b207307a
5
Pipfile
5
Pipfile
@ -38,13 +38,14 @@ recommonmark = "*"
|
||||
requests = "*"
|
||||
sentry-sdk = {extras = ["flask"],version = "==0.14.4"}
|
||||
sphinx = "*"
|
||||
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow.git",ref = "master"}
|
||||
#spiffworkflow = {editable = true,path="/home/kelly/sartography/SpiffWorkflow/"}
|
||||
swagger-ui-bundle = "*"
|
||||
spiffworkflow = {editable = true, git = "https://github.com/sartography/SpiffWorkflow.git", ref = "master"}
|
||||
webtest = "*"
|
||||
werkzeug = "*"
|
||||
xlrd = "*"
|
||||
xlsxwriter = "*"
|
||||
pygithub = "*"
|
||||
python-box = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.7"
|
||||
|
128
Pipfile.lock
generated
128
Pipfile.lock
generated
@ -1,7 +1,7 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "2057a84011229daa6b8a9491d729a0bae5225e6ce11c7ca45136d3c1fad85ec0"
|
||||
"sha256": "096abf7ce152358489282a004ed634ca64730cb98276f3a513ed2d5b8a6635c6"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
@ -32,11 +32,11 @@
|
||||
},
|
||||
"amqp": {
|
||||
"hashes": [
|
||||
"sha256:24dbaff8ce4f30566bb88976b398e8c4e77637171af3af6f1b9650f48890e60b",
|
||||
"sha256:bb68f8d2bced8f93ccfd07d96c689b716b3227720add971be980accfc2952139"
|
||||
"sha256:70cdb10628468ff14e57ec2f751c7aa9e48e7e3651cfd62d431213c0c4e58f21",
|
||||
"sha256:aa7f313fb887c91f15474c1229907a04dac0b8135822d6603437803424c0aa59"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
|
||||
"version": "==2.6.0"
|
||||
"version": "==2.6.1"
|
||||
},
|
||||
"aniso8601": {
|
||||
"hashes": [
|
||||
@ -242,6 +242,14 @@
|
||||
"index": "pypi",
|
||||
"version": "==5.2.1"
|
||||
},
|
||||
"deprecated": {
|
||||
"hashes": [
|
||||
"sha256:525ba66fb5f90b07169fdd48b6373c18f1ee12728ca277ca44567a367d9d7f74",
|
||||
"sha256:a766c1dccb30c5f6eb2b203f87edd1d8588847709c78589e1521d769addc8218"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==1.2.10"
|
||||
},
|
||||
"docutils": {
|
||||
"hashes": [
|
||||
"sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
|
||||
@ -371,14 +379,6 @@
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==1.2.0"
|
||||
},
|
||||
"importlib-metadata": {
|
||||
"hashes": [
|
||||
"sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83",
|
||||
"sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"
|
||||
],
|
||||
"markers": "python_version < '3.8'",
|
||||
"version": "==1.7.0"
|
||||
},
|
||||
"inflection": {
|
||||
"hashes": [
|
||||
"sha256:88b101b2668a1d81d6d72d4c2018e53bc6c7fc544c987849da1c7f77545c3bc9",
|
||||
@ -611,25 +611,25 @@
|
||||
},
|
||||
"pandas": {
|
||||
"hashes": [
|
||||
"sha256:02f1e8f71cd994ed7fcb9a35b6ddddeb4314822a0e09a9c5b2d278f8cb5d4096",
|
||||
"sha256:13f75fb18486759da3ff40f5345d9dd20e7d78f2a39c5884d013456cec9876f0",
|
||||
"sha256:35b670b0abcfed7cad76f2834041dcf7ae47fd9b22b63622d67cdc933d79f453",
|
||||
"sha256:4c73f373b0800eb3062ffd13d4a7a2a6d522792fa6eb204d67a4fad0a40f03dc",
|
||||
"sha256:5759edf0b686b6f25a5d4a447ea588983a33afc8a0081a0954184a4a87fd0dd7",
|
||||
"sha256:5a7cf6044467c1356b2b49ef69e50bf4d231e773c3ca0558807cdba56b76820b",
|
||||
"sha256:69c5d920a0b2a9838e677f78f4dde506b95ea8e4d30da25859db6469ded84fa8",
|
||||
"sha256:8778a5cc5a8437a561e3276b85367412e10ae9fff07db1eed986e427d9a674f8",
|
||||
"sha256:9871ef5ee17f388f1cb35f76dc6106d40cb8165c562d573470672f4cdefa59ef",
|
||||
"sha256:9c31d52f1a7dd2bb4681d9f62646c7aa554f19e8e9addc17e8b1b20011d7522d",
|
||||
"sha256:ab8173a8efe5418bbe50e43f321994ac6673afc5c7c4839014cf6401bbdd0705",
|
||||
"sha256:ae961f1f0e270f1e4e2273f6a539b2ea33248e0e3a11ffb479d757918a5e03a9",
|
||||
"sha256:b3c4f93fcb6e97d993bf87cdd917883b7dab7d20c627699f360a8fb49e9e0b91",
|
||||
"sha256:c9410ce8a3dee77653bc0684cfa1535a7f9c291663bd7ad79e39f5ab58f67ab3",
|
||||
"sha256:f69e0f7b7c09f1f612b1f8f59e2df72faa8a6b41c5a436dde5b615aaf948f107",
|
||||
"sha256:faa42a78d1350b02a7d2f0dbe3c80791cf785663d6997891549d0f86dc49125e"
|
||||
"sha256:0210f8fe19c2667a3817adb6de2c4fd92b1b78e1975ca60c0efa908e0985cbdb",
|
||||
"sha256:0227e3a6e3a22c0e283a5041f1e3064d78fbde811217668bb966ed05386d8a7e",
|
||||
"sha256:0bc440493cf9dc5b36d5d46bbd5508f6547ba68b02a28234cd8e81fdce42744d",
|
||||
"sha256:16504f915f1ae424052f1e9b7cd2d01786f098fbb00fa4e0f69d42b22952d798",
|
||||
"sha256:182a5aeae319df391c3df4740bb17d5300dcd78034b17732c12e62e6dd79e4a4",
|
||||
"sha256:35db623487f00d9392d8af44a24516d6cb9f274afaf73cfcfe180b9c54e007d2",
|
||||
"sha256:40ec0a7f611a3d00d3c666c4cceb9aa3f5bf9fbd81392948a93663064f527203",
|
||||
"sha256:47a03bfef80d6812c91ed6fae43f04f2fa80a4e1b82b35aa4d9002e39529e0b8",
|
||||
"sha256:4b21d46728f8a6be537716035b445e7ef3a75dbd30bd31aa1b251323219d853e",
|
||||
"sha256:4d1a806252001c5db7caecbe1a26e49a6c23421d85a700960f6ba093112f54a1",
|
||||
"sha256:60e20a4ab4d4fec253557d0fc9a4e4095c37b664f78c72af24860c8adcd07088",
|
||||
"sha256:9f61cca5262840ff46ef857d4f5f65679b82188709d0e5e086a9123791f721c8",
|
||||
"sha256:a15835c8409d5edc50b4af93be3377b5dd3eb53517e7f785060df1f06f6da0e2",
|
||||
"sha256:b39508562ad0bb3f384b0db24da7d68a2608b9ddc85b1d931ccaaa92d5e45273",
|
||||
"sha256:ed60848caadeacecefd0b1de81b91beff23960032cded0ac1449242b506a3b3f",
|
||||
"sha256:fc714895b6de6803ac9f661abb316853d0cd657f5d23985222255ad76ccedc25"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.0.5"
|
||||
"version": "==1.1.0"
|
||||
},
|
||||
"psycopg2-binary": {
|
||||
"hashes": [
|
||||
@ -693,6 +693,14 @@
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==2.20"
|
||||
},
|
||||
"pygithub": {
|
||||
"hashes": [
|
||||
"sha256:8375a058ec651cc0774244a3bc7395cf93617298735934cdd59e5bcd9a1df96e",
|
||||
"sha256:d2d17d1e3f4474e070353f201164685a95b5a92f5ee0897442504e399c7bc249"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.51"
|
||||
},
|
||||
"pygments": {
|
||||
"hashes": [
|
||||
"sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",
|
||||
@ -728,7 +736,7 @@
|
||||
"sha256:bcb057e8960f4d888a4caf8f668eeca3c5c61ad349d8d81c4339414984fa9454",
|
||||
"sha256:f02e059a299cac0515687aafec7543d401b12759d6578e53fae74154e0cbaa79"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"index": "pypi",
|
||||
"version": "==5.1.0"
|
||||
},
|
||||
"python-dateutil": {
|
||||
@ -940,8 +948,9 @@
|
||||
"version": "==1.1.4"
|
||||
},
|
||||
"spiffworkflow": {
|
||||
"editable": true,
|
||||
"git": "https://github.com/sartography/SpiffWorkflow.git",
|
||||
"ref": "11ad40bbcb0fbd3c5bc1078e4989dc38b749f7f3"
|
||||
"ref": "7c8d59e7b9a978795bc8d1f354002fdc89540672"
|
||||
},
|
||||
"sqlalchemy": {
|
||||
"hashes": [
|
||||
@ -1033,12 +1042,18 @@
|
||||
"index": "pypi",
|
||||
"version": "==1.0.1"
|
||||
},
|
||||
"wrapt": {
|
||||
"hashes": [
|
||||
"sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"
|
||||
],
|
||||
"version": "==1.12.1"
|
||||
},
|
||||
"wtforms": {
|
||||
"hashes": [
|
||||
"sha256:6ff8635f4caeed9f38641d48cfe019d0d3896f41910ab04494143fc027866e1b",
|
||||
"sha256:861a13b3ae521d6700dac3b2771970bd354a63ba7043ecc3a82b5288596a1972"
|
||||
"sha256:7b504fc724d0d1d4d5d5c114e778ec88c37ea53144683e084215eed5155ada4c",
|
||||
"sha256:81195de0ac94fbc8368abbaf9197b88c4f3ffd6c2719b5bf5fc9da744f3d829c"
|
||||
],
|
||||
"version": "==2.3.1"
|
||||
"version": "==2.3.3"
|
||||
},
|
||||
"xlrd": {
|
||||
"hashes": [
|
||||
@ -1050,19 +1065,11 @@
|
||||
},
|
||||
"xlsxwriter": {
|
||||
"hashes": [
|
||||
"sha256:828b3285fc95105f5b1946a6a015b31cf388bd5378fdc6604e4d1b7839df2e77",
|
||||
"sha256:82a3b0e73e3913483da23791d1a25e4d2dbb3837d1be4129473526b9a270a5cc"
|
||||
"sha256:3015f707cf237d277cf1b2d7805f409f0387e32bc52f3c76db9f85098980e828",
|
||||
"sha256:ee3fc2f32890246aba44dd14d777d6b3135e3454f865d8cc669618e20152296b"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.2.9"
|
||||
},
|
||||
"zipp": {
|
||||
"hashes": [
|
||||
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
|
||||
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.1.0"
|
||||
"version": "==1.3.0"
|
||||
}
|
||||
},
|
||||
"develop": {
|
||||
@ -1114,13 +1121,12 @@
|
||||
"index": "pypi",
|
||||
"version": "==5.2.1"
|
||||
},
|
||||
"importlib-metadata": {
|
||||
"iniconfig": {
|
||||
"hashes": [
|
||||
"sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83",
|
||||
"sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"
|
||||
"sha256:80cf40c597eb564e86346103f609d74efce0f6b4d4f30ec8ce9e2c26411ba437",
|
||||
"sha256:e5f92f89355a67de0595932a6c6c02ab4afddc6fcdc0bfc5becd0d60884d3f69"
|
||||
],
|
||||
"markers": "python_version < '3.8'",
|
||||
"version": "==1.7.0"
|
||||
"version": "==1.0.1"
|
||||
},
|
||||
"more-itertools": {
|
||||
"hashes": [
|
||||
@ -1172,11 +1178,11 @@
|
||||
},
|
||||
"pytest": {
|
||||
"hashes": [
|
||||
"sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1",
|
||||
"sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"
|
||||
"sha256:85228d75db9f45e06e57ef9bf4429267f81ac7c0d742cc9ed63d09886a9fe6f4",
|
||||
"sha256:8b6007800c53fdacd5a5c192203f4e531eb2a1540ad9c752e052ec0f7143dbad"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==5.4.3"
|
||||
"version": "==6.0.1"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
@ -1186,20 +1192,12 @@
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==1.15.0"
|
||||
},
|
||||
"wcwidth": {
|
||||
"toml": {
|
||||
"hashes": [
|
||||
"sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784",
|
||||
"sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"
|
||||
"sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f",
|
||||
"sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"
|
||||
],
|
||||
"version": "==0.2.5"
|
||||
},
|
||||
"zipp": {
|
||||
"hashes": [
|
||||
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
|
||||
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.1.0"
|
||||
"version": "==0.10.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -18,6 +18,9 @@ Make sure all of the following are properly installed on your system:
|
||||
- [Install pipenv](https://pipenv-es.readthedocs.io/es/stable/)
|
||||
- [Add ${HOME}/.local/bin to your PATH](https://github.com/pypa/pipenv/issues/2122#issue-319600584)
|
||||
|
||||
### Running Postgres
|
||||
|
||||
|
||||
### Project Initialization
|
||||
1. Clone this repository.
|
||||
2. In PyCharm:
|
||||
|
@ -46,6 +46,9 @@ PB_STUDY_DETAILS_URL = environ.get('PB_STUDY_DETAILS_URL', default=PB_BASE_URL +
|
||||
LDAP_URL = environ.get('LDAP_URL', default="ldap.virginia.edu").strip('/') # No trailing slash or http://
|
||||
LDAP_TIMEOUT_SEC = int(environ.get('LDAP_TIMEOUT_SEC', default=1))
|
||||
|
||||
# Github token
|
||||
GITHUB_TOKEN = environ.get('GITHUB_TOKEN', None)
|
||||
|
||||
# Email configuration
|
||||
DEFAULT_SENDER = 'askresearch@virginia.edu'
|
||||
FALLBACK_EMAILS = ['askresearch@virginia.edu', 'sartographysupport@googlegroups.com']
|
||||
|
26
crc/api.yml
26
crc/api.yml
@ -31,6 +31,13 @@ paths:
|
||||
'304':
|
||||
description: Redirection to the hosted frontend with an auth_token header.
|
||||
/user:
|
||||
parameters:
|
||||
- name: admin_impersonate_uid
|
||||
in: query
|
||||
required: false
|
||||
description: For admins, the unique uid of an existing user to impersonate.
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: crc.api.user.get_current_user
|
||||
summary: Returns the current user.
|
||||
@ -38,11 +45,27 @@ paths:
|
||||
- Users
|
||||
responses:
|
||||
'200':
|
||||
description: The currently authenticated user.
|
||||
description: The currently-authenticated user, or, if the current user is an admin and admin_impersonate_uid is provided, this will be the user with the given uid.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/User"
|
||||
/list_users:
|
||||
get:
|
||||
operationId: crc.api.user.get_all_users
|
||||
security:
|
||||
- auth_admin: ['secret']
|
||||
summary: Returns a list of all users in the database.
|
||||
tags:
|
||||
- Users
|
||||
responses:
|
||||
'200':
|
||||
description: All users in the database.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
$ref: "#/components/schemas/User"
|
||||
# /v1.0/study
|
||||
/study:
|
||||
get:
|
||||
@ -56,6 +79,7 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
$ref: "#/components/schemas/Study"
|
||||
post:
|
||||
operationId: crc.api.study.add_study
|
||||
|
@ -3,19 +3,22 @@ import json
|
||||
|
||||
from flask import url_for
|
||||
from flask_admin import Admin
|
||||
from flask_admin.actions import action
|
||||
from flask_admin.contrib import sqla
|
||||
from flask_admin.contrib.sqla import ModelView
|
||||
from sqlalchemy import desc
|
||||
from werkzeug.utils import redirect
|
||||
from jinja2 import Markup
|
||||
|
||||
from crc import db, app
|
||||
from crc.api.user import verify_token, verify_token_admin
|
||||
from crc.models.approval import ApprovalModel
|
||||
from crc.models.file import FileModel
|
||||
from crc.models.file import FileModel, FileDataModel
|
||||
from crc.models.task_event import TaskEventModel
|
||||
from crc.models.study import StudyModel
|
||||
from crc.models.user import UserModel
|
||||
from crc.models.workflow import WorkflowModel
|
||||
from crc.services.file_service import FileService
|
||||
|
||||
|
||||
class AdminModelView(sqla.ModelView):
|
||||
@ -34,26 +37,40 @@ class AdminModelView(sqla.ModelView):
|
||||
# redirect to login page if user doesn't have access
|
||||
return redirect(url_for('home'))
|
||||
|
||||
|
||||
class UserView(AdminModelView):
|
||||
column_filters = ['uid']
|
||||
|
||||
|
||||
class StudyView(AdminModelView):
|
||||
column_filters = ['id', 'primary_investigator_id']
|
||||
column_searchable_list = ['title']
|
||||
|
||||
|
||||
class ApprovalView(AdminModelView):
|
||||
column_filters = ['study_id', 'approver_uid']
|
||||
|
||||
|
||||
class WorkflowView(AdminModelView):
|
||||
column_filters = ['study_id', 'id']
|
||||
|
||||
|
||||
class FileView(AdminModelView):
|
||||
column_filters = ['workflow_id']
|
||||
column_filters = ['workflow_id', 'type']
|
||||
|
||||
@action('publish', 'Publish', 'Are you sure you want to publish this file(s)?')
|
||||
def action_publish(self, ids):
|
||||
FileService.publish_to_github(ids)
|
||||
|
||||
@action('update', 'Update', 'Are you sure you want to update this file(s)?')
|
||||
def action_update(self, ids):
|
||||
FileService.update_from_github(ids)
|
||||
|
||||
|
||||
def json_formatter(view, context, model, name):
|
||||
value = getattr(model, name)
|
||||
json_value = json.dumps(value, ensure_ascii=False, indent=2)
|
||||
return Markup('<pre>{}</pre>'.format(json_value))
|
||||
return Markup(f'<pre>{json_value}</pre>')
|
||||
|
||||
class TaskEventView(AdminModelView):
|
||||
column_filters = ['workflow_id', 'action']
|
||||
@ -62,6 +79,7 @@ class TaskEventView(AdminModelView):
|
||||
'form_data': json_formatter,
|
||||
}
|
||||
|
||||
|
||||
admin = Admin(app)
|
||||
|
||||
admin.add_view(StudyView(StudyModel, db.session))
|
||||
|
@ -1,5 +1,6 @@
|
||||
from SpiffWorkflow import WorkflowException
|
||||
from SpiffWorkflow.exceptions import WorkflowTaskExecException
|
||||
from flask import g
|
||||
|
||||
from crc import ma, app
|
||||
|
||||
@ -65,3 +66,5 @@ class ApiErrorSchema(ma.Schema):
|
||||
def handle_invalid_usage(error):
|
||||
response = ApiErrorSchema().dump(error)
|
||||
return response, error.status_code
|
||||
|
||||
|
||||
|
@ -8,6 +8,7 @@ from crc.api.common import ApiError, ApiErrorSchema
|
||||
from crc.models.protocol_builder import ProtocolBuilderStatus
|
||||
from crc.models.study import Study, StudyModel, StudySchema, StudyStatus
|
||||
from crc.services.study_service import StudyService
|
||||
from crc.services.user_service import UserService
|
||||
|
||||
|
||||
def add_study(body):
|
||||
@ -17,7 +18,7 @@ def add_study(body):
|
||||
if 'title' not in body:
|
||||
raise ApiError("missing_title", "Can't create a new study without a title.")
|
||||
|
||||
study_model = StudyModel(user_uid=g.user.uid,
|
||||
study_model = StudyModel(user_uid=UserService.current_user().uid,
|
||||
title=body['title'],
|
||||
primary_investigator_id=body['primary_investigator_id'],
|
||||
last_updated=datetime.now(),
|
||||
@ -65,8 +66,9 @@ def delete_study(study_id):
|
||||
|
||||
def user_studies():
|
||||
"""Returns all the studies associated with the current user. """
|
||||
StudyService.synch_with_protocol_builder_if_enabled(g.user)
|
||||
studies = StudyService.get_studies_for_user(g.user)
|
||||
user = UserService.current_user(allow_admin_impersonate=True)
|
||||
StudyService.synch_with_protocol_builder_if_enabled(user)
|
||||
studies = StudyService.get_studies_for_user(user)
|
||||
results = StudySchema(many=True).dump(studies)
|
||||
return results
|
||||
|
||||
|
@ -1,10 +1,11 @@
|
||||
import flask
|
||||
from flask import g, request
|
||||
|
||||
from crc import app, db
|
||||
from crc import app, session
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.user import UserModel, UserModelSchema
|
||||
from crc.services.ldap_service import LdapService, LdapModel
|
||||
from crc.services.user_service import UserService
|
||||
|
||||
"""
|
||||
.. module:: crc.api.user
|
||||
@ -35,6 +36,10 @@ def verify_token(token=None):
|
||||
try:
|
||||
token_info = UserModel.decode_auth_token(token)
|
||||
g.user = UserModel.query.filter_by(uid=token_info['sub']).first()
|
||||
|
||||
# If the user is valid, store the token for this session
|
||||
if g.user:
|
||||
g.token = token
|
||||
except:
|
||||
raise failure_error
|
||||
if g.user is not None:
|
||||
@ -49,27 +54,31 @@ def verify_token(token=None):
|
||||
if uid is not None:
|
||||
db_user = UserModel.query.filter_by(uid=uid).first()
|
||||
|
||||
# If the user is valid, store the user and token for this session
|
||||
if db_user is not None:
|
||||
g.user = db_user
|
||||
token = g.user.encode_auth_token().decode()
|
||||
g.token = token
|
||||
token_info = UserModel.decode_auth_token(token)
|
||||
return token_info
|
||||
|
||||
else:
|
||||
raise ApiError("no_user", "User not found. Please login via the frontend app before accessing this feature.",
|
||||
status_code=403)
|
||||
raise ApiError("no_user",
|
||||
"User not found. Please login via the frontend app before accessing this feature.",
|
||||
status_code=403)
|
||||
|
||||
else:
|
||||
# Fall back to a default user if this is not production.
|
||||
g.user = UserModel.query.first()
|
||||
token = g.user.encode_auth_token()
|
||||
|
||||
token_info = UserModel.decode_auth_token(token)
|
||||
return token_info
|
||||
|
||||
|
||||
def verify_token_admin(token=None):
|
||||
"""
|
||||
Verifies the token for the user (if provided) in non-production environment. If in production environment,
|
||||
checks that the user is in the list of authorized admins
|
||||
Verifies the token for the user (if provided) in non-production environment.
|
||||
If in production environment, checks that the user is in the list of authorized admins
|
||||
|
||||
Args:
|
||||
token: Optional[str]
|
||||
@ -77,21 +86,44 @@ def verify_token_admin(token=None):
|
||||
Returns:
|
||||
token: str
|
||||
"""
|
||||
|
||||
# If this is production, check that the user is in the list of admins
|
||||
if _is_production():
|
||||
uid = _get_request_uid(request)
|
||||
|
||||
if uid is not None and uid in app.config['ADMIN_UIDS']:
|
||||
return verify_token()
|
||||
|
||||
# If we're not in production, just use the normal verify_token method
|
||||
else:
|
||||
return verify_token(token)
|
||||
verify_token(token)
|
||||
if "user" in g and g.user.is_admin():
|
||||
token = g.user.encode_auth_token()
|
||||
token_info = UserModel.decode_auth_token(token)
|
||||
return token_info
|
||||
|
||||
|
||||
def get_current_user():
|
||||
return UserModelSchema().dump(g.user)
|
||||
def start_impersonating(uid):
|
||||
if uid is not None and UserService.user_is_admin():
|
||||
UserService.start_impersonating(uid)
|
||||
|
||||
user = UserService.current_user(allow_admin_impersonate=True)
|
||||
return UserModelSchema().dump(user)
|
||||
|
||||
|
||||
def stop_impersonating():
|
||||
if UserService.user_is_admin():
|
||||
UserService.stop_impersonating()
|
||||
|
||||
user = UserService.current_user(allow_admin_impersonate=False)
|
||||
return UserModelSchema().dump(user)
|
||||
|
||||
|
||||
def get_current_user(admin_impersonate_uid=None):
|
||||
if UserService.user_is_admin():
|
||||
if admin_impersonate_uid is not None:
|
||||
UserService.start_impersonating(admin_impersonate_uid)
|
||||
else:
|
||||
UserService.stop_impersonating()
|
||||
|
||||
user = UserService.current_user(UserService.user_is_admin() and UserService.admin_is_impersonating())
|
||||
return UserModelSchema().dump(user)
|
||||
|
||||
|
||||
def get_all_users():
|
||||
if "user" in g and g.user.is_admin():
|
||||
all_users = session.query(UserModel).all()
|
||||
return UserModelSchema(many=True).dump(all_users)
|
||||
|
||||
|
||||
def login(
|
||||
@ -134,7 +166,6 @@ def login(
|
||||
# X-Forwarded-Server: dev.crconnect.uvadcos.io
|
||||
# Connection: Keep-Alive
|
||||
|
||||
|
||||
# If we're in production, override any uid with the uid from the SSO request headers
|
||||
if _is_production():
|
||||
uid = _get_request_uid(request)
|
||||
@ -182,6 +213,8 @@ def _handle_login(user_info: LdapModel, redirect_url=None):
|
||||
|
||||
# Return the frontend auth callback URL, with auth token appended.
|
||||
auth_token = user.encode_auth_token().decode()
|
||||
g.token = auth_token
|
||||
|
||||
if redirect_url is not None:
|
||||
if redirect_url.find("http://") != 0 and redirect_url.find("https://") != 0:
|
||||
redirect_url = "http://" + redirect_url
|
||||
@ -194,13 +227,13 @@ def _handle_login(user_info: LdapModel, redirect_url=None):
|
||||
|
||||
|
||||
def _upsert_user(user_info):
|
||||
user = db.session.query(UserModel).filter(UserModel.uid == user_info.uid).first()
|
||||
user = session.query(UserModel).filter(UserModel.uid == user_info.uid).first()
|
||||
|
||||
if user is None:
|
||||
# Add new user
|
||||
user = UserModel()
|
||||
else:
|
||||
user = db.session.query(UserModel).filter(UserModel.uid == user_info.uid).with_for_update().first()
|
||||
user = session.query(UserModel).filter(UserModel.uid == user_info.uid).with_for_update().first()
|
||||
|
||||
user.uid = user_info.uid
|
||||
user.display_name = user_info.display_name
|
||||
@ -208,8 +241,8 @@ def _upsert_user(user_info):
|
||||
user.affiliation = user_info.affiliation
|
||||
user.title = user_info.title
|
||||
|
||||
db.session.add(user)
|
||||
db.session.commit()
|
||||
session.add(user)
|
||||
session.commit()
|
||||
return user
|
||||
|
||||
|
||||
|
@ -13,6 +13,7 @@ from crc.models.workflow import WorkflowModel, WorkflowSpecModelSchema, Workflow
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.lookup_service import LookupService
|
||||
from crc.services.study_service import StudyService
|
||||
from crc.services.user_service import UserService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from crc.services.workflow_service import WorkflowService
|
||||
|
||||
@ -141,7 +142,7 @@ def set_current_task(workflow_id, task_id):
|
||||
task_id = uuid.UUID(task_id)
|
||||
spiff_task = processor.bpmn_workflow.get_task(task_id)
|
||||
_verify_user_and_role(processor, spiff_task)
|
||||
user_uid = g.user.uid
|
||||
user_uid = UserService.current_user(allow_admin_impersonate=True).uid
|
||||
if spiff_task.state != spiff_task.COMPLETED and spiff_task.state != spiff_task.READY:
|
||||
raise ApiError("invalid_state", "You may not move the token to a task who's state is not "
|
||||
"currently set to COMPLETE or READY.")
|
||||
@ -184,7 +185,8 @@ def update_task(workflow_id, task_id, body, terminate_loop=None):
|
||||
processor.save()
|
||||
|
||||
# Log the action, and any pending task assignments in the event of lanes in the workflow.
|
||||
WorkflowService.log_task_action(g.user.uid, processor, spiff_task, WorkflowService.TASK_ACTION_COMPLETE)
|
||||
user = UserService.current_user(allow_admin_impersonate=False) # Always log as the real user.
|
||||
WorkflowService.log_task_action(user.uid, processor, spiff_task, WorkflowService.TASK_ACTION_COMPLETE)
|
||||
WorkflowService.update_task_assignments(processor)
|
||||
|
||||
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
|
||||
@ -244,19 +246,11 @@ def lookup(workflow_id, field_id, query=None, value=None, limit=10):
|
||||
|
||||
def _verify_user_and_role(processor, spiff_task):
|
||||
"""Assures the currently logged in user can access the given workflow and task, or
|
||||
raises an error.
|
||||
Allow administrators to modify tasks, otherwise assure that the current user
|
||||
is allowed to edit or update the task. Will raise the appropriate error if user
|
||||
is not authorized. """
|
||||
|
||||
if 'user' not in g:
|
||||
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
|
||||
|
||||
if g.user.uid in app.config['ADMIN_UIDS']:
|
||||
return g.user.uid
|
||||
raises an error. """
|
||||
|
||||
user = UserService.current_user(allow_admin_impersonate=True)
|
||||
allowed_users = WorkflowService.get_users_assigned_to_task(processor, spiff_task)
|
||||
if g.user.uid not in allowed_users:
|
||||
if user.uid not in allowed_users:
|
||||
raise ApiError.from_task("permission_denied",
|
||||
f"This task must be completed by '{allowed_users}', "
|
||||
f"but you are {g.user.uid}", spiff_task)
|
||||
f"but you are {user.uid}", spiff_task)
|
||||
|
@ -1,6 +1,7 @@
|
||||
import datetime
|
||||
|
||||
import jwt
|
||||
from marshmallow import fields
|
||||
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
|
||||
|
||||
from crc import db, app
|
||||
@ -21,6 +22,10 @@ class UserModel(db.Model):
|
||||
|
||||
# TODO: Add Department and School
|
||||
|
||||
def is_admin(self):
|
||||
# Currently admin abilities are set in the configuration, but this
|
||||
# may change in the future.
|
||||
return self.uid in app.config['ADMIN_UIDS']
|
||||
|
||||
def encode_auth_token(self):
|
||||
"""
|
||||
@ -60,4 +65,14 @@ class UserModelSchema(SQLAlchemyAutoSchema):
|
||||
model = UserModel
|
||||
load_instance = True
|
||||
include_relationships = True
|
||||
is_admin = fields.Method('get_is_admin', dump_only=True)
|
||||
|
||||
def get_is_admin(self, obj):
|
||||
return obj.is_admin()
|
||||
|
||||
|
||||
class AdminSessionModel(db.Model):
|
||||
__tablename__ = 'admin_session'
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
token = db.Column(db.String, unique=True)
|
||||
admin_impersonate_uid = db.Column(db.String)
|
||||
|
@ -40,7 +40,7 @@ class FactService(Script):
|
||||
else:
|
||||
details = "unknown fact type."
|
||||
|
||||
self.add_data_to_task(task, details)
|
||||
#self.add_data_to_task(task, details)
|
||||
|
||||
print(details)
|
||||
return details
|
||||
|
50
crc/scripts/ldap.py
Normal file
50
crc/scripts/ldap.py
Normal file
@ -0,0 +1,50 @@
|
||||
import copy
|
||||
|
||||
from crc import app
|
||||
from crc.api.common import ApiError
|
||||
from crc.scripts.script import Script
|
||||
from crc.services.ldap_service import LdapService
|
||||
|
||||
|
||||
class Ldap(Script):
|
||||
"""This Script allows to be introduced as part of a workflow and called from there, taking
|
||||
a UID (or several) as input and looking it up through LDAP to return the person's details """
|
||||
|
||||
def get_description(self):
|
||||
return """
|
||||
Attempts to create a dictionary with person details, using the
|
||||
provided argument (a UID) and look it up through LDAP.
|
||||
|
||||
Examples:
|
||||
supervisor_info = ldap(supervisor_uid) // Sets the supervisor information to ldap details for the given uid.
|
||||
"""
|
||||
|
||||
def do_task_validate_only(self, task, *args, **kwargs):
|
||||
return self.set_users_info_in_task(task, args)
|
||||
|
||||
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
|
||||
return self.set_users_info_in_task(task, args)
|
||||
|
||||
def set_users_info_in_task(self, task, args):
|
||||
if len(args) != 1:
|
||||
raise ApiError(code="missing_argument",
|
||||
message="Ldap takes a single argument, the "
|
||||
"UID for the person we want to look up")
|
||||
uid = args[0]
|
||||
user_info_dict = {}
|
||||
|
||||
user_info = LdapService.user_info(uid)
|
||||
user_info_dict = {
|
||||
"display_name": user_info.display_name,
|
||||
"given_name": user_info.given_name,
|
||||
"email_address": user_info.email_address,
|
||||
"telephone_number": user_info.telephone_number,
|
||||
"title": user_info.title,
|
||||
"department": user_info.department,
|
||||
"affiliation": user_info.affiliation,
|
||||
"sponsor_type": user_info.sponsor_type,
|
||||
"uid": user_info.uid,
|
||||
"proper_name": user_info.proper_name()
|
||||
}
|
||||
|
||||
return user_info_dict
|
@ -11,7 +11,7 @@ class RequestApproval(Script):
|
||||
return """
|
||||
Creates an approval request on this workflow, by the given approver_uid(s),"
|
||||
Takes multiple arguments, which should point to data located in current task
|
||||
or be quoted strings. The order is important. Approvals will be processed
|
||||
or be quoted strings. The order is important. Approvals will be processed
|
||||
in this order.
|
||||
|
||||
Example:
|
||||
|
@ -23,6 +23,53 @@ class Script(object):
|
||||
"This is an internal error. The script you are trying to execute '%s' " % self.__class__.__name__ +
|
||||
"does must provide a validate_only option that mimics the do_task, " +
|
||||
"but does not make external calls or database updates." )
|
||||
@staticmethod
|
||||
def generate_augmented_list(task, study_id,workflow_id):
|
||||
"""
|
||||
this makes a dictionary of lambda functions that are closed over the class instance that
|
||||
They represent. This is passed into PythonScriptParser as a list of helper functions that are
|
||||
available for running. In general, they maintain the do_task call structure that they had, but
|
||||
they always return a value rather than updating the task data.
|
||||
|
||||
We may be able to remove the task for each of these calls if we are not using it other than potentially
|
||||
updating the task data.
|
||||
"""
|
||||
def make_closure(subclass,task,study_id,workflow_id):
|
||||
instance = subclass()
|
||||
return lambda *a : subclass.do_task(instance,task,study_id,workflow_id,*a)
|
||||
execlist = {}
|
||||
subclasses = Script.get_all_subclasses()
|
||||
for x in range(len(subclasses)):
|
||||
subclass = subclasses[x]
|
||||
execlist[subclass.__module__.split('.')[-1]] = make_closure(subclass,task,study_id,
|
||||
workflow_id)
|
||||
return execlist
|
||||
|
||||
@staticmethod
|
||||
def generate_augmented_validate_list(task, study_id, workflow_id):
|
||||
"""
|
||||
this makes a dictionary of lambda functions that are closed over the class instance that
|
||||
They represent. This is passed into PythonScriptParser as a list of helper functions that are
|
||||
available for running. In general, they maintain the do_task call structure that they had, but
|
||||
they always return a value rather than updating the task data.
|
||||
|
||||
We may be able to remove the task for each of these calls if we are not using it other than potentially
|
||||
updating the task data.
|
||||
"""
|
||||
|
||||
def make_closure_validate(subclass,task,study_id,workflow_id):
|
||||
instance = subclass()
|
||||
return lambda *a : subclass.do_task_validate_only(instance,task,study_id,workflow_id,*a)
|
||||
execlist = {}
|
||||
subclasses = Script.get_all_subclasses()
|
||||
for x in range(len(subclasses)):
|
||||
subclass = subclasses[x]
|
||||
execlist[subclass.__module__.split('.')[-1]] = make_closure_validate(subclass,task,study_id,
|
||||
workflow_id)
|
||||
return execlist
|
||||
|
||||
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_all_subclasses():
|
||||
|
@ -8,7 +8,7 @@ from crc.scripts.script import Script
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.protocol_builder import ProtocolBuilderService
|
||||
from crc.services.study_service import StudyService
|
||||
|
||||
from box import Box
|
||||
|
||||
class StudyInfo(Script):
|
||||
"""Please see the detailed description that is provided below. """
|
||||
@ -149,11 +149,11 @@ Returns information specific to the protocol.
|
||||
|
||||
def do_task_validate_only(self, task, study_id, workflow_id, *args, **kwargs):
|
||||
"""For validation only, pretend no results come back from pb"""
|
||||
self.check_args(args)
|
||||
self.check_args(args,2)
|
||||
# Assure the reference file exists (a bit hacky, but we want to raise this error early, and cleanly.)
|
||||
FileService.get_reference_file_data(FileService.DOCUMENT_LIST)
|
||||
FileService.get_reference_file_data(FileService.INVESTIGATOR_LIST)
|
||||
data = {
|
||||
data = Box({
|
||||
"study":{
|
||||
"info": {
|
||||
"id": 12,
|
||||
@ -195,38 +195,50 @@ Returns information specific to the protocol.
|
||||
'id': 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
self.add_data_to_task(task=task, data=data["study"])
|
||||
self.add_data_to_task(task, {"documents": StudyService().get_documents_status(study_id)})
|
||||
})
|
||||
if args[0]=='documents':
|
||||
return StudyService().get_documents_status(study_id)
|
||||
return data['study'][args[0]]
|
||||
#self.add_data_to_task(task=task, data=data["study"])
|
||||
#self.add_data_to_task(task, {"documents": StudyService().get_documents_status(study_id)})
|
||||
|
||||
def do_task(self, task, study_id, workflow_id, *args, **kwargs):
|
||||
self.check_args(args)
|
||||
|
||||
self.check_args(args,2)
|
||||
prefix = None
|
||||
if len(args) > 1:
|
||||
prefix = args[1]
|
||||
cmd = args[0]
|
||||
study_info = {}
|
||||
if self.__class__.__name__ in task.data:
|
||||
study_info = task.data[self.__class__.__name__]
|
||||
|
||||
# study_info = {}
|
||||
# if self.__class__.__name__ in task.data:
|
||||
# study_info = task.data[self.__class__.__name__]
|
||||
retval = None
|
||||
if cmd == 'info':
|
||||
study = session.query(StudyModel).filter_by(id=study_id).first()
|
||||
schema = StudySchema()
|
||||
self.add_data_to_task(task, {cmd: schema.dump(study)})
|
||||
retval = schema.dump(study)
|
||||
if cmd == 'investigators':
|
||||
self.add_data_to_task(task, {cmd: StudyService().get_investigators(study_id)})
|
||||
retval = StudyService().get_investigators(study_id)
|
||||
if cmd == 'roles':
|
||||
self.add_data_to_task(task, {cmd: StudyService().get_investigators(study_id, all=True)})
|
||||
retval = StudyService().get_investigators(study_id, all=True)
|
||||
if cmd == 'details':
|
||||
self.add_data_to_task(task, {cmd: self.pb.get_study_details(study_id)})
|
||||
retval = self.pb.get_study_details(study_id)
|
||||
if cmd == 'approvals':
|
||||
self.add_data_to_task(task, {cmd: StudyService().get_approvals(study_id)})
|
||||
retval = StudyService().get_approvals(study_id)
|
||||
if cmd == 'documents':
|
||||
self.add_data_to_task(task, {cmd: StudyService().get_documents_status(study_id)})
|
||||
retval = StudyService().get_documents_status(study_id)
|
||||
if cmd == 'protocol':
|
||||
self.add_data_to_task(task, {cmd: StudyService().get_protocol(study_id)})
|
||||
retval = StudyService().get_protocol(study_id)
|
||||
if isinstance(retval,dict) and prefix is not None:
|
||||
return Box({x:retval[x] for x in retval.keys() if x[:len(prefix)] == prefix})
|
||||
elif isinstance(retval,dict):
|
||||
return Box(retval)
|
||||
else:
|
||||
return retval
|
||||
|
||||
|
||||
def check_args(self, args):
|
||||
if len(args) != 1 or (args[0] not in StudyInfo.type_options):
|
||||
|
||||
def check_args(self, args, maxlen=1):
|
||||
if len(args) < 1 or len(args) > maxlen or (args[0] not in StudyInfo.type_options):
|
||||
raise ApiError(code="missing_argument",
|
||||
message="The StudyInfo script requires a single argument which must be "
|
||||
"one of %s" % ",".join(StudyInfo.type_options))
|
||||
|
@ -2,6 +2,7 @@ import hashlib
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
from github import Github, UnknownObjectException
|
||||
from uuid import UUID
|
||||
from lxml import etree
|
||||
|
||||
@ -333,3 +334,51 @@ class FileService(object):
|
||||
file_model.archived = True
|
||||
session.commit()
|
||||
app.logger.info("Failed to delete file, so archiving it instead. %i, due to %s" % (file_id, str(ie)))
|
||||
|
||||
@staticmethod
|
||||
def update_from_github(file_ids):
|
||||
gh_token = app.config['GITHUB_TOKEN']
|
||||
_github = Github(gh_token)
|
||||
repo = _github.get_user().get_repo('crispy-fiesta')
|
||||
|
||||
for file_id in file_ids:
|
||||
file_data_model = FileDataModel.query.filter_by(
|
||||
file_model_id=file_id
|
||||
).order_by(
|
||||
desc(FileDataModel.version)
|
||||
).first()
|
||||
try:
|
||||
repo_file = repo.get_contents(file_data_model.file_model.name)
|
||||
except UnknownObjectException:
|
||||
# TODO: Add message indicating file is not in the repo
|
||||
pass
|
||||
else:
|
||||
file_data_model.data = repo_file.decoded_content
|
||||
session.add(file_data_model)
|
||||
session.commit()
|
||||
|
||||
@staticmethod
|
||||
def publish_to_github(file_ids):
|
||||
gh_token = app.config['GITHUB_TOKEN']
|
||||
_github = Github(gh_token)
|
||||
repo = _github.get_user().get_repo('crispy-fiesta')
|
||||
|
||||
for file_id in file_ids:
|
||||
file_data_model = FileDataModel.query.filter_by(file_model_id=file_id).first()
|
||||
try:
|
||||
repo_file = repo.get_contents(file_data_model.file_model.name)
|
||||
except UnknownObjectException:
|
||||
repo.create_file(
|
||||
path=file_data_model.file_model.name,
|
||||
message=f'Creating {file_data_model.file_model.name}',
|
||||
content=file_data_model.data
|
||||
)
|
||||
return {'created': True}
|
||||
else:
|
||||
updated = repo.update_file(
|
||||
path=repo_file.path,
|
||||
message=f'Updating {file_data_model.file_model.name}',
|
||||
content=file_data_model.data,
|
||||
sha=repo_file.sha
|
||||
)
|
||||
return {'updated': True}
|
||||
|
117
crc/services/user_service.py
Normal file
117
crc/services/user_service.py
Normal file
@ -0,0 +1,117 @@
|
||||
from flask import g
|
||||
|
||||
from crc import session
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.user import UserModel, AdminSessionModel
|
||||
|
||||
|
||||
class UserService(object):
|
||||
"""Provides common tools for working with users"""
|
||||
|
||||
# Returns true if the current user is logged in.
|
||||
@staticmethod
|
||||
def has_user():
|
||||
return 'token' in g and \
|
||||
bool(g.token) and \
|
||||
'user' in g and \
|
||||
bool(g.user)
|
||||
|
||||
# Returns true if the current user is an admin.
|
||||
@staticmethod
|
||||
def user_is_admin():
|
||||
return UserService.has_user() and g.user.is_admin()
|
||||
|
||||
# Returns true if the current admin user is impersonating another user.
|
||||
@staticmethod
|
||||
def admin_is_impersonating():
|
||||
if UserService.user_is_admin():
|
||||
adminSession: AdminSessionModel = UserService.get_admin_session()
|
||||
return adminSession is not None
|
||||
|
||||
else:
|
||||
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)
|
||||
|
||||
# Returns true if the given user uid is different from the current user's uid.
|
||||
@staticmethod
|
||||
def is_different_user(uid):
|
||||
return UserService.has_user() and uid is not None and uid is not g.user.uid
|
||||
|
||||
@staticmethod
|
||||
def current_user(allow_admin_impersonate=False) -> UserModel:
|
||||
if not UserService.has_user():
|
||||
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
|
||||
|
||||
# Admins can pretend to be different users and act on a user's behalf in
|
||||
# some circumstances.
|
||||
if UserService.user_is_admin() and allow_admin_impersonate and UserService.admin_is_impersonating():
|
||||
return UserService.get_admin_session_user()
|
||||
else:
|
||||
return g.user
|
||||
|
||||
# Admins can pretend to be different users and act on a user's behalf in some circumstances.
|
||||
# This method allows an admin user to start impersonating another user with the given uid.
|
||||
# Stops impersonating if the uid is None or invalid.
|
||||
@staticmethod
|
||||
def start_impersonating(uid=None):
|
||||
if not UserService.has_user():
|
||||
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
|
||||
|
||||
if not UserService.user_is_admin():
|
||||
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)
|
||||
|
||||
if uid is None:
|
||||
raise ApiError("invalid_uid", "Please provide a valid user uid.")
|
||||
|
||||
if not UserService.admin_is_impersonating() and UserService.is_different_user(uid):
|
||||
# Impersonate the user if the given uid is valid.
|
||||
impersonate_user = session.query(UserModel).filter(UserModel.uid == uid).first()
|
||||
|
||||
if impersonate_user is not None:
|
||||
g.impersonate_user = impersonate_user
|
||||
|
||||
# Store the uid and user session token.
|
||||
session.add(AdminSessionModel(token=g.token, admin_impersonate_uid=uid))
|
||||
session.commit()
|
||||
else:
|
||||
raise ApiError("invalid_uid", "The uid provided is not valid.")
|
||||
|
||||
@staticmethod
|
||||
def stop_impersonating():
|
||||
if not UserService.has_user():
|
||||
raise ApiError("logged_out", "You are no longer logged in.", status_code=401)
|
||||
|
||||
# Clear out the current impersonating user.
|
||||
if 'impersonate_user' in g:
|
||||
del g.impersonate_user
|
||||
|
||||
admin_session: AdminSessionModel = UserService.get_admin_session()
|
||||
if admin_session:
|
||||
session.delete(admin_session)
|
||||
session.commit()
|
||||
|
||||
@staticmethod
|
||||
def in_list(uids, allow_admin_impersonate=False):
|
||||
"""Returns true if the current user's id is in the given list of ids. False if there
|
||||
is no user, or the user is not in the list."""
|
||||
if UserService.has_user(): # If someone is logged in, lock tasks that don't belong to them.
|
||||
user = UserService.current_user(allow_admin_impersonate)
|
||||
if user.uid in uids:
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_admin_session() -> AdminSessionModel:
|
||||
if UserService.user_is_admin():
|
||||
return session.query(AdminSessionModel).filter(AdminSessionModel.token == g.token).first()
|
||||
else:
|
||||
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)
|
||||
|
||||
@staticmethod
|
||||
def get_admin_session_user() -> UserModel:
|
||||
if UserService.user_is_admin():
|
||||
admin_session = UserService.get_admin_session()
|
||||
|
||||
if admin_session is not None:
|
||||
return session.query(UserModel).filter(UserModel.uid == admin_session.admin_impersonate_uid).first()
|
||||
else:
|
||||
raise ApiError("unauthorized", "You do not have permissions to do this.", status_code=403)
|
@ -17,6 +17,7 @@ from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
|
||||
from SpiffWorkflow.exceptions import WorkflowTaskExecException
|
||||
from SpiffWorkflow.specs import WorkflowSpec
|
||||
|
||||
import crc
|
||||
from crc import session, app
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.file import FileDataModel, FileModel, FileType
|
||||
@ -28,64 +29,71 @@ from crc import app
|
||||
|
||||
class CustomBpmnScriptEngine(BpmnScriptEngine):
|
||||
"""This is a custom script processor that can be easily injected into Spiff Workflow.
|
||||
Rather than execute arbitrary code, this assumes the script references a fully qualified python class
|
||||
such as myapp.RandomFact. """
|
||||
It will execute python code read in from the bpmn. It will also make any scripts in the
|
||||
scripts directory available for execution. """
|
||||
|
||||
def execute(self, task: SpiffTask, script, data):
|
||||
"""
|
||||
Functions in two modes.
|
||||
1. If the command is proceeded by #! then this is assumed to be a python script, and will
|
||||
attempt to load that python module and execute the do_task method on that script. Scripts
|
||||
must be located in the scripts package and they must extend the script.py class.
|
||||
2. If not proceeded by the #! this will attempt to execute the script directly and assumes it is
|
||||
valid Python.
|
||||
"""
|
||||
# Shlex splits the whole string while respecting double quoted strings within
|
||||
if not script.startswith('#!'):
|
||||
try:
|
||||
super().execute(task, script, data)
|
||||
except SyntaxError as e:
|
||||
raise ApiError.from_task('syntax_error',
|
||||
f'If you are running a pre-defined script, please'
|
||||
f' proceed the script with "#!", otherwise this is assumed to be'
|
||||
f' pure python: {script}, {e.msg}', task=task)
|
||||
else:
|
||||
self.run_predefined_script(task, script[2:], data) # strip off the first two characters.
|
||||
|
||||
def run_predefined_script(self, task: SpiffTask, script, data):
|
||||
commands = shlex.split(script)
|
||||
path_and_command = commands[0].rsplit(".", 1)
|
||||
if len(path_and_command) == 1:
|
||||
module_name = "crc.scripts." + self.camel_to_snake(path_and_command[0])
|
||||
class_name = path_and_command[0]
|
||||
study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
|
||||
if WorkflowProcessor.WORKFLOW_ID_KEY in task.workflow.data:
|
||||
workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
|
||||
else:
|
||||
module_name = "crc.scripts." + path_and_command[0] + "." + self.camel_to_snake(path_and_command[1])
|
||||
class_name = path_and_command[1]
|
||||
workflow_id = None
|
||||
|
||||
try:
|
||||
mod = __import__(module_name, fromlist=[class_name])
|
||||
klass = getattr(mod, class_name)
|
||||
study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
|
||||
if WorkflowProcessor.WORKFLOW_ID_KEY in task.workflow.data:
|
||||
workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
|
||||
else:
|
||||
workflow_id = None
|
||||
|
||||
if not isinstance(klass(), Script):
|
||||
raise ApiError.from_task("invalid_script",
|
||||
"This is an internal error. The script '%s:%s' you called " %
|
||||
(module_name, class_name) +
|
||||
"does not properly implement the CRC Script class.",
|
||||
task=task)
|
||||
if task.workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY]:
|
||||
"""If this is running a validation, and not a normal process, then we want to
|
||||
mimic running the script, but not make any external calls or database changes."""
|
||||
klass().do_task_validate_only(task, study_id, workflow_id, *commands[1:])
|
||||
augmentMethods = Script.generate_augmented_validate_list(task, study_id, workflow_id)
|
||||
else:
|
||||
klass().do_task(task, study_id, workflow_id, *commands[1:])
|
||||
except ModuleNotFoundError:
|
||||
raise ApiError.from_task("invalid_script",
|
||||
"Unable to locate Script: '%s:%s'" % (module_name, class_name),
|
||||
task=task)
|
||||
augmentMethods = Script.generate_augmented_list(task, study_id, workflow_id)
|
||||
|
||||
super().execute(task, script, data, externalMethods=augmentMethods)
|
||||
except SyntaxError as e:
|
||||
raise ApiError('syntax_error',
|
||||
f'Something is wrong with your python script '
|
||||
f'please correct the following:'
|
||||
f' {script}, {e.msg}')
|
||||
except NameError as e:
|
||||
raise ApiError('name_error',
|
||||
f'something you are referencing does not exist:'
|
||||
f' {script}, {e.name}')
|
||||
|
||||
# else:
|
||||
# self.run_predefined_script(task, script[2:], data) # strip off the first two characters.
|
||||
|
||||
# def run_predefined_script(self, task: SpiffTask, script, data):
|
||||
# commands = shlex.split(script)
|
||||
# path_and_command = commands[0].rsplit(".", 1)
|
||||
# if len(path_and_command) == 1:
|
||||
# module_name = "crc.scripts." + self.camel_to_snake(path_and_command[0])
|
||||
# class_name = path_and_command[0]
|
||||
# else:
|
||||
# module_name = "crc.scripts." + path_and_command[0] + "." + self.camel_to_snake(path_and_command[1])
|
||||
# class_name = path_and_command[1]
|
||||
# try:
|
||||
# mod = __import__(module_name, fromlist=[class_name])
|
||||
# klass = getattr(mod, class_name)
|
||||
# study_id = task.workflow.data[WorkflowProcessor.STUDY_ID_KEY]
|
||||
# if WorkflowProcessor.WORKFLOW_ID_KEY in task.workflow.data:
|
||||
# workflow_id = task.workflow.data[WorkflowProcessor.WORKFLOW_ID_KEY]
|
||||
# else:
|
||||
# workflow_id = None
|
||||
#
|
||||
# if not isinstance(klass(), Script):
|
||||
# raise ApiError.from_task("invalid_script",
|
||||
# "This is an internal error. The script '%s:%s' you called " %
|
||||
# (module_name, class_name) +
|
||||
# "does not properly implement the CRC Script class.",
|
||||
# task=task)
|
||||
# if task.workflow.data[WorkflowProcessor.VALIDATION_PROCESS_KEY]:
|
||||
# """If this is running a validation, and not a normal process, then we want to
|
||||
# mimic running the script, but not make any external calls or database changes."""
|
||||
# klass().do_task_validate_only(task, study_id, workflow_id, *commands[1:])
|
||||
# else:
|
||||
# klass().do_task(task, study_id, workflow_id, *commands[1:])
|
||||
# except ModuleNotFoundError:
|
||||
# raise ApiError.from_task("invalid_script",
|
||||
# "Unable to locate Script: '%s:%s'" % (module_name, class_name),
|
||||
# task=task)
|
||||
|
||||
def evaluate_expression(self, task, expression):
|
||||
"""
|
||||
|
@ -30,6 +30,7 @@ from crc.models.workflow import WorkflowModel, WorkflowStatus, WorkflowSpecModel
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.lookup_service import LookupService
|
||||
from crc.services.study_service import StudyService
|
||||
from crc.services.user_service import UserService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
|
||||
|
||||
@ -238,7 +239,7 @@ class WorkflowService(object):
|
||||
nav_item['title'] = nav_item['task'].title # Prefer the task title.
|
||||
|
||||
user_uids = WorkflowService.get_users_assigned_to_task(processor, spiff_task)
|
||||
if 'user' not in g or not g.user or g.user.uid not in user_uids:
|
||||
if not UserService.in_list(user_uids, allow_admin_impersonate=True):
|
||||
nav_item['state'] = WorkflowService.TASK_STATE_LOCKED
|
||||
|
||||
else:
|
||||
@ -271,7 +272,7 @@ class WorkflowService(object):
|
||||
workflow_api.next_task = WorkflowService.spiff_task_to_api_task(next_task, add_docs_and_forms=True)
|
||||
# Update the state of the task to locked if the current user does not own the task.
|
||||
user_uids = WorkflowService.get_users_assigned_to_task(processor, next_task)
|
||||
if 'user' not in g or not g.user or g.user.uid not in user_uids:
|
||||
if not UserService.in_list(user_uids, allow_admin_impersonate=True):
|
||||
workflow_api.next_task.state = WorkflowService.TASK_STATE_LOCKED
|
||||
return workflow_api
|
||||
|
||||
|
@ -212,7 +212,8 @@
|
||||
<bpmn:scriptTask id="Activity_10nxpt2" name="Load Study Details">
|
||||
<bpmn:incoming>SequenceFlow_1r3yrhy</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_09h1imz</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo details</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['details'] = study_info('details')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:businessRuleTask id="Activity_PBMultiSiteCheckQ12" name="PB Multi-Site Check Q12" camunda:decisionRef="Decision_core_info_multi_site_q12">
|
||||
<bpmn:incoming>Flow_09h1imz</bpmn:incoming>
|
||||
|
@ -453,7 +453,7 @@ Indicate all the possible formats in which you will transmit your data outside o
|
||||
<bpmn:incoming>SequenceFlow_0k2r83n</bpmn:incoming>
|
||||
<bpmn:incoming>SequenceFlow_0t6xl9i</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_16kyite</bpmn:outgoing>
|
||||
<bpmn:script>#! CompleteTemplate NEW_DSP_template.docx Study_DataSecurityPlan</bpmn:script>
|
||||
<bpmn:script>complete_template('NEW_DSP_template.docx','Study_DataSecurityPlan')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:manualTask id="Task_0q6ir2l" name="View Instructions">
|
||||
<bpmn:documentation>##### Instructions
|
||||
|
@ -53,12 +53,13 @@
|
||||
<bpmn:scriptTask id="Activity_0a14x7j" name="Load Approvals">
|
||||
<bpmn:incoming>Flow_0c7ryff</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_142jtxs</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo approvals</bpmn:script>
|
||||
<bpmn:script>StudyInfo['approvals'] = study_info('approvals')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:scriptTask id="Activity_1aju60t" name="Load Documents">
|
||||
<bpmn:incoming>Flow_1k3su2q</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0c7ryff</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo documents</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['documents'] = study_info('documents')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_142jtxs" sourceRef="Activity_0a14x7j" targetRef="Activity_DisplayDocsAndApprovals" />
|
||||
<bpmn:sequenceFlow id="Flow_0c7ryff" sourceRef="Activity_1aju60t" targetRef="Activity_0a14x7j" />
|
||||
|
@ -36,7 +36,8 @@
|
||||
<bpmn:scriptTask id="ScriptTask_1fn00ox" name="Load IRB Details">
|
||||
<bpmn:incoming>SequenceFlow_1dhb8f4</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1uzcl1f</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo details</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['details'] = study_info('details')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1uzcl1f" sourceRef="ScriptTask_1fn00ox" targetRef="Task_SupplementIDE" />
|
||||
<bpmn:exclusiveGateway id="ExclusiveGateway_1fib89p" name="IS_IDE = True and Number Provided? ">
|
||||
|
@ -217,7 +217,8 @@ Protocol Owner: **(need to insert value here)**</bpmn:documentation>
|
||||
<bpmn:scriptTask id="Activity_LoadDocuments" name="Load Documents">
|
||||
<bpmn:incoming>SequenceFlow_1dexemq</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1x9d2mo</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo documents</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['documents'] = study_info('documents')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
|
@ -12,7 +12,8 @@
|
||||
<bpmn:scriptTask id="ScriptTask_LoadIRBDetails" name="Load IRB Details">
|
||||
<bpmn:incoming>SequenceFlow_1dhb8f4</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1uzcl1f</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo details</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['details'] = study_info('details')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1uzcl1f" sourceRef="ScriptTask_LoadIRBDetails" targetRef="Task_SupplementIDE" />
|
||||
<bpmn:businessRuleTask id="Task_SupplementIDE" name="Current IND Status" camunda:decisionRef="decision_ind_check">
|
||||
|
@ -8,7 +8,8 @@
|
||||
<bpmn:scriptTask id="ScriptTask_02924vs" name="Load IRB Details">
|
||||
<bpmn:incoming>SequenceFlow_1fmyo77</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_18nr0gf</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo details</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['details'] = study_info('details')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1fmyo77" sourceRef="StartEvent_1" targetRef="ScriptTask_02924vs" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_18nr0gf" sourceRef="ScriptTask_02924vs" targetRef="Activity_FromIRB-API" />
|
||||
|
@ -7,7 +7,8 @@
|
||||
<bpmn:scriptTask id="ScriptTask_LoadPersonnel" name="Load IRB Personnel">
|
||||
<bpmn:incoming>Flow_0kcrx5l</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1dcsioh</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo investigators</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['investigators'] = study_info('investigators')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:endEvent id="EndEvent_1qor16n">
|
||||
<bpmn:documentation>## The following information was gathered:
|
||||
|
@ -598,7 +598,7 @@ Use the EHS [Lab Safety Plan During COVID 19 template](https://www.google.com/ur
|
||||
This step is internal to the system and do not require and user interaction</bpmn:documentation>
|
||||
<bpmn:incoming>Flow_11uqavk</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0aqgwvu</bpmn:outgoing>
|
||||
<bpmn:script>#! CompleteTemplate ResearchRampUpPlan.docx RESEARCH_RAMPUP</bpmn:script>
|
||||
<bpmn:script>complete_template('ResearchRampUpPlan.docx','RESEARCH_RAMPUP')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_0aqgwvu" sourceRef="Activity_GenerateRRP" targetRef="Activity_AcknowledgePlanReview" />
|
||||
<bpmn:sequenceFlow id="Flow_0j4rs82" sourceRef="Activity_SubmitPlan" targetRef="Activity_0absozl" />
|
||||
@ -755,7 +755,7 @@ Notify the Area Monitor for
|
||||
This step is internal to the system and do not require and user interaction</bpmn:documentation>
|
||||
<bpmn:incoming>Flow_0j4rs82</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_07ge8uf</bpmn:outgoing>
|
||||
<bpmn:script>#!RequestApproval ApprvlApprvr1 ApprvlApprvr2</bpmn:script>
|
||||
<bpmn:script>request_approval('ApprvlApprvr1','ApprvlApprvr2')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:scriptTask id="Activity_1u58hox" name="Update Request">
|
||||
<bpmn:documentation>#### Script Task
|
||||
@ -764,7 +764,7 @@ This step is internal to the system and do not require and user interaction</bpm
|
||||
This step is internal to the system and do not require and user interaction</bpmn:documentation>
|
||||
<bpmn:incoming>Flow_16y8glw</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0uc4o6c</bpmn:outgoing>
|
||||
<bpmn:script>#! UpdateStudy title:PIComputingID.label pi:PIComputingID.value</bpmn:script>
|
||||
<bpmn:script>update_study('title:PIComputingID.label','pi:PIComputingID.value')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:userTask id="PersonnelSchedule" name="Upload Weekly Personnel Schedule(s)" camunda:formKey="Personnel Weekly Schedule">
|
||||
<bpmn:documentation>#### Weekly Personnel Schedule(s)
|
||||
|
@ -11,7 +11,8 @@
|
||||
<bpmn:scriptTask id="Task_Load_Requirements" name="Load Documents From PB">
|
||||
<bpmn:incoming>SequenceFlow_1ees8ka</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_17ct47v</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo documents</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['documents'] = study_info('documents')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:businessRuleTask id="Activity_1yqy50i" name="Enter Core Info " camunda:decisionRef="enter_core_info">
|
||||
<bpmn:incoming>Flow_1m8285h</bpmn:incoming>
|
||||
@ -62,7 +63,8 @@
|
||||
<bpmn:scriptTask id="Activity_0f295la" name="Load Details from PB">
|
||||
<bpmn:incoming>Flow_0pwtiqm</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0eq6px2</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo details</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['details'] = study_info('details')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:businessRuleTask id="Activity_0ahlc3u" name="IDE Supplement" camunda:decisionRef="decision_ide_menu_check">
|
||||
<bpmn:incoming>Flow_14ce1d7</bpmn:incoming>
|
||||
@ -91,7 +93,8 @@
|
||||
<bpmn:scriptTask id="Activity_0g3qa1c" name="Load Personnel from PB">
|
||||
<bpmn:incoming>Flow_1qyrmzn</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0vo6ul1</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo investigators</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['investigators'] = study_info('investigators')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_1ybicki" sourceRef="Activity_13ep6ar" targetRef="Event_135x8jg" />
|
||||
<bpmn:businessRuleTask id="Activity_13ep6ar" name="Personnel" camunda:decisionRef="personnel">
|
||||
|
@ -11,7 +11,7 @@ from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1c3f88dbccc3'
|
||||
down_revision = '2e7b377cbc7b'
|
||||
down_revision = 'ab06a94e5d4c'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
34
migrations/versions/ab06a94e5d4c_.py
Normal file
34
migrations/versions/ab06a94e5d4c_.py
Normal file
@ -0,0 +1,34 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: ab06a94e5d4c
|
||||
Revises: 2e7b377cbc7b
|
||||
Create Date: 2020-07-30 11:23:46.601338
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'ab06a94e5d4c'
|
||||
down_revision = '2e7b377cbc7b'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('admin_session',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('token', sa.String(), nullable=True),
|
||||
sa.Column('admin_impersonate_uid', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('token')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('admin_session')
|
||||
# ### end Alembic commands ###
|
2
setup.py
2
setup.py
@ -1,3 +1,3 @@
|
||||
from setuptools import setup
|
||||
|
||||
setup(setup_requires=["pbr"], pbr=True)
|
||||
setup(setup_requires=["pbr"], pbr=True, install_requires=['box'])
|
||||
|
@ -18,14 +18,16 @@ from crc.models.file import FileModel, FileDataModel, CONTENT_TYPES
|
||||
from crc.models.task_event import TaskEventModel
|
||||
from crc.models.study import StudyModel, StudyStatus
|
||||
from crc.models.user import UserModel
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecModelSchema, WorkflowModel, WorkflowSpecCategoryModel
|
||||
from crc.models.workflow import WorkflowSpecModel, WorkflowSpecCategoryModel
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.study_service import StudyService
|
||||
from crc.services.user_service import UserService
|
||||
from crc.services.workflow_service import WorkflowService
|
||||
from example_data import ExampleDataLoader
|
||||
|
||||
#UNCOMMENT THIS FOR DEBUGGING SQL ALCHEMY QUERIES
|
||||
# UNCOMMENT THIS FOR DEBUGGING SQL ALCHEMY QUERIES
|
||||
import logging
|
||||
|
||||
logging.basicConfig()
|
||||
|
||||
|
||||
@ -36,22 +38,32 @@ class BaseTest(unittest.TestCase):
|
||||
|
||||
if not app.config['TESTING']:
|
||||
raise (Exception("INVALID TEST CONFIGURATION. This is almost always in import order issue."
|
||||
"The first class to import in each test should be the base_test.py file."))
|
||||
"The first class to import in each test should be the base_test.py file."))
|
||||
|
||||
auths = {}
|
||||
test_uid = "dhf8r"
|
||||
|
||||
users = [
|
||||
{
|
||||
'uid':'dhf8r',
|
||||
'email_address':'dhf8r@virginia.EDU',
|
||||
'display_name':'Daniel Harold Funk',
|
||||
'affiliation':'staff@virginia.edu;member@virginia.edu',
|
||||
'eppn':'dhf8r@virginia.edu',
|
||||
'first_name':'Daniel',
|
||||
'last_name':'Funk',
|
||||
'title':'SOFTWARE ENGINEER V'
|
||||
}
|
||||
'uid': 'dhf8r',
|
||||
'email_address': 'dhf8r@virginia.EDU',
|
||||
'display_name': 'Daniel Harold Funk',
|
||||
'affiliation': 'staff@virginia.edu;member@virginia.edu',
|
||||
'eppn': 'dhf8r@virginia.edu',
|
||||
'first_name': 'Daniel',
|
||||
'last_name': 'Funk',
|
||||
'title': 'SOFTWARE ENGINEER V'
|
||||
},
|
||||
{
|
||||
'uid': 'lbd3p',
|
||||
'email_address': 'lbd3p@virginia.EDU',
|
||||
'display_name': 'Laura Barnes',
|
||||
'affiliation': 'staff@virginia.edu;member@virginia.edu',
|
||||
'eppn': 'lbd3p@virginia.edu',
|
||||
'first_name': 'Laura',
|
||||
'last_name': 'Barnes',
|
||||
'title': 'Associate Professor of Systems and Information Engineering'
|
||||
},
|
||||
]
|
||||
|
||||
studies = [
|
||||
@ -77,7 +89,6 @@ class BaseTest(unittest.TestCase):
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
app.config.from_object('config.testing')
|
||||
@ -97,7 +108,7 @@ class BaseTest(unittest.TestCase):
|
||||
|
||||
def tearDown(self):
|
||||
ExampleDataLoader.clean_db()
|
||||
g.user = None
|
||||
self.logout()
|
||||
self.auths = {}
|
||||
|
||||
def logged_in_headers(self, user=None, redirect_url='http://some/frontend/url'):
|
||||
@ -117,7 +128,8 @@ class BaseTest(unittest.TestCase):
|
||||
self.assertIsNotNone(user_model.display_name)
|
||||
self.assertEqual(user_model.uid, uid)
|
||||
self.assertTrue('user' in g, 'User should be in Flask globals')
|
||||
self.assertEqual(uid, g.user.uid, 'Logged in user should match given user uid')
|
||||
user = UserService.current_user(allow_admin_impersonate=True)
|
||||
self.assertEqual(uid, user.uid, 'Logged in user should match given user uid')
|
||||
|
||||
return dict(Authorization='Bearer ' + user_model.encode_auth_token().decode())
|
||||
|
||||
@ -134,16 +146,21 @@ class BaseTest(unittest.TestCase):
|
||||
else:
|
||||
ExampleDataLoader().load_test_data()
|
||||
|
||||
for user_json in self.users:
|
||||
db.session.add(UserModel(**user_json))
|
||||
db.session.commit()
|
||||
# If in production mode, only add the first user.
|
||||
if app.config['PRODUCTION']:
|
||||
session.add(UserModel(**self.users[0]))
|
||||
else:
|
||||
for user_json in self.users:
|
||||
session.add(UserModel(**user_json))
|
||||
|
||||
session.commit()
|
||||
for study_json in self.studies:
|
||||
study_model = StudyModel(**study_json)
|
||||
db.session.add(study_model)
|
||||
session.add(study_model)
|
||||
StudyService._add_all_workflow_specs_to_study(study_model)
|
||||
db.session.execute(Sequence(StudyModel.__tablename__ + '_id_seq'))
|
||||
db.session.commit()
|
||||
db.session.flush()
|
||||
session.execute(Sequence(StudyModel.__tablename__ + '_id_seq'))
|
||||
session.commit()
|
||||
session.flush()
|
||||
|
||||
specs = session.query(WorkflowSpecModel).all()
|
||||
self.assertIsNotNone(specs)
|
||||
@ -167,8 +184,8 @@ class BaseTest(unittest.TestCase):
|
||||
"""Loads a spec into the database based on a directory in /tests/data"""
|
||||
if category_id is None:
|
||||
category = WorkflowSpecCategoryModel(name="test", display_name="Test Workflows", display_order=0)
|
||||
db.session.add(category)
|
||||
db.session.commit()
|
||||
session.add(category)
|
||||
session.commit()
|
||||
category_id = category.id
|
||||
|
||||
if session.query(WorkflowSpecModel).filter_by(id=dir_name).count() > 0:
|
||||
@ -216,14 +233,13 @@ class BaseTest(unittest.TestCase):
|
||||
|
||||
return '?%s' % '&'.join(query_string_list)
|
||||
|
||||
|
||||
def replace_file(self, name, file_path):
|
||||
"""Replaces a stored file with the given name with the contents of the file at the given path."""
|
||||
file_service = FileService()
|
||||
file = open(file_path, "rb")
|
||||
data = file.read()
|
||||
|
||||
file_model = db.session.query(FileModel).filter(FileModel.name == name).first()
|
||||
file_model = session.query(FileModel).filter(FileModel.name == name).first()
|
||||
noise, file_extension = os.path.splitext(file_path)
|
||||
content_type = CONTENT_TYPES[file_extension[1:]]
|
||||
file_service.update_file(file_model, data, content_type)
|
||||
@ -232,18 +248,19 @@ class BaseTest(unittest.TestCase):
|
||||
user = session.query(UserModel).filter(UserModel.uid == uid).first()
|
||||
if user is None:
|
||||
user = UserModel(uid=uid, email_address=email, display_name=display_name)
|
||||
db.session.add(user)
|
||||
db.session.commit()
|
||||
session.add(user)
|
||||
session.commit()
|
||||
return user
|
||||
|
||||
def create_study(self, uid="dhf8r", title="Beer consumption in the bipedal software engineer", primary_investigator_id="lb3dp"):
|
||||
def create_study(self, uid="dhf8r", title="Beer consumption in the bipedal software engineer",
|
||||
primary_investigator_id="lb3dp"):
|
||||
study = session.query(StudyModel).filter_by(user_uid=uid).filter_by(title=title).first()
|
||||
if study is None:
|
||||
user = self.create_user(uid=uid)
|
||||
study = StudyModel(title=title, status=StudyStatus.in_progress,
|
||||
user_uid=user.uid, primary_investigator_id=primary_investigator_id)
|
||||
db.session.add(study)
|
||||
db.session.commit()
|
||||
session.add(study)
|
||||
session.commit()
|
||||
return study
|
||||
|
||||
def _create_study_workflow_approvals(self, user_uid, title, primary_investigator_id, approver_uids, statuses,
|
||||
@ -270,8 +287,8 @@ class BaseTest(unittest.TestCase):
|
||||
return full_study
|
||||
|
||||
def create_workflow(self, workflow_name, display_name=None, study=None, category_id=None, as_user="dhf8r"):
|
||||
db.session.flush()
|
||||
spec = db.session.query(WorkflowSpecModel).filter(WorkflowSpecModel.name == workflow_name).first()
|
||||
session.flush()
|
||||
spec = session.query(WorkflowSpecModel).filter(WorkflowSpecModel.name == workflow_name).first()
|
||||
if spec is None:
|
||||
if display_name is None:
|
||||
display_name = workflow_name
|
||||
@ -290,21 +307,22 @@ class BaseTest(unittest.TestCase):
|
||||
file.close()
|
||||
|
||||
def create_approval(
|
||||
self,
|
||||
study=None,
|
||||
workflow=None,
|
||||
approver_uid=None,
|
||||
status=None,
|
||||
version=None,
|
||||
self,
|
||||
study=None,
|
||||
workflow=None,
|
||||
approver_uid=None,
|
||||
status=None,
|
||||
version=None,
|
||||
):
|
||||
study = study or self.create_study()
|
||||
workflow = workflow or self.create_workflow()
|
||||
approver_uid = approver_uid or self.test_uid
|
||||
status = status or ApprovalStatus.PENDING.value
|
||||
version = version or 1
|
||||
approval = ApprovalModel(study=study, workflow=workflow, approver_uid=approver_uid, status=status, version=version)
|
||||
db.session.add(approval)
|
||||
db.session.commit()
|
||||
approval = ApprovalModel(study=study, workflow=workflow, approver_uid=approver_uid, status=status,
|
||||
version=version)
|
||||
session.add(approval)
|
||||
session.commit()
|
||||
return approval
|
||||
|
||||
def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False, do_engine_steps=True, user_uid="dhf8r"):
|
||||
@ -322,7 +340,6 @@ class BaseTest(unittest.TestCase):
|
||||
self.assertEqual(workflow.workflow_spec_id, workflow_api.workflow_spec_id)
|
||||
return workflow_api
|
||||
|
||||
|
||||
def complete_form(self, workflow_in, task_in, dict_data, error_code=None, terminate_loop=None, user_uid="dhf8r"):
|
||||
prev_completed_task_count = workflow_in.completed_tasks
|
||||
if isinstance(task_in, dict):
|
||||
@ -387,12 +404,18 @@ class BaseTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(task_in.multi_instance_count, event.mi_count)
|
||||
if task_in.multi_instance_type == 'looping' and not terminate_loop:
|
||||
self.assertEqual(task_in.multi_instance_index+1, event.mi_index)
|
||||
self.assertEqual(task_in.multi_instance_index + 1, event.mi_index)
|
||||
else:
|
||||
self.assertEqual(task_in.multi_instance_index, event.mi_index)
|
||||
self.assertEqual(task_in.process_name, event.process_name)
|
||||
self.assertIsNotNone(event.date)
|
||||
|
||||
|
||||
workflow = WorkflowApiSchema().load(json_data)
|
||||
return workflow
|
||||
|
||||
def logout(self):
|
||||
if 'user' in g:
|
||||
del g.user
|
||||
|
||||
if 'impersonate_user' in g:
|
||||
del g.impersonate_user
|
||||
|
@ -27,7 +27,7 @@
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_1i7hk1a</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_11c35oq</bpmn:outgoing>
|
||||
<bpmn:script>#! CompleteTemplate Letter.docx AD_CoCApp</bpmn:script>
|
||||
<bpmn:script>complete_template('Letter.docx','AD_CoCApp')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:endEvent id="EndEvent_0evb22x">
|
||||
<bpmn:incoming>SequenceFlow_11c35oq</bpmn:incoming>
|
||||
|
@ -20,7 +20,7 @@ Email content to be delivered to {{ ApprvlApprvr1 }}
|
||||
---</bpmn:documentation>
|
||||
<bpmn:incoming>Flow_08n2npe</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1xlrgne</bpmn:outgoing>
|
||||
<bpmn:script>#! Email "Camunda Email Subject" ApprvlApprvr1 PIComputingID</bpmn:script>
|
||||
<bpmn:script>email("Camunda Email Subject",'ApprvlApprvr1','PIComputingID')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_1synsig" sourceRef="StartEvent_1" targetRef="Activity_1l9vih3" />
|
||||
<bpmn:sequenceFlow id="Flow_1xlrgne" sourceRef="Activity_0s5v97n" targetRef="Event_0izrcj4" />
|
||||
|
@ -11,7 +11,7 @@
|
||||
<bpmn:scriptTask id="Invalid_Script_Task" name="An Invalid Script Reference">
|
||||
<bpmn:incoming>SequenceFlow_1pnq3kg</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_12pf6um</bpmn:outgoing>
|
||||
<bpmn:script>#! NoSuchScript withArg1</bpmn:script>
|
||||
<bpmn:script>no_such_script('withArg1')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_12pf6um" sourceRef="Invalid_Script_Task" targetRef="EndEvent_063bpg6" />
|
||||
</bpmn:process>
|
||||
|
70
tests/data/ldap_replace/ldap_replace.bpmn
Normal file
70
tests/data/ldap_replace/ldap_replace.bpmn
Normal file
@ -0,0 +1,70 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_0y2dq4f" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
|
||||
<bpmn:process id="Process_0tad5ma" name="Set Recipients" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_1synsig</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:endEvent id="Event_0izrcj4">
|
||||
<bpmn:incoming>Flow_11e7jgz</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:scriptTask id="Activity_0s5v97n" name="Ldap Replace">
|
||||
<bpmn:incoming>Flow_08n2npe</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1xlrgne</bpmn:outgoing>
|
||||
<bpmn:script>Supervisor = ldap(Supervisor)
|
||||
Investigator = ldap(Investigator)</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_1synsig" sourceRef="StartEvent_1" targetRef="Activity_1l9vih3" />
|
||||
<bpmn:sequenceFlow id="Flow_1xlrgne" sourceRef="Activity_0s5v97n" targetRef="Activity_0f78ek5" />
|
||||
<bpmn:sequenceFlow id="Flow_08n2npe" sourceRef="Activity_1l9vih3" targetRef="Activity_0s5v97n" />
|
||||
<bpmn:userTask id="Activity_1l9vih3" name="Set UIDs">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="Supervisor" label="Approver" type="string" />
|
||||
<camunda:formField id="Investigator" label="Primary Investigator" type="string" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_1synsig</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_08n2npe</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="Flow_11e7jgz" sourceRef="Activity_0f78ek5" targetRef="Event_0izrcj4" />
|
||||
<bpmn:userTask id="Activity_0f78ek5" name="Read UIDs">
|
||||
<bpmn:incoming>Flow_1xlrgne</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_11e7jgz</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0tad5ma">
|
||||
<bpmndi:BPMNEdge id="Flow_11e7jgz_di" bpmnElement="Flow_11e7jgz">
|
||||
<di:waypoint x="720" y="117" />
|
||||
<di:waypoint x="802" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_08n2npe_di" bpmnElement="Flow_08n2npe">
|
||||
<di:waypoint x="370" y="117" />
|
||||
<di:waypoint x="450" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1xlrgne_di" bpmnElement="Flow_1xlrgne">
|
||||
<di:waypoint x="550" y="117" />
|
||||
<di:waypoint x="620" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1synsig_di" bpmnElement="Flow_1synsig">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="270" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0izrcj4_di" bpmnElement="Event_0izrcj4">
|
||||
<dc:Bounds x="802" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_04imfm6_di" bpmnElement="Activity_0s5v97n">
|
||||
<dc:Bounds x="450" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0xugr62_di" bpmnElement="Activity_1l9vih3">
|
||||
<dc:Bounds x="270" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_17h05g6_di" bpmnElement="Activity_0f78ek5">
|
||||
<dc:Bounds x="620" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
@ -29,7 +29,8 @@
|
||||
<bpmn:scriptTask id="Task_1v0e2zu" name="Load Personnel">
|
||||
<bpmn:incoming>Flow_0t6p1sb</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1p568pp</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo investigators</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['investigators'] = study_info('investigators')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
|
@ -11,7 +11,7 @@
|
||||
<bpmn:sequenceFlow id="Flow_0ugjw69" sourceRef="MultiInstanceTask" targetRef="Event_End" />
|
||||
<bpmn:userTask id="MultiInstanceTask" name="Gather more information" camunda:formKey="GetEmail">
|
||||
<bpmn:documentation># Please provide addtional information about:
|
||||
## Investigator ID: {{investigator.user_id}}
|
||||
## Investigator ID: {{investigator.user_id}}
|
||||
## Role: {{investigator.type_full}}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
@ -29,7 +29,8 @@
|
||||
<bpmn:scriptTask id="Task_1v0e2zu" name="Load Personnel">
|
||||
<bpmn:incoming>Flow_0t6p1sb</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1p568pp</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo investigators</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['investigators'] = study_info('investigators')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
|
@ -132,7 +132,7 @@ Autoconverted link https://github.com/nodeca/pica (enable linkify to see)
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>SequenceFlow_0641sh6</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_0t29gjo</bpmn:outgoing>
|
||||
<bpmn:script>#! FactService</bpmn:script>
|
||||
<bpmn:script>FactService = fact_service()</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:endEvent id="EndEvent_0u1cgrf">
|
||||
<bpmn:documentation># Great Job!
|
||||
|
@ -8,12 +8,19 @@
|
||||
<bpmn:scriptTask id="Task_Script_Load_Study_Details" name="Load Study Info">
|
||||
<bpmn:incoming>SequenceFlow_1nfe5m9</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_1bqiin0</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo info</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['info'] = study_info('info')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1bqiin0" sourceRef="Task_Script_Load_Study_Details" targetRef="EndEvent_171dj09" />
|
||||
<bpmn:sequenceFlow id="SequenceFlow_1bqiin0" sourceRef="Task_Script_Load_Study_Details" targetRef="Activity_0w91u9s" />
|
||||
<bpmn:endEvent id="EndEvent_171dj09">
|
||||
<bpmn:incoming>SequenceFlow_1bqiin0</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_0ochvmi</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0ochvmi" sourceRef="Activity_0w91u9s" targetRef="EndEvent_171dj09" />
|
||||
<bpmn:scriptTask id="Activity_0w91u9s" name="StudyInfo as Script">
|
||||
<bpmn:incoming>SequenceFlow_1bqiin0</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0ochvmi</bpmn:outgoing>
|
||||
<bpmn:script>study = study_info('info','p')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0exnnpv">
|
||||
@ -29,10 +36,17 @@
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="SequenceFlow_1bqiin0_di" bpmnElement="SequenceFlow_1bqiin0">
|
||||
<di:waypoint x="370" y="117" />
|
||||
<di:waypoint x="402" y="117" />
|
||||
<di:waypoint x="430" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="EndEvent_171dj09_di" bpmnElement="EndEvent_171dj09">
|
||||
<dc:Bounds x="402" y="99" width="36" height="36" />
|
||||
<dc:Bounds x="622" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0ochvmi_di" bpmnElement="Flow_0ochvmi">
|
||||
<di:waypoint x="530" y="117" />
|
||||
<di:waypoint x="622" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Activity_1wtk4bb_di" bpmnElement="Activity_0w91u9s">
|
||||
<dc:Bounds x="430" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
|
@ -11,7 +11,8 @@
|
||||
<bpmn:scriptTask id="Task_Load_Requirements" name="Load Required Documents From PM">
|
||||
<bpmn:incoming>SequenceFlow_1ees8ka</bpmn:incoming>
|
||||
<bpmn:outgoing>SequenceFlow_17ct47v</bpmn:outgoing>
|
||||
<bpmn:script>#! StudyInfo documents</bpmn:script>
|
||||
<bpmn:script>StudyInfo = {}
|
||||
StudyInfo['documents'] = study_info('documents')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:businessRuleTask id="Activity_1yqy50i" name="Enter Core Info " camunda:decisionRef="enter_core_info">
|
||||
<bpmn:incoming>Flow_1m8285h</bpmn:incoming>
|
||||
|
@ -1,9 +1,45 @@
|
||||
from github import UnknownObjectException
|
||||
from sqlalchemy import desc
|
||||
from tests.base_test import BaseTest
|
||||
from unittest.mock import patch, Mock
|
||||
|
||||
from crc import db
|
||||
from crc.models.file import FileDataModel
|
||||
from crc.services.file_service import FileService
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
|
||||
|
||||
class FakeGithubCreates(Mock):
|
||||
def get_user(var):
|
||||
class FakeUser(Mock):
|
||||
def get_repo(var, name):
|
||||
class FakeRepo(Mock):
|
||||
def get_contents(var, filename):
|
||||
raise UnknownObjectException(status='Failure', data='Failed data')
|
||||
def update_file(var, path, message, content, sha):
|
||||
pass
|
||||
return FakeRepo()
|
||||
return FakeUser()
|
||||
|
||||
|
||||
class FakeGithub(Mock):
|
||||
def get_user(var):
|
||||
class FakeUser(Mock):
|
||||
def get_repo(var, name):
|
||||
class FakeRepo(Mock):
|
||||
def get_contents(var, filename):
|
||||
fake_file = Mock()
|
||||
fake_file.decoded_content = b'Some bytes'
|
||||
fake_file.path = '/el/path/'
|
||||
fake_file.data = 'Serious data'
|
||||
fake_file.sha = 'Sha'
|
||||
return fake_file
|
||||
def update_file(var, path, message, content, sha):
|
||||
pass
|
||||
return FakeRepo()
|
||||
return FakeUser()
|
||||
|
||||
|
||||
class TestFileService(BaseTest):
|
||||
"""Largely tested via the test_file_api, and time is tight, but adding new tests here."""
|
||||
|
||||
@ -103,3 +139,62 @@ class TestFileService(BaseTest):
|
||||
binary_data=b'5678')
|
||||
file_models = FileService.get_workflow_files(workflow_id=workflow.id)
|
||||
self.assertEqual(2, len(file_models))
|
||||
|
||||
@patch('crc.services.file_service.Github')
|
||||
def test_update_from_github(self, mock_github):
|
||||
mock_github.return_value = FakeGithub()
|
||||
|
||||
self.load_example_data()
|
||||
self.create_reference_document()
|
||||
workflow = self.create_workflow('file_upload_form')
|
||||
processor = WorkflowProcessor(workflow)
|
||||
task = processor.next_task()
|
||||
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
|
||||
file_model = FileService.add_workflow_file(workflow_id=workflow.id,
|
||||
irb_doc_code=irb_code,
|
||||
name="anything.png", content_type="text",
|
||||
binary_data=b'1234')
|
||||
FileService.update_from_github([file_model.id])
|
||||
|
||||
file_model_data = FileDataModel.query.filter_by(
|
||||
file_model_id=file_model.id
|
||||
).order_by(
|
||||
desc(FileDataModel.version)
|
||||
).first()
|
||||
self.assertEqual(file_model_data.data, b'Some bytes')
|
||||
|
||||
@patch('crc.services.file_service.Github')
|
||||
def test_publish_to_github_creates(self, mock_github):
|
||||
mock_github.return_value = FakeGithubCreates()
|
||||
|
||||
self.load_example_data()
|
||||
self.create_reference_document()
|
||||
workflow = self.create_workflow('file_upload_form')
|
||||
processor = WorkflowProcessor(workflow)
|
||||
task = processor.next_task()
|
||||
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
|
||||
file_model = FileService.add_workflow_file(workflow_id=workflow.id,
|
||||
irb_doc_code=irb_code,
|
||||
name="anything.png", content_type="text",
|
||||
binary_data=b'1234')
|
||||
result = FileService.publish_to_github([file_model.id])
|
||||
|
||||
self.assertEqual(result['created'], True)
|
||||
|
||||
@patch('crc.services.file_service.Github')
|
||||
def test_publish_to_github_updates(self, mock_github):
|
||||
mock_github.return_value = FakeGithub()
|
||||
|
||||
self.load_example_data()
|
||||
self.create_reference_document()
|
||||
workflow = self.create_workflow('file_upload_form')
|
||||
processor = WorkflowProcessor(workflow)
|
||||
task = processor.next_task()
|
||||
irb_code = "UVACompl_PRCAppr" # The first file referenced in pb required docs.
|
||||
file_model = FileService.add_workflow_file(workflow_id=workflow.id,
|
||||
irb_doc_code=irb_code,
|
||||
name="anything.png", content_type="text",
|
||||
binary_data=b'1234')
|
||||
result = FileService.publish_to_github([file_model.id])
|
||||
|
||||
self.assertEqual(result['updated'], True)
|
||||
|
70
tests/ldap/test_ldap_lookup_script.py
Normal file
70
tests/ldap/test_ldap_lookup_script.py
Normal file
@ -0,0 +1,70 @@
|
||||
from tests.base_test import BaseTest
|
||||
|
||||
from crc.services.workflow_processor import WorkflowProcessor
|
||||
from crc.scripts.ldap import Ldap
|
||||
from crc.api.common import ApiError
|
||||
from crc import db, mail
|
||||
|
||||
|
||||
class TestLdapLookupScript(BaseTest):
|
||||
|
||||
def test_get_existing_user_details(self):
|
||||
self.load_example_data()
|
||||
self.create_reference_document()
|
||||
workflow = self.create_workflow('empty_workflow')
|
||||
processor = WorkflowProcessor(workflow)
|
||||
task = processor.next_task()
|
||||
|
||||
script = Ldap()
|
||||
user_details = script.do_task(task, workflow.study_id, workflow.id, "dhf8r")
|
||||
|
||||
self.assertEqual(user_details['display_name'], 'Dan Funk')
|
||||
self.assertEqual(user_details['given_name'], 'Dan')
|
||||
self.assertEqual(user_details['email_address'], 'dhf8r@virginia.edu')
|
||||
self.assertEqual(user_details['telephone_number'], '+1 (434) 924-1723')
|
||||
self.assertEqual(user_details['title'], 'E42:He\'s a hoopy frood')
|
||||
self.assertEqual(user_details['department'], 'E0:EN-Eng Study of Parallel Universes')
|
||||
self.assertEqual(user_details['affiliation'], 'faculty')
|
||||
self.assertEqual(user_details['sponsor_type'], 'Staff')
|
||||
self.assertEqual(user_details['uid'], 'dhf8r')
|
||||
self.assertEqual(user_details['proper_name'], 'Dan Funk - (dhf8r)')
|
||||
|
||||
def test_get_invalid_user_details(self):
|
||||
self.load_example_data()
|
||||
self.create_reference_document()
|
||||
workflow = self.create_workflow('empty_workflow')
|
||||
processor = WorkflowProcessor(workflow)
|
||||
task = processor.next_task()
|
||||
|
||||
task.data = {
|
||||
'PIComputingID': 'rec3z'
|
||||
}
|
||||
|
||||
script = Ldap()
|
||||
with(self.assertRaises(ApiError)):
|
||||
user_details = script.do_task(task, workflow.study_id, workflow.id, "PIComputingID")
|
||||
|
||||
|
||||
def test_bpmn_task_receives_user_details(self):
|
||||
workflow = self.create_workflow('ldap_replace')
|
||||
|
||||
task_data = {
|
||||
'Supervisor': 'dhf8r',
|
||||
'Investigator': 'lb3dp'
|
||||
}
|
||||
task = self.get_workflow_api(workflow).next_task
|
||||
|
||||
self.complete_form(workflow, task, task_data)
|
||||
|
||||
task = self.get_workflow_api(workflow).next_task
|
||||
|
||||
self.assertEqual(task.data['Supervisor']['display_name'], 'Dan Funk')
|
||||
self.assertEqual(task.data['Supervisor']['given_name'], 'Dan')
|
||||
self.assertEqual(task.data['Supervisor']['email_address'], 'dhf8r@virginia.edu')
|
||||
self.assertEqual(task.data['Supervisor']['telephone_number'], '+1 (434) 924-1723')
|
||||
self.assertEqual(task.data['Supervisor']['title'], 'E42:He\'s a hoopy frood')
|
||||
self.assertEqual(task.data['Supervisor']['department'], 'E0:EN-Eng Study of Parallel Universes')
|
||||
self.assertEqual(task.data['Supervisor']['affiliation'], 'faculty')
|
||||
self.assertEqual(task.data['Supervisor']['sponsor_type'], 'Staff')
|
||||
self.assertEqual(task.data['Supervisor']['uid'], 'dhf8r')
|
||||
self.assertEqual(task.data['Supervisor']['proper_name'], 'Dan Funk - (dhf8r)')
|
@ -5,7 +5,7 @@ from datetime import timezone, datetime, timedelta
|
||||
import jwt
|
||||
|
||||
from tests.base_test import BaseTest
|
||||
from crc import db, app
|
||||
from crc import app, session
|
||||
from crc.api.common import ApiError
|
||||
from crc.models.protocol_builder import ProtocolBuilderStatus
|
||||
from crc.models.study import StudySchema, StudyModel, StudyStatus
|
||||
@ -13,6 +13,8 @@ from crc.models.user import UserModel
|
||||
|
||||
|
||||
class TestAuthentication(BaseTest):
|
||||
admin_uid = 'dhf8r'
|
||||
non_admin_uid = 'lb3dp'
|
||||
|
||||
def tearDown(self):
|
||||
# Assure we set the production flag back to false.
|
||||
@ -58,9 +60,9 @@ class TestAuthentication(BaseTest):
|
||||
self.assertTrue(expected_exp_3 - 1000 <= actual_exp_3 <= expected_exp_3 + 1000)
|
||||
|
||||
def test_non_production_auth_creates_user(self):
|
||||
new_uid = 'lb3dp' ## Assure this user id is in the fake responses from ldap.
|
||||
new_uid = self.non_admin_uid ## Assure this user id is in the fake responses from ldap.
|
||||
self.load_example_data()
|
||||
user = db.session.query(UserModel).filter(UserModel.uid == new_uid).first()
|
||||
user = session.query(UserModel).filter(UserModel.uid == new_uid).first()
|
||||
self.assertIsNone(user)
|
||||
|
||||
user_info = {'uid': new_uid, 'first_name': 'Cordi', 'last_name': 'Nator',
|
||||
@ -72,7 +74,7 @@ class TestAuthentication(BaseTest):
|
||||
self.assertTrue(rv_1.status_code == 302)
|
||||
self.assertTrue(str.startswith(rv_1.location, redirect_url))
|
||||
|
||||
user = db.session.query(UserModel).filter(UserModel.uid == new_uid).first()
|
||||
user = session.query(UserModel).filter(UserModel.uid == new_uid).first()
|
||||
self.assertIsNotNone(user)
|
||||
self.assertIsNotNone(user.display_name)
|
||||
self.assertIsNotNone(user.email_address)
|
||||
@ -88,21 +90,20 @@ class TestAuthentication(BaseTest):
|
||||
|
||||
self.load_example_data()
|
||||
|
||||
new_uid = 'lb3dp' # This user is in the test ldap system.
|
||||
user = db.session.query(UserModel).filter_by(uid=new_uid).first()
|
||||
# User should not be in the system yet.
|
||||
user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first()
|
||||
self.assertIsNone(user)
|
||||
redirect_url = 'http://worlds.best.website/admin'
|
||||
headers = dict(Uid=new_uid)
|
||||
db.session.flush()
|
||||
rv = self.app.get('v1.0/login', follow_redirects=False, headers=headers)
|
||||
|
||||
self.assert_success(rv)
|
||||
user = db.session.query(UserModel).filter_by(uid=new_uid).first()
|
||||
self.assertIsNotNone(user)
|
||||
self.assertEqual(new_uid, user.uid)
|
||||
self.assertEqual("Laura Barnes", user.display_name)
|
||||
self.assertEqual("lb3dp@virginia.edu", user.email_address)
|
||||
self.assertEqual("E0:Associate Professor of Systems and Information Engineering", user.title)
|
||||
# Log in
|
||||
non_admin_user = self._login_as_non_admin()
|
||||
|
||||
# User should be in the system now.
|
||||
redirect_url = 'http://worlds.best.website/admin'
|
||||
rv_user = self.app.get('/v1.0/user', headers=self.logged_in_headers(non_admin_user, redirect_url=redirect_url))
|
||||
self.assert_success(rv_user)
|
||||
user_data = json.loads(rv_user.get_data(as_text=True))
|
||||
self.assertEqual(self.non_admin_uid, user_data['uid'])
|
||||
self.assertFalse(user_data['is_admin'])
|
||||
|
||||
# Switch production mode back off
|
||||
app.config['PRODUCTION'] = False
|
||||
@ -119,6 +120,8 @@ class TestAuthentication(BaseTest):
|
||||
user = UserModel(uid="dhf8r", first_name='Dan', last_name='Funk', email_address='dhf8r@virginia.edu')
|
||||
rv = self.app.get('/v1.0/user', headers=self.logged_in_headers(user, redirect_url='http://omg.edu/lolwut'))
|
||||
self.assert_success(rv)
|
||||
user_data = json.loads(rv.get_data(as_text=True))
|
||||
self.assertTrue(user_data['is_admin'])
|
||||
|
||||
def test_admin_can_access_admin_only_endpoints(self):
|
||||
# Switch production mode on
|
||||
@ -126,21 +129,8 @@ class TestAuthentication(BaseTest):
|
||||
|
||||
self.load_example_data()
|
||||
|
||||
admin_uids = app.config['ADMIN_UIDS']
|
||||
self.assertGreater(len(admin_uids), 0)
|
||||
admin_uid = admin_uids[0]
|
||||
self.assertEqual(admin_uid, 'dhf8r') # This user is in the test ldap system.
|
||||
admin_headers = dict(Uid=admin_uid)
|
||||
|
||||
rv = self.app.get('v1.0/login', follow_redirects=False, headers=admin_headers)
|
||||
self.assert_success(rv)
|
||||
|
||||
admin_user = db.session.query(UserModel).filter(UserModel.uid == admin_uid).first()
|
||||
self.assertIsNotNone(admin_user)
|
||||
self.assertEqual(admin_uid, admin_user.uid)
|
||||
|
||||
admin_study = self._make_fake_study(admin_uid)
|
||||
|
||||
admin_user = self._login_as_admin()
|
||||
admin_study = self._make_fake_study(admin_user.uid)
|
||||
admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode())
|
||||
|
||||
rv_add_study = self.app.post(
|
||||
@ -153,7 +143,7 @@ class TestAuthentication(BaseTest):
|
||||
self.assert_success(rv_add_study, 'Admin user should be able to add a study')
|
||||
|
||||
new_admin_study = json.loads(rv_add_study.get_data(as_text=True))
|
||||
db_admin_study = db.session.query(StudyModel).filter_by(id=new_admin_study['id']).first()
|
||||
db_admin_study = session.query(StudyModel).filter_by(id=new_admin_study['id']).first()
|
||||
self.assertIsNotNone(db_admin_study)
|
||||
|
||||
rv_del_study = self.app.delete(
|
||||
@ -173,26 +163,9 @@ class TestAuthentication(BaseTest):
|
||||
self.load_example_data()
|
||||
|
||||
# Non-admin user should not be able to delete a study
|
||||
non_admin_uid = 'lb3dp'
|
||||
admin_uids = app.config['ADMIN_UIDS']
|
||||
self.assertGreater(len(admin_uids), 0)
|
||||
self.assertNotIn(non_admin_uid, admin_uids)
|
||||
|
||||
non_admin_headers = dict(Uid=non_admin_uid)
|
||||
|
||||
rv = self.app.get(
|
||||
'v1.0/login',
|
||||
follow_redirects=False,
|
||||
headers=non_admin_headers
|
||||
)
|
||||
self.assert_success(rv)
|
||||
|
||||
non_admin_user = db.session.query(UserModel).filter_by(uid=non_admin_uid).first()
|
||||
self.assertIsNotNone(non_admin_user)
|
||||
|
||||
non_admin_user = self._login_as_non_admin()
|
||||
non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token().decode())
|
||||
|
||||
non_admin_study = self._make_fake_study(non_admin_uid)
|
||||
non_admin_study = self._make_fake_study(non_admin_user.uid)
|
||||
|
||||
rv_add_study = self.app.post(
|
||||
'/v1.0/study',
|
||||
@ -203,7 +176,7 @@ class TestAuthentication(BaseTest):
|
||||
self.assert_success(rv_add_study, 'Non-admin user should be able to add a study')
|
||||
|
||||
new_non_admin_study = json.loads(rv_add_study.get_data(as_text=True))
|
||||
db_non_admin_study = db.session.query(StudyModel).filter_by(id=new_non_admin_study['id']).first()
|
||||
db_non_admin_study = session.query(StudyModel).filter_by(id=new_non_admin_study['id']).first()
|
||||
self.assertIsNotNone(db_non_admin_study)
|
||||
|
||||
rv_non_admin_del_study = self.app.delete(
|
||||
@ -216,6 +189,87 @@ class TestAuthentication(BaseTest):
|
||||
# Switch production mode back off
|
||||
app.config['PRODUCTION'] = False
|
||||
|
||||
def test_list_all_users(self):
|
||||
self.load_example_data()
|
||||
rv = self.app.get('/v1.0/user')
|
||||
self.assert_failure(rv, 401)
|
||||
|
||||
rv = self.app.get('/v1.0/user', headers=self.logged_in_headers())
|
||||
self.assert_success(rv)
|
||||
|
||||
all_users = session.query(UserModel).all()
|
||||
|
||||
rv = self.app.get('/v1.0/list_users', headers=self.logged_in_headers())
|
||||
self.assert_success(rv)
|
||||
user_data = json.loads(rv.get_data(as_text=True))
|
||||
self.assertEqual(len(user_data), len(all_users))
|
||||
|
||||
def test_admin_can_impersonate_another_user(self):
|
||||
# Switch production mode on
|
||||
app.config['PRODUCTION'] = True
|
||||
|
||||
self.load_example_data()
|
||||
|
||||
admin_user = self._login_as_admin()
|
||||
admin_token_headers = dict(Authorization='Bearer ' + admin_user.encode_auth_token().decode())
|
||||
|
||||
# User should not be in the system yet.
|
||||
non_admin_user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first()
|
||||
self.assertIsNone(non_admin_user)
|
||||
|
||||
# Admin should not be able to impersonate non-existent user
|
||||
rv_1 = self.app.get(
|
||||
'/v1.0/user?admin_impersonate_uid=' + self.non_admin_uid,
|
||||
content_type="application/json",
|
||||
headers=admin_token_headers,
|
||||
follow_redirects=False
|
||||
)
|
||||
self.assert_failure(rv_1, 400)
|
||||
|
||||
# Add the non-admin user now
|
||||
self.logout()
|
||||
non_admin_user = self._login_as_non_admin()
|
||||
self.assertEqual(non_admin_user.uid, self.non_admin_uid)
|
||||
non_admin_token_headers = dict(Authorization='Bearer ' + non_admin_user.encode_auth_token().decode())
|
||||
|
||||
# Add a study for the non-admin user
|
||||
non_admin_study = self._make_fake_study(self.non_admin_uid)
|
||||
rv_add_study = self.app.post(
|
||||
'/v1.0/study',
|
||||
content_type="application/json",
|
||||
headers=non_admin_token_headers,
|
||||
data=json.dumps(StudySchema().dump(non_admin_study))
|
||||
)
|
||||
self.assert_success(rv_add_study, 'Non-admin user should be able to add a study')
|
||||
self.logout()
|
||||
|
||||
# Admin should be able to impersonate user now
|
||||
admin_user = self._login_as_admin()
|
||||
rv_2 = self.app.get(
|
||||
'/v1.0/user?admin_impersonate_uid=' + self.non_admin_uid,
|
||||
content_type="application/json",
|
||||
headers=admin_token_headers,
|
||||
follow_redirects=False
|
||||
)
|
||||
self.assert_success(rv_2)
|
||||
user_data_2 = json.loads(rv_2.get_data(as_text=True))
|
||||
self.assertEqual(user_data_2['uid'], self.non_admin_uid, 'Admin user should impersonate non-admin user')
|
||||
|
||||
# Study endpoint should return non-admin user's studies
|
||||
rv_study = self.app.get(
|
||||
'/v1.0/study',
|
||||
content_type="application/json",
|
||||
headers=admin_token_headers,
|
||||
follow_redirects=False
|
||||
)
|
||||
self.assert_success(rv_study, 'Admin user should be able to get impersonated user studies')
|
||||
study_data = json.loads(rv_study.get_data(as_text=True))
|
||||
self.assertGreaterEqual(len(study_data), 1)
|
||||
self.assertEqual(study_data[0]['user_uid'], self.non_admin_uid)
|
||||
|
||||
# Switch production mode back off
|
||||
app.config['PRODUCTION'] = False
|
||||
|
||||
def _make_fake_study(self, uid):
|
||||
return {
|
||||
"title": "blah",
|
||||
@ -224,3 +278,42 @@ class TestAuthentication(BaseTest):
|
||||
"primary_investigator_id": uid,
|
||||
"user_uid": uid,
|
||||
}
|
||||
|
||||
def _login_as_admin(self):
|
||||
admin_uids = app.config['ADMIN_UIDS']
|
||||
self.assertGreater(len(admin_uids), 0)
|
||||
self.assertIn(self.admin_uid, admin_uids)
|
||||
admin_headers = dict(Uid=self.admin_uid)
|
||||
|
||||
rv = self.app.get('v1.0/login', follow_redirects=False, headers=admin_headers)
|
||||
self.assert_success(rv)
|
||||
|
||||
admin_user = session.query(UserModel).filter(UserModel.uid == self.admin_uid).first()
|
||||
self.assertIsNotNone(admin_user)
|
||||
self.assertEqual(self.admin_uid, admin_user.uid)
|
||||
self.assertTrue(admin_user.is_admin())
|
||||
return admin_user
|
||||
|
||||
def _login_as_non_admin(self):
|
||||
admin_uids = app.config['ADMIN_UIDS']
|
||||
self.assertGreater(len(admin_uids), 0)
|
||||
self.assertNotIn(self.non_admin_uid, admin_uids)
|
||||
|
||||
non_admin_headers = dict(Uid=self.non_admin_uid)
|
||||
|
||||
rv = self.app.get(
|
||||
'v1.0/login?uid=' + self.non_admin_uid,
|
||||
follow_redirects=False,
|
||||
headers=non_admin_headers
|
||||
)
|
||||
self.assert_success(rv)
|
||||
|
||||
user = session.query(UserModel).filter(UserModel.uid == self.non_admin_uid).first()
|
||||
self.assertIsNotNone(user)
|
||||
self.assertFalse(user.is_admin())
|
||||
self.assertIsNotNone(user)
|
||||
self.assertEqual(self.non_admin_uid, user.uid)
|
||||
self.assertEqual("Laura Barnes", user.display_name)
|
||||
self.assertEqual("lb3dp@virginia.edu", user.email_address)
|
||||
self.assertEqual("E0:Associate Professor of Systems and Information Engineering", user.title)
|
||||
return user
|
||||
|
@ -30,4 +30,4 @@ class TestLdapService(BaseTest):
|
||||
user_info = LdapService.user_info("nosuch")
|
||||
self.assertFalse(True, "An API error should be raised.")
|
||||
except ApiError as ae:
|
||||
self.assertEqual("missing_ldap_record", ae.code)
|
||||
self.assertEqual("missing_ldap_record", ae.code)
|
||||
|
@ -90,7 +90,7 @@ class TestWorkflowSpecValidation(BaseTest):
|
||||
errors = self.validate_workflow("invalid_script")
|
||||
self.assertEqual(2, len(errors))
|
||||
self.assertEqual("workflow_validation_exception", errors[0]['code'])
|
||||
self.assertTrue("NoSuchScript" in errors[0]['message'])
|
||||
#self.assertTrue("NoSuchScript" in errors[0]['message'])
|
||||
self.assertEqual("Invalid_Script_Task", errors[0]['task_id'])
|
||||
self.assertEqual("An Invalid Script Reference", errors[0]['task_name'])
|
||||
self.assertEqual("invalid_script.bpmn", errors[0]['file_name'])
|
||||
|
Loading…
x
Reference in New Issue
Block a user