Merge branch 'rrt/dev' into feature/emails-enhancement

This commit is contained in:
Carlos Lopez 2020-06-24 21:46:53 -06:00
commit 5327b469f6
15 changed files with 530 additions and 199 deletions

View File

@ -41,6 +41,7 @@ gunicorn = "*"
werkzeug = "*"
sentry-sdk = {extras = ["flask"],version = "==0.14.4"}
flask-mail = "*"
flask-admin = "*"
markdown = "*"
[requires]

129
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "f5c922e74e296622c19ecfdd5c22cdcc71841fe81cdd95f407a2eb2ba475e615"
"sha256": "deb9e257fe8240d12bf82940ff22f5ddb338b305491f33655a82adda9438990f"
},
"pipfile-spec": 6,
"requires": {
@ -104,17 +104,17 @@
},
"celery": {
"hashes": [
"sha256:c3f4173f83ceb5a5c986c5fdaefb9456de3b0729a72a5776e46bd405fda7b647",
"sha256:d1762d6065522879f341c3d67c2b9fe4615eb79756d59acb1434601d4aca474b"
"sha256:ef17d7dffde7fc73ecab3a3b6389d93d3213bac53fa7f28e68e33647ad50b916",
"sha256:fd77e4248bb1b7af5f7922dd8e81156f540306e3a5c4b1c24167c1f5f06025da"
],
"version": "==4.4.5"
"version": "==4.4.6"
},
"certifi": {
"hashes": [
"sha256:5ad7e9a056d25ffa5082862e36f119f7f7cec6457fa07ee2f8c339814b80c9b1",
"sha256:9cd41137dc19af6a5e03b630eefe7d1f458d964d406342dd3edf625839b944cc"
"sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3",
"sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"
],
"version": "==2020.4.5.2"
"version": "==2020.6.20"
},
"cffi": {
"hashes": [
@ -261,6 +261,13 @@
"index": "pypi",
"version": "==1.1.2"
},
"flask-admin": {
"hashes": [
"sha256:68c761d8582d59b1f7702013e944a7ad11d7659a72f3006b89b68b0bd8df61b8"
],
"index": "pypi",
"version": "==1.5.6"
},
"flask-bcrypt": {
"hashes": [
"sha256:d71c8585b2ee1c62024392ebdbc447438564e2c8c02b4e57b56a4cafd8d13c5f"
@ -394,10 +401,10 @@
},
"kombu": {
"hashes": [
"sha256:437b9cdea193cc2ed0b8044c85fd0f126bb3615ca2f4d4a35b39de7cacfa3c1a",
"sha256:dc282bb277197d723bccda1a9ba30a27a28c9672d0ab93e9e51bb05a37bd29c3"
"sha256:be48cdffb54a2194d93ad6533d73f69408486483d189fe9f5990ee24255b0e0a",
"sha256:ca1b45faac8c0b18493d02a8571792f3c40291cf2bcf1f55afed3d8f3aa7ba74"
],
"version": "==4.6.10"
"version": "==4.6.11"
},
"ldap3": {
"hashes": [
@ -518,29 +525,34 @@
},
"numpy": {
"hashes": [
"sha256:0172304e7d8d40e9e49553901903dc5f5a49a703363ed756796f5808a06fc233",
"sha256:34e96e9dae65c4839bd80012023aadd6ee2ccb73ce7fdf3074c62f301e63120b",
"sha256:3676abe3d621fc467c4c1469ee11e395c82b2d6b5463a9454e37fe9da07cd0d7",
"sha256:3dd6823d3e04b5f223e3e265b4a1eae15f104f4366edd409e5a5e413a98f911f",
"sha256:4064f53d4cce69e9ac613256dc2162e56f20a4e2d2086b1956dd2fcf77b7fac5",
"sha256:4674f7d27a6c1c52a4d1aa5f0881f1eff840d2206989bae6acb1c7668c02ebfb",
"sha256:7d42ab8cedd175b5ebcb39b5208b25ba104842489ed59fbb29356f671ac93583",
"sha256:965df25449305092b23d5145b9bdaeb0149b6e41a77a7d728b1644b3c99277c1",
"sha256:9c9d6531bc1886454f44aa8f809268bc481295cf9740827254f53c30104f074a",
"sha256:a78e438db8ec26d5d9d0e584b27ef25c7afa5a182d1bf4d05e313d2d6d515271",
"sha256:a7acefddf994af1aeba05bbbafe4ba983a187079f125146dc5859e6d817df824",
"sha256:a87f59508c2b7ceb8631c20630118cc546f1f815e034193dc72390db038a5cb3",
"sha256:ac792b385d81151bae2a5a8adb2b88261ceb4976dbfaaad9ce3a200e036753dc",
"sha256:b03b2c0badeb606d1232e5f78852c102c0a7989d3a534b3129e7856a52f3d161",
"sha256:b39321f1a74d1f9183bf1638a745b4fd6fe80efbb1f6b32b932a588b4bc7695f",
"sha256:cae14a01a159b1ed91a324722d746523ec757357260c6804d11d6147a9e53e3f",
"sha256:cd49930af1d1e49a812d987c2620ee63965b619257bd76eaaa95870ca08837cf",
"sha256:e15b382603c58f24265c9c931c9a45eebf44fe2e6b4eaedbb0d025ab3255228b",
"sha256:e91d31b34fc7c2c8f756b4e902f901f856ae53a93399368d9a0dc7be17ed2ca0",
"sha256:ef627986941b5edd1ed74ba89ca43196ed197f1a206a3f18cc9faf2fb84fd675",
"sha256:f718a7949d1c4f622ff548c572e0c03440b49b9531ff00e4ed5738b459f011e8"
"sha256:13af0184177469192d80db9bd02619f6fa8b922f9f327e077d6f2a6acb1ce1c0",
"sha256:26a45798ca2a4e168d00de75d4a524abf5907949231512f372b217ede3429e98",
"sha256:26f509450db547e4dfa3ec739419b31edad646d21fb8d0ed0734188b35ff6b27",
"sha256:30a59fb41bb6b8c465ab50d60a1b298d1cd7b85274e71f38af5a75d6c475d2d2",
"sha256:33c623ef9ca5e19e05991f127c1be5aeb1ab5cdf30cb1c5cf3960752e58b599b",
"sha256:356f96c9fbec59974a592452ab6a036cd6f180822a60b529a975c9467fcd5f23",
"sha256:3c40c827d36c6d1c3cf413694d7dc843d50997ebffbc7c87d888a203ed6403a7",
"sha256:4d054f013a1983551254e2379385e359884e5af105e3efe00418977d02f634a7",
"sha256:63d971bb211ad3ca37b2adecdd5365f40f3b741a455beecba70fd0dde8b2a4cb",
"sha256:658624a11f6e1c252b2cd170d94bf28c8f9410acab9f2fd4369e11e1cd4e1aaf",
"sha256:76766cc80d6128750075378d3bb7812cf146415bd29b588616f72c943c00d598",
"sha256:7b57f26e5e6ee2f14f960db46bd58ffdca25ca06dd997729b1b179fddd35f5a3",
"sha256:7b852817800eb02e109ae4a9cef2beda8dd50d98b76b6cfb7b5c0099d27b52d4",
"sha256:8cde829f14bd38f6da7b2954be0f2837043e8b8d7a9110ec5e318ae6bf706610",
"sha256:a2e3a39f43f0ce95204beb8fe0831199542ccab1e0c6e486a0b4947256215632",
"sha256:a86c962e211f37edd61d6e11bb4df7eddc4a519a38a856e20a6498c319efa6b0",
"sha256:a8705c5073fe3fcc297fb8e0b31aa794e05af6a329e81b7ca4ffecab7f2b95ef",
"sha256:b6aaeadf1e4866ca0fdf7bb4eed25e521ae21a7947c59f78154b24fc7abbe1dd",
"sha256:be62aeff8f2f054eff7725f502f6228298891fd648dc2630e03e44bf63e8cee0",
"sha256:c2edbb783c841e36ca0fa159f0ae97a88ce8137fb3a6cd82eae77349ba4b607b",
"sha256:cbe326f6d364375a8e5a8ccb7e9cd73f4b2f6dc3b2ed205633a0db8243e2a96a",
"sha256:d34fbb98ad0d6b563b95de852a284074514331e6b9da0a9fc894fb1cdae7a79e",
"sha256:d97a86937cf9970453c3b62abb55a6475f173347b4cde7f8dcdb48c8e1b9952d",
"sha256:dd53d7c4a69e766e4900f29db5872f5824a06827d594427cf1a4aa542818b796",
"sha256:df1889701e2dfd8ba4dc9b1a010f0a60950077fb5242bb92c8b5c7f1a6f2668a",
"sha256:fa1fe75b4a9e18b66ae7f0b122543c42debcf800aaafa0212aaff3ad273c2596"
],
"version": "==1.18.5"
"version": "==1.19.0"
},
"openapi-spec-validator": {
"hashes": [
@ -552,10 +564,10 @@
},
"openpyxl": {
"hashes": [
"sha256:547a9fc6aafcf44abe358b89ed4438d077e9d92e4f182c87e2dc294186dc4b64"
"sha256:6e62f058d19b09b95d20ebfbfb04857ad08d0833190516c1660675f699c6186f"
],
"index": "pypi",
"version": "==3.0.3"
"version": "==3.0.4"
},
"packaging": {
"hashes": [
@ -566,25 +578,25 @@
},
"pandas": {
"hashes": [
"sha256:034185bb615dc96d08fa13aacba8862949db19d5e7804d6ee242d086f07bcc46",
"sha256:0c9b7f1933e3226cc16129cf2093338d63ace5c85db7c9588e3e1ac5c1937ad5",
"sha256:1f6fcf0404626ca0475715da045a878c7062ed39bc859afc4ccf0ba0a586a0aa",
"sha256:1fc963ba33c299973e92d45466e576d11f28611f3549469aec4a35658ef9f4cc",
"sha256:29b4cfee5df2bc885607b8f016e901e63df7ffc8f00209000471778f46cc6678",
"sha256:2a8b6c28607e3f3c344fe3e9b3cd76d2bf9f59bc8c0f2e582e3728b80e1786dc",
"sha256:2bc2ff52091a6ac481cc75d514f06227dc1b10887df1eb72d535475e7b825e31",
"sha256:415e4d52fcfd68c3d8f1851cef4d947399232741cc994c8f6aa5e6a9f2e4b1d8",
"sha256:519678882fd0587410ece91e3ff7f73ad6ded60f6fcb8aa7bcc85c1dc20ecac6",
"sha256:51e0abe6e9f5096d246232b461649b0aa627f46de8f6344597ca908f2240cbaa",
"sha256:698e26372dba93f3aeb09cd7da2bb6dd6ade248338cfe423792c07116297f8f4",
"sha256:83af85c8e539a7876d23b78433d90f6a0e8aa913e37320785cf3888c946ee874",
"sha256:982cda36d1773076a415ec62766b3c0a21cdbae84525135bdb8f460c489bb5dd",
"sha256:a647e44ba1b3344ebc5991c8aafeb7cca2b930010923657a273b41d86ae225c4",
"sha256:b35d625282baa7b51e82e52622c300a1ca9f786711b2af7cbe64f1e6831f4126",
"sha256:bab51855f8b318ef39c2af2c11095f45a10b74cbab4e3c8199efcc5af314c648"
"sha256:02f1e8f71cd994ed7fcb9a35b6ddddeb4314822a0e09a9c5b2d278f8cb5d4096",
"sha256:13f75fb18486759da3ff40f5345d9dd20e7d78f2a39c5884d013456cec9876f0",
"sha256:35b670b0abcfed7cad76f2834041dcf7ae47fd9b22b63622d67cdc933d79f453",
"sha256:4c73f373b0800eb3062ffd13d4a7a2a6d522792fa6eb204d67a4fad0a40f03dc",
"sha256:5759edf0b686b6f25a5d4a447ea588983a33afc8a0081a0954184a4a87fd0dd7",
"sha256:5a7cf6044467c1356b2b49ef69e50bf4d231e773c3ca0558807cdba56b76820b",
"sha256:69c5d920a0b2a9838e677f78f4dde506b95ea8e4d30da25859db6469ded84fa8",
"sha256:8778a5cc5a8437a561e3276b85367412e10ae9fff07db1eed986e427d9a674f8",
"sha256:9871ef5ee17f388f1cb35f76dc6106d40cb8165c562d573470672f4cdefa59ef",
"sha256:9c31d52f1a7dd2bb4681d9f62646c7aa554f19e8e9addc17e8b1b20011d7522d",
"sha256:ab8173a8efe5418bbe50e43f321994ac6673afc5c7c4839014cf6401bbdd0705",
"sha256:ae961f1f0e270f1e4e2273f6a539b2ea33248e0e3a11ffb479d757918a5e03a9",
"sha256:b3c4f93fcb6e97d993bf87cdd917883b7dab7d20c627699f360a8fb49e9e0b91",
"sha256:c9410ce8a3dee77653bc0684cfa1535a7f9c291663bd7ad79e39f5ab58f67ab3",
"sha256:f69e0f7b7c09f1f612b1f8f59e2df72faa8a6b41c5a436dde5b615aaf948f107",
"sha256:faa42a78d1350b02a7d2f0dbe3c80791cf785663d6997891549d0f86dc49125e"
],
"index": "pypi",
"version": "==1.0.4"
"version": "==1.0.5"
},
"psycopg2-binary": {
"hashes": [
@ -810,7 +822,7 @@
"spiffworkflow": {
"editable": true,
"git": "https://github.com/sartography/SpiffWorkflow.git",
"ref": "b8a064a0bb76c705a1be04ee9bb8ac7beee56eb0"
"ref": "5450dc0463a95811d386b7de063d950bf6179d2b"
},
"sqlalchemy": {
"hashes": [
@ -898,6 +910,13 @@
"index": "pypi",
"version": "==1.0.1"
},
"wtforms": {
"hashes": [
"sha256:6ff8635f4caeed9f38641d48cfe019d0d3896f41910ab04494143fc027866e1b",
"sha256:861a13b3ae521d6700dac3b2771970bd354a63ba7043ecc3a82b5288596a1972"
],
"version": "==2.3.1"
},
"xlrd": {
"hashes": [
"sha256:546eb36cee8db40c3eaa46c351e67ffee6eeb5fa2650b71bc4c758a29a1b29b2",
@ -1035,10 +1054,10 @@
},
"wcwidth": {
"hashes": [
"sha256:79375666b9954d4a1a10739315816324c3e73110af9d0e102d906fdb0aec009f",
"sha256:8c6b5b6ee1360b842645f336d9e5d68c55817c26d3050f46b235ef2bc650e48f"
"sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784",
"sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"
],
"version": "==0.2.4"
"version": "==0.2.5"
},
"zipp": {
"hashes": [

View File

@ -4,6 +4,8 @@ import sentry_sdk
import connexion
from jinja2 import Environment, FileSystemLoader
from flask_admin import Admin
from flask_admin.contrib.sqla import ModelView
from flask_cors import CORS
from flask_marshmallow import Marshmallow
from flask_mail import Mail
@ -37,13 +39,16 @@ ma = Marshmallow(app)
from crc import models
from crc import api
from crc.api import admin
connexion_app.add_api('api.yml', base_path='/v1.0')
# Convert list of allowed origins to list of regexes
origins_re = [r"^https?:\/\/%s(.*)" % o.replace('.', '\.') for o in app.config['CORS_ALLOW_ORIGINS']]
cors = CORS(connexion_app.app, origins=origins_re)
# Sentry error handling
if app.config['ENABLE_SENTRY']:
sentry_sdk.init(
dsn="https://25342ca4e2d443c6a5c49707d68e9f40@o401361.ingest.sentry.io/5260915",
@ -88,3 +93,10 @@ def clear_db():
"""Load example data into the database."""
from example_data import ExampleDataLoader
ExampleDataLoader.clean_db()
@app.cli.command()
def rrt_data_fix():
"""Finds all the empty task event logs, and populates
them with good wholesome data."""
from crc.services.workflow_service import WorkflowService
WorkflowService.fix_legacy_data_model_for_rrt()

72
crc/api/admin.py Normal file
View File

@ -0,0 +1,72 @@
# Admin app
import json
from flask import url_for
from flask_admin import Admin
from flask_admin.contrib import sqla
from flask_admin.contrib.sqla import ModelView
from werkzeug.utils import redirect
from jinja2 import Markup
from crc import db, app
from crc.api.user import verify_token, verify_token_admin
from crc.models.approval import ApprovalModel
from crc.models.file import FileModel
from crc.models.stats import TaskEventModel
from crc.models.study import StudyModel
from crc.models.user import UserModel
from crc.models.workflow import WorkflowModel
class AdminModelView(sqla.ModelView):
can_create = False
can_edit = False
can_delete = False
page_size = 50 # the number of entries to display on the list view
column_exclude_list = ['bpmn_workflow_json', ]
column_display_pk = True
can_export = True
def is_accessible(self):
return verify_token_admin()
def inaccessible_callback(self, name, **kwargs):
# redirect to login page if user doesn't have access
return redirect(url_for('home'))
class UserView(AdminModelView):
column_filters = ['uid']
class StudyView(AdminModelView):
column_filters = ['id', 'primary_investigator_id']
column_searchable_list = ['title']
class ApprovalView(AdminModelView):
column_filters = ['study_id', 'approver_uid']
class WorkflowView(AdminModelView):
column_filters = ['study_id', 'id']
class FileView(AdminModelView):
column_filters = ['workflow_id']
def json_formatter(view, context, model, name):
value = getattr(model, name)
json_value = json.dumps(value, ensure_ascii=False, indent=2)
return Markup('<pre>{}</pre>'.format(json_value))
class TaskEventView(AdminModelView):
column_filters = ['workflow_id', 'action']
column_list = ['study_id', 'user_id', 'workflow_id', 'action', 'task_title', 'form_data', 'date']
column_formatters = {
'form_data': json_formatter,
}
admin = Admin(app)
admin.add_view(StudyView(StudyModel, db.session))
admin.add_view(ApprovalView(ApprovalModel, db.session))
admin.add_view(UserView(UserModel, db.session))
admin.add_view(WorkflowView(WorkflowModel, db.session))
admin.add_view(FileView(FileModel, db.session))
admin.add_view(TaskEventView(TaskEventModel, db.session))

View File

@ -1,7 +1,7 @@
import uuid
from SpiffWorkflow.util.deep_merge import DeepMerge
from flask import g
from crc import session, app
from crc.api.common import ApiError, ApiErrorSchema
from crc.models.api_models import WorkflowApi, WorkflowApiSchema, NavigationItem, NavigationItemSchema
@ -96,59 +96,10 @@ def delete_workflow_specification(spec_id):
session.commit()
def __get_workflow_api_model(processor: WorkflowProcessor, next_task = None):
"""Returns an API model representing the state of the current workflow, if requested, and
possible, next_task is set to the current_task."""
nav_dict = processor.bpmn_workflow.get_nav_list()
navigation = []
for nav_item in nav_dict:
spiff_task = processor.bpmn_workflow.get_task(nav_item['task_id'])
if 'description' in nav_item:
nav_item['title'] = nav_item.pop('description')
# fixme: duplicate code from the workflow_service. Should only do this in one place.
if ' ' in nav_item['title']:
nav_item['title'] = nav_item['title'].partition(' ')[2]
else:
nav_item['title'] = ""
if spiff_task:
nav_item['task'] = WorkflowService.spiff_task_to_api_task(spiff_task, add_docs_and_forms=False)
nav_item['title'] = nav_item['task'].title # Prefer the task title.
else:
nav_item['task'] = None
if not 'is_decision' in nav_item:
nav_item['is_decision'] = False
navigation.append(NavigationItem(**nav_item))
NavigationItemSchema().dump(nav_item)
spec = session.query(WorkflowSpecModel).filter_by(id=processor.workflow_spec_id).first()
workflow_api = WorkflowApi(
id=processor.get_workflow_id(),
status=processor.get_status(),
next_task=None,
navigation=navigation,
workflow_spec_id=processor.workflow_spec_id,
spec_version=processor.get_version_string(),
is_latest_spec=processor.is_latest_spec,
total_tasks=len(navigation),
completed_tasks=processor.workflow_model.completed_tasks,
last_updated=processor.workflow_model.last_updated,
title=spec.display_name
)
if not next_task: # The Next Task can be requested to be a certain task, useful for parallel tasks.
# This may or may not work, sometimes there is no next task to complete.
next_task = processor.next_task()
if next_task:
workflow_api.next_task = WorkflowService.spiff_task_to_api_task(next_task, add_docs_and_forms=True)
return workflow_api
def get_workflow(workflow_id, soft_reset=False, hard_reset=False):
workflow_model: WorkflowModel = session.query(WorkflowModel).filter_by(id=workflow_id).first()
processor = WorkflowProcessor(workflow_model, soft_reset=soft_reset, hard_reset=hard_reset)
workflow_api_model = __get_workflow_api_model(processor)
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
return WorkflowApiSchema().dump(workflow_api_model)
@ -161,17 +112,20 @@ def set_current_task(workflow_id, task_id):
user_uid = __get_user_uid(workflow_model.study.user_uid)
processor = WorkflowProcessor(workflow_model)
task_id = uuid.UUID(task_id)
task = processor.bpmn_workflow.get_task(task_id)
if task.state != task.COMPLETED and task.state != task.READY:
spiff_task = processor.bpmn_workflow.get_task(task_id)
if spiff_task.state != spiff_task.COMPLETED and spiff_task.state != spiff_task.READY:
raise ApiError("invalid_state", "You may not move the token to a task who's state is not "
"currently set to COMPLETE or READY.")
# Only reset the token if the task doesn't already have it.
if task.state == task.COMPLETED:
task.reset_token(reset_data=False) # we could optionally clear the previous data.
if spiff_task.state == spiff_task.COMPLETED:
spiff_task.reset_token(reset_data=True) # Don't try to copy the existing data back into this task.
processor.save()
WorkflowService.log_task_action(user_uid, processor, task, WorkflowService.TASK_ACTION_TOKEN_RESET)
workflow_api_model = __get_workflow_api_model(processor, task)
WorkflowService.log_task_action(user_uid, workflow_model, spiff_task,
WorkflowService.TASK_ACTION_TOKEN_RESET,
version=processor.get_version_string())
workflow_api_model = WorkflowService.processor_to_workflow_api(processor, spiff_task)
return WorkflowApiSchema().dump(workflow_api_model)
@ -187,17 +141,19 @@ def update_task(workflow_id, task_id, body):
user_uid = __get_user_uid(workflow_model.study.user_uid)
processor = WorkflowProcessor(workflow_model)
task_id = uuid.UUID(task_id)
task = processor.bpmn_workflow.get_task(task_id)
if task.state != task.READY:
spiff_task = processor.bpmn_workflow.get_task(task_id)
if spiff_task.state != spiff_task.READY:
raise ApiError("invalid_state", "You may not update a task unless it is in the READY state. "
"Consider calling a token reset to make this task Ready.")
task.update_data(body)
processor.complete_task(task)
if body: # IF and only if we get the body back, update the task data with the content.
spiff_task.data = body # Accept the data from the front end as complete. Do not merge it in, as then it is impossible to remove items.
processor.complete_task(spiff_task)
processor.do_engine_steps()
processor.save()
WorkflowService.log_task_action(user_uid, processor, task, WorkflowService.TASK_ACTION_COMPLETE)
workflow_api_model = __get_workflow_api_model(processor)
WorkflowService.log_task_action(user_uid, workflow_model, spiff_task, WorkflowService.TASK_ACTION_COMPLETE,
version=processor.get_version_string())
workflow_api_model = WorkflowService.processor_to_workflow_api(processor)
return WorkflowApiSchema().dump(workflow_api_model)

View File

@ -36,6 +36,7 @@ class Task(object):
PROP_OPTIONS_FILE = "spreadsheet.name"
PROP_OPTIONS_VALUE_COLUMN = "spreadsheet.value.column"
PROP_OPTIONS_LABEL_COL = "spreadsheet.label.column"
PROP_OPTIONS_READ_ONLY = "read_only"
PROP_LDAP_LOOKUP = "ldap.lookup"
VALIDATION_REQUIRED = "required"
FIELD_TYPE_AUTO_COMPLETE = "autocomplete"

View File

@ -17,6 +17,7 @@ class TaskEventModel(db.Model):
task_title = db.Column(db.String)
task_type = db.Column(db.String)
task_state = db.Column(db.String)
form_data = db.Column(db.JSON) # And form data submitted when the task was completed.
mi_type = db.Column(db.String)
mi_count = db.Column(db.Integer)
mi_index = db.Column(db.Integer)

View File

@ -102,14 +102,15 @@ class WorkflowProcessor(object):
def __init__(self, workflow_model: WorkflowModel, soft_reset=False, hard_reset=False, validate_only=False):
"""Create a Workflow Processor based on the serialized information available in the workflow model.
If soft_reset is set to true, it will try to use the latest version of the workflow specification.
If hard_reset is set to true, it will create a new Workflow, but embed the data from the last
completed task in the previous workflow.
If soft_reset is set to true, it will try to use the latest version of the workflow specification
without resetting to the beginning of the workflow. This will work for some minor changes to the spec.
If hard_reset is set to true, it will use the latest spec, and start the workflow over from the beginning.
which should work in casees where a soft reset fails.
If neither flag is set, it will use the same version of the specification that was used to originally
create the workflow model. """
self.workflow_model = workflow_model
if soft_reset or len(workflow_model.dependencies) == 0:
if soft_reset or len(workflow_model.dependencies) == 0: # Depenencies of 0 means the workflow was never started.
self.spec_data_files = FileService.get_spec_data_files(
workflow_spec_id=workflow_model.workflow_spec_id)
else:
@ -216,8 +217,6 @@ class WorkflowProcessor(object):
full_version = "v%s (%s)" % (version, files)
return full_version
def update_dependencies(self, spec_data_files):
existing_dependencies = FileService.get_spec_data_files(
workflow_spec_id=self.workflow_model.workflow_spec_id,
@ -299,25 +298,12 @@ class WorkflowProcessor(object):
return WorkflowStatus.waiting
def hard_reset(self):
"""Recreate this workflow, but keep the data from the last completed task and add
it back into the first task. This may be useful when a workflow specification changes,
and users need to review all the prior steps, but they don't need to reenter all the previous data.
Returns the new version.
"""Recreate this workflow. This will be useful when a workflow specification changes.
"""
# Create a new workflow based on the latest specs.
self.spec_data_files = FileService.get_spec_data_files(workflow_spec_id=self.workflow_spec_id)
new_spec = WorkflowProcessor.get_spec(self.spec_data_files, self.workflow_spec_id)
new_bpmn_workflow = BpmnWorkflow(new_spec, script_engine=self._script_engine)
new_bpmn_workflow.data = self.bpmn_workflow.data
# Reset the current workflow to the beginning - which we will consider to be the first task after the root
# element. This feels a little sketchy, but I think it is safe to assume root will have one child.
first_task = self.bpmn_workflow.task_tree.children[0]
first_task.reset_token(reset_data=False)
for task in new_bpmn_workflow.get_tasks(SpiffTask.READY):
task.data = first_task.data
new_bpmn_workflow.do_engine_steps()
self.bpmn_workflow = new_bpmn_workflow

View File

@ -1,3 +1,4 @@
import copy
import string
from datetime import datetime
import random
@ -5,25 +6,26 @@ import random
import jinja2
from SpiffWorkflow import Task as SpiffTask, WorkflowException
from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask
from SpiffWorkflow.bpmn.specs.MultiInstanceTask import MultiInstanceTask
from SpiffWorkflow.bpmn.specs.ScriptTask import ScriptTask
from SpiffWorkflow.bpmn.specs.UserTask import UserTask
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
from SpiffWorkflow.specs import CancelTask, StartTask
from flask import g
from SpiffWorkflow.util.deep_merge import DeepMerge
from jinja2 import Template
from crc import db, app
from crc.api.common import ApiError
from crc.models.api_models import Task, MultiInstanceType
from crc.models.api_models import Task, MultiInstanceType, NavigationItem, NavigationItemSchema, WorkflowApi
from crc.models.file import LookupDataModel
from crc.models.stats import TaskEventModel
from crc.models.study import StudyModel
from crc.models.user import UserModel
from crc.models.workflow import WorkflowModel, WorkflowStatus
from crc.models.workflow import WorkflowModel, WorkflowStatus, WorkflowSpecModel
from crc.services.file_service import FileService
from crc.services.lookup_service import LookupService
from crc.services.study_service import StudyService
from crc.services.workflow_processor import WorkflowProcessor, CustomBpmnScriptEngine
from crc.services.workflow_processor import WorkflowProcessor
class WorkflowService(object):
@ -186,6 +188,77 @@ class WorkflowService(object):
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(string_length))
@staticmethod
def processor_to_workflow_api(processor: WorkflowProcessor, next_task=None):
"""Returns an API model representing the state of the current workflow, if requested, and
possible, next_task is set to the current_task."""
nav_dict = processor.bpmn_workflow.get_nav_list()
navigation = []
for nav_item in nav_dict:
spiff_task = processor.bpmn_workflow.get_task(nav_item['task_id'])
if 'description' in nav_item:
nav_item['title'] = nav_item.pop('description')
# fixme: duplicate code from the workflow_service. Should only do this in one place.
if ' ' in nav_item['title']:
nav_item['title'] = nav_item['title'].partition(' ')[2]
else:
nav_item['title'] = ""
if spiff_task:
nav_item['task'] = WorkflowService.spiff_task_to_api_task(spiff_task, add_docs_and_forms=False)
nav_item['title'] = nav_item['task'].title # Prefer the task title.
else:
nav_item['task'] = None
if not 'is_decision' in nav_item:
nav_item['is_decision'] = False
navigation.append(NavigationItem(**nav_item))
NavigationItemSchema().dump(nav_item)
spec = db.session.query(WorkflowSpecModel).filter_by(id=processor.workflow_spec_id).first()
workflow_api = WorkflowApi(
id=processor.get_workflow_id(),
status=processor.get_status(),
next_task=None,
navigation=navigation,
workflow_spec_id=processor.workflow_spec_id,
spec_version=processor.get_version_string(),
is_latest_spec=processor.is_latest_spec,
total_tasks=len(navigation),
completed_tasks=processor.workflow_model.completed_tasks,
last_updated=processor.workflow_model.last_updated,
title=spec.display_name
)
if not next_task: # The Next Task can be requested to be a certain task, useful for parallel tasks.
# This may or may not work, sometimes there is no next task to complete.
next_task = processor.next_task()
if next_task:
previous_form_data = WorkflowService.get_previously_submitted_data(processor.workflow_model.id, next_task)
DeepMerge.merge(next_task.data, previous_form_data)
workflow_api.next_task = WorkflowService.spiff_task_to_api_task(next_task, add_docs_and_forms=True)
return workflow_api
@staticmethod
def get_previously_submitted_data(workflow_id, task):
""" If the user has completed this task previously, find the form data for the last submission."""
latest_event = db.session.query(TaskEventModel) \
.filter_by(workflow_id=workflow_id) \
.filter_by(task_name=task.task_spec.name) \
.filter_by(action=WorkflowService.TASK_ACTION_COMPLETE) \
.order_by(TaskEventModel.date.desc()).first()
if latest_event:
if latest_event.form_data is not None:
return latest_event.form_data
else:
app.logger.error("missing_form_dat", "We have lost data for workflow %i, task %s, it is not "
"in the task event model, "
"and it should be." % (workflow_id, task.task_spec.name))
return {}
else:
return {}
@staticmethod
def spiff_task_to_api_task(spiff_task, add_docs_and_forms=False):
task_type = spiff_task.task_spec.__class__.__name__
@ -317,21 +390,22 @@ class WorkflowService(object):
field.options.append({"id": d.value, "name": d.label})
@staticmethod
def log_task_action(user_uid, processor, spiff_task, action):
def log_task_action(user_uid, workflow_model, spiff_task, action, version):
task = WorkflowService.spiff_task_to_api_task(spiff_task)
workflow_model = processor.workflow_model
form_data = WorkflowService.extract_form_data(spiff_task.data, spiff_task)
task_event = TaskEventModel(
study_id=workflow_model.study_id,
user_uid=user_uid,
workflow_id=workflow_model.id,
workflow_spec_id=workflow_model.workflow_spec_id,
spec_version=processor.get_version_string(),
spec_version=version,
action=action,
task_id=task.id,
task_name=task.name,
task_title=task.title,
task_type=str(task.type),
task_state=task.state,
form_data=form_data,
mi_type=task.multi_instance_type.value, # Some tasks have a repeat behavior.
mi_count=task.multi_instance_count, # This is the number of times the task could repeat.
mi_index=task.multi_instance_index, # And the index of the currently repeating task.
@ -341,3 +415,64 @@ class WorkflowService(object):
db.session.add(task_event)
db.session.commit()
@staticmethod
def fix_legacy_data_model_for_rrt():
""" Remove this after use! This is just to fix RRT so the data is handled correctly.
Utility that is likely called via the flask command line, it will loop through all the
workflows in the system and attempt to add the right data into the task action log so that
users do not have to re fill out all of the forms if they start over or go back in the workflow.
Viciously inefficient, but should only have to run one time for RRT"""
workflows = db.session.query(WorkflowModel).all()
for workflow_model in workflows:
task_logs = db.session.query(TaskEventModel) \
.filter(TaskEventModel.workflow_id == workflow_model.id) \
.filter(TaskEventModel.action == WorkflowService.TASK_ACTION_COMPLETE) \
.order_by(TaskEventModel.date.desc()).all()
processor = WorkflowProcessor(workflow_model)
# Grab all the data from last task completed, which will be everything in this
# rrt situation because of how we were keeping all the data at the time.
latest_data = processor.next_task().data
# Move forward in the task spec tree, dropping any data that would have been
# added in subsequent tasks, just looking at form data, will not track the automated
# task data additions, hopefully this doesn't hang us.
for log in task_logs:
# if log.task_data is not None: # Only do this if the task event does not have data populated in it.
# continue
data = copy.deepcopy(latest_data) # Or you end up with insane crazy issues.
# In the simple case of RRT, there is exactly one task for the given task_spec
task = processor.bpmn_workflow.get_tasks_from_spec_name(log.task_name)[0]
data = WorkflowService.extract_form_data(data, task)
log.form_data = data
db.session.add(log)
db.session.commit()
@staticmethod
def extract_form_data(latest_data, task):
"""Removes data from latest_data that would be added by the child task or any of it's children."""
data = {}
if hasattr(task.task_spec, 'form'):
for field in task.task_spec.form.fields:
if field.has_property(Task.PROP_OPTIONS_READ_ONLY) and \
field.get_property(Task.PROP_OPTIONS_READ_ONLY).lower().strip() == "true":
continue # Don't add read-only data
elif field.has_property(Task.PROP_OPTIONS_REPEAT):
group = field.get_property(Task.PROP_OPTIONS_REPEAT)
if group in latest_data:
data[group] = latest_data[group]
elif isinstance(task.task_spec, MultiInstanceTask):
group = task.task_spec.elementVar
if group in latest_data:
data[group] = latest_data[group]
else:
if field.id in latest_data:
data[field.id] = latest_data[field.id]
return data

View File

@ -23,8 +23,16 @@ if [ "$RESET_DB_RRT" = "true" ]; then
pipenv run flask load-example-rrt-data
fi
if [ "$FIX_RRT_DATA" = "true" ]; then
echo 'Fixing RRT data...'
pipenv run flask rrt-data-fix
fi
# THIS MUST BE THE LAST COMMAND!
if [ "$APPLICATION_ROOT" = "/" ]; then
pipenv run gunicorn --bind 0.0.0.0:$PORT0 wsgi:app
else
pipenv run gunicorn -e SCRIPT_NAME="$APPLICATION_ROOT" --bind 0.0.0.0:$PORT0 wsgi:app
fi

View File

@ -0,0 +1,28 @@
"""empty message
Revision ID: 1fdd1bdb600e
Revises: 17597692d0b0
Create Date: 2020-06-17 16:44:16.427988
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1fdd1bdb600e'
down_revision = '17597692d0b0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('task_event', sa.Column('task_data', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('task_event', 'task_data')
# ### end Alembic commands ###

View File

@ -0,0 +1,30 @@
"""empty message
Revision ID: de30304ff5e6
Revises: 1fdd1bdb600e
Create Date: 2020-06-18 16:19:11.133665
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'de30304ff5e6'
down_revision = '1fdd1bdb600e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('task_event', sa.Column('form_data', sa.JSON(), nullable=True))
op.drop_column('task_event', 'task_data')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('task_event', sa.Column('task_data', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True))
op.drop_column('task_event', 'form_data')
# ### end Alembic commands ###

View File

@ -4,14 +4,86 @@ import random
from unittest.mock import patch
from tests.base_test import BaseTest
from crc import session, app
from crc.models.api_models import WorkflowApiSchema, MultiInstanceType, TaskSchema
from crc.models.file import FileModelSchema
from crc.models.workflow import WorkflowStatus
from crc.services.workflow_service import WorkflowService
from crc.models.stats import TaskEventModel
class TestTasksApi(BaseTest):
def get_workflow_api(self, workflow, soft_reset=False, hard_reset=False):
rv = self.app.get('/v1.0/workflow/%i?soft_reset=%s&hard_reset=%s' %
(workflow.id, str(soft_reset), str(hard_reset)),
headers=self.logged_in_headers(),
content_type="application/json")
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
workflow_api = WorkflowApiSchema().load(json_data)
self.assertEqual(workflow.workflow_spec_id, workflow_api.workflow_spec_id)
return workflow_api
def complete_form(self, workflow_in, task_in, dict_data, error_code = None):
prev_completed_task_count = workflow_in.completed_tasks
if isinstance(task_in, dict):
task_id = task_in["id"]
else:
task_id = task_in.id
rv = self.app.put('/v1.0/workflow/%i/task/%s/data' % (workflow_in.id, task_id),
headers=self.logged_in_headers(),
content_type="application/json",
data=json.dumps(dict_data))
if error_code:
self.assert_failure(rv, error_code=error_code)
return
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
# Assure stats are updated on the model
workflow = WorkflowApiSchema().load(json_data)
# The total number of tasks may change over time, as users move through gateways
# branches may be pruned. As we hit parallel Multi-Instance new tasks may be created...
self.assertIsNotNone(workflow.total_tasks)
self.assertEquals(prev_completed_task_count + 1, workflow.completed_tasks)
# Assure a record exists in the Task Events
task_events = session.query(TaskEventModel) \
.filter_by(workflow_id=workflow.id) \
.filter_by(task_id=task_id) \
.order_by(TaskEventModel.date.desc()).all()
self.assertGreater(len(task_events), 0)
event = task_events[0]
self.assertIsNotNone(event.study_id)
self.assertEquals("dhf8r", event.user_uid)
self.assertEquals(workflow.id, event.workflow_id)
self.assertEquals(workflow.workflow_spec_id, event.workflow_spec_id)
self.assertEquals(workflow.spec_version, event.spec_version)
self.assertEquals(WorkflowService.TASK_ACTION_COMPLETE, event.action)
self.assertEquals(task_in.id, task_id)
self.assertEquals(task_in.name, event.task_name)
self.assertEquals(task_in.title, event.task_title)
self.assertEquals(task_in.type, event.task_type)
self.assertEquals("COMPLETED", event.task_state)
# Not sure what vodoo is happening inside of marshmallow to get me in this state.
if isinstance(task_in.multi_instance_type, MultiInstanceType):
self.assertEquals(task_in.multi_instance_type.value, event.mi_type)
else:
self.assertEquals(task_in.multi_instance_type, event.mi_type)
self.assertEquals(task_in.multi_instance_count, event.mi_count)
self.assertEquals(task_in.multi_instance_index, event.mi_index)
self.assertEquals(task_in.process_name, event.process_name)
self.assertIsNotNone(event.date)
# Assure that there is data in the form_data
self.assertIsNotNone(event.form_data)
workflow = WorkflowApiSchema().load(json_data)
return workflow
def test_get_current_user_tasks(self):
self.load_example_data()
workflow = self.create_workflow('random_fact')
@ -299,13 +371,13 @@ class TestTasksApi(BaseTest):
self.assertEqual("UserTask", task.type)
self.assertEqual("Activity_A", task.name)
self.assertEqual("My Sub Process", task.process_name)
workflow_api = self.complete_form(workflow, task, {"name": "Dan"})
workflow_api = self.complete_form(workflow, task, {"FieldA": "Dan"})
task = workflow_api.next_task
self.assertIsNotNone(task)
self.assertEqual("Activity_B", task.name)
self.assertEqual("Sub Workflow Example", task.process_name)
workflow_api = self.complete_form(workflow, task, {"name": "Dan"})
workflow_api = self.complete_form(workflow, task, {"FieldB": "Dan"})
self.assertEqual(WorkflowStatus.complete, workflow_api.status)
def test_update_task_resets_token(self):
@ -373,7 +445,9 @@ class TestTasksApi(BaseTest):
for i in random.sample(range(9), 9):
task = TaskSchema().load(ready_items[i]['task'])
self.complete_form(workflow, task, {"investigator":{"email": "dhf8r@virginia.edu"}})
data = workflow_api.next_task.data
data['investigator']['email'] = "dhf8r@virginia.edu"
self.complete_form(workflow, task, data)
#tasks = self.get_workflow_api(workflow).user_tasks
workflow = self.get_workflow_api(workflow)

View File

@ -270,53 +270,6 @@ class TestWorkflowProcessor(BaseTest):
processor = self.get_processor(study, workflow_spec_model)
self.assertTrue(processor.get_version_string().startswith('v2.1.1'))
def test_restart_workflow(self):
self.load_example_data()
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
processor = self.get_processor(study, workflow_spec_model)
self.assertEqual(processor.workflow_model.workflow_spec_id, workflow_spec_model.id)
task = processor.next_task()
task.data = {"key": "Value"}
processor.complete_task(task)
task_before_restart = processor.next_task()
processor.hard_reset()
task_after_restart = processor.next_task()
self.assertNotEqual(task.get_name(), task_before_restart.get_name())
self.assertEqual(task.get_name(), task_after_restart.get_name())
self.assertEqual(task.data, task_after_restart.data)
def test_soft_reset(self):
self.load_example_data()
# Start the two_forms workflow, and enter some data in the first form.
study = session.query(StudyModel).first()
workflow_spec_model = self.load_test_spec("two_forms")
processor = self.get_processor(study, workflow_spec_model)
self.assertEqual(processor.workflow_model.workflow_spec_id, workflow_spec_model.id)
task = processor.next_task()
task.data = {"color": "blue"}
processor.complete_task(task)
# Modify the specification, with a minor text change.
file_path = os.path.join(app.root_path, '..', 'tests', 'data', 'two_forms', 'mods', 'two_forms_text_mod.bpmn')
self.replace_file("two_forms.bpmn", file_path)
# Setting up another processor should not error out, but doesn't pick up the update.
processor.workflow_model.bpmn_workflow_json = processor.serialize()
processor2 = WorkflowProcessor(processor.workflow_model)
self.assertEqual("Step 1", processor2.bpmn_workflow.last_task.task_spec.description)
self.assertNotEqual("# This is some documentation I wanted to add.",
processor2.bpmn_workflow.last_task.task_spec.documentation)
# You can do a soft update and get the right response.
processor3 = WorkflowProcessor(processor.workflow_model, soft_reset=True)
self.assertEqual("Step 1", processor3.bpmn_workflow.last_task.task_spec.description)
self.assertEqual("# This is some documentation I wanted to add.",
processor3.bpmn_workflow.last_task.task_spec.documentation)
def test_hard_reset(self):
self.load_example_data()
@ -344,8 +297,10 @@ class TestWorkflowProcessor(BaseTest):
# Do a hard reset, which should bring us back to the beginning, but retain the data.
processor3 = WorkflowProcessor(processor.workflow_model, hard_reset=True)
self.assertEqual("Step 1", processor3.next_task().task_spec.description)
self.assertEqual({"color": "blue"}, processor3.next_task().data)
processor3.complete_task(processor3.next_task())
self.assertTrue(processor3.is_latest_spec) # Now at version 2.
task = processor3.next_task()
task.data = {"color": "blue"}
processor3.complete_task(task)
self.assertEqual("New Step", processor3.next_task().task_spec.description)
self.assertEqual("blue", processor3.next_task().data["color"])

View File

@ -1,7 +1,14 @@
import json
from tests.base_test import BaseTest
from crc.services.workflow_processor import WorkflowProcessor
from crc.services.workflow_service import WorkflowService
from SpiffWorkflow import Task as SpiffTask, WorkflowException
from example_data import ExampleDataLoader
from crc import db
from crc.models.stats import TaskEventModel
from crc.models.api_models import Task
class TestWorkflowService(BaseTest):
@ -78,4 +85,50 @@ class TestWorkflowService(BaseTest):
task = processor.next_task()
task_api = WorkflowService.spiff_task_to_api_task(task, add_docs_and_forms=True)
WorkflowService.populate_form_with_random_data(task, task_api, required_only=False)
self.assertTrue(isinstance(task.data["sponsor"], dict))
self.assertTrue(isinstance(task.data["sponsor"], dict))
def test_fix_legacy_data_model_for_rrt(self):
ExampleDataLoader().load_rrt() # Make sure the research_rampup is loaded, as it's not a test spec.
workflow = self.create_workflow('research_rampup')
processor = WorkflowProcessor(workflow, validate_only=True)
# Use the test spec code to complete the workflow of research rampup.
while not processor.bpmn_workflow.is_completed():
processor.bpmn_workflow.do_engine_steps()
tasks = processor.bpmn_workflow.get_tasks(SpiffTask.READY)
for task in tasks:
task_api = WorkflowService.spiff_task_to_api_task(task, add_docs_and_forms=True)
WorkflowService.populate_form_with_random_data(task, task_api, False)
task.complete()
# create the task events
WorkflowService.log_task_action('dhf8r', workflow, task,
WorkflowService.TASK_ACTION_COMPLETE,
version=processor.get_version_string())
processor.save()
db.session.commit()
WorkflowService.fix_legacy_data_model_for_rrt()
# All tasks should now have data associated with them.
task_logs = db.session.query(TaskEventModel) \
.filter(TaskEventModel.workflow_id == workflow.id) \
.filter(TaskEventModel.action == WorkflowService.TASK_ACTION_COMPLETE) \
.order_by(TaskEventModel.date).all() # Get them back in order.
self.assertEqual(17, len(task_logs))
for log in task_logs:
task = processor.bpmn_workflow.get_tasks_from_spec_name(log.task_name)[0]
self.assertIsNotNone(log.form_data)
# Each task should have the data in the form for that task in the task event.
if hasattr(task.task_spec, 'form'):
for field in task.task_spec.form.fields:
if field.has_property(Task.PROP_OPTIONS_REPEAT):
self.assertIn(field.get_property(Task.PROP_OPTIONS_REPEAT), log.form_data)
else:
self.assertIn(field.id, log.form_data)
# Some spot checks:
# The first task should be empty, with all the data removed.
self.assertEqual({}, task_logs[0].form_data)