Lots and lots of random stuff as I organize code and get things to make sense.

This commit is contained in:
Dan Funk 2019-12-18 14:02:17 -05:00
parent d4a138e943
commit 9861f6baf3
27 changed files with 863 additions and 271 deletions

View File

@ -7,7 +7,7 @@ verify_ssl = true
pytest = "*"
[packages]
connexion = {version = "*",extras = ["swagger-ui"]}
connexion = {extras = ["swagger-ui"],version = "*"}
swagger-ui-bundle = "*"
flask = "*"
flask-bcrypt = "*"
@ -26,6 +26,8 @@ requests = "*"
xlsxwriter = "*"
webtest = "*"
spiffworkflow = {editable = true,git = "https://github.com/sartography/SpiffWorkflow.git",ref = "master"}
alembic = "*"
coverage = "*"
[requires]
python_version = "3.7"

96
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "0f69f2039d61b7ce74169d57d17f78d7d32708f22135d13f7a453656a13e8825"
"sha256": "9a98e8870797e0de005154d479ed668e6f23a1a912621f87d020e6d86d9fcf59"
},
"pipfile-spec": 6,
"requires": {
@ -18,9 +18,10 @@
"default": {
"alembic": {
"hashes": [
"sha256:49277bb7242192bbb9eac58fed4fe02ec6c3a2a4b4345d2171197459266482b2"
"sha256:3b0cb1948833e062f4048992fbc97ecfaaaac24aaa0d83a1202a99fb58af8c6d"
],
"version": "==1.3.1"
"index": "pypi",
"version": "==1.3.2"
},
"aniso8601": {
"hashes": [
@ -145,11 +146,48 @@
"swagger-ui"
],
"hashes": [
"sha256:6e0569b646f2e6229923dc4e4c6e0325e223978bd19105779fd81e16bcb22fdf",
"sha256:7b4268e9ea837241e530738b35040345b78c8748d05d2c22805350aca0cd5b1c"
"sha256:0fa5776a44b32668f20d59e6e478f15a1dc19def8e4d07d837e10d837379c2ba",
"sha256:4b643821a775927b2ec6220c427779b6d9c3a83ddf43662d69e68dcdad4be603"
],
"index": "pypi",
"version": "==2.4.0"
"version": "==2.5.0"
},
"coverage": {
"hashes": [
"sha256:0cd13a6e98c37b510a2d34c8281d5e1a226aaf9b65b7d770ef03c63169965351",
"sha256:1a4b6b6a2a3a6612e6361130c2cc3dc4378d8c221752b96167ccbad94b47f3cd",
"sha256:2ee55e6dba516ddf6f484aa83ccabbb0adf45a18892204c23486938d12258cde",
"sha256:3be5338a2eb4ef03c57f20917e1d12a1fd10e3853fed060b6d6b677cb3745898",
"sha256:44b783b02db03c4777d8cf71bae19eadc171a6f2a96777d916b2c30a1eb3d070",
"sha256:475bf7c4252af0a56e1abba9606f1e54127cdf122063095c75ab04f6f99cf45e",
"sha256:47c81ee687eafc2f1db7f03fbe99aab81330565ebc62fb3b61edfc2216a550c8",
"sha256:4a7f8e72b18f2aca288ff02255ce32cc830bc04d993efbc87abf6beddc9e56c0",
"sha256:50197163a22fd17f79086e087a787883b3ec9280a509807daf158dfc2a7ded02",
"sha256:56b13000acf891f700f5067512b804d1ec8c301d627486c678b903859d07f798",
"sha256:79388ae29c896299b3567965dbcd93255f175c17c6c7bca38614d12718c47466",
"sha256:79fd5d3d62238c4f583b75d48d53cdae759fe04d4fb18fe8b371d88ad2b6f8be",
"sha256:7fe3e2fde2bf1d7ce25ebcd2d3de3650b8d60d9a73ce6dcef36e20191291613d",
"sha256:81042a24f67b96e4287774014fa27220d8a4d91af1043389e4d73892efc89ac6",
"sha256:81326f1095c53111f8afc95da281e1414185f4a538609a77ca50bdfa39a6c207",
"sha256:8873dc0d8f42142ea9f20c27bbdc485190fff93823c6795be661703369e5877d",
"sha256:88d2cbcb0a112f47eef71eb95460b6995da18e6f8ca50c264585abc2c473154b",
"sha256:91f2491aeab9599956c45a77c5666d323efdec790bfe23fcceafcd91105d585a",
"sha256:979daa8655ae5a51e8e7a24e7d34e250ae8309fd9719490df92cbb2fe2b0422b",
"sha256:9c871b006c878a890c6e44a5b2f3c6291335324b298c904dc0402ee92ee1f0be",
"sha256:a6d092545e5af53e960465f652e00efbf5357adad177b2630d63978d85e46a72",
"sha256:b5ed7837b923d1d71c4f587ae1539ccd96bfd6be9788f507dbe94dab5febbb5d",
"sha256:ba259f68250f16d2444cbbfaddaa0bb20e1560a4fdaad50bece25c199e6af864",
"sha256:be1d89614c6b6c36d7578496dc8625123bda2ff44f224cf8b1c45b810ee7383f",
"sha256:c1b030a79749aa8d1f1486885040114ee56933b15ccfc90049ba266e4aa2139f",
"sha256:c95bb147fab76f2ecde332d972d8f4138b8f2daee6c466af4ff3b4f29bd4c19e",
"sha256:d52c1c2d7e856cecc05aa0526453cb14574f821b7f413cc279b9514750d795c1",
"sha256:d609a6d564ad3d327e9509846c2c47f170456344521462b469e5cb39e48ba31c",
"sha256:e1bad043c12fb58e8c7d92b3d7f2f49977dcb80a08a6d1e7a5114a11bf819fca",
"sha256:e5a675f6829c53c87d79117a8eb656cc4a5f8918185a32fc93ba09778e90f6db",
"sha256:fec32646b98baf4a22fdceb08703965bd16dea09051fbeb31a04b5b6e72b846c"
],
"index": "pypi",
"version": "==5.0"
},
"et-xmlfile": {
"hashes": [
@ -241,11 +279,11 @@
},
"importlib-metadata": {
"hashes": [
"sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21",
"sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742"
"sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45",
"sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f"
],
"markers": "python_version < '3.8'",
"version": "==1.1.0"
"version": "==1.3.0"
},
"inflection": {
"hashes": [
@ -353,11 +391,11 @@
},
"marshmallow": {
"hashes": [
"sha256:1a358beb89c2b4d5555272065a9533591a3eb02f1b854f3c4002d88d8f2a1ddb",
"sha256:eb97c42c5928b5720812c9268865fe863d4807bc1a8b48ddd7d5c9e1779a6af0"
"sha256:0ba81b6da4ae69eb229b74b3c741ff13fe04fb899824377b1aff5aaa1a9fd46e",
"sha256:3e53dd9e9358977a3929e45cdbe4a671f9eff53a7d6a23f33ed3eab8c1890d8f"
],
"index": "pypi",
"version": "==3.2.2"
"version": "==3.3.0"
},
"marshmallow-enum": {
"hashes": [
@ -369,18 +407,18 @@
},
"marshmallow-sqlalchemy": {
"hashes": [
"sha256:0878a01587a09dfa3088ac812522784944bdcfcab9a33fa4cc80ab0ab8ad7691",
"sha256:50b4cce1387adf968928cf8b690664655d5caecb23acf2ec676cde91e93c514b"
"sha256:0d72beaf777f8b420c4dc94684252ae0e0a79556ccc4128129d2588f9ff72888",
"sha256:93fd8fad2b33d92a1ae58328eeb0f39ed174858d82f9e7084a174df7b41fd3a4"
],
"index": "pypi",
"version": "==0.20.0"
"version": "==0.21.0"
},
"more-itertools": {
"hashes": [
"sha256:53ff73f186307d9c8ef17a9600309154a6ae27f25579e80af4db8f047ba14bc2",
"sha256:a0ea684c39bc4315ba7aae406596ef191fd84f873d2d2751f84d64e81a7a2d45"
"sha256:b84b238cce0d9adad5ed87e745778d20a3f8487d0f0cb8b8a586816c7496458d",
"sha256:c833ef592a0324bcc6a60e48440da07645063c453880c9477ceb22490aec1564"
],
"version": "==8.0.0"
"version": "==8.0.2"
},
"openapi-spec-validator": {
"hashes": [
@ -484,9 +522,9 @@
},
"sqlalchemy": {
"hashes": [
"sha256:afa5541e9dea8ad0014251bc9d56171ca3d8b130c9627c6cb3681cff30be3f8a"
"sha256:bfb8f464a5000b567ac1d350b9090cf081180ec1ab4aa87e7bca12dab25320ec"
],
"version": "==1.3.11"
"version": "==1.3.12"
},
"swagger-ui-bundle": {
"hashes": [
@ -559,18 +597,18 @@
},
"importlib-metadata": {
"hashes": [
"sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21",
"sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742"
"sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45",
"sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f"
],
"markers": "python_version < '3.8'",
"version": "==1.1.0"
"version": "==1.3.0"
},
"more-itertools": {
"hashes": [
"sha256:53ff73f186307d9c8ef17a9600309154a6ae27f25579e80af4db8f047ba14bc2",
"sha256:a0ea684c39bc4315ba7aae406596ef191fd84f873d2d2751f84d64e81a7a2d45"
"sha256:b84b238cce0d9adad5ed87e745778d20a3f8487d0f0cb8b8a586816c7496458d",
"sha256:c833ef592a0324bcc6a60e48440da07645063c453880c9477ceb22490aec1564"
],
"version": "==8.0.0"
"version": "==8.0.2"
},
"packaging": {
"hashes": [
@ -602,11 +640,11 @@
},
"pytest": {
"hashes": [
"sha256:63344a2e3bce2e4d522fd62b4fdebb647c019f1f9e4ca075debbd13219db4418",
"sha256:f67403f33b2b1d25a6756184077394167fe5e2f9d8bdaab30707d19ccec35427"
"sha256:6b571215b5a790f9b41f19f3531c53a45cf6bb8ef2988bc1ff9afb38270b25fa",
"sha256:e41d489ff43948babd0fad7ad5e49b8735d5d55e26628a58673c39ff61d95de4"
],
"index": "pypi",
"version": "==5.3.1"
"version": "==5.3.2"
},
"six": {
"hashes": [

View File

@ -1,11 +0,0 @@
import logging
import connexion
from app.api import workflows
logging.basicConfig(level=logging.INFO)
connexion_app = connexion.FlaskApp(__name__)
connexion_app.add_api('api.yml')
app = connexion_app.app
app.config.from_object('config.default')

View File

@ -1 +0,0 @@

View File

@ -1,25 +0,0 @@
import datetime
from typing import Tuple, Any, Union, Dict
from connexion import NoContent
workflows = {
1: {
'id': 1,
'tag': 'expedited',
'name': 'Full IRB Board Review',
'last_updated': datetime.datetime.now(),
}
}
def list_all(limit=100):
# NOTE: we need to wrap it with list for Python 3 as dict_values is not JSON serializable
return list(workflows.values())[0:limit]
def get(workflow_id):
id_ = int(workflow_id)
if workflows.get(id_) is None:
return NoContent, 404
return workflows[id_]

View File

@ -1,85 +0,0 @@
import datetime
from connexion import NoContent
workflow_tasks = [
{
'id': 1,
'workflow_id': 1,
'task_id': 1,
'last_updated': datetime.datetime.now(),
'status': 'Complete',
},
{
'id': 2,
'workflow_id': 1,
'task_id': 2,
'last_updated': datetime.datetime.now(),
'status': 'Incomplete',
},
{
'id': 3,
'workflow_id': 1,
'task_id': 3,
'last_updated': datetime.datetime.now(),
'status': 'Disabled',
},
{
'id': 4,
'workflow_id': 1,
'task_id': 4,
'last_updated': datetime.datetime.now(),
'status': 'Incomplete',
},
]
def start(workflow_id):
# spec = TrainingWorkflowSpec()
# wf = Workflow(spec)
id_ = len(workflow_tasks)
workflow_tasks.append({
'id': id_,
'workflow_id': workflow_id,
'task_id': 1,
'last_updated': datetime.datetime.now(),
'status': 'Incomplete',
})
return workflow_tasks[id_]
def get(workflow_id, task_id):
i = _get_workflow_task_index(workflow_id, task_id)
print(i)
return workflow_tasks[i] if i is not None else NoContent, 404
def post(workflow_id, task_id, body):
i = _get_workflow_task_index(workflow_id, task_id)
if i is not None:
workflow_tasks[i]['last_updated'] = datetime.datetime.now()
workflow_tasks[i]['status'] = body['status']
return workflow_tasks[i]
else:
return NoContent, 404
def delete(workflow_id, task_id):
i = _get_workflow_task_index(workflow_id, task_id)
if i is not None:
del workflow_tasks[i]
return NoContent, 204
else:
return NoContent, 404
def _get_workflow_task_index(workflow_id, task_id):
workflow_id = int(workflow_id)
task_id = int(task_id)
for i, wt in enumerate(workflow_tasks):
if wt['workflow_id'] == workflow_id and wt['task_id'] == task_id:
return i
return None

View File

@ -1,6 +0,0 @@
class Fact:
types = ["cat", "buzzword", "norris"]
type = "cat"
details = "not yet set."

View File

@ -1,3 +1,7 @@
import os
basedir = os.path.abspath(os.path.dirname(__file__))
NAME = "CR Connect Workflow"
CORS_ENABLED = False
DEVELOPMENT = True
SQLALCHEMY_DATABASE_URI = "sqlite:////" + os.path.join(basedir, "cr_connect.db")

View File

@ -1,4 +1,8 @@
import os
basedir = os.path.abspath(os.path.dirname(__file__))
NAME = "CR Connect Workflow"
CORS_ENABLED = False
DEVELOPMENT = True
TESTING = True
SQLALCHEMY_DATABASE_URI = "sqlite:////" + os.path.join(basedir, "test.db")

33
crc/__init__.py Normal file
View File

@ -0,0 +1,33 @@
import logging
import os
import connexion
from flask_marshmallow import Marshmallow
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
logging.basicConfig(level=logging.INFO)
connexion_app = connexion.FlaskApp(__name__)
app = connexion_app.app
app.config.from_object('config.default')
#app.config.from_pyfile('config.py')
if "TESTING" in os.environ and os.environ["TESTING"] == "true":
app.config.from_object('config.testing')
app.config.from_pyfile('testing.py')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
ma = Marshmallow(app)
from crc import models
connexion_app.add_api('api.yml')
@app.cli.command()
def load_example_data():
"""Load example data into the database."""
from study import ExampleDataLoader
ExampleDataLoader().load_studies()

72
crc/api.py Normal file
View File

@ -0,0 +1,72 @@
from connexion import NoContent
from flask_marshmallow import Schema
from crc import db, ma
from crc.models import WorkflowModel, WorkflowSchema, StudySchema, StudyModel, WorkflowSpecSchema, WorkflowSpecModel
from crc.workflow_processor import WorkflowProcessor
class ApiError:
def __init__(self, code, message):
self.code = code
self.message = message
class ApiErrorSchema(ma.Schema):
class Meta:
fields = ("code", "message")
def all_studies():
#todo: Limit returned studies to a user
schema = StudySchema(many=True)
return schema.dump(db.session.query(StudyModel).all())
def get_study(study_id):
study = db.session.query(StudyModel).filter_by(id=study_id).first()
schema = StudySchema()
if study is None:
return NoContent, 404
return schema.dump(study)
def all_specifications():
schema = WorkflowSpecSchema(many=True)
return schema.dump(db.session.query(WorkflowSpecModel).all())
def post_update_study_from_protocol_builder(study_id):
#todo: Actually get data from an external service here
return NoContent, 304
def get_study_workflows(study_id):
workflows = db.session.query(WorkflowModel).filter_by(study_id=study_id).all()
schema = WorkflowSchema(many=True)
return schema.dump(workflows)
def add_workflow_to_study(study_id, body):
workflow_spec_model = db.session.query(WorkflowSpecModel).filter_by(id=body["id"]).first()
if workflow_spec_model is None:
error = ApiError('unknown_spec', 'The specification "' + body['id'] + '" is not recognized.')
return ApiErrorSchema.dump(error), 404
processor = WorkflowProcessor.create(workflow_spec_model.id)
def get_workflow(workflow_id):
return db.session.query(WorkflowModel).filter_by(id=workflow_id).first()
def get_task(workflow_id, task_id):
workflow = db.session.query(WorkflowModel).filter_by(id=workflow_id).first()
return workflow.bpmn_workflow().get_task(task_id)
def update_task(workflow_id, task_id, body):
global bpmn_workflow
for field in body["task"]["form"]:
print("Setting " + field["id"] + " to " + field["value"])
return body

View File

@ -10,10 +10,10 @@ paths:
# /v1.0/study
/study:
get:
operationId: app.api.study.list_all
operationId: crc.api.all_studies
summary: Provides a list of studies related to the current user.
tags:
- Studies and Requirements
- Studies
responses:
'200':
description: An array of studies, ordered by the last modified date.
@ -27,13 +27,12 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/Error"
/study/{study_id}/requirement:
/study/{study_id}:
get:
operationId: app.api.requirements.list_all
summary: Provides a list of requirements for the study.
operationId: crc.api.get_study
summary: Provides a single study
tags:
- Studies and Requirements
- Studies
parameters:
- name: study_id
in: path
@ -44,65 +43,131 @@ paths:
format: int32
responses:
'200':
description: An array of requirements
description: An Study Object
content:
application/json:
schema:
$ref: "#/components/schemas/Study"
default:
description: unexpected error
content:
application/json:
schema:
$ref: "#/components/schemas/Error"
/study-update/{study_id}:
post:
operationId: crc.api.post_update_study_from_protocol_builder
summary: If the study is up-to-date with Protocol Builder, returns a 304 Not Modified. If out of date, return a 202 Accepted and study state changes to updating.
tags:
- Study Status
parameters:
- name: study_id
in: path
required: true
description: The id of the study that should be checked for updates.
schema:
type: integer
format: int32
responses:
'304':
description: Study is currently up to date and does not need to be reloaded from Protocol Builder
'202':
description: Request accepted, will preform an update. Study state set to "updating"
default:
description: unexpected error
content:
application/json:
schema:
$ref: "#/components/schemas/Error"
/study/{study_id}/workflows:
get:
operationId: crc.api.get_study_workflows
summary: Provides a list of workflows to be completed for the given study.
tags:
- Studies
parameters:
- name: study_id
in: path
required: true
description: The id of the study for which workflows should be returned.
schema:
type: integer
format: int32
responses:
'200':
description: An array of workflows
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Requirement"
$ref: "#/components/schemas/Workflow"
default:
description: unexpected error
content:
application/json:
schema:
$ref: "#/components/schemas/Error"
/study/{study_id}/requirement/{requirement_id}:
post:
operationId: app.api.study.create_workflow
summary: Generate a new instance of a workflow, based on a requirement.
operationId: crc.api.add_workflow_to_study
summary: Starts a new workflow for the given study using the provided spec. This is atypical, and should be left to the protocol builder.
tags:
- Studies and Requirements
- Studies
parameters:
- in: body
name: workflow_spec
schema:
$ref: '#/components/schemas/WorkflowSpec'
- name: study_id
in: path
required: true
description: The id of the study for which workflows should be returned.
schema:
type: integer
format: int32
- name: requirement_id
in: path
required: true
description: The id of the requirement for which to generate or start a new workflow.
description: The id of the study for which a workflow should start
schema:
type: integer
format: int32
responses:
'200':
description: Returns a new workflow instance on which to work.
description: An array of workflows
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Workflow"
'412':
description: You already have a workflow underway to complete this requirement.
content:
application/json:
schema:
$ref: "#/components/schemas/Error"
default:
description: unexpected error
content:
application/json:
schema:
$ref: "#/components/schemas/Error"
# /v1.0/workflow/0
/workflow-specification:
get:
operationId: crc.api.all_specifications
summary: Provides a list of workflows specifications that can be added to a study manually. Please note that Protocol Builder will handle this most of the time.
tags:
- Workflow Specifications
responses:
'200':
description: An array of workflow specifications
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/WorkflowSpec"
default:
description: unexpected error
content:
application/json:
schema:
$ref: "#/components/schemas/Error"
# /v1.0/workflow/0
/workflow/{workflow_id}:
get:
operationId: app.api.workflows.getWorkflow
summary: Status info for a specific workflow instance
operationId: crc.api.get_workflow
summary: Detailed information for a specific workflow instance
tags:
- Workflows and Tasks
parameters:
@ -127,11 +192,11 @@ paths:
schema:
$ref: "#/components/schemas/Error"
# /v1.0/workflow/0/tasks/0
/workflow/{workflow_id}/tasks/{task_id}:
# /v1.0/workflow/0/task/0
/workflow/{workflow_id}/task/{task_id}:
get:
operationId: app.api.workflows_tasks.get
summary: Get status of specific task in specific workflow instance
operationId: crc.api.get_task
summary: Get details of specific task in specific workflow instance
tags:
- Workflows and Tasks
parameters:
@ -147,8 +212,7 @@ paths:
required: true
description: The id of the task
schema:
type: integer
format: int32
type: string
responses:
'200':
description: Expected response to a valid request
@ -163,12 +227,16 @@ paths:
schema:
$ref: "#/components/schemas/Error"
post:
operationId: app.api.workflows_tasks.post
put:
operationId: crc.api.update_task
summary: Update, attempt to complete a workflow task
tags:
- Workflows and Tasks
parameters:
- in: body
name: task
schema:
$ref: '#/components/schemas/Task'
- name: workflow_id
in: path
required: true
@ -181,14 +249,7 @@ paths:
required: true
description: The id of the task
schema:
type: integer
format: int32
requestBody:
description: Task status to update
content:
application/json:
schema:
$ref: "#/components/schemas/Task"
type: string
responses:
'201':
description: Null response
@ -236,69 +297,55 @@ components:
ind_number:
type: string
example: "27b-6-42"
Requirement:
required:
- id
- name
- type
- status
- description
WorkflowSpec:
properties:
id:
type: string
name:
display_name:
type: string
type:
type: string
enum: [workflow, non-functional]
status:
type: string
enum: [new, in-process, complete]
description:
type: string
workflow:
type: integer
example:
id: ids_submission
name: Investigational Drug Services
type: workflow
status: in-process
description: This workflow will help determine what application needs be provided to IDS, create the template, and submit it for approval.
workflow_id: 21
bpmn_url:
type: string
svg_url:
type: string
Workflow:
required:
- id
- messages
- current_status
- current_task_id
properties:
id:
readOnly: true
type: integer
format: int64
name:
type: string
current_status:
status:
type: enum
enum: ['user_input_required','waiting','complete']
$ref: "#/components/schemas/Task"
current_task_id:
readOnly: true,
type: string
study_id:
readOnly: true
type: integer
workflow_spec:
$ref: "#/components/schemas/WorkflowSpec"
current_task_ids:
type: array
items:
type: String
messages:
type: array
items:
type: String
example:
id: 291234
requirement_id: ids_submission
workflow_status: 'user_input_required'
current_task_id: study_identification
status: 'user_input_required'
current_task_ids: ['study_identification','Detailed Reports']
workflow_spec:
id: 'prot_def'
display_name: 'Protocol Definition'
description: 'Collect some additional information about your protocol to complete forms and processes.'
bpmn_url: 'https://crconnect.viriginia.edu/prot_def.bpmn'
svg_url: 'https://crconnect.viriginia.edu/prot_def.svg'
messages: [
"Protocol Builder reports that the protocol process is complete for this study.",
"IDS Submission Template was generated successfully."
]
Task:
required:
- id
- name
- type
properties:
id:
readOnly: true
@ -309,7 +356,7 @@ components:
type:
type: string
form:
$ref: "#/components/schemas/Task"
$ref: "#/components/schemas/Form"
example:
{
id: study_identification,
@ -325,6 +372,31 @@ components:
}
}
}
Form:
properties:
fields:
type: array
items:
$ref: "#/components/schemas/Field"
Field:
properties:
id:
type: string
readOnly: true
label:
type: string
readOnly: true
type:
type: enum
enum: ['text','number', 'enum']
readOnly: true
options:
type: array
items:
type: string
readOnly: true
value:
type: string
Error:
required:
- code

66
crc/models.py Normal file
View File

@ -0,0 +1,66 @@
import enum
from flask_marshmallow.sqla import ModelSchema
from marshmallow_enum import EnumField
from sqlalchemy import func
from crc import db
class ProtocolBuilderStatus(enum.Enum):
out_of_date = "out_of_date"
in_process = "in_process"
complete = "complete"
updating = "updating"
class StudyModel(db.Model):
__tablename__ = 'study'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String)
last_updated = db.Column(db.DateTime(timezone=True), default=func.now())
protocol_builder_status = db.Column(db.Enum(ProtocolBuilderStatus))
primary_investigator_id = db.Column(db.String)
sponsor = db.Column(db.String)
ind_number = db.Column(db.String)
class StudySchema(ModelSchema):
class Meta:
model = StudyModel
protocol_builder_status = EnumField(ProtocolBuilderStatus)
class WorkflowSpecModel(db.Model):
__tablename__ = 'workflow_spec'
id = db.Column(db.String, primary_key=True)
display_name = db.Column(db.String)
description = db.Column(db.Text)
class WorkflowSpecSchema(ModelSchema):
class Meta:
model = WorkflowSpecModel
class WorkflowStatus(enum.Enum):
new = "new"
user_input_required = "user_input_required"
waiting = "waiting"
complete = "complete"
class WorkflowModel(db.Model):
__tablename__ = 'workflow'
id = db.Column(db.Integer, primary_key=True)
bpmn_workflow_json = db.Column(db.TEXT)
status = db.Column(db.Enum(WorkflowStatus))
study_id = db.Column(db.Integer, db.ForeignKey('study.id'))
workflow_spec_id = db.Column(db.Integer, db.ForeignKey('workflow_spec.id'))
messages: db.Column
class WorkflowSchema(ModelSchema):
class Meta:
model = WorkflowModel
status = EnumField(WorkflowStatus)

View File

@ -30,4 +30,4 @@ class FactService:
details = self.get_buzzword()
else:
details = "unknown fact type."
print("The fact is : " + details)
data['details'] = details

View File

@ -19,7 +19,7 @@
</camunda:properties>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0ik56h0</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1wl4cli</bpmn:outgoing>
<bpmn:outgoing>SequenceFlow_1291h6i</bpmn:outgoing>
</bpmn:userTask>
<bpmn:scriptTask id="Task_Get_Fact_From_API" name="Display Fact">
<bpmn:extensionElements>
@ -27,15 +27,15 @@
<camunda:inputParameter name="Fact.type" />
</camunda:inputOutput>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_1wl4cli</bpmn:incoming>
<bpmn:incoming>SequenceFlow_1291h6i</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0am07in</bpmn:outgoing>
<bpmn:script>scripts.FactService</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="SequenceFlow_1wl4cli" sourceRef="Task_User_Select_Type" targetRef="Task_Get_Fact_From_API" />
<bpmn:endEvent id="EndEvent_0u1cgrf">
<bpmn:incoming>SequenceFlow_0am07in</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="SequenceFlow_0am07in" sourceRef="Task_Get_Fact_From_API" targetRef="EndEvent_0u1cgrf" />
<bpmn:sequenceFlow id="SequenceFlow_1291h6i" sourceRef="Task_User_Select_Type" targetRef="Task_Get_Fact_From_API" />
<bpmn:textAnnotation id="TextAnnotation_09fq7kh">
<bpmn:text>User sets the Fact.type to cat, norris, or buzzword</bpmn:text>
</bpmn:textAnnotation>
@ -60,10 +60,6 @@
<bpmndi:BPMNShape id="ScriptTask_10keafb_di" bpmnElement="Task_Get_Fact_From_API">
<dc:Bounds x="480" y="210" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_1wl4cli_di" bpmnElement="SequenceFlow_1wl4cli">
<di:waypoint x="370" y="250" />
<di:waypoint x="480" y="250" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="TextAnnotation_09fq7kh_di" bpmnElement="TextAnnotation_09fq7kh">
<dc:Bounds x="330" y="116" width="99.99202297383536" height="68.28334396936822" />
</bpmndi:BPMNShape>
@ -85,6 +81,10 @@
<di:waypoint x="580" y="250" />
<di:waypoint x="692" y="250" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1291h6i_di" bpmnElement="SequenceFlow_1291h6i">
<di:waypoint x="370" y="250" />
<di:waypoint x="480" y="250" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

79
crc/workflow_processor.py Normal file
View File

@ -0,0 +1,79 @@
import os
from SpiffWorkflow.bpmn.BpmnScriptEngine import BpmnScriptEngine
from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer
from SpiffWorkflow.bpmn.serializer.CompactWorkflowSerializer import CompactWorkflowSerializer
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.camunda.serializer.CamundaSerializer import CamundaSerializer
from SpiffWorkflow.serializer.json import JSONSerializer
from crc import app
from crc.models import WorkflowModel, WorkflowStatus, WorkflowSpecModel
class CustomBpmnScriptEngine(BpmnScriptEngine):
"""This is a custom script processor that can be easily injected into Spiff Workflow.
Rather than execute arbitrary code, this assumes the script references a fully qualified python class
such as myapp.RandomFact. """
def execute(self, task, script, **kwargs):
"""
Assume that the script read in from the BPMN file is a fully qualified python class. Instantiate
that class, pass in any data available to the current task so that it might act on it.
Assume that the class implements the "do_task" method.
This allows us to reference custom code from the BPMN diagram.
"""
module_name = "crc." + script
class_name = module_name.split(".")[-1]
mod = __import__(module_name, fromlist=[class_name])
klass = getattr(mod, class_name)
klass().do_task(task.data)
class WorkflowProcessor:
script_engine = CustomBpmnScriptEngine()
serializer = CompactWorkflowSerializer()
def __init__(self, workflow_spec_id, bpmn_json):
self.bpmn_workflow = self.serializer.deserialize_workflow(bpmn_json, self.get_spec(workflow_spec_id))
self.bpmn_workflow.script_engine = self.script_engine
@staticmethod
def get_spec(workflow_spec_id):
filename = os.path.join(app.root_path, 'static', 'bpmn', workflow_spec_id)
return CamundaSerializer().deserialize_workflow_spec(filename)
@classmethod
def create(cls, workflow_spec_id):
spec = cls.get_spec(workflow_spec_id)
bpmn_workflow = BpmnWorkflow(spec, script_engine=cls.script_engine)
bpmn_workflow.do_engine_steps()
json = cls.serializer.serialize_workflow(bpmn_workflow)
processor = cls(workflow_spec_id, json)
return processor
def get_status(self):
if self.bpmn_workflow.is_completed():
return WorkflowStatus.complete
user_tasks = self.bpmn_workflow.get_ready_user_tasks()
if len(user_tasks) > 0:
return WorkflowStatus.user_input_required
else:
return WorkflowStatus.waiting
def do_engine_steps(self):
self.bpmn_workflow.do_engine_steps()
def serialize(self):
return self.bpmn_workflow.serialize(JSONSerializer())
def next_user_tasks(self):
return self.bpmn_workflow.get_ready_user_tasks()
def complete_task(self, task):
self.bpmn_workflow.complete_task_from_id(task.id)
def get_data(self):
return self.bpmn_workflow.data

25
example_data.py Normal file
View File

@ -0,0 +1,25 @@
import datetime
from crc import db
from crc.models import StudyModel, WorkflowSpecModel
class ExampleDataLoader:
studies = [StudyModel(id=1,
title='The impact of fried pickles on beer consumption in bipedal software developers.',
last_updated=datetime.datetime.now(),
protocol_builder_status='in_process',
primary_investigator_id='dhf8r',
sponsor='Sartography Pharmaceuticals',
ind_number='1234')]
workflow_specs = [WorkflowSpecModel(
id="random_fact",
display_name="Random Fact Generator",
description='Displays a random fact about a topic of your choosing.',
)]
def load_all(self):
db.session.bulk_save_objects(ExampleDataLoader.studies)
db.session.bulk_save_objects(ExampleDataLoader.workflow_specs)
db.session.commit()

1
migrations/README Normal file
View File

@ -0,0 +1 @@
Generic single-database configuration.

45
migrations/alembic.ini Normal file
View File

@ -0,0 +1,45 @@
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

96
migrations/env.py Normal file
View File

@ -0,0 +1,96 @@
from __future__ import with_statement
import logging
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
config.set_main_option(
'sqlalchemy.url', current_app.config.get(
'SQLALCHEMY_DATABASE_URI').replace('%', '%%'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
migrations/script.py.mako Normal file
View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,54 @@
"""empty message
Revision ID: 4363c8bb8f1b
Revises:
Create Date: 2019-12-16 11:25:16.540952
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4363c8bb8f1b'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('requirement',
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('study',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=True),
sa.Column('last_updated', sa.DateTime(timezone=True), nullable=True),
sa.Column('protocol_builder_status', sa.String(), nullable=True),
sa.Column('primary_investigator_id', sa.String(), nullable=True),
sa.Column('sponsor', sa.String(), nullable=True),
sa.Column('ind_number', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('user',
sa.Column('id', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('workflow',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('requirement', sa.String(), nullable=True),
sa.Column('bpmn_workflow_json', sa.TEXT(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('workflow')
op.drop_table('user')
op.drop_table('study')
op.drop_table('requirement')
# ### end Alembic commands ###

View File

@ -1,4 +1,17 @@
from app import app
# Set environment variable to testing before loading.
# IMPORTANT - Environment must be loaded before app, models, etc....
import json
import os
os.environ["TESTING"] = "true"
from crc import app, db
def clean_db():
for table in reversed(db.metadata.sorted_tables):
db.session.execute(table.delete())
db.session.flush()
# Great class to inherit from, as it sets up and tears down
# classes efficiently when we have a database in place.
@ -11,27 +24,33 @@ class BaseTest:
app.config.from_object('config.testing')
cls.ctx = app.test_request_context()
cls.app = app.test_client()
# Great place to do a db.create_all()
db.create_all()
@classmethod
def tearDownClass(cls):
# Create place to clear everything out ...
# db.drop_all()
# db.session.remove()
# elastic_index.clear()
db.drop_all()
db.session.remove()
pass
def setUp(self):
self.ctx.push()
def tearDown(self):
# db.session.rollback()
# Most efficient thing here is to delete all rows from
# the database with a clear db method like this one:
# def clean_db(db):
# for table in reversed(db.metadata.sorted_tables):
# db.session.execute(table.delete())
# clean_db(db)
clean_db() # This does not seem to work, some colision of sessions.
self.ctx.pop()
self.auths = {}
def load_example_data(self):
clean_db()
from example_data import ExampleDataLoader
ExampleDataLoader().load_all()
def assert_success(self, rv, msg=""):
try:
data = json.loads(rv.get_data(as_text=True))
self.assertTrue(rv.status_code >= 200 and rv.status_code < 300,
"BAD Response: %i. \n %s" %
(rv.status_code, json.dumps(data)) + ". " + msg)
except:
self.assertTrue(rv.status_code >= 200 and rv.status_code < 300,
"BAD Response: %i." % rv.status_code + ". " + msg)

54
tests/test_api.py Normal file
View File

@ -0,0 +1,54 @@
import json
import unittest
from crc import db
from crc.models import StudyModel, StudySchema, WorkflowSpecModel, WorkflowSpecSchema
from tests.base_test import BaseTest
class TestStudy(BaseTest, unittest.TestCase):
def test_study_basics(self):
self.load_example_data()
study = db.session.query(StudyModel).first()
self.assertIsNotNone(study)
def test_study_api_get_single_study(self):
self.load_example_data()
study = db.session.query(StudyModel).first()
rv = self.app.get('/v1.0/study/%i' % study.id,
follow_redirects=True,
content_type="application/json")
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
study2 = StudySchema().load(json_data, session=db.session)
self.assertEqual(study, study2)
self.assertEqual(study.id, study2.id)
self.assertEqual(study.title, study2.title)
self.assertEqual(study.last_updated, study2.last_updated)
self.assertEqual(study.protocol_builder_status, study2.protocol_builder_status)
self.assertEqual(study.primary_investigator_id, study2.primary_investigator_id)
self.assertEqual(study.sponsor, study2.sponsor)
self.assertEqual(study.ind_number, study2.ind_number)
def test_list_workflow_specifications(self):
self.load_example_data()
spec = db.session.query(WorkflowSpecModel).first()
rv = self.app.get('/v1.0/workflow-specification',
follow_redirects=True,
content_type="application/json")
self.assert_success(rv)
json_data = json.loads(rv.get_data(as_text=True))
specs = WorkflowSpecSchema(many=True).load(json_data, session=db.session)
spec2 = specs[0]
self.assertEqual(spec.id, spec2.id)
self.assertEqual(spec.display_name, spec2.display_name)
self.assertEqual(spec.description, spec2.description)
def test_add_workflow_to_study(self):
self.load_example_data()
study = db.session.query(StudyModel).first()
spec = db.session.query(WorkflowSpecModel).first()
rv = self.app.post('/v1.0/study/%i/workflows' % study.id, data=WorkflowSpecSchema().dump(spec))
self.assert_success(rv)

View File

@ -0,0 +1,32 @@
import json
import unittest
from crc import db
from crc.workflow_processor import WorkflowProcessor
from crc.models import StudyModel, WorkflowModel, WorkflowSpecModel, WorkflowStatus
from tests.base_test import BaseTest
class TestWorkflowProcessor(BaseTest, unittest.TestCase):
def test_create_and_complete_workflow(self):
self.load_example_data()
workflow_spec_model = db.session.query(WorkflowSpecModel).filter_by(id="random_fact").first()
processor = WorkflowProcessor.create(workflow_spec_model.id)
self.assertIsNotNone(processor)
self.assertEqual(WorkflowStatus.user_input_required, processor.get_status())
next_user_tasks = processor.next_user_tasks()
self.assertEqual(1, len(next_user_tasks))
task = next_user_tasks[0]
self.assertEqual("Task_User_Select_Type", task.get_name())
model = {"Fact.type": "cat"}
if task.data is None:
task.data = {}
task.data.update(model)
processor.complete_task(task)
self.assertEqual(WorkflowStatus.waiting, processor.get_status())
processor.do_engine_steps()
self.assertEqual(WorkflowStatus.complete, processor.get_status())
data = processor.get_data()
self.assertIsNotNone(data)
self.assertIn("details", data)